2020-09-27 14:00:20 +02:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
2023-10-13 20:32:40 +02:00
|
|
|
import argparse
|
2020-09-27 14:00:20 +02:00
|
|
|
from http.server import BaseHTTPRequestHandler, HTTPServer
|
2021-05-31 14:13:01 +02:00
|
|
|
from urllib.parse import urlparse
|
2020-09-27 14:37:52 +02:00
|
|
|
import threading
|
2020-10-21 21:41:06 +02:00
|
|
|
import requests
|
2020-09-27 14:37:52 +02:00
|
|
|
import sys
|
2021-12-08 17:50:48 +01:00
|
|
|
import time
|
2020-10-15 14:35:16 +02:00
|
|
|
import os
|
|
|
|
import ssl
|
2021-05-12 15:58:19 +02:00
|
|
|
import util
|
|
|
|
import xss_handler
|
2020-09-27 14:00:20 +02:00
|
|
|
|
2020-09-27 14:37:52 +02:00
|
|
|
class FileServerRequestHandler(BaseHTTPRequestHandler):
|
2020-09-27 14:00:20 +02:00
|
|
|
|
2020-09-27 14:37:52 +02:00
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
super().__init__(*args, **kwargs)
|
2020-09-27 14:00:20 +02:00
|
|
|
|
2021-05-12 15:58:19 +02:00
|
|
|
def do_HEAD(self):
|
|
|
|
self.do_GET()
|
|
|
|
|
2020-10-15 14:35:16 +02:00
|
|
|
def do_POST(self):
|
|
|
|
self.do_GET()
|
|
|
|
|
2023-11-26 14:10:54 +01:00
|
|
|
def onForward(self, base_path, target, **kwargs):
|
2021-05-31 14:13:01 +02:00
|
|
|
path = self.path[max(0, len(base_path)-1):]
|
|
|
|
parts = urlparse(target)
|
|
|
|
if path.startswith(parts.path):
|
|
|
|
path = path[len(parts.path):]
|
|
|
|
|
|
|
|
target_rewrite = target + path
|
|
|
|
|
|
|
|
# queryStr = "" if "?" not in self.path else self.path[self.path.index("?")+1:]
|
|
|
|
# if queryStr:
|
|
|
|
# target += "?" if "?" not in target else "&"
|
|
|
|
# target += queryStr
|
|
|
|
|
2023-10-16 17:44:22 +02:00
|
|
|
content_length = self.headers.get('Content-Length')
|
2021-05-31 14:13:01 +02:00
|
|
|
data = None
|
|
|
|
|
2023-10-16 17:44:22 +02:00
|
|
|
if content_length and int(content_length) > 0:
|
|
|
|
data = self.rfile.read(int(content_length))
|
2020-10-21 21:41:06 +02:00
|
|
|
|
2021-08-28 13:41:46 +02:00
|
|
|
if "Host" in self.headers:
|
|
|
|
del self.headers["Host"]
|
|
|
|
|
2020-10-21 21:41:06 +02:00
|
|
|
method = self.command
|
2021-05-31 14:13:01 +02:00
|
|
|
print(target, "=>", method, target_rewrite)
|
2023-11-26 14:10:54 +01:00
|
|
|
res = requests.request(method, target_rewrite, headers=self.headers, data=data, **kwargs)
|
2021-05-31 14:13:01 +02:00
|
|
|
return res.status_code, res.content, res.headers
|
|
|
|
|
2023-10-22 15:01:35 +02:00
|
|
|
def read_body(self):
|
|
|
|
if not hasattr(self, "body"):
|
|
|
|
content_length = self.headers.get('Content-Length')
|
|
|
|
if content_length and int(content_length) > 0:
|
|
|
|
self.body = self.rfile.read(int(content_length))
|
|
|
|
else:
|
|
|
|
self.body = None
|
|
|
|
|
|
|
|
return self.body
|
2021-05-31 14:13:01 +02:00
|
|
|
|
|
|
|
def find_route(self, path):
|
|
|
|
|
|
|
|
if path in self.server.routes:
|
|
|
|
return self.server.routes[path]
|
|
|
|
|
|
|
|
for p, route in self.server.prefix_routes.items():
|
|
|
|
if path.startswith(p):
|
|
|
|
return route
|
|
|
|
|
|
|
|
def not_found(req):
|
|
|
|
return 404, b"", {}
|
|
|
|
|
|
|
|
return not_found
|
|
|
|
|
|
|
|
def do_OPTIONS(self):
|
|
|
|
self.do_GET()
|
2020-10-21 21:41:06 +02:00
|
|
|
|
2020-09-27 14:00:20 +02:00
|
|
|
def do_GET(self):
|
2022-03-01 14:08:53 +01:00
|
|
|
try:
|
|
|
|
if not self.server.is_running:
|
|
|
|
self.send_response(200)
|
|
|
|
self.end_headers()
|
|
|
|
return
|
2020-10-21 21:41:06 +02:00
|
|
|
|
2022-03-01 14:08:53 +01:00
|
|
|
path = self.server.cleanPath(self.path)
|
|
|
|
route = self.find_route(path)
|
|
|
|
result = route(self)
|
2021-05-31 14:13:01 +02:00
|
|
|
|
2022-03-01 14:08:53 +01:00
|
|
|
blacklist_headers = ["transfer-encoding", "content-length", "content-encoding", "allow", "connection"]
|
2023-10-04 12:24:41 +02:00
|
|
|
if isinstance(result, tuple):
|
|
|
|
status_code = 200 if len(result) < 1 else result[0]
|
|
|
|
data = b"" if len(result) < 2 else result[1]
|
|
|
|
headers = { } if len(result) < 3 else result[2]
|
|
|
|
else:
|
|
|
|
status_code = result
|
|
|
|
data = b""
|
|
|
|
headers = {}
|
2021-05-20 13:11:42 +02:00
|
|
|
|
2022-03-01 14:08:53 +01:00
|
|
|
if path in self.server.dumpRequests:
|
|
|
|
headers["Access-Control-Allow-Origin"] = "*"
|
2021-12-08 17:50:48 +01:00
|
|
|
|
2022-03-01 14:08:53 +01:00
|
|
|
headers["Content-Length"] = len(data)
|
2022-02-16 14:18:54 +01:00
|
|
|
|
2022-03-01 14:08:53 +01:00
|
|
|
if len(headers) == 0:
|
|
|
|
self.send_response(status_code)
|
|
|
|
else:
|
|
|
|
if path != "/dummy":
|
|
|
|
self.log_request(status_code)
|
|
|
|
self.send_response_only(status_code)
|
2021-05-20 13:11:42 +02:00
|
|
|
|
2022-03-01 14:08:53 +01:00
|
|
|
for key, value in headers.items():
|
|
|
|
if key.lower() not in blacklist_headers:
|
|
|
|
self.send_header(key, value)
|
2021-05-20 13:11:42 +02:00
|
|
|
|
2022-03-01 14:08:53 +01:00
|
|
|
if self.command.upper() == "OPTIONS":
|
|
|
|
self.send_header("Allow", "OPTIONS, GET, HEAD, POST")
|
2020-10-21 21:41:06 +02:00
|
|
|
|
2022-03-01 14:08:53 +01:00
|
|
|
self.end_headers()
|
2021-05-31 14:13:01 +02:00
|
|
|
|
2022-03-01 14:08:53 +01:00
|
|
|
if data and self.command.upper() not in ["HEAD","OPTIONS"]:
|
2023-10-22 15:01:35 +02:00
|
|
|
if isinstance(data, str):
|
|
|
|
data = data.encode()
|
2022-03-01 14:08:53 +01:00
|
|
|
self.wfile.write(data)
|
2020-09-27 14:00:20 +02:00
|
|
|
|
2022-03-01 14:08:53 +01:00
|
|
|
if (path in self.server.dumpRequests or "/" in self.server.dumpRequests) and path != "/dummy":
|
2023-10-22 15:01:35 +02:00
|
|
|
body = self.read_body()
|
2020-10-15 14:35:16 +02:00
|
|
|
|
2022-03-01 14:08:53 +01:00
|
|
|
print("===== Connection from:",self.client_address[0])
|
|
|
|
print("%s %s %s" % (self.command, self.path, self.request_version))
|
|
|
|
print(str(self.headers).strip())
|
|
|
|
if body:
|
|
|
|
print()
|
|
|
|
print(body)
|
|
|
|
print("==========")
|
|
|
|
except Exception as e:
|
|
|
|
print("Exception on handling http", str(e))
|
2023-10-04 12:24:41 +02:00
|
|
|
raise e
|
2020-10-15 14:35:16 +02:00
|
|
|
|
2020-09-27 14:00:20 +02:00
|
|
|
def log_message(self, format, *args):
|
2020-09-27 14:37:52 +02:00
|
|
|
if self.server.logRequests:
|
2020-10-15 14:35:16 +02:00
|
|
|
super().log_message(format, *args)
|
2020-09-27 14:37:52 +02:00
|
|
|
|
|
|
|
class HttpFileServer(HTTPServer):
|
|
|
|
def __init__(self, addr, port):
|
|
|
|
super().__init__((addr, port), FileServerRequestHandler)
|
|
|
|
self.logRequests = False
|
2020-10-21 21:41:06 +02:00
|
|
|
self.routes = { }
|
2020-10-15 14:35:16 +02:00
|
|
|
self.dumpRequests = []
|
2021-05-31 14:13:01 +02:00
|
|
|
self.prefix_routes = { }
|
2021-06-05 14:24:35 +02:00
|
|
|
self.is_running = True
|
2021-12-08 17:50:48 +01:00
|
|
|
self.listen_thread = None
|
2020-09-27 14:37:52 +02:00
|
|
|
|
2020-10-21 21:41:06 +02:00
|
|
|
def cleanPath(self, path):
|
|
|
|
|
|
|
|
if "?" in path:
|
|
|
|
path = path[0:path.find("?")]
|
|
|
|
|
|
|
|
if not path.startswith("/"):
|
|
|
|
path = "/" + path
|
|
|
|
|
|
|
|
return path.strip()
|
2020-09-27 14:37:52 +02:00
|
|
|
|
2021-08-28 13:41:46 +02:00
|
|
|
def addFile(self, name, data, mimeType=None):
|
2022-03-01 14:08:53 +01:00
|
|
|
|
|
|
|
if hasattr(data, "read"):
|
|
|
|
fd = data
|
|
|
|
data = data.read()
|
|
|
|
fd.close()
|
|
|
|
|
2020-09-27 14:37:52 +02:00
|
|
|
if isinstance(data, str):
|
|
|
|
data = data.encode("UTF-8")
|
2022-03-01 14:08:53 +01:00
|
|
|
|
2022-02-16 14:18:54 +01:00
|
|
|
headers = {
|
|
|
|
"Access-Control-Allow-Origin": "*",
|
|
|
|
}
|
2023-10-16 17:44:22 +02:00
|
|
|
|
2021-08-28 13:41:46 +02:00
|
|
|
if mimeType:
|
2023-10-16 17:44:22 +02:00
|
|
|
headers["Content-Type"] = mimeType
|
2022-02-16 14:18:54 +01:00
|
|
|
|
|
|
|
# return 200 - OK and data
|
|
|
|
self.addRoute(name, lambda req: (200, data, headers))
|
2020-09-27 14:37:52 +02:00
|
|
|
|
2022-03-01 14:08:53 +01:00
|
|
|
def add_file_path(self, path, name=None):
|
|
|
|
def readfile():
|
|
|
|
with open(path, "rb") as f:
|
|
|
|
return f.read()
|
|
|
|
|
|
|
|
if name is None:
|
|
|
|
name = os.path.basename(path)
|
|
|
|
self.addRoute(name, lambda req: (200, readfile()))
|
|
|
|
|
|
|
|
def load_directory(self, path, recursive=True, exclude_ext=[]):
|
|
|
|
if not os.path.isdir(path):
|
|
|
|
print("Not a directory:", path)
|
|
|
|
return
|
|
|
|
|
|
|
|
for dp, dn, filenames in os.walk(path):
|
|
|
|
for f in filenames:
|
|
|
|
file_path = os.path.join(dp, f)
|
|
|
|
if not exclude_ext or os.path.splitext(file_path)[1] not in exclude_ext:
|
|
|
|
relative_path = file_path[len(path):]
|
|
|
|
self.add_file_path(file_path, relative_path)
|
|
|
|
|
2020-10-15 14:35:16 +02:00
|
|
|
def dumpRequest(self, name):
|
2020-10-21 21:41:06 +02:00
|
|
|
self.dumpRequests.append(self.cleanPath(name))
|
|
|
|
|
|
|
|
def addRoute(self, path, func):
|
|
|
|
self.routes[self.cleanPath(path)] = func
|
|
|
|
|
2021-05-31 14:13:01 +02:00
|
|
|
def addPrefixRoute(self, path, func):
|
|
|
|
self.prefix_routes[self.cleanPath(path)] = func
|
|
|
|
|
2023-11-26 14:10:54 +01:00
|
|
|
def forwardRequest(self, path, target, **kwargs):
|
|
|
|
self.addPrefixRoute(path, lambda req: req.onForward(path, target, **kwargs))
|
2020-10-15 14:35:16 +02:00
|
|
|
|
|
|
|
def enableLogging(self):
|
|
|
|
self.logRequests = True
|
|
|
|
|
2021-12-08 17:50:48 +01:00
|
|
|
def enableSSL(self, keyFile="private.key", certFile="server.crt"):
|
|
|
|
|
|
|
|
if not os.path.isfile(keyFile):
|
|
|
|
print("Generating private key and certificate…")
|
2020-10-15 14:35:16 +02:00
|
|
|
os.system("openssl req -new -x509 -keyout private.key -out server.crt -days 365 -nodes")
|
2021-12-08 17:50:48 +01:00
|
|
|
elif not os.path.isfile(certFile):
|
|
|
|
print("Generating certificate…")
|
|
|
|
os.system("openssl req -new -x509 -keyin private.key -out server.crt -days 365 -nodes")
|
2020-10-15 14:35:16 +02:00
|
|
|
|
|
|
|
self.socket = ssl.wrap_socket(self.socket,
|
|
|
|
server_side=True,
|
|
|
|
certfile=certFile,
|
|
|
|
keyfile=keyFile,
|
|
|
|
ssl_version=ssl.PROTOCOL_TLS,
|
|
|
|
cert_reqs=ssl.CERT_NONE)
|
|
|
|
|
|
|
|
# try:
|
|
|
|
# ssl._create_default_https_context = ssl._create_unverified_context
|
|
|
|
# except AttributeError:
|
|
|
|
# print("Legacy Python that doesn't verify HTTPS certificates by default")
|
|
|
|
# pass
|
|
|
|
|
2020-09-27 14:37:52 +02:00
|
|
|
def startBackground(self):
|
2021-12-08 17:50:48 +01:00
|
|
|
self.listen_thread = threading.Thread(target=self.serve_forever)
|
|
|
|
self.listen_thread.start()
|
|
|
|
return self.listen_thread
|
2020-09-27 14:37:52 +02:00
|
|
|
|
2023-10-08 13:08:42 +02:00
|
|
|
def get_base_url(self, ip_addr=None):
|
2021-12-08 17:50:48 +01:00
|
|
|
addr, port = self.server_address
|
2023-10-24 23:45:09 +02:00
|
|
|
|
2023-10-08 13:08:42 +02:00
|
|
|
if ip_addr is not None:
|
|
|
|
addr = ip_addr
|
2023-10-24 23:45:09 +02:00
|
|
|
|
2023-10-08 13:08:42 +02:00
|
|
|
protocol = "https" if type(self.socket) == ssl.SSLSocket else "http"
|
2023-10-24 23:45:09 +02:00
|
|
|
if (int(port) == 80 and protocol == "http") or (int(port) == 443 and protocol == "https"):
|
|
|
|
port = ""
|
2023-10-29 17:22:24 +01:00
|
|
|
else:
|
|
|
|
port = f":{port}"
|
2023-10-24 23:45:09 +02:00
|
|
|
|
2021-12-08 17:50:48 +01:00
|
|
|
return f"{protocol}://{addr}{port}"
|
|
|
|
|
2023-10-13 20:32:40 +02:00
|
|
|
def get_full_url(self, uri, ip_addr=None):
|
2023-10-08 13:08:42 +02:00
|
|
|
if not uri.startswith("/"):
|
|
|
|
uri = "/" + uri
|
2023-10-13 20:32:40 +02:00
|
|
|
return self.get_base_url(ip_addr) + uri
|
2023-10-08 13:08:42 +02:00
|
|
|
|
2021-06-05 14:24:35 +02:00
|
|
|
def stop(self):
|
|
|
|
self.is_running = False
|
2023-09-13 21:16:52 +02:00
|
|
|
time.sleep(1)
|
|
|
|
self.shutdown()
|
2021-12-08 17:50:48 +01:00
|
|
|
if self.listen_thread != threading.currentThread():
|
|
|
|
self.listen_thread.join()
|
2021-06-05 14:24:35 +02:00
|
|
|
|
2020-09-27 14:37:52 +02:00
|
|
|
if __name__ == "__main__":
|
2021-04-30 22:50:58 +02:00
|
|
|
if len(sys.argv) < 2 or sys.argv[1] not in ["shell","dump","proxy","xss"]:
|
|
|
|
print("Usage: %s [shell,dump,proxy,xss]" % sys.argv[0])
|
2020-10-21 21:41:06 +02:00
|
|
|
exit(1)
|
2020-09-27 14:37:52 +02:00
|
|
|
|
2023-10-13 20:32:40 +02:00
|
|
|
parser = argparse.ArgumentParser(description="Spawn a temporary http server")
|
|
|
|
parser.add_argument(
|
|
|
|
"action",
|
|
|
|
choices=["shell", "dump", "proxy", "xss"],
|
|
|
|
help="Choose one of these actions: shell, dump, proxy, xss"
|
|
|
|
)
|
|
|
|
|
|
|
|
parser.add_argument(
|
|
|
|
"--bind-address",
|
|
|
|
type=str,
|
|
|
|
default="0.0.0.0",
|
2023-10-13 20:35:06 +02:00
|
|
|
dest="bind_addr",
|
2023-10-13 20:32:40 +02:00
|
|
|
help="Address to bind on (default: 0.0.0.0)"
|
|
|
|
)
|
|
|
|
|
|
|
|
# Optionales Argument: port
|
|
|
|
parser.add_argument(
|
|
|
|
"--port",
|
|
|
|
type=int,
|
|
|
|
default=9000,
|
|
|
|
help="Port to bind on (default: 9000)"
|
|
|
|
)
|
|
|
|
|
|
|
|
parser.add_argument(
|
|
|
|
"--payload",
|
|
|
|
type=str,
|
|
|
|
default=None,
|
|
|
|
help="Payload for xss / shell"
|
|
|
|
)
|
|
|
|
|
|
|
|
args = parser.parse_args()
|
|
|
|
|
2023-10-13 20:35:06 +02:00
|
|
|
file_server = HttpFileServer(args.bind_addr, args.port)
|
2023-10-13 20:32:40 +02:00
|
|
|
ip_address = util.get_address()
|
|
|
|
|
|
|
|
if args.action == "shell":
|
|
|
|
payload_type = args.payload if args.payload else "bash"
|
|
|
|
shell_payload = rev_shell.generate_payload(args.payload, ip_address, 4444)
|
|
|
|
file_server.addFile("/shell", rev_shell)
|
|
|
|
print("Reverse Shell URL:", file_server.get_full_url("/shell", ip_address))
|
|
|
|
elif args.action == "dump":
|
|
|
|
file_server.dumpRequest("/")
|
|
|
|
print("Exfiltrate data using:", file_server.get_full_url("/", ip_address))
|
|
|
|
elif args.action == "proxy":
|
|
|
|
url = "https://google.com"
|
|
|
|
file_server.forwardRequest("/proxy", url)
|
|
|
|
print("Exfiltrate data using:", file_server.get_full_url("/proxy", ip_address))
|
|
|
|
elif args.action == "xss":
|
|
|
|
payload_type = args.payload if args.payload else "img"
|
|
|
|
xss = xss_handler.generatePayload(payload_type, ip_addr, args.port)
|
2021-04-30 22:50:58 +02:00
|
|
|
print("Exfiltrate data using:")
|
|
|
|
print(xss)
|
2020-10-21 21:41:06 +02:00
|
|
|
|
2023-10-13 20:32:40 +02:00
|
|
|
file_server.serve_forever()
|