#!/usr/bin/python3 import argparse import urllib import requests import os import pathlib from urllib3.exceptions import InsecureRequestWarning requests.packages.urllib3.disable_warnings(category=InsecureRequestWarning) def download_files(url, root_directory, wordlist, verbose=False): root_directory = str(pathlib.Path(root_directory).resolve()) for w in wordlist: rel_directory = os.path.dirname(w) dest_directory = os.path.join(root_directory, rel_directory) if not str(pathlib.Path(dest_directory).resolve()).startswith(root_directory): print("[!] Path outside the root directory:", w) continue res = requests.get(url + w, verify=False, allow_redirects=False) if res.status_code == 200: if not os.path.isdir(dest_directory): os.makedirs(dest_directory, exist_ok=True) with open(os.path.join(root_directory, w), "wb") as f: f.write(res.content) print(f"[+] {url}{w}: {len(res.content)} Bytes") elif verbose: print(f"[-] {url}{w}: {res.status_code} {res.reason}") if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument( dest="dir", help="The destination directory" ) parser.add_argument( dest="url", help="The URL prefix" ) parser.add_argument( dest="wordlist", help="The wordlist containing all uris" ) parser.add_argument( "-v", "--verbose", action="store_true", help="Verbose mode", default=False ) args = parser.parse_args() uris = set() with open(args.wordlist, "r") as f: for line in f: w = line.strip() while w.startswith("/"): w = w[1:] if w: uris.add(w) if len(uris) == 0: print("[!] List does not contain any URIs") exit() dest_directory = args.dir if not os.path.isdir(dest_directory): if not os.mkdir(dest_directory): print("[!] Destination directory does not exist and could not be created") exit() url = args.url if not url.endswith("/"): url += "/" download_files(url, dest_directory, uris, args.verbose)