Advanced.. import requests import argparse import threading from termcolor import colored from urllib.parse import urljoin from queue import Queue import subprocess import os
HEADERS = { "User-Agent": "Mozilla/5.0 (Windows NT 10.0; rv:91.0) Gecko/20100101 Firefox/91.0", "Accept": "/", "Connection": "keep-alive" }
DOMAIN_QUEUE = Queue() RESULTS = [] THREADS = 10 # Adjust based on your network capacity
def enumerate_subdomains(domain): """Use sublist3r to find subdomains of the target domain.""" try: output_file = f"{domain}_subdomains.txt" subprocess.check_output( ["sublist3r", "-d", domain, "-o", output_file], stderr=subprocess.DEVNULL ) with open(output_file, "r") as f: subdomains = f.read().splitlines() os.remove(output_file) return subdomains except Exception as e: return []
def scan_domain(url): """Run all vulnerability checks on a single URL.""" try: # GraphQL Introspection endpoint = urljoin(url, "/graphql") payload = {"query": "query { __schema { types { name } }"} response = requests.post(endpoint, json=payload, headers=HEADERS, timeout=15) if "__schema" in response.text: RESULTS.append(colored(f"[+] GraphQL Introspection Enabled: {endpoint}", "red"))
# Sensitive Files Check
for file in [".env", "config.js"]:
test_url = urljoin(url, file)
response = requests.get(test_url, headers=HEADERS, timeout=15)
if response.status_code == 200 and ("DB_PASSWORD" in response.text or "API_KEY" in response.text):
RESULTS.append(colored(f"[+] Sensitive File Exposed: {test_url}", "red"))
# Debug Endpoints Check
for endpoint in ["/actuator", "/debug"]:
test_url = urljoin(url, endpoint)
response = requests.get(test_url, headers=HEADERS, timeout=15)
if response.status_code == 200:
RESULTS.append(colored(f"[+] Debug Endpoint Exposed: {test_url}", "red"))
except Exception as e:
pass
def worker(): """Process domains from the queue.""" while True: domain = DOMAIN_QUEUE.get() scan_domain(domain) DOMAIN_QUEUE.task_done()
def main(): parser = argparse.ArgumentParser(description="Multi-Domain Vulnerability Scanner") parser.add_argument("-d", "--domain", help="Base domain (e.g., example.com)", required=True) args = parser.parse_args()
# Step 1: Enumerate subdomains
print(colored(f"[*] Enumerating subdomains for {args.domain}...", "blue"))
subdomains = enumerate_subdomains(args.domain)
targets = [f"http://{sub}" for sub in subdomains] + [f"https://{sub}" for sub in subdomains]
if not targets:
print(colored("[-] No subdomains found!", "yellow"))
return
# Step 2: Add targets to queue
for target in targets:
DOMAIN_QUEUE.put(target)
# Step 3: Start threads
print(colored(f"[*] Scanning {len(targets)} targets with {THREADS} threads...", "blue"))
for _ in range(THREADS):
t = threading.Thread(target=worker, daemon=True)
t.start()
DOMAIN_QUEUE.join()
# Print results
print(colored("\n[+] Scan Results:", "green"))
for result in set(RESULTS):
print(result)
if name == "main": main()