diff --git a/bbot/core/helpers/dns/dns.py b/bbot/core/helpers/dns/dns.py index f064edad3b..039e855162 100644 --- a/bbot/core/helpers/dns/dns.py +++ b/bbot/core/helpers/dns/dns.py @@ -1,199 +1,467 @@ -import dns +import asyncio import logging -import dns.exception -import dns.asyncresolver -from cachetools import LFUCache +import time +from contextlib import suppress + +from cachetools import LFUCache, LRUCache from radixtarget import RadixTarget -from bbot.errors import DNSError -from bbot.core.engine import EngineClient -from bbot.core.helpers.async_helpers import async_cachedmethod -from ..misc import clean_dns_record, is_ip, is_domain, is_dns_name +from blastdns import Client, ClientConfig, DNSError, DNSResult, MockClient, get_system_resolvers +from blastdns.exceptions import BlastDNSError -from .engine import DNSEngine +from bbot.core.helpers.async_helpers import NamedLock, async_cachedmethod +from .helpers import all_rdtypes, extract_targets, record_to_text +from ..misc import clean_dns_record, domain_parents, is_dns_name, is_domain, is_ip, parent_domain, rand_string log = logging.getLogger("bbot.core.helpers.dns") -class DNSHelper(EngineClient): - SERVER_CLASS = DNSEngine - ERROR_CLASS = DNSError - +class DNSHelper: """Helper class for DNS-related operations within BBOT. - This class provides mechanisms for host resolution, wildcard domain detection, event tagging, and more. - It centralizes all DNS-related activities in BBOT, offering both synchronous and asynchronous methods - for DNS resolution, as well as various utilities for batch resolution and DNS query filtering. + Wraps the blastdns ``Client`` (a Rust-backed async DNS engine) and adds the + BBOT-specific concerns that live above raw resolution: wildcard detection, + per-zone error tracking, connectivity checks, and ``dns_omit_queries`` + filtering. Attributes: parent_helper: A reference to the instantiated `ConfigAwareHelper` (typically `scan.helpers`). - resolver (BBOTAsyncResolver): An asynchronous DNS resolver tailored for BBOT with rate-limiting capabilities. - timeout (int): The timeout value for DNS queries. Defaults to 5 seconds. - retries (int): The number of retries for failed DNS queries. Defaults to 1. - abort_threshold (int): The threshold for aborting after consecutive failed queries. Defaults to 50. - runaway_limit (int): Maximum allowed distance for consecutive DNS resolutions. Defaults to 5. - all_rdtypes (list): A list of DNS record types to be considered during operations. - wildcard_ignore (tuple): Domains to be ignored during wildcard detection. - wildcard_tests (int): Number of tests to be run for wildcard detection. Defaults to 5. - _wildcard_cache (dict): Cache for wildcard detection results. - _dns_cache (LRUCache): Cache for DNS resolution results, limited in size. - resolver_file (Path): File containing system's current resolver nameservers. - - Args: - parent_helper: The parent helper object with configuration details and utilities. - - Raises: - DNSError: If an issue arises when creating the BBOTAsyncResolver instance. - - Examples: - >>> dns_helper = DNSHelper(parent_config) - >>> resolved_host = dns_helper.resolver.resolve("example.com") + blastdns (blastdns.Client): The underlying Rust DNS client. + timeout (int): Per-query timeout in seconds. Defaults to 5. + retries (int): Number of retries for failed DNS queries. Defaults to 5. + abort_threshold (int): Consecutive failed queries per parent before aborting. Defaults to 50. + wildcard_ignore (RadixTarget): Domains to skip during wildcard detection. + wildcard_tests (int): Random subdomains generated per wildcard check. Defaults to 5. + resolver_file (Path): File containing the system's resolver IPs (for tools that need it). """ def __init__(self, parent_helper): + self.log = log self.parent_helper = parent_helper self.config = self.parent_helper.config self.dns_config = self.config.get("dns", {}) - engine_debug = self.config.get("engine", {}).get("debug", False) - super().__init__(server_kwargs={"config": self.config}, debug=engine_debug) - # resolver + # config self.timeout = self.dns_config.get("timeout", 5) - self.resolver = dns.asyncresolver.Resolver() - self.resolver.rotate = True - self.resolver.timeout = self.timeout - self.resolver.lifetime = self.timeout - + self.retries = self.dns_config.get("retries", 5) + self.threads = self.dns_config.get("threads", 5) + self.cache_size = self.dns_config.get("cache_size", 10000) + self.abort_threshold = self.dns_config.get("abort_threshold", 50) + # how many consecutive DNS resolution hops we allow before tagging an event as runaway self.runaway_limit = self.dns_config.get("runaway_limit", 5) + # blastdns client + self.system_resolvers = get_system_resolvers() + self.log.debug( + f"Starting BlastDNS client with {self.threads} threads per resolver, " + f"{self.retries} retries, {self.cache_size} cache size, " + f"and {self.timeout} second timeout" + ) + self.blastdns = Client( + self.system_resolvers, + ClientConfig( + request_timeout_ms=self.timeout * 1000, + max_retries=self.retries, + threads_per_resolver=self.threads, + cache_capacity=self.cache_size, + ), + ) + + # parse dns.omit_queries (e.g. "A:internal.bad.com") into {rdtype: {host, ...}} + self.dns_omit_queries = {} + for entry in self.dns_config.get("omit_queries", None) or []: + parts = entry.split(":") + if len(parts) == 2: + rdtype, host = parts + self.dns_omit_queries.setdefault(rdtype.upper(), set()).add(host.lower()) + # wildcard handling self.wildcard_disable = self.dns_config.get("wildcard_disable", False) + self.wildcard_tests = self.dns_config.get("wildcard_tests", 5) self.wildcard_ignore = RadixTarget() for d in self.dns_config.get("wildcard_ignore", []): self.wildcard_ignore.insert(d) + self._wildcard_cache = LRUCache(maxsize=10000) + self._wildcard_lock = NamedLock() + + # error tracking + connectivity + self._errors = LRUCache(maxsize=10000) + self._dns_warnings = LRUCache(maxsize=10000) + self._dns_connectivity_lock = None + self._last_dns_success = None + self._last_connectivity_warning = time.time() # copy the system's current resolvers to a text file for tool use - self.system_resolvers = dns.resolver.Resolver().nameservers - # TODO: DNS server speed test (start in background task) self.resolver_file = self.parent_helper.tempfile(self.system_resolvers, pipe=False) # brute force helper self._brute = None + # method-level dedup caches for is_wildcard / is_wildcard_domain self._is_wildcard_cache = LFUCache(maxsize=1000) self._is_wildcard_domain_cache = LFUCache(maxsize=1000) - async def resolve(self, query, **kwargs): - return await self.run_and_return("resolve", query=query, **kwargs) + # ------------------------------------------------------------------ + # Resolution -- thin pass-throughs to blastdns. Anything more complex + # belongs in a caller that knows the exact shape it wants. + # ------------------------------------------------------------------ - async def resolve_raw(self, query, **kwargs): - return await self.run_and_return("resolve_raw", query=query, **kwargs) + async def resolve(self, query, rdtype="A"): + """Resolve to a set of rdata strings (e.g. IPs). - async def resolve_batch(self, queries, **kwargs): - agen = self.run_and_yield("resolve_batch", queries=queries, **kwargs) - while 1: - try: - yield await agen.__anext__() - except (StopAsyncIteration, GeneratorExit): - await agen.aclose() - break + Returns an empty set on DNS failure (timeout, SERVFAIL, etc). + """ + try: + return set(await self.blastdns.resolve(query, rdtype)) + except BlastDNSError as e: + self.log.debug(f"DNS error resolving {query}/{rdtype}: {e}") + return set() - async def resolve_raw_batch(self, queries): - agen = self.run_and_yield("resolve_raw_batch", queries=queries) - while 1: - try: - yield await agen.__anext__() - except (StopAsyncIteration, GeneratorExit): - await agen.aclose() - break + async def resolve_full(self, query, rdtype="A"): + """Return blastdns ``DNSResult`` (full response with Record objects). - @property - def brute(self): - if self._brute is None: - from .brute import DNSBrute + Returns an empty-answer DNSResult on DNS failure so callers can + unconditionally iterate ``.response.answers`` without try/except. + """ + try: + return await self.blastdns.resolve_full(query, rdtype) + except BlastDNSError as e: + self.log.debug(f"DNS error resolving {query}/{rdtype}: {e}") + return self._empty_result(query) + + async def resolve_multi_full(self, query, rdtypes): + """Resolve many rdtypes for one host concurrently in Rust. + + Skips rdtypes listed in ``dns_omit_queries`` and rdtypes whose parent + zone has exceeded ``abort_threshold`` consecutive errors. + Returns ``dict[rdtype, DNSResult | DNSError]``. + """ + rdtypes = [r for r in rdtypes if not self._is_omitted(query, r)] + filtered = [] + for r in rdtypes: + if not await self._is_aborted(query, r): + filtered.append(r) + rdtypes = filtered + if not rdtypes: + return {} + results = await self.blastdns.resolve_multi_full(query, rdtypes) + # Track per-zone errors so we can circuit-break dead zones + for rdtype, response in results.items(): + if isinstance(response, DNSError): + self.record_dns_error(query, rdtype) + elif isinstance(response, DNSResult) and response.response.answers: + self.reset_dns_errors(query, rdtype) + return results + + async def resolve_batch_full(self, hosts, rdtype="A", skip_empty=False, skip_errors=False): + """Resolve many hosts for one rdtype concurrently in Rust. + + Yields ``(host, DNSResult | DNSError)``. + """ + async for host, result in self.blastdns.resolve_batch_full( + hosts, rdtype, skip_empty=skip_empty, skip_errors=skip_errors + ): + yield host, result + + def _is_omitted(self, query, rdtype): + omit_hosts = self.dns_omit_queries.get(rdtype.upper()) + if not omit_hosts: + return False + q = str(query).lower() + return any(q == h or q.endswith(f".{h}") for h in omit_hosts) - self._brute = DNSBrute(self.parent_helper) - return self._brute + # ------------------------------------------------------------------ + # Wildcard detection + # ------------------------------------------------------------------ @async_cachedmethod( lambda self: self._is_wildcard_cache, - key=lambda query, rdtypes, raw_dns_records: (query, tuple(sorted(rdtypes)), bool(raw_dns_records)), + key=lambda query, rdtypes, raw_dns_records=None: (query, tuple(sorted(rdtypes)), bool(raw_dns_records)), ) async def is_wildcard(self, query, rdtypes, raw_dns_records=None): """ - Use this method to check whether a *host* is a wildcard entry - - This can reliably tell the difference between a valid DNS record and a wildcard within a wildcard domain. - - If you want to know whether a domain is using wildcard DNS, use `is_wildcard_domain()` instead. + Check whether ``query`` is a wildcard hit within a wildcard domain. Args: - query (str): The hostname to check for a wildcard entry. - ips (list, optional): List of IPs to compare against, typically obtained from a previous DNS resolution of the query. - rdtype (str, optional): The DNS record type (e.g., "A", "AAAA") to consider during the check. + query (str): The hostname to check. + rdtypes (list): DNS record types to consider. + raw_dns_records (dict, optional): ``{rdtype: [Record, ...]}`` already + resolved for this query. If omitted, the records are fetched. Returns: - dict: A dictionary indicating if the query is a wildcard for each checked DNS record type. - Keys are DNS record types like "A", "AAAA", etc. - Values are tuples where the first element is a boolean indicating if the query is a wildcard, - and the second element is the wildcard parent if it's a wildcard. - - Raises: - ValueError: If only one of `ips` or `rdtype` is specified or if no valid IPs are specified. - - Examples: - >>> is_wildcard("www.github.io") - {"A": (True, "github.io"), "AAAA": (True, "github.io")} - - >>> is_wildcard("www.evilcorp.com", ips=["93.184.216.34"], rdtype="A") - {"A": (False, "evilcorp.com")} - - Note: - `is_wildcard` can be True, False, or None (indicating that wildcard detection was inconclusive) + dict: ``{rdtype: (is_wildcard, parent)}`` for each rdtype that resolved. + ``is_wildcard`` may be ``True``, ``False``, ``None``, ``"POSSIBLE"``, or ``"ERROR"``. """ query = self._wildcard_prevalidation(query) if not query: return {} - # skip check if the query is a domain + # skip check if the query is itself a domain if is_domain(query): return {} - return await self.run_and_return("is_wildcard", query=query, rdtypes=rdtypes, raw_dns_records=raw_dns_records) + if isinstance(rdtypes, str): + rdtypes = [rdtypes] + + result = {} + + # if the work of resolving hasn't been done yet, do it + if raw_dns_records is None: + raw_dns_records = {} + multi = await self.resolve_multi_full(query, list(rdtypes)) + for rdtype, response in multi.items(): + if isinstance(response, DNSResult) and response.response.answers: + raw_dns_records[rdtype] = response.response.answers + elif isinstance(response, DNSError): + self.log.debug(f"Failed to resolve {query} ({rdtype}) during wildcard detection: {response.error}") + result[rdtype] = ("ERROR", query) + + # build the baseline (the IPs/hosts we actually got back for this query) + baseline = {} + baseline_raw = {} + for rdtype, answers in raw_dns_records.items(): + for answer in answers: + text_answer = record_to_text(answer) + baseline_raw.setdefault(rdtype, set()).add(text_answer) + for _, host in extract_targets(answer): + baseline.setdefault(rdtype, set()).add(host) + + if not raw_dns_records: + return result + + rdtypes_to_check = set(raw_dns_records) + + # walk parent domains shortest-first, comparing baseline against any wildcard pool + parents = list(domain_parents(query)) + for parent in parents[::-1]: + wildcard_results = await self.is_wildcard_domain(parent, rdtypes_to_check) + + for rdtype in list(baseline_raw): + if rdtype in result: + continue + + _baseline = baseline.get(rdtype, set()) + _baseline_raw = baseline_raw.get(rdtype, set()) + + wildcard_rdtypes = wildcard_results.get(parent, {}) + wildcards = wildcard_rdtypes.get(rdtype) + if wildcards is None: + continue + wildcards, wildcard_raw = wildcards + + if wildcard_raw: + rdtypes_to_check.discard(rdtype) + is_wc = any(r in wildcards for r in _baseline) + is_wc_raw = any(r in wildcard_raw for r in _baseline_raw) + if is_wc or is_wc_raw: + result[rdtype] = (True, parent) + else: + result[rdtype] = ("POSSIBLE", parent) + + for rdtype, answers in baseline_raw.items(): + if answers and rdtype not in result: + result[rdtype] = (False, query) + + return result @async_cachedmethod( - lambda self: self._is_wildcard_domain_cache, key=lambda domain, rdtypes: (domain, tuple(sorted(rdtypes))) + lambda self: self._is_wildcard_domain_cache, + key=lambda domain, rdtypes: (domain, tuple(sorted(rdtypes))), ) async def is_wildcard_domain(self, domain, rdtypes): + """For each parent of ``domain``, return the wildcard pool per rdtype. + + Returns ``{parent: {rdtype: (hosts_set, raw_text_set)}}``. + """ domain = self._wildcard_prevalidation(domain) if not domain: return {} - return await self.run_and_return("is_wildcard_domain", domain=domain, rdtypes=rdtypes) + if isinstance(rdtypes, str): + rdtypes = [rdtypes] + rdtypes = set(rdtypes) + + wildcard_results = {} + # walk parents from shortest (root) to longest, narrowing rdtypes as we find wildcards + for host in list(domain_parents(domain, include_self=True))[::-1]: + host_results = {} + # check each rdtype concurrently for this parent + tasks = [self._is_wildcard_zone(host, rdtype) for rdtype in list(rdtypes)] + if not tasks: + break + for rdtype, (results, results_raw) in zip(list(rdtypes), await asyncio.gather(*tasks)): + if results_raw: + rdtypes.discard(rdtype) + host_results[rdtype] = (results, results_raw) + if host_results: + wildcard_results[host] = host_results + + return wildcard_results + + async def _is_wildcard_zone(self, host, rdtype): + """Test one (host, rdtype) for wildcard configuration. Cached per-pair.""" + rdtype = rdtype.upper() + host_hash = hash((host, rdtype)) + + async with self._wildcard_lock.lock(host_hash): + try: + cached = self._wildcard_cache[host_hash] + self.log.debug(f"Got {host}:{rdtype} from wildcard cache") + return cached + except KeyError: + pass + + self.log.debug(f"Checking if {host}:{rdtype} is a wildcard") + results = set() + results_raw = set() + + rand_hosts = [f"{rand_string(digits=False, length=10)}.{host}" for _ in range(self.wildcard_tests)] + async for _, response in self.resolve_batch_full(rand_hosts, rdtype): + if not isinstance(response, DNSResult): + continue + for answer in response.response.answers: + results_raw.add(record_to_text(answer)) + for _, t in extract_targets(answer): + results.add(t) + + if results: + self.log.info(f"Encountered domain with wildcard DNS ({rdtype}): *.{host}") + else: + self.log.debug(f"Finished checking {host}:{rdtype}, it is not a wildcard") + + self._wildcard_cache[host_hash] = (results, results_raw) + return results, results_raw def _wildcard_prevalidation(self, host): if self.wildcard_disable: return False host = clean_dns_record(host) - # skip check if it's an IP or a plain hostname if is_ip(host) or "." not in host: return False - - # skip if query isn't a dns name if not is_dns_name(host): return False - # skip check if the query's parent domain is excluded in the config wildcard_ignore = self.wildcard_ignore.search(host) if wildcard_ignore: - log.debug(f"Skipping wildcard detection on {host} because {wildcard_ignore} is excluded in the config") + self.log.debug( + f"Skipping wildcard detection on {host} because {wildcard_ignore} is excluded in the config" + ) return False return host - async def _mock_dns(self, mock_data, custom_lookup_fn=None): - from .mock import MockResolver + # ------------------------------------------------------------------ + # Error tracking + connectivity + # ------------------------------------------------------------------ + + async def _is_aborted(self, query, rdtype): + """Check if queries for this parent zone + rdtype have been circuit-broken. + + Only triggers on sustained timeouts (DNSError), not instant failures + like NXDOMAIN or SERVFAIL. When the threshold is hit, verifies + network connectivity first — if the network is down, clears error + counters instead of aborting (the zone might be fine). + """ + parent = parent_domain(str(query)) + parent_hash = hash((parent, rdtype)) + error_count = self._errors.get(parent_hash, 0) + if error_count >= self.abort_threshold: + # before aborting, make sure our network is actually up + connectivity = await self._connectivity_check() + if not connectivity: + # network is down — don't blame the zone + self._errors.clear() + return False + if parent_hash not in self._dns_warnings: + self.log.info( + f'Aborting {rdtype} queries to "{parent}" — ' + f"{error_count} consecutive errors exceeded threshold ({self.abort_threshold})" + ) + self._dns_warnings[parent_hash] = True + return True + return False + + def record_dns_error(self, query, rdtype): + """Bump the error counter for ``query``'s parent zone. Returns the new count.""" + parent_hash = hash((parent_domain(str(query)), rdtype)) + self._errors[parent_hash] = self._errors.get(parent_hash, 0) + 1 + return self._errors[parent_hash] + + def reset_dns_errors(self, query, rdtype): + parent_hash = hash((parent_domain(str(query)), rdtype)) + if parent_hash in self._errors: + self._errors[parent_hash] = 0 + + @property + def dns_connectivity_lock(self): + if self._dns_connectivity_lock is None: + self._dns_connectivity_lock = asyncio.Lock() + return self._dns_connectivity_lock + + async def _connectivity_check(self, interval=5): + """Confirm the network can reach DNS. Cached for ``interval`` seconds.""" + if self._last_dns_success is not None and time.time() - self._last_dns_success < interval: + return True + + async with self.dns_connectivity_lock: + with suppress(Exception): + answers = await self.blastdns.resolve("www.google.com", "A") + if answers: + self._last_dns_success = time.time() + return True + + if time.time() - self._last_connectivity_warning > interval: + self.log.warning("DNS queries are failing, please check your internet connection") + self._last_connectivity_warning = time.time() + self._errors.clear() + return False + + # ------------------------------------------------------------------ + # Brute / mock helpers + # ------------------------------------------------------------------ + + @property + def brute(self): + if self._brute is None: + from .brute import DNSBrute + + self._brute = DNSBrute(self.parent_helper) + return self._brute - self.resolver = MockResolver(mock_data, custom_lookup_fn=custom_lookup_fn) - await self.run_and_return("_mock_dns", mock_data=mock_data, custom_lookup_fn=custom_lookup_fn) + async def _mock_dns(self, mock_data): + """Swap the underlying client for a ``MockClient`` configured with ``mock_data``.""" + mock_client = MockClient() + mock_client.mock_dns(mock_data) + self.blastdns = mock_client + + @staticmethod + def _empty_result(host=""): + """Build a minimal ``DNSResult`` with no answers, for use as a safe fallback.""" + from blastdns.models import Header, Response + + header = Header( + id=0, + message_type="Response", + op_code="Query", + authoritative=False, + truncation=False, + recursion_desired=True, + recursion_available=True, + authentic_data=False, + checking_disabled=False, + response_code="NoError", + query_count=0, + answer_count=0, + name_server_count=0, + additional_count=0, + ) + return DNSResult(host=host, response=Response(header=header, queries=[], answers=[], name_servers=[], additionals=[])) + + async def shutdown(self): + """No-op kept for API compatibility -- blastdns runs in-process, nothing to tear down.""" + return None + + +# Re-export for convenience +__all__ = ["DNSHelper", "all_rdtypes", "extract_targets", "record_to_text"] diff --git a/bbot/core/helpers/dns/engine.py b/bbot/core/helpers/dns/engine.py deleted file mode 100644 index d2e56dc207..0000000000 --- a/bbot/core/helpers/dns/engine.py +++ /dev/null @@ -1,663 +0,0 @@ -import os -import dns -import time -import asyncio -import logging -import traceback -from cachetools import LRUCache -from contextlib import suppress - -from bbot.core.engine import EngineServer -from bbot.core.helpers.async_helpers import NamedLock -from bbot.core.helpers.dns.helpers import extract_targets -from bbot.core.helpers.misc import ( - is_ip, - rand_string, - parent_domain, - domain_parents, -) - - -log = logging.getLogger("bbot.core.helpers.dns.engine.server") - -all_rdtypes = ["A", "AAAA", "SRV", "MX", "NS", "SOA", "CNAME", "TXT"] - - -class DNSEngine(EngineServer): - CMDS = { - 0: "resolve", - 1: "resolve_raw", - 2: "resolve_batch", - 3: "resolve_raw_batch", - 4: "is_wildcard", - 5: "is_wildcard_domain", - 99: "_mock_dns", - } - - def __init__(self, socket_path, config={}, debug=False): - super().__init__(socket_path, debug=debug) - - self.config = config - self.dns_config = self.config.get("dns", {}) - # config values - self.timeout = self.dns_config.get("timeout", 5) - self.retries = self.dns_config.get("retries", 1) - self.abort_threshold = self.dns_config.get("abort_threshold", 50) - - # resolver - self.resolver = dns.asyncresolver.Resolver() - self.resolver.rotate = True - self.resolver.timeout = self.timeout - self.resolver.lifetime = self.timeout - - # skip certain queries - dns_omit_queries = self.dns_config.get("omit_queries", None) - if not dns_omit_queries: - dns_omit_queries = [] - self.dns_omit_queries = {} - for d in dns_omit_queries: - d = d.split(":") - if len(d) == 2: - rdtype, query = d - rdtype = rdtype.upper() - query = query.lower() - try: - self.dns_omit_queries[rdtype].add(query) - except KeyError: - self.dns_omit_queries[rdtype] = {query} - - # wildcard handling - self.wildcard_ignore = self.dns_config.get("wildcard_ignore", None) - if not self.wildcard_ignore: - self.wildcard_ignore = [] - self.wildcard_ignore = tuple([str(d).strip().lower() for d in self.wildcard_ignore]) - self.wildcard_tests = self.dns_config.get("wildcard_tests", 5) - self._wildcard_cache = LRUCache(maxsize=10000) - # since wildcard detection takes some time, This is to prevent multiple - # modules from kicking off wildcard detection for the same domain at the same time - self._wildcard_lock = NamedLock() - - self._dns_connectivity_lock = None - self._last_dns_success = None - self._last_connectivity_warning = time.time() - # keeps track of warnings issued for wildcard detection to prevent duplicate warnings - self._dns_warnings = LRUCache(maxsize=10000) - self._errors = LRUCache(maxsize=10000) - self._debug = self.dns_config.get("debug", False) - self._dns_cache = LRUCache(maxsize=100000) - - async def resolve(self, query, **kwargs): - """Resolve DNS names and IP addresses to their corresponding results. - - This is a high-level function that can translate a given domain name to its associated IP addresses - or an IP address to its corresponding domain names. It's structured for ease of use within modules - and will abstract away most of the complexity of DNS resolution, returning a simple set of results. - - Args: - query (str): The domain name or IP address to resolve. - **kwargs: Additional arguments to be passed to the resolution process. - - Returns: - set: A set containing resolved domain names or IP addresses. - - Examples: - >>> results = await resolve("1.2.3.4") - {"evilcorp.com"} - - >>> results = await resolve("evilcorp.com") - {"1.2.3.4", "dead::beef"} - """ - results = set() - try: - answers, errors = await self.resolve_raw(query, **kwargs) - for answer in answers: - for _, host in extract_targets(answer): - results.add(host) - except BaseException: - self.log.trace(f"Caught exception in resolve({query}, {kwargs}):") - self.log.trace(traceback.format_exc()) - raise - - self.debug(f"Results for {query} with kwargs={kwargs}: {results}") - return results - - async def resolve_raw(self, query, **kwargs): - """Resolves the given query to its associated DNS records. - - This function is a foundational method for DNS resolution in this class. It understands both IP addresses and - hostnames and returns their associated records in a raw format provided by the dnspython library. - - Args: - query (str): The IP address or hostname to resolve. - type (str or list[str], optional): Specifies the DNS record type(s) to fetch. Can be a single type like 'A' - or a list like ['A', 'AAAA']. If set to 'any', 'all', or '*', it fetches all supported types. If not - specified, the function defaults to fetching 'A' and 'AAAA' records. - **kwargs: Additional arguments that might be passed to the resolver. - - Returns: - tuple: A tuple containing two lists: - - list: A list of tuples where each tuple consists of a record type string (like 'A') and the associated - raw dnspython answer. - - list: A list of tuples where each tuple consists of a record type string and the associated error if - there was an issue fetching the record. - - Examples: - >>> await resolve_raw("8.8.8.8") - ([('PTR', )], []) - - >>> await resolve_raw("dns.google") - (, []) - """ - # DNS over TCP is more reliable - # But setting this breaks DNS resolution on Ubuntu because systemd-resolve doesn't support TCP - # kwargs["tcp"] = True - try: - query = str(query).strip() - kwargs.pop("rdtype", None) - rdtype = kwargs.pop("type", "A") - if is_ip(query): - return await self._resolve_ip(query, **kwargs) - else: - return await self._resolve_hostname(query, rdtype=rdtype, **kwargs) - except BaseException: - self.log.trace(f"Caught exception in resolve_raw({query}, {kwargs}):") - self.log.trace(traceback.format_exc()) - raise - - async def _resolve_hostname(self, query, **kwargs): - """Translate a hostname into its corresponding IP addresses. - - This is the foundational function for converting a domain name into its associated IP addresses. It's designed - for internal use within the class and handles retries, caching, and a variety of error/timeout scenarios. - It also respects certain configurations that might ask to skip certain types of queries. Results are returned - in the default dnspython answer object format. - - Args: - query (str): The hostname to resolve. - rdtype (str, optional): The type of DNS record to query (e.g., 'A', 'AAAA'). Defaults to 'A'. - retries (int, optional): The number of times to retry on failure. Defaults to class-wide `retries`. - use_cache (bool, optional): Whether to check the cache before trying a fresh resolution. Defaults to True. - **kwargs: Additional arguments that might be passed to the resolver. - - Returns: - tuple: A tuple containing: - - list: A list of resolved IP addresses. - - list: A list of errors encountered during the resolution process. - - Examples: - >>> results, errors = await _resolve_hostname("google.com") - (, []) - """ - self.debug(f"Resolving {query} with kwargs={kwargs}") - results = [] - errors = [] - rdtype = kwargs.get("rdtype", "A") - - # skip certain queries if requested - if rdtype in self.dns_omit_queries: - if any(h == query or query.endswith(f".{h}") for h in self.dns_omit_queries[rdtype]): - self.debug(f"Skipping {rdtype}:{query} because it's omitted in the config") - return results, errors - - parent = parent_domain(query) - retries = kwargs.pop("retries", self.retries) - use_cache = kwargs.pop("use_cache", True) - tries_left = int(retries) + 1 - parent_hash = hash((parent, rdtype)) - dns_cache_hash = hash((query, rdtype)) - while tries_left > 0: - try: - if use_cache: - results = self._dns_cache.get(dns_cache_hash, []) - if not results: - error_count = self._errors.get(parent_hash, 0) - if error_count >= self.abort_threshold: - connectivity = await self._connectivity_check() - if connectivity: - self.log.verbose( - f'Aborting query "{query}" because failed {rdtype} queries for "{parent}" ({error_count:,}) exceeded abort threshold ({self.abort_threshold:,})' - ) - if parent_hash not in self._dns_warnings: - self.log.verbose( - f'Aborting future {rdtype} queries to "{parent}" because error count ({error_count:,}) exceeded abort threshold ({self.abort_threshold:,})' - ) - self._dns_warnings[parent_hash] = True - return results, errors - results = await self._catch(self.resolver.resolve, query, **kwargs) - if use_cache: - self._dns_cache[dns_cache_hash] = results - if parent_hash in self._errors: - self._errors[parent_hash] = 0 - break - except ( - dns.resolver.NoNameservers, - dns.exception.Timeout, - dns.resolver.LifetimeTimeout, - TimeoutError, - asyncio.exceptions.TimeoutError, - ) as e: - try: - self._errors[parent_hash] += 1 - except KeyError: - self._errors[parent_hash] = 1 - errors.append(e) - # don't retry if we get a SERVFAIL - if isinstance(e, dns.resolver.NoNameservers): - break - tries_left -= 1 - err_msg = ( - f'DNS error or timeout for {rdtype} query "{query}" ({self._errors[parent_hash]:,} so far): {e}' - ) - if tries_left > 0: - retry_num = (retries + 1) - tries_left - self.debug(err_msg) - self.debug(f"Retry (#{retry_num}) resolving {query} with kwargs={kwargs}") - else: - self.log.verbose(err_msg) - - if results: - self._last_dns_success = time.time() - self.debug(f"Answers for {query} with kwargs={kwargs}: {list(results)}") - - if errors: - self.debug(f"Errors for {query} with kwargs={kwargs}: {errors}") - - return results, errors - - async def _resolve_ip(self, query, **kwargs): - """Translate an IP address into a corresponding DNS name. - - This is the most basic function that will convert an IP address into its associated domain name. It handles - retries, caching, and multiple types of timeout/error scenarios internally. The function is intended for - internal use and should not be directly called by modules without understanding its intricacies. - - Args: - query (str): The IP address to be reverse-resolved. - retries (int, optional): The number of times to retry on failure. Defaults to 0. - use_cache (bool, optional): Whether to check the cache for the result before attempting resolution. Defaults to True. - **kwargs: Additional arguments to be passed to the resolution process. - - Returns: - tuple: A tuple containing: - - list: A list of resolved domain names (in default dnspython answer format). - - list: A list of errors encountered during resolution. - - Examples: - >>> results, errors = await _resolve_ip("8.8.8.8") - (, []) - """ - self.debug(f"Reverse-resolving {query} with kwargs={kwargs}") - retries = kwargs.pop("retries", 0) - use_cache = kwargs.pop("use_cache", True) - tries_left = int(retries) + 1 - results = [] - errors = [] - dns_cache_hash = hash((query, "PTR")) - while tries_left > 0: - try: - if use_cache: - results = self._dns_cache.get(dns_cache_hash, []) - if not results: - results = await self._catch(self.resolver.resolve_address, query, **kwargs) - if use_cache: - self._dns_cache[dns_cache_hash] = results - break - except ( - dns.resolver.NoNameservers, - dns.exception.Timeout, - dns.resolver.LifetimeTimeout, - TimeoutError, - asyncio.exceptions.TimeoutError, - ) as e: - errors.append(e) - # don't retry if we get a SERVFAIL - if isinstance(e, dns.resolver.NoNameservers): - self.debug(f"{e} (query={query}, kwargs={kwargs})") - break - else: - tries_left -= 1 - if tries_left > 0: - retry_num = (retries + 2) - tries_left - self.debug(f"Retrying (#{retry_num}) {query} with kwargs={kwargs}") - - if results: - self._last_dns_success = time.time() - - return results, errors - - async def resolve_batch(self, queries, threads=10, **kwargs): - """ - A helper to execute a bunch of DNS requests. - - Args: - queries (list): List of queries to resolve. - **kwargs: Additional keyword arguments to pass to `resolve()`. - - Yields: - tuple: A tuple containing the original query and its resolved value. - - Examples: - >>> import asyncio - >>> async def example_usage(): - ... async for result in resolve_batch(['www.evilcorp.com', 'evilcorp.com']): - ... print(result) - ('www.evilcorp.com', {'1.1.1.1'}) - ('evilcorp.com', {'2.2.2.2'}) - """ - async for (args, _, _), responses in self.task_pool( - self.resolve, args_kwargs=queries, threads=threads, global_kwargs=kwargs - ): - yield args[0], responses - - async def resolve_raw_batch(self, queries, threads=10, **kwargs): - queries_kwargs = [[q[0], {"type": q[1]}] for q in queries] - async for (args, kwargs, _), (answers, errors) in self.task_pool( - self.resolve_raw, args_kwargs=queries_kwargs, threads=threads, global_kwargs=kwargs - ): - query = args[0] - rdtype = kwargs["type"] - yield ((query, rdtype), (answers, errors)) - - async def _catch(self, callback, *args, **kwargs): - """ - Asynchronously catches exceptions thrown during DNS resolution and logs them. - - This method wraps around a given asynchronous callback function to handle different - types of DNS exceptions and general exceptions. It logs the exceptions for debugging - and, in some cases, re-raises them. - - Args: - callback (callable): The asynchronous function to be executed. - *args: Positional arguments to pass to the callback. - **kwargs: Keyword arguments to pass to the callback. - - Returns: - Any: The return value of the callback function, or an empty list if an exception is caught. - - Raises: - dns.resolver.NoNameservers: When no nameservers could be reached. - """ - try: - return await callback(*args, **kwargs) - except dns.resolver.NoNameservers: - raise - except (dns.exception.Timeout, dns.resolver.LifetimeTimeout, TimeoutError): - self.log.debug(f"DNS query with args={args}, kwargs={kwargs} timed out after {self.timeout} seconds") - raise - except dns.exception.DNSException as e: - self.debug(f"{e} (args={args}, kwargs={kwargs})") - except Exception as e: - self.log.warning(f"Error in {callback.__qualname__}() with args={args}, kwargs={kwargs}: {e}") - self.log.trace(traceback.format_exc()) - return [] - - async def is_wildcard(self, query, rdtypes, raw_dns_records=None): - """ - Use this method to check whether a *host* is a wildcard entry - - This can reliably tell the difference between a valid DNS record and a wildcard within a wildcard domain. - - It works by making a bunch of random DNS queries to the parent domain, compiling a list of wildcard IPs, - then comparing those to the IPs of the host in question. If the host's IP matches the wildcard ones, it's a wildcard. - - If you want to know whether a domain is using wildcard DNS, use `is_wildcard_domain()` instead. - - Args: - query (str): The hostname to check for a wildcard entry. - rdtypes (list): The DNS record type (e.g., "A", "AAAA") to consider during the check. - raw_dns_records (dict, optional): Dictionary of {rdtype: [answer1, answer2, ...], ...} containing raw dnspython answers for the query. - - Returns: - dict: A dictionary indicating if the query is a wildcard for each checked DNS record type. - Keys are DNS record types like "A", "AAAA", etc. - Values are tuples where the first element is a boolean indicating if the query is a wildcard, - and the second element is the wildcard parent if it's a wildcard. - - Examples: - >>> is_wildcard("www.github.io", rdtypes=["A", "AAAA", "MX"]) - {"A": (True, "github.io"), "AAAA": (True, "github.io"), "MX": (False, "github.io")} - - >>> is_wildcard("www.evilcorp.com", rdtypes=["A"]) - {"A": (False, "evilcorp.com")} - - Note: - `is_wildcard` can be True, False, or None (indicating that wildcard detection was inconclusive) - """ - if isinstance(rdtypes, str): - rdtypes = [rdtypes] - - result = {} - - # if the work of resolving hasn't been done yet, do it - if raw_dns_records is None: - raw_dns_records = {} - queries = [(query, rdtype) for rdtype in rdtypes] - async for (_, rdtype), (answers, errors) in self.resolve_raw_batch(queries): - if answers: - for answer in answers: - try: - raw_dns_records[rdtype].add(answer) - except KeyError: - raw_dns_records[rdtype] = {answer} - else: - if errors: - self.debug(f"Failed to resolve {query} ({rdtype}) during wildcard detection") - result[rdtype] = ("ERROR", query) - - # clean + process the raw records into a baseline - baseline = {} - baseline_raw = {} - for rdtype, answers in raw_dns_records.items(): - for answer in answers: - text_answer = answer.to_text() - try: - baseline_raw[rdtype].add(text_answer) - except KeyError: - baseline_raw[rdtype] = {text_answer} - for _, host in extract_targets(answer): - try: - baseline[rdtype].add(host) - except KeyError: - baseline[rdtype] = {host} - - # if it's unresolved, it's a big nope - if not raw_dns_records: - return result - - # once we've resolved the base query and have IP addresses to work with - # we can compare the IPs to the ones we have on file for wildcards - - # only bother to check the rdypes that actually resolve - rdtypes_to_check = set(raw_dns_records) - - # for every parent domain, starting with the shortest - parents = list(domain_parents(query)) - for parent in parents[::-1]: - # check if the parent domain is set up with wildcards - wildcard_results = await self.is_wildcard_domain(parent, rdtypes_to_check) - - # for every rdtype - for rdtype in list(baseline_raw): - # skip if we already found a wildcard for this rdtype - if rdtype in result: - continue - - # get our baseline IPs from above - _baseline = baseline.get(rdtype, set()) - _baseline_raw = baseline_raw.get(rdtype, set()) - - wildcard_rdtypes = wildcard_results.get(parent, {}) - wildcards = wildcard_rdtypes.get(rdtype, None) - if wildcards is None: - continue - wildcards, wildcard_raw = wildcards - - if wildcard_raw: - # skip this rdtype from now on - rdtypes_to_check.remove(rdtype) - - # check if any of our baseline IPs are in the wildcard results - is_wildcard = any(r in wildcards for r in _baseline) - is_wildcard_raw = any(r in wildcard_raw for r in _baseline_raw) - - # if there are any matches, we have a wildcard - if is_wildcard or is_wildcard_raw: - result[rdtype] = (True, parent) - else: - # otherwise, it's still suspicious, because we had random stuff resolve at this level - result[rdtype] = ("POSSIBLE", parent) - - # any rdtype that wasn't a wildcard, mark it as False - for rdtype, answers in baseline_raw.items(): - if answers and rdtype not in result: - result[rdtype] = (False, query) - - return result - - async def is_wildcard_domain(self, domain, rdtypes): - """ - Check whether a given host or its children make use of wildcard DNS entries. Wildcard DNS can have - various implications, particularly in subdomain enumeration and subdomain takeovers. - - Args: - domain (str): The domain to check for wildcard DNS entries. - rdtypes (list): Which DNS record types to check. - - Returns: - dict: A dictionary where the keys are the parent domains that have wildcard DNS entries, - and the values are another dictionary of DNS record types ("A", "AAAA", etc.) mapped to - sets of their resolved IP addresses. - - Examples: - >>> is_wildcard_domain("github.io") - {"github.io": {"A": {"1.2.3.4"}, "AAAA": {"dead::beef"}}} - - >>> is_wildcard_domain("example.com") - {} - """ - if isinstance(rdtypes, str): - rdtypes = [rdtypes] - rdtypes = set(rdtypes) - - wildcard_results = {} - # make a list of its parents - parents = list(domain_parents(domain, include_self=True)) - # and check each of them, beginning with the highest parent (i.e. the root domain) - for i, host in enumerate(parents[::-1]): - host_results = {} - queries = [((host, rdtype), {}) for rdtype in rdtypes] - async for ((_, rdtype), _, _), (results, results_raw) in self.task_pool( - self._is_wildcard_zone, args_kwargs=queries - ): - # if we hit a wildcard, we can skip this rdtype from now on - if results_raw: - rdtypes.remove(rdtype) - host_results[rdtype] = results, results_raw - - if host_results: - wildcard_results[host] = host_results - - return wildcard_results - - async def _is_wildcard_zone(self, host, rdtype): - """ - Check whether a specific DNS zone+rdtype has a wildcard configuration - """ - rdtype = rdtype.upper() - - # have we checked this host before? - host_hash = hash((host, rdtype)) - async with self._wildcard_lock.lock(host_hash): - # if we've seen this host before - try: - wildcard_results, wildcard_results_raw = self._wildcard_cache[host_hash] - self.debug(f"Got {host}:{rdtype} from cache") - except KeyError: - wildcard_results = set() - wildcard_results_raw = set() - self.debug(f"Checking if {host}:{rdtype} is a wildcard") - - # determine if this is a wildcard domain - # resolve a bunch of random subdomains of the same parent - rand_queries = [] - for _ in range(self.wildcard_tests): - rand_query = f"{rand_string(digits=False, length=10)}.{host}" - rand_queries.append((rand_query, rdtype)) - - async for (query, rdtype), (answers, errors) in self.resolve_raw_batch(rand_queries, use_cache=False): - for answer in answers: - # consider both the raw record - wildcard_results_raw.add(answer.to_text()) - # and all the extracted hosts - for _, t in extract_targets(answer): - wildcard_results.add(t) - - if wildcard_results: - self.log.info(f"Encountered domain with wildcard DNS ({rdtype}): *.{host}") - else: - self.debug(f"Finished checking {host}:{rdtype}, it is not a wildcard") - self._wildcard_cache[host_hash] = wildcard_results, wildcard_results_raw - - return wildcard_results, wildcard_results_raw - - async def _is_wildcard(self, query, rdtypes, dns_children): - if isinstance(rdtypes, str): - rdtypes = [rdtypes] - - @property - def dns_connectivity_lock(self): - if self._dns_connectivity_lock is None: - self._dns_connectivity_lock = asyncio.Lock() - return self._dns_connectivity_lock - - async def _connectivity_check(self, interval=5): - """ - Periodically checks for an active internet connection by attempting DNS resolution. - - Args: - interval (int, optional): The time interval, in seconds, at which to perform the check. - Defaults to 5 seconds. - - Returns: - bool: True if there is an active internet connection, False otherwise. - - Examples: - >>> await _connectivity_check() - True - """ - if self._last_dns_success is not None: - if time.time() - self._last_dns_success < interval: - return True - dns_server_working = [] - async with self.dns_connectivity_lock: - with suppress(Exception): - dns_server_working = await self._catch(self.resolver.resolve, "www.google.com", rdtype="A") - if dns_server_working: - self._last_dns_success = time.time() - return True - if time.time() - self._last_connectivity_warning > interval: - self.log.warning("DNS queries are failing, please check your internet connection") - self._last_connectivity_warning = time.time() - self._errors.clear() - return False - - def debug(self, *args, **kwargs): - if self._debug: - self.log.trace(*args, **kwargs) - - @property - def in_tests(self): - return os.getenv("BBOT_TESTING", "") == "True" - - async def _mock_dns(self, mock_data, custom_lookup_fn=None): - from .mock import MockResolver - - def deserialize_function(func_source): - assert self.in_tests, "Can only mock when BBOT_TESTING=True" - if func_source is None: - return None - namespace = {} - exec(func_source, {}, namespace) - return namespace["custom_lookup"] - - self.resolver = MockResolver(mock_data, custom_lookup_fn=deserialize_function(custom_lookup_fn)) diff --git a/bbot/core/helpers/dns/helpers.py b/bbot/core/helpers/dns/helpers.py index 340af5a425..78303cdb2c 100644 --- a/bbot/core/helpers/dns/helpers.py +++ b/bbot/core/helpers/dns/helpers.py @@ -1,11 +1,52 @@ import logging from bbot.core.helpers.regexes import dns_name_extraction_regex -from bbot.core.helpers.misc import clean_dns_record, smart_decode +from bbot.core.helpers.misc import clean_dns_record log = logging.getLogger("bbot.core.helpers.dns") +# Default rdtypes BBOT cares about during recursive resolution +all_rdtypes = ["A", "AAAA", "SRV", "MX", "NS", "SOA", "CNAME", "TXT"] + + +def extract_targets(record): + """Hostnames/IPs worth following from a blastdns ``Record``. + + For structured rdata (A/AAAA/CNAME/NS/PTR/MX/SOA/SRV/etc), blastdns has + already extracted the embedded names in Rust -- we just hand those back. + + For TXT records we additionally apply a hostname regex to the text content, + since SPF / DKIM / similar TXT payloads commonly embed hostnames worth + pivoting on. That regex extraction is BBOT-specific and stays here. + """ + results = set() + for rdtype, host in record.extract_targets(): + cleaned = clean_dns_record(host) + if cleaned: + results.add((rdtype, cleaned)) + + # TXT: pull additional hostnames out of the free-form text content + is_txt = "TXT" in record.rdata + if is_txt: + text = record.to_text() + for match in dns_name_extraction_regex.finditer(text): + cleaned = clean_dns_record(text[match.start() : match.end()]) + if cleaned: + results.add(("TXT", cleaned)) + + return results + + +def record_to_text(record): + """Presentation-format text for a blastdns ``Record``. + + Equivalent to dnspython's ``answer.to_text()``. blastdns produces this on + the Rust side via hickory's ``Display`` impl, so this is a thin pass-through. + """ + return record.to_text() + + # the following are the result of a 1-day internet survey to find the top SRV records # the scan resulted in 36,282 SRV records. the count for each one is shown. common_srvs = [ @@ -154,61 +195,6 @@ ] -def extract_targets(record): - """ - Extracts hostnames or IP addresses from a given DNS record. - - This method reads the DNS record's type and based on that, extracts the target - hostnames or IP addresses it points to. The type of DNS record - (e.g., "A", "MX", "CNAME", etc.) determines which fields are used for extraction. - - Args: - record (dns.rdata.Rdata): The DNS record to extract information from. - - Returns: - set: A set of tuples, each containing the DNS record type and the extracted value. - - Examples: - >>> from dns.rrset import from_text - >>> record = from_text('www.example.com', 3600, 'IN', 'A', '192.0.2.1') - >>> extract_targets(record[0]) - {('A', '192.0.2.1')} - - >>> record = from_text('example.com', 3600, 'IN', 'MX', '10 mail.example.com.') - >>> extract_targets(record[0]) - {('MX', 'mail.example.com')} - - """ - results = set() - - def add_result(rdtype, _record): - cleaned = clean_dns_record(_record) - if cleaned: - results.add((rdtype, cleaned)) - - rdtype = str(record.rdtype.name).upper() - if rdtype in ("A", "AAAA", "NS", "CNAME", "PTR"): - add_result(rdtype, record) - elif rdtype == "SOA": - add_result(rdtype, record.mname) - elif rdtype == "MX": - add_result(rdtype, record.exchange) - elif rdtype == "SRV": - add_result(rdtype, record.target) - elif rdtype == "TXT": - for s in record.strings: - s = smart_decode(s) - for match in dns_name_extraction_regex.finditer(s): - start, end = match.span() - host = s[start:end] - add_result(rdtype, host) - elif rdtype == "NSEC": - add_result(rdtype, record.next) - else: - log.warning(f'Unknown DNS record type "{rdtype}"') - return results - - def service_record(host, rdtype=None): """ Indicates that the provided host name and optional rdtype is an SRV or related service record. diff --git a/bbot/core/helpers/dns/mock.py b/bbot/core/helpers/dns/mock.py deleted file mode 100644 index 3f6fd83ea5..0000000000 --- a/bbot/core/helpers/dns/mock.py +++ /dev/null @@ -1,74 +0,0 @@ -import dns -import logging - -log = logging.getLogger("bbot.core.helpers.dns.mock") - - -class MockResolver: - def __init__(self, mock_data=None, custom_lookup_fn=None): - self.mock_data = mock_data if mock_data else {} - self._custom_lookup_fn = custom_lookup_fn - self.nameservers = ["127.0.0.1"] - - async def resolve_address(self, ipaddr, *args, **kwargs): - modified_kwargs = {} - modified_kwargs.update(kwargs) - modified_kwargs["rdtype"] = "PTR" - return await self.resolve(str(dns.reversename.from_address(ipaddr)), *args, **modified_kwargs) - - def _lookup(self, query, rdtype): - query = query.strip(".") - ret = [] - if self._custom_lookup_fn is not None: - answers = self._custom_lookup_fn(query, rdtype) - if answers is not None: - ret.extend(list(answers)) - answers = self.mock_data.get(query, {}).get(rdtype, []) - if answers: - ret.extend(list(answers)) - if not ret: - raise dns.resolver.NXDOMAIN(f"No answer found for {query} {rdtype}") - return ret - - def create_dns_response(self, query_name, answers, rdtype): - query_name = query_name.strip(".") - message_text = f"""id 1234 -opcode QUERY -rcode NOERROR -flags QR AA RD -;QUESTION -{query_name}. IN {rdtype} -;ANSWER""" - for answer in answers: - if answer == "": - answer = '""' - message_text += f"\n{query_name}. 1 IN {rdtype} {answer}" - - message_text += "\n;AUTHORITY\n;ADDITIONAL\n" - message = dns.message.from_text(message_text) - # log.verbose(message_text) - return message - - async def resolve(self, query_name, rdtype=None): - if rdtype is None: - rdtype = "A" - elif isinstance(rdtype, str): - rdtype = rdtype.upper() - else: - rdtype = str(rdtype.name).upper() - - domain_name = dns.name.from_text(query_name) - rdtype_obj = dns.rdatatype.from_text(rdtype) - - if "_NXDOMAIN" in self.mock_data and query_name in self.mock_data["_NXDOMAIN"]: - # Simulate the NXDOMAIN exception - raise dns.resolver.NXDOMAIN - - try: - answers = self._lookup(query_name, rdtype) - log.verbose(f"Answers for {query_name}:{rdtype}: {answers}") - response = self.create_dns_response(query_name, answers, rdtype) - answer = dns.resolver.Answer(domain_name, rdtype_obj, dns.rdataclass.IN, response) - return answer - except dns.resolver.NXDOMAIN: - return [] diff --git a/bbot/core/helpers/misc.py b/bbot/core/helpers/misc.py index cd2011abcc..8e5bb23a16 100644 --- a/bbot/core/helpers/misc.py +++ b/bbot/core/helpers/misc.py @@ -2743,14 +2743,11 @@ def clean_dns_record(record): >>> clean_dns_record('www.evilcorp.com.') 'www.evilcorp.com' - >>> from dns.rrset import from_text - >>> record = from_text('www.evilcorp.com', 3600, 'IN', 'A', '1.2.3.4')[0] - >>> clean_dns_record(record) - '1.2.3.4' - """ - if not isinstance(record, str): - record = str(record.to_text()) - return str(record).rstrip(".").lower() + >>> clean_dns_record('*.evilcorp.com.') + 'evilcorp.com' + """ + record = str(record).strip("*.").lower() + return record def truncate_filename(file_path, max_length=255): diff --git a/bbot/defaults.yml b/bbot/defaults.yml index a00adad9d9..09b5adf89f 100644 --- a/bbot/defaults.yml +++ b/bbot/defaults.yml @@ -41,8 +41,10 @@ dns: disable: false # Speed up scan by not creating any new DNS events, and only resolving A and AAAA records minimal: false - # How many instances of the dns module to run concurrently - threads: 25 + # How many threads to use per resolver (best way to increase speed is to put more resolvers in /etc/resolv.conf) + threads: 10 + # How many DNS records to cache + cache_size: 100000 # How many concurrent DNS resolvers to use when brute-forcing # (under the hood this is passed through directly to massdns -s) brute_threads: 1000 @@ -67,7 +69,7 @@ dns: wildcard_tests: 10 # Skip DNS requests for a certain domain and rdtype after encountering this many timeouts or SERVFAILs # This helps prevent faulty DNS servers from hanging up the scan - abort_threshold: 50 + abort_threshold: 20 # Treat hostnames discovered via PTR records as affiliates instead of in-scope # This prevents rDNS results (e.g. 1-2-3-4.ptr.example.com) from triggering # subdomain enumeration against unrelated domains when scanning IP ranges diff --git a/bbot/modules/baddns.py b/bbot/modules/baddns.py index ace72c9c68..515b54b221 100644 --- a/bbot/modules/baddns.py +++ b/bbot/modules/baddns.py @@ -5,7 +5,7 @@ import logging SEVERITY_LEVELS = ("INFO", "LOW", "MEDIUM", "HIGH", "CRITICAL") -CONFIDENCE_LEVELS = ("UNKNOWN", "LOW", "MODERATE", "HIGH", "CONFIRMED") +CONFIDENCE_LEVELS = ("UNKNOWN", "LOW", "MEDIUM", "HIGH", "CONFIRMED") SUBMODULE_MAX_SEVERITY = { "CNAME": "MEDIUM", @@ -45,15 +45,15 @@ class baddns(BaseModule): "created_date": "2024-01-18", "author": "@liquidsec", } - options = {"custom_nameservers": [], "min_severity": "LOW", "min_confidence": "MODERATE", "enabled_submodules": []} + options = {"custom_nameservers": [], "min_severity": "LOW", "min_confidence": "MEDIUM", "enabled_submodules": []} options_desc = { "custom_nameservers": "Force BadDNS to use a list of custom nameservers", "min_severity": "Minimum severity to emit (INFO, LOW, MEDIUM, HIGH, CRITICAL)", - "min_confidence": "Minimum confidence to emit (UNKNOWN, LOW, MODERATE, HIGH, CONFIRMED)", + "min_confidence": "Minimum confidence to emit (UNKNOWN, LOW, MEDIUM, HIGH, CONFIRMED)", "enabled_submodules": "A list of submodules to enable. Empty list (default) enables CNAME, TXT and MX Only", } module_threads = 8 - deps_pip = ["baddns~=2.0.0"] + deps_pip = ["baddns~=2.1.0"] def select_modules(self): selected_submodules = [] @@ -96,13 +96,13 @@ async def setup(self): if self.custom_nameservers: self.custom_nameservers = self.helpers.chain_lists(self.custom_nameservers) min_severity = self.config.get("min_severity", "LOW").upper() - min_confidence = self.config.get("min_confidence", "MODERATE").upper() + min_confidence = self.config.get("min_confidence", "MEDIUM").upper() if min_severity not in SEVERITY_LEVELS: self.warning(f"Invalid min_severity: {min_severity}, defaulting to LOW") min_severity = "LOW" if min_confidence not in CONFIDENCE_LEVELS: - self.warning(f"Invalid min_confidence: {min_confidence}, defaulting to MODERATE") - min_confidence = "MODERATE" + self.warning(f"Invalid min_confidence: {min_confidence}, defaulting to MEDIUM") + min_confidence = "MEDIUM" self._min_sev_idx = SEVERITY_LEVELS.index(min_severity) self._min_conf_idx = CONFIDENCE_LEVELS.index(min_confidence) self.signatures = load_signatures() @@ -149,7 +149,7 @@ async def handle_event(self, event): for ModuleClass in self.select_modules(): kwargs = { "http_client_class": self._new_http_client, - "dns_client": self.scan.helpers.dns.resolver, + "dns_client": self.scan.helpers.dns.blastdns, "custom_nameservers": self.custom_nameservers, "signatures": self.signatures, } diff --git a/bbot/modules/baddns_direct.py b/bbot/modules/baddns_direct.py index f4093fa8cc..243f224a11 100644 --- a/bbot/modules/baddns_direct.py +++ b/bbot/modules/baddns_direct.py @@ -10,14 +10,14 @@ class baddns_direct(baddns_module): "created_date": "2024-01-29", "author": "@liquidsec", } - options = {"custom_nameservers": [], "min_severity": "LOW", "min_confidence": "MODERATE"} + options = {"custom_nameservers": [], "min_severity": "LOW", "min_confidence": "MEDIUM"} options_desc = { "custom_nameservers": "Force BadDNS to use a list of custom nameservers", "min_severity": "Minimum severity to emit (INFO, LOW, MEDIUM, HIGH, CRITICAL)", - "min_confidence": "Minimum confidence to emit (UNKNOWN, LOW, MODERATE, HIGH, CONFIRMED)", + "min_confidence": "Minimum confidence to emit (UNKNOWN, LOW, MEDIUM, HIGH, CONFIRMED)", } module_threads = 8 - deps_pip = ["baddns~=2.0.0"] + deps_pip = ["baddns~=2.1.0"] scope_distance_modifier = 1 @@ -28,7 +28,7 @@ async def handle_event(self, event): CNAME_direct_module = self.select_modules()[0] kwargs = { "http_client_class": self.scan.helpers.web.AsyncClient, - "dns_client": self.scan.helpers.dns.resolver, + "dns_client": self.scan.helpers.dns.blastdns, "custom_nameservers": self.custom_nameservers, "signatures": self.signatures, "direct_mode": True, diff --git a/bbot/modules/baddns_zone.py b/bbot/modules/baddns_zone.py index 3f81906395..e13140c70a 100644 --- a/bbot/modules/baddns_zone.py +++ b/bbot/modules/baddns_zone.py @@ -10,14 +10,14 @@ class baddns_zone(baddns_module): "created_date": "2024-01-29", "author": "@liquidsec", } - options = {"custom_nameservers": [], "min_severity": "INFO", "min_confidence": "MODERATE"} + options = {"custom_nameservers": [], "min_severity": "INFO", "min_confidence": "MEDIUM"} options_desc = { "custom_nameservers": "Force BadDNS to use a list of custom nameservers", "min_severity": "Minimum severity to emit (INFO, LOW, MEDIUM, HIGH, CRITICAL)", - "min_confidence": "Minimum confidence to emit (UNKNOWN, LOW, MODERATE, HIGH, CONFIRMED)", + "min_confidence": "Minimum confidence to emit (UNKNOWN, LOW, MEDIUM, HIGH, CONFIRMED)", } module_threads = 8 - deps_pip = ["baddns~=2.0.0"] + deps_pip = ["baddns~=2.1.0"] def set_modules(self): self.enabled_submodules = ["NSEC", "zonetransfer"] diff --git a/bbot/modules/base.py b/bbot/modules/base.py index d7356bd809..f561fdcf19 100644 --- a/bbot/modules/base.py +++ b/bbot/modules/base.py @@ -1876,7 +1876,7 @@ async def _worker(self): forward_event_reason = "" if acceptable: - context = f"{self.name}.handle_event({event, kwargs})" + context = f"{self.name}.handle_event({event})" self.scan.stats.event_consumed(event, self) self.debug(f"Intercepting {event}") try: diff --git a/bbot/modules/dnsbimi.py b/bbot/modules/dnsbimi.py index 4148dae509..d301857d30 100644 --- a/bbot/modules/dnsbimi.py +++ b/bbot/modules/dnsbimi.py @@ -25,7 +25,7 @@ # from bbot.modules.base import BaseModule -from bbot.core.helpers.dns.helpers import service_record +from bbot.core.helpers.dns.helpers import record_to_text, service_record import re @@ -93,50 +93,46 @@ async def inspectBIMI(self, event, domain): tags = ["bimi-record", f"bimi-{selector}"] hostname = f"{selector}._bimi.{parent_domain}" - r = await self.helpers.resolve_raw(hostname, type=rdtype) - - if r: - raw_results, errors = r - - for answer in raw_results: - if self.emit_raw_dns_records: - await self.emit_event( - { - "host": hostname, - "type": rdtype, - "answer": answer.to_text(), - }, - "RAW_DNS_RECORD", - parent=event, - tags=tags.append(f"{rdtype.lower()}-record"), - context=f"{rdtype} lookup on {hostname} produced {{event.type}}", - ) - - # we need to strip surrounding quotes and whitespace, as well as fix TXT data that may have been split across two different rdata's - # e.g. we will get a single string, but within that string we may have two parts such as: - # answer = '"part 1 that was really long" "part 2 that did not fit in part 1"' - s = answer.to_text().strip('"').strip().replace('" "', "") - - bimi_match = bimi_regex.search(s) - - if bimi_match and bimi_match.group("v") and "bimi" in bimi_match.group("v").lower(): - if bimi_match.group("l") and bimi_match.group("l") != "": - if self.emit_urls: - await self.emit_event( - bimi_match.group("l"), - "URL_UNVERIFIED", - parent=event, - tags=tags.append("bimi-location"), - ) - - if bimi_match.group("a") and bimi_match.group("a") != "": - if self.emit_urls: - await self.emit_event( - bimi_match.group("a"), - "URL_UNVERIFIED", - parent=event, - tags=tags.append("bimi-authority"), - ) + response = await self.helpers.dns.resolve_full(hostname, rdtype) + + for answer in response.response.answers: + text_answer = record_to_text(answer) + if self.emit_raw_dns_records: + await self.emit_event( + { + "host": hostname, + "type": rdtype, + "answer": text_answer, + }, + "RAW_DNS_RECORD", + parent=event, + tags=tags.append(f"{rdtype.lower()}-record"), + context=f"{rdtype} lookup on {hostname} produced {{event.type}}", + ) + + # record_to_text already joins multi-string TXT records and omits dnspython-style quoting + s = text_answer.strip() + + bimi_match = bimi_regex.search(s) + + if bimi_match and bimi_match.group("v") and "bimi" in bimi_match.group("v").lower(): + if bimi_match.group("l") and bimi_match.group("l") != "": + if self.emit_urls: + await self.emit_event( + bimi_match.group("l"), + "URL_UNVERIFIED", + parent=event, + tags=tags.append("bimi-location"), + ) + + if bimi_match.group("a") and bimi_match.group("a") != "": + if self.emit_urls: + await self.emit_event( + bimi_match.group("a"), + "URL_UNVERIFIED", + parent=event, + tags=tags.append("bimi-authority"), + ) async def handle_event(self, event): await self.inspectBIMI(event, event.host) diff --git a/bbot/modules/dnscaa.py b/bbot/modules/dnscaa.py index fad74bd947..0f8fa9ee96 100644 --- a/bbot/modules/dnscaa.py +++ b/bbot/modules/dnscaa.py @@ -21,22 +21,8 @@ from bbot.modules.base import BaseModule -import re - from bbot.core.helpers.regexes import dns_name_extraction_regex, email_regex, url_regexes -# Handle '0 iodef "mailto:support@hcaptcha.com"' -# Handle '1 iodef "https://some.host.tld/caa;"' -# Handle '0 issue "pki.goog; cansignhttpexchanges=yes; somethingelse=1"' -# Handle '1 issue ";"' == explicit denial for any wildcard issuance. -# Handle '128 issuewild "comodoca.com"' -# Handle '128 issuewild ";"' == explicit denial for any wildcard issuance. -_caa_regex = r"^(?P[0-9]+) +(?P\w+) +\"(?P[^;\"]*);* *(?P[^\"]*)\"$" -caa_regex = re.compile(_caa_regex) - -_caa_extensions_kvp_regex = r"(?P\w+)=(?P[^;]+)" -caa_extensions_kvp_regex = re.compile(_caa_extensions_kvp_regex) - class dnscaa(BaseModule): watched_events = ["DNS_NAME"] @@ -78,42 +64,49 @@ async def filter_event(self, event): async def handle_event(self, event): tags = ["caa-record"] - r = await self.helpers.resolve_raw(event.host, type="caa") - - if r: - raw_results, errors = r - - for answer in raw_results: - s = answer.to_text().strip().replace('" "', "") - - # validate CAA record vi regex so that we can determine what to do with it. - caa_match = caa_regex.search(s) - - if caa_match and caa_match.group("flags") and caa_match.group("property") and caa_match.group("text"): - # it's legit. - if caa_match.group("property").lower() == "iodef": - if self._emails: - for match in email_regex.finditer(caa_match.group("text")): - start, end = match.span() - email = caa_match.group("text")[start:end] - - await self.emit_event(email, "EMAIL_ADDRESS", tags=tags, parent=event) - - if self._urls: - for url_regex in url_regexes: - for match in url_regex.finditer(caa_match.group("text")): - start, end = match.span() - url = caa_match.group("text")[start:end].strip('"').strip() - - await self.emit_event(url, "URL_UNVERIFIED", tags=tags, parent=event) - - elif caa_match.group("property").lower().startswith("issue"): - if self._dns_names: - for match in dns_name_extraction_regex.finditer(caa_match.group("text")): - start, end = match.span() - name = caa_match.group("text")[start:end] - - await self.emit_event(name, "DNS_NAME", tags=tags, parent=event) + response = await self.helpers.dns.resolve_full(event.host, "CAA") + + for answer in response.response.answers: + caa = answer.rdata.get("CAA") + if not isinstance(caa, dict): + continue + + tag = (caa.get("tag") or "").lower() + value = caa.get("value") or {} + + # iodef -> "Url" containing mailto: or https:// for incident reporting + if tag == "iodef": + target = value.get("Url") if isinstance(value, dict) else None + if not target: + continue + if self._emails: + for match in email_regex.finditer(target): + await self.emit_event( + target[match.start() : match.end()], "EMAIL_ADDRESS", tags=tags, parent=event + ) + if self._urls: + for url_regex in url_regexes: + for match in url_regex.finditer(target): + await self.emit_event( + target[match.start() : match.end()].strip('"').strip(), + "URL_UNVERIFIED", + tags=tags, + parent=event, + ) + + # issue / issuewild -> "Issuer" containing the CA's domain + elif tag.startswith("issue"): + if not self._dns_names: + continue + issuer = value.get("Issuer") if isinstance(value, dict) else None + # blastdns models this as ["domain", [extensions]]; an empty issuer + # ("explicit denial") resolves to None or [None, []] + if isinstance(issuer, (list, tuple)) and issuer and issuer[0]: + name_text = str(issuer[0]) + for match in dns_name_extraction_regex.finditer(name_text): + await self.emit_event( + name_text[match.start() : match.end()], "DNS_NAME", tags=tags, parent=event + ) # EOF diff --git a/bbot/modules/dnstlsrpt.py b/bbot/modules/dnstlsrpt.py index 8d2976e93c..7c45759478 100644 --- a/bbot/modules/dnstlsrpt.py +++ b/bbot/modules/dnstlsrpt.py @@ -15,7 +15,7 @@ # e.g. tlsrpt@%{UNIQUE_ID}%.hosted.service.provider is usually a tenant specific ID. from bbot.modules.base import BaseModule -from bbot.core.helpers.dns.helpers import service_record +from bbot.core.helpers.dns.helpers import record_to_text, service_record import re @@ -77,62 +77,58 @@ async def handle_event(self, event): tags = ["tlsrpt-record"] hostname = f"_smtp._tls.{event.host}" - r = await self.helpers.resolve_raw(hostname, type=rdtype) - - if r: - raw_results, errors = r - for answer in raw_results: - if self.emit_raw_dns_records: - await self.emit_event( - {"host": hostname, "type": rdtype, "answer": answer.to_text()}, - "RAW_DNS_RECORD", - parent=event, - tags=tags.append(f"{rdtype.lower()}-record"), - context=f"{rdtype} lookup on {hostname} produced {{event.type}}", - ) - - # we need to fix TXT data that may have been split across two different rdata's - # e.g. we will get a single string, but within that string we may have two parts such as: - # answer = '"part 1 that was really long" "part 2 that did not fit in part 1"' - # NOTE: the leading and trailing double quotes are essential as part of a raw DNS TXT record, or another record type that contains a free form text string as a component. - s = answer.to_text().strip('"').replace('" "', "") - - # validate TLSRPT record, tag appropriately - tlsrpt_match = tlsrpt_regex.search(s) - - if ( - tlsrpt_match - and tlsrpt_match.group("v") - and tlsrpt_match.group("kvps") - and tlsrpt_match.group("kvps") != "" - ): - for kvp_match in tlsrpt_kvp_regex.finditer(tlsrpt_match.group("kvps")): - key = kvp_match.group("k").lower() - - if key == "rua": - for csul_match in csul.finditer(kvp_match.group("v")): - if csul_match.group("uri"): - for match in email_regex.finditer(csul_match.group("uri")): + response = await self.helpers.dns.resolve_full(hostname, rdtype) + + for answer in response.response.answers: + text_answer = record_to_text(answer) + if self.emit_raw_dns_records: + await self.emit_event( + {"host": hostname, "type": rdtype, "answer": text_answer}, + "RAW_DNS_RECORD", + parent=event, + tags=tags.append(f"{rdtype.lower()}-record"), + context=f"{rdtype} lookup on {hostname} produced {{event.type}}", + ) + + # record_to_text already joins multi-string TXT records and omits dnspython-style quoting + s = text_answer + + # validate TLSRPT record, tag appropriately + tlsrpt_match = tlsrpt_regex.search(s) + + if ( + tlsrpt_match + and tlsrpt_match.group("v") + and tlsrpt_match.group("kvps") + and tlsrpt_match.group("kvps") != "" + ): + for kvp_match in tlsrpt_kvp_regex.finditer(tlsrpt_match.group("kvps")): + key = kvp_match.group("k").lower() + + if key == "rua": + for csul_match in csul.finditer(kvp_match.group("v")): + if csul_match.group("uri"): + for match in email_regex.finditer(csul_match.group("uri")): + start, end = match.span() + email = csul_match.group("uri")[start:end] + + if self.emit_emails: + await self.emit_event( + email, + "EMAIL_ADDRESS", + tags=tags.append(f"tlsrpt-record-{key}"), + parent=event, + ) + + for url_regex in url_regexes: + for match in url_regex.finditer(csul_match.group("uri")): start, end = match.span() - email = csul_match.group("uri")[start:end] + url = csul_match.group("uri")[start:end] - if self.emit_emails: + if self.emit_urls: await self.emit_event( - email, - "EMAIL_ADDRESS", + url, + "URL_UNVERIFIED", tags=tags.append(f"tlsrpt-record-{key}"), parent=event, ) - - for url_regex in url_regexes: - for match in url_regex.finditer(csul_match.group("uri")): - start, end = match.span() - url = csul_match.group("uri")[start:end] - - if self.emit_urls: - await self.emit_event( - url, - "URL_UNVERIFIED", - tags=tags.append(f"tlsrpt-record-{key}"), - parent=event, - ) diff --git a/bbot/modules/internal/dnsresolve.py b/bbot/modules/internal/dnsresolve.py index f2472025c9..2e90c5f9f8 100644 --- a/bbot/modules/internal/dnsresolve.py +++ b/bbot/modules/internal/dnsresolve.py @@ -2,9 +2,10 @@ import ipaddress from contextlib import suppress +from blastdns import DNSResult + from bbot.errors import ValidationError -from bbot.core.helpers.dns.engine import all_rdtypes -from bbot.core.helpers.dns.helpers import extract_targets +from bbot.core.helpers.dns.helpers import all_rdtypes, extract_targets, record_to_text from bbot.modules.base import BaseInterceptModule, BaseModule @@ -202,7 +203,7 @@ async def emit_dns_children(self, event): context=f"{rdtype} record for {event.host} contains {{event.type}}: {{event.host}}", ) except ValidationError as e: - self.warning(f'Event validation failed for DNS child of {event}: "{child_host}" ({rdtype}): {e}') + self.trace(f'Event validation failed for DNS child of {event}: "{child_host}" ({rdtype}): {e}') continue # tag PTR-derived children so downstream logic can identify them @@ -227,7 +228,7 @@ async def emit_dns_children_raw(self, event, dns_tags): tags = {t for t in dns_tags if rdtype_lower in t.split("-")} if self.emit_raw_records and rdtype not in ("A", "AAAA", "CNAME", "PTR"): for answer in answers: - text_answer = answer.to_text() + text_answer = record_to_text(answer) child_hash = hash(f"{event.host}:{rdtype}:{text_answer}") if child_hash not in self.children_emitted_raw: self.children_emitted_raw.add(child_hash) @@ -270,23 +271,23 @@ async def resolve_event(self, event, types): if not types: return event_host = str(event.host) - queries = [(event_host, rdtype) for rdtype in types] - dns_errors = {} - async for (query, rdtype), (answers, errors) in self.helpers.dns.resolve_raw_batch(queries): + results = await self.helpers.dns.resolve_multi_full(event_host, list(types)) + for rdtype, response in results.items(): rdtype = sys.intern(rdtype) - # errors - try: - dns_errors[rdtype].update(errors) - except KeyError: - dns_errors[rdtype] = set(errors) + if not isinstance(response, DNSResult): + # blastdns returns a DNSError for this rdtype; tag and move on + if rdtype not in event.dns_children: + event.add_tag(f"{rdtype}-error") + continue + + answers = response.response.answers + if not answers: + continue + + event.add_tag(f"{rdtype}-record") + # blastdns hands us an already-unique list[Record] -- store as-is, no copy + event.raw_dns_records[rdtype] = answers for answer in answers: - event.add_tag(f"{rdtype}-record") - # raw dnspython answers - try: - event.raw_dns_records[rdtype].add(answer) - except KeyError: - event.raw_dns_records[rdtype] = {answer} - # hosts for _rdtype, host in extract_targets(answer): _rdtype = sys.intern(_rdtype) host = sys.intern(host) @@ -302,12 +303,6 @@ async def resolve_event(self, event, types): except ValueError: continue - # tag event with errors - for rdtype, errors in dns_errors.items(): - # only consider it an error if there weren't any results for that rdtype - if errors and rdtype not in event.dns_children: - event.add_tag(f"{rdtype}-error") - def get_dns_parent(self, event): """ Get the first parent DNS_NAME / IP_ADDRESS of an event. If one isn't found, create it. diff --git a/bbot/presets/baddns.yml b/bbot/presets/baddns.yml index c737257011..d6e0cf76aa 100644 --- a/bbot/presets/baddns.yml +++ b/bbot/presets/baddns.yml @@ -8,4 +8,4 @@ config: baddns: enabled_submodules: [CNAME, MX, TXT] min_severity: LOW - min_confidence: MODERATE + min_confidence: MEDIUM diff --git a/bbot/scanner/target.py b/bbot/scanner/target.py index d029b6f567..90f6f3f1eb 100644 --- a/bbot/scanner/target.py +++ b/bbot/scanner/target.py @@ -141,11 +141,14 @@ def _make_event_seed(self, target, raise_error=False): try: return EventSeed(target) except ValidationError: + import traceback + msg = f"Invalid target: '{target}'" if raise_error: raise KeyError(msg) else: log.warning(msg) + log.trace("".join(traceback.format_stack())) def __contains__(self, other): if isinstance(other, BaseTarget): @@ -272,8 +275,11 @@ def get(self, host, **kwargs): # first, check event's host against blacklist try: event_seed = self._make_event_seed(host, raise_error=raise_error) - host = event_seed.host - to_match = event_seed.data + if event_seed is not None: + host = event_seed.host + to_match = event_seed.data + else: + to_match = str(host) except ValidationError: to_match = str(host) event_result = super().get(host) diff --git a/bbot/test/test_step_1/test_dns.py b/bbot/test/test_step_1/test_dns.py index 9bb89c7070..aab330aeb9 100644 --- a/bbot/test/test_step_1/test_dns.py +++ b/bbot/test/test_step_1/test_dns.py @@ -1,8 +1,10 @@ from ..bbot_fixtures import * -from bbot.core.helpers.dns.helpers import extract_targets, service_record, common_srvs +from bbot.core.helpers.dns.helpers import all_rdtypes, common_srvs, extract_targets, record_to_text, service_record +# Common mock dataset (zone-file format -- TXT character-strings must be quoted +# to keep whitespace inside the payload from being tokenized). mock_records = { "one.one.one.one": { "A": ["1.1.1.1", "1.0.0.1"], @@ -16,688 +18,99 @@ @pytest.mark.asyncio -async def test_dns_engine(bbot_scanner): +async def test_dns_helper(bbot_scanner): scan = bbot_scanner() - await scan._prep() - await scan.helpers._mock_dns( - {"one.one.one.one": {"A": ["1.1.1.1"]}, "1.1.1.1.in-addr.arpa": {"PTR": ["one.one.one.one"]}} - ) - result = await scan.helpers.resolve("one.one.one.one") - assert "1.1.1.1" in result - assert "2606:4700:4700::1111" not in result - - results = [_ async for _ in scan.helpers.resolve_batch(("one.one.one.one", "1.1.1.1"))] - pass_1 = False - pass_2 = False - for query, result in results: - if query == "one.one.one.one" and "1.1.1.1" in result: - pass_1 = True - elif query == "1.1.1.1" and "one.one.one.one" in result: - pass_2 = True - assert pass_1 and pass_2 - - results = [_ async for _ in scan.helpers.resolve_raw_batch((("one.one.one.one", "A"), ("1.1.1.1", "PTR")))] - pass_1 = False - pass_2 = False - for (query, rdtype), (answers, errors) in results: - results = [] - for answer in answers: - for t in extract_targets(answer): - results.append(t[1]) - if query == "one.one.one.one" and "1.1.1.1" in results: - pass_1 = True - elif query == "1.1.1.1" and "one.one.one.one" in results: - pass_2 = True - assert pass_1 and pass_2 - - from bbot.core.helpers.dns.mock import MockResolver - - # ensure dns records are being properly cleaned - mockresolver = MockResolver({"evilcorp.com": {"MX": ["0 ."]}}) - mx_records = await mockresolver.resolve("evilcorp.com", rdtype="MX") - results = set() - for r in mx_records: - results.update(extract_targets(r)) - assert not results - - await scan._cleanup() + await scan.helpers.dns._mock_dns({"asdf.example.com": {"A": ["1.2.3.4"]}}) + result = await scan.helpers.dns.resolve("asdf.example.com") + assert "1.2.3.4" in result @pytest.mark.asyncio -async def test_dns_resolution(bbot_scanner): - scan = bbot_scanner("1.1.1.1") - - from bbot.core.helpers.dns.engine import DNSEngine - - dnsengine = DNSEngine(None) - await dnsengine._mock_dns(mock_records) - - # lowest level functions - a_responses = await dnsengine._resolve_hostname("one.one.one.one") - aaaa_responses = await dnsengine._resolve_hostname("one.one.one.one", rdtype="AAAA") - ip_responses = await dnsengine._resolve_ip("1.1.1.1") - assert a_responses[0].response.answer[0][0].address in ("1.1.1.1", "1.0.0.1") - assert aaaa_responses[0].response.answer[0][0].address in ("2606:4700:4700::1111", "2606:4700:4700::1001") - assert ip_responses[0].response.answer[0][0].target.to_text() in ("one.one.one.one.",) - - # mid level functions - answers, errors = await dnsengine.resolve_raw("one.one.one.one", type="A") - responses = [] - for answer in answers: - responses += list(extract_targets(answer)) - assert ("A", "1.1.1.1") in responses - assert ("AAAA", "2606:4700:4700::1111") not in responses - answers, errors = await dnsengine.resolve_raw("one.one.one.one", type="AAAA") - responses = [] - for answer in answers: - responses += list(extract_targets(answer)) - assert ("A", "1.1.1.1") not in responses - assert ("AAAA", "2606:4700:4700::1111") in responses - answers, errors = await dnsengine.resolve_raw("1.1.1.1") - responses = [] - for answer in answers: - responses += list(extract_targets(answer)) - assert ("PTR", "one.one.one.one") in responses - - await dnsengine._shutdown() - - # high level functions - dnsengine = DNSEngine(None) - assert "1.1.1.1" in await dnsengine.resolve("one.one.one.one") - assert "2606:4700:4700::1111" in await dnsengine.resolve("one.one.one.one", type="AAAA") - assert "one.one.one.one" in await dnsengine.resolve("1.1.1.1") - for rdtype in ("NS", "SOA", "MX", "TXT"): - results = await dnsengine.resolve("google.com", type=rdtype) - assert len(results) > 0 - - # batch resolution - batch_results = [r async for r in dnsengine.resolve_batch(["1.1.1.1", "one.one.one.one"])] - assert len(batch_results) == 2 - batch_results = dict(batch_results) - assert any(x in batch_results["one.one.one.one"] for x in ("1.1.1.1", "1.0.0.1")) - assert "one.one.one.one" in batch_results["1.1.1.1"] - - # custom batch resolution - batch_results = [r async for r in dnsengine.resolve_raw_batch([("1.1.1.1", "PTR"), ("one.one.one.one", "A")])] - batch_results_new = [] - for query, (answers, errors) in batch_results: - for answer in answers: - batch_results_new.append((answer.to_text(), answer.rdtype.name)) - assert len(batch_results_new) == 3 - assert any(answer == "1.0.0.1" and rdtype == "A" for answer, rdtype in batch_results_new) - assert any(answer == "one.one.one.one." and rdtype == "PTR" for answer, rdtype in batch_results_new) - - # dns cache - dnsengine._dns_cache.clear() - assert hash(("1.1.1.1", "PTR")) not in dnsengine._dns_cache - assert hash(("one.one.one.one", "A")) not in dnsengine._dns_cache - assert hash(("one.one.one.one", "AAAA")) not in dnsengine._dns_cache - await dnsengine.resolve("1.1.1.1", use_cache=False) - await dnsengine.resolve("one.one.one.one", use_cache=False) - assert hash(("1.1.1.1", "PTR")) not in dnsengine._dns_cache - assert hash(("one.one.one.one", "A")) not in dnsengine._dns_cache - assert hash(("one.one.one.one", "AAAA")) not in dnsengine._dns_cache - - await dnsengine.resolve("1.1.1.1") - assert hash(("1.1.1.1", "PTR")) in dnsengine._dns_cache - await dnsengine.resolve("one.one.one.one", type="A") - assert hash(("one.one.one.one", "A")) in dnsengine._dns_cache - assert hash(("one.one.one.one", "AAAA")) not in dnsengine._dns_cache - dnsengine._dns_cache.clear() - await dnsengine.resolve("one.one.one.one", type="AAAA") - assert hash(("one.one.one.one", "AAAA")) in dnsengine._dns_cache - assert hash(("one.one.one.one", "A")) not in dnsengine._dns_cache - - await dnsengine._shutdown() - - # Ensure events with hosts have resolved_hosts attribute populated - await scan._prep() - resolved_hosts_event1 = scan.make_event("one.one.one.one", "DNS_NAME", parent=scan.root_event) - resolved_hosts_event2 = scan.make_event("http://one.one.one.one/", "URL_UNVERIFIED", parent=scan.root_event) - dnsresolve = scan.modules["dnsresolve"] - await dnsresolve.handle_event(resolved_hosts_event1) - await dnsresolve.handle_event(resolved_hosts_event2) - assert "1.1.1.1" in resolved_hosts_event2.resolved_hosts - # URL event should not have dns_children - assert not resolved_hosts_event2.dns_children - assert resolved_hosts_event1.resolved_hosts == resolved_hosts_event2.resolved_hosts - # DNS_NAME event should have dns_children - assert "1.1.1.1" in resolved_hosts_event1.dns_children["A"] - assert "A" in resolved_hosts_event1.raw_dns_records - assert "AAAA" in resolved_hosts_event1.raw_dns_records - assert "a-record" in resolved_hosts_event1.tags - assert "a-record" not in resolved_hosts_event2.tags - - scan2 = bbot_scanner("evilcorp.com", config={"dns": {"minimal": False}}) - await scan2._prep() - await scan2.helpers.dns._mock_dns( +async def test_resolve_full_and_extract(bbot_scanner): + """resolve_full + extract_targets + record_to_text on the new blastdns Records.""" + scan = bbot_scanner() + await scan.helpers.dns._mock_dns( { - "evilcorp.com": {"TXT": ['"v=spf1 include:cloudprovider.com ~all"']}, - "cloudprovider.com": {"A": ["1.2.3.4"]}, - }, - ) - events = [e async for e in scan2.async_start()] - assert 1 == len( - [e for e in events if e.type == "DNS_NAME" and e.data == "cloudprovider.com" and "affiliate" in e.tags] - ) - - await scan._cleanup() - await scan2._cleanup() - - -@pytest.mark.asyncio -async def test_wildcards(bbot_scanner): - scan = bbot_scanner("1.1.1.1") - await scan._prep() - helpers = scan.helpers - - from bbot.core.helpers.dns.engine import DNSEngine, all_rdtypes - - dnsengine = DNSEngine(None, debug=True) - - # is_wildcard_domain - wildcard_domains = await dnsengine.is_wildcard_domain("asdf.github.io", all_rdtypes) - assert len(dnsengine._wildcard_cache) == len(all_rdtypes) + (len(all_rdtypes) - 2) - for rdtype in all_rdtypes: - assert hash(("github.io", rdtype)) in dnsengine._wildcard_cache - if rdtype not in ("A", "AAAA"): - assert hash(("asdf.github.io", rdtype)) in dnsengine._wildcard_cache - assert "github.io" in wildcard_domains - assert "A" in wildcard_domains["github.io"] - assert "SRV" not in wildcard_domains["github.io"] - assert wildcard_domains["github.io"]["A"] and all(helpers.is_ip(r) for r in wildcard_domains["github.io"]["A"][0]) - dnsengine._wildcard_cache.clear() - - # is_wildcard - for test_domain in ("blacklanternsecurity.github.io", "asdf.asdf.asdf.github.io"): - wildcard_rdtypes = await dnsengine.is_wildcard(test_domain, all_rdtypes) - assert "A" in wildcard_rdtypes - assert "SRV" not in wildcard_rdtypes - assert wildcard_rdtypes["A"] == (True, "github.io") - assert wildcard_rdtypes["AAAA"] == (True, "github.io") - assert len(dnsengine._wildcard_cache) == 2 - for rdtype in ("A", "AAAA"): - assert hash(("github.io", rdtype)) in dnsengine._wildcard_cache - assert len(dnsengine._wildcard_cache[hash(("github.io", rdtype))]) == 2 - assert len(dnsengine._wildcard_cache[hash(("github.io", rdtype))][0]) > 0 - assert len(dnsengine._wildcard_cache[hash(("github.io", rdtype))][1]) > 0 - dnsengine._wildcard_cache.clear() - - ### wildcard TXT record ### - - custom_lookup = """ -def custom_lookup(query, rdtype): - if rdtype == "TXT" and query.strip(".").endswith("test.evilcorp.com"): - return {""} -""" - - mock_data = { - "evilcorp.com": {"A": ["127.0.0.1"]}, - "test.evilcorp.com": {"A": ["127.0.0.2"]}, - "www.test.evilcorp.com": {"AAAA": ["dead::beef"]}, - } - - # basic sanity checks - - await dnsengine._mock_dns(mock_data, custom_lookup_fn=custom_lookup) - - a_result = await dnsengine.resolve("evilcorp.com") - assert a_result == {"127.0.0.1"} - aaaa_result = await dnsengine.resolve("www.test.evilcorp.com", type="AAAA") - assert aaaa_result == {"dead::beef"} - txt_result = await dnsengine.resolve("asdf.www.test.evilcorp.com", type="TXT") - assert txt_result == set() - txt_result_raw, errors = await dnsengine.resolve_raw("asdf.www.test.evilcorp.com", type="TXT") - txt_result_raw = list(txt_result_raw) - assert txt_result_raw - - await dnsengine._shutdown() - - # first, we check with wildcard detection disabled - - scan = bbot_scanner( - "evilcorp.com", - seeds=["bbot.fdsa.www.test.evilcorp.com"], - config={ - "dns": {"minimal": False, "disable": False, "search_distance": 5, "wildcard_ignore": ["evilcorp.com"]}, - "speculate": True, - }, - ) - await scan._prep() - await scan.helpers.dns._mock_dns(mock_data, custom_lookup_fn=custom_lookup) - - events = [e async for e in scan.async_start()] - - assert len(events) == 12 - assert len([e for e in events if e.type == "DNS_NAME"]) == 5 - assert len([e for e in events if e.type == "RAW_DNS_RECORD"]) == 4 - assert sorted([e.data for e in events if e.type == "DNS_NAME"]) == [ - "bbot.fdsa.www.test.evilcorp.com", - "evilcorp.com", - "fdsa.www.test.evilcorp.com", - "test.evilcorp.com", - "www.test.evilcorp.com", - ] - - dns_names_by_host = {e.host: e for e in events if e.type == "DNS_NAME"} - assert dns_names_by_host["evilcorp.com"].tags == { - "domain", - "private-ip", - "in-scope", - "a-record", - } - assert dns_names_by_host["evilcorp.com"].resolved_hosts == {"127.0.0.1"} - assert dns_names_by_host["test.evilcorp.com"].tags == { - "subdomain", - "private-ip", - "in-scope", - "a-record", - "txt-record", - } - assert dns_names_by_host["test.evilcorp.com"].resolved_hosts == {"127.0.0.2"} - assert dns_names_by_host["www.test.evilcorp.com"].tags == {"subdomain", "in-scope", "aaaa-record", "txt-record"} - assert dns_names_by_host["www.test.evilcorp.com"].resolved_hosts == {"dead::beef"} - assert dns_names_by_host["fdsa.www.test.evilcorp.com"].tags == {"subdomain", "in-scope", "txt-record"} - assert dns_names_by_host["fdsa.www.test.evilcorp.com"].resolved_hosts == set() - assert dns_names_by_host["bbot.fdsa.www.test.evilcorp.com"].tags == { - "target", - "subdomain", - "in-scope", - "txt-record", - "seed", - } - assert dns_names_by_host["bbot.fdsa.www.test.evilcorp.com"].resolved_hosts == set() - - raw_records_by_host = {e.host: e for e in events if e.type == "RAW_DNS_RECORD"} - assert raw_records_by_host["test.evilcorp.com"].tags == {"subdomain", "in-scope", "txt-record"} - assert raw_records_by_host["test.evilcorp.com"].resolved_hosts == {"127.0.0.2"} - assert raw_records_by_host["www.test.evilcorp.com"].tags == {"subdomain", "in-scope", "txt-record"} - assert raw_records_by_host["www.test.evilcorp.com"].resolved_hosts == {"dead::beef"} - assert raw_records_by_host["fdsa.www.test.evilcorp.com"].tags == {"subdomain", "in-scope", "txt-record"} - assert raw_records_by_host["fdsa.www.test.evilcorp.com"].resolved_hosts == set() - assert raw_records_by_host["bbot.fdsa.www.test.evilcorp.com"].tags == {"subdomain", "in-scope", "txt-record"} - assert raw_records_by_host["bbot.fdsa.www.test.evilcorp.com"].resolved_hosts == set() - - # then we run it again with wildcard detection enabled - - scan = bbot_scanner( - "evilcorp.com", - seeds=["bbot.fdsa.www.test.evilcorp.com"], - config={ - "dns": {"minimal": False, "disable": False, "search_distance": 5, "wildcard_ignore": []}, - "speculate": True, - }, + "one.one.one.one": { + "A": ["1.1.1.1"], + "AAAA": ["2606:4700:4700::1111"], + "MX": ["10 mail.one.one.one.one."], + }, + } ) - await scan._prep() - await scan.helpers.dns._mock_dns(mock_data, custom_lookup_fn=custom_lookup) - - events = [e async for e in scan.async_start()] - - assert len(events) == 12 - assert len([e for e in events if e.type == "DNS_NAME"]) == 5 - assert len([e for e in events if e.type == "RAW_DNS_RECORD"]) == 4 - assert sorted([e.data for e in events if e.type == "DNS_NAME"]) == [ - "_wildcard.test.evilcorp.com", - "bbot.fdsa.www.test.evilcorp.com", - "evilcorp.com", - "test.evilcorp.com", - "www.test.evilcorp.com", - ] - - dns_names_by_host = {e.host: e for e in events if e.type == "DNS_NAME"} - assert dns_names_by_host["evilcorp.com"].tags == { - "domain", - "private-ip", - "in-scope", - "a-record", - } - assert dns_names_by_host["evilcorp.com"].resolved_hosts == {"127.0.0.1"} - assert dns_names_by_host["test.evilcorp.com"].tags == { - "subdomain", - "private-ip", - "in-scope", - "a-record", - "txt-record", - } - assert dns_names_by_host["test.evilcorp.com"].resolved_hosts == {"127.0.0.2"} - assert dns_names_by_host["_wildcard.test.evilcorp.com"].tags == { - "subdomain", - "in-scope", - "txt-record", - "txt-wildcard", - "wildcard", - } - assert dns_names_by_host["_wildcard.test.evilcorp.com"].resolved_hosts == set() - assert dns_names_by_host["www.test.evilcorp.com"].tags == { - "subdomain", - "in-scope", - "aaaa-record", - "txt-record", - "txt-wildcard", - "wildcard", - } - assert dns_names_by_host["www.test.evilcorp.com"].resolved_hosts == {"dead::beef"} - assert dns_names_by_host["bbot.fdsa.www.test.evilcorp.com"].tags == { - "target", - "subdomain", - "in-scope", - "txt-record", - "txt-wildcard", - "wildcard", - "seed", - } - assert dns_names_by_host["bbot.fdsa.www.test.evilcorp.com"].resolved_hosts == set() - - raw_records_by_host = {e.host: e for e in events if e.type == "RAW_DNS_RECORD"} - assert raw_records_by_host["test.evilcorp.com"].tags == {"subdomain", "in-scope", "txt-record"} - assert raw_records_by_host["test.evilcorp.com"].resolved_hosts == {"127.0.0.2"} - assert raw_records_by_host["www.test.evilcorp.com"].tags == {"subdomain", "in-scope", "txt-record", "txt-wildcard"} - assert raw_records_by_host["www.test.evilcorp.com"].resolved_hosts == {"dead::beef"} - assert raw_records_by_host["_wildcard.test.evilcorp.com"].tags == { - "subdomain", - "in-scope", - "txt-record", - "txt-wildcard", - } - assert raw_records_by_host["_wildcard.test.evilcorp.com"].resolved_hosts == set() - assert raw_records_by_host["bbot.fdsa.www.test.evilcorp.com"].tags == { - "subdomain", - "in-scope", - "txt-record", - "txt-wildcard", - } - assert raw_records_by_host["bbot.fdsa.www.test.evilcorp.com"].resolved_hosts == set() - ### runaway SRV wildcard ### + a_response = await scan.helpers.dns.resolve_full("one.one.one.one", "A") + a_targets = set() + for ans in a_response.response.answers: + a_targets.update(extract_targets(ans)) + assert ("A", "1.1.1.1") in a_targets - custom_lookup = """ -def custom_lookup(query, rdtype): - if rdtype == "SRV" and query.strip(".").endswith("evilcorp.com"): - return {f"0 100 389 test.{query}"} -""" + mx_response = await scan.helpers.dns.resolve_full("one.one.one.one", "MX") + mx_targets = set() + for ans in mx_response.response.answers: + mx_targets.update(extract_targets(ans)) + # the MX record_to_text matches dnspython's "preference exchange" format + assert record_to_text(ans).endswith("mail.one.one.one.one.") + assert ("MX", "mail.one.one.one.one") in mx_targets - mock_data = { - "evilcorp.com": {"A": ["127.0.0.1"]}, - "test.evilcorp.com": {"AAAA": ["dead::beef"]}, - } - scan = bbot_scanner( - "evilcorp.com", - config={ - "dns": { - "minimal": False, - "disable": False, - "search_distance": 5, - "wildcard_ignore": [], - "runaway_limit": 3, +@pytest.mark.asyncio +async def test_resolve_multi_full(bbot_scanner): + """resolve_multi_full does many rdtypes for one host in a single Rust call.""" + scan = bbot_scanner() + await scan.helpers.dns._mock_dns( + { + "one.one.one.one": { + "A": ["1.1.1.1"], + "AAAA": ["2606:4700:4700::1111"], }, - }, + } ) - await scan._prep() - await scan.helpers.dns._mock_dns(mock_data, custom_lookup_fn=custom_lookup) - events = [e async for e in scan.async_start()] - - assert len(events) == 11 - assert len([e for e in events if e.type == "DNS_NAME"]) == 5 - assert len([e for e in events if e.type == "RAW_DNS_RECORD"]) == 4 - assert sorted([e.data for e in events if e.type == "DNS_NAME"]) == [ - "evilcorp.com", - "test.evilcorp.com", - "test.test.evilcorp.com", - "test.test.test.evilcorp.com", - "test.test.test.test.evilcorp.com", - ] - - dns_names_by_host = {e.host: e for e in events if e.type == "DNS_NAME"} - assert dns_names_by_host["evilcorp.com"].tags == { - "target", - "a-record", - "in-scope", - "domain", - "srv-record", - "private-ip", - "seed", - } - assert dns_names_by_host["test.evilcorp.com"].tags == { - "in-scope", - "srv-record", - "aaaa-record", - "srv-wildcard-possible", - "wildcard-possible", - "subdomain", - } - assert dns_names_by_host["test.test.evilcorp.com"].tags == { - "in-scope", - "srv-record", - "srv-wildcard-possible", - "wildcard-possible", - "subdomain", - } - assert dns_names_by_host["test.test.test.evilcorp.com"].tags == { - "in-scope", - "srv-record", - "srv-wildcard-possible", - "wildcard-possible", - "subdomain", - } - assert dns_names_by_host["test.test.test.test.evilcorp.com"].tags == { - "in-scope", - "srv-record", - "srv-wildcard-possible", - "wildcard-possible", - "subdomain", - "runaway-dns-3", - } + results = await scan.helpers.dns.resolve_multi_full("one.one.one.one", ["A", "AAAA"]) + assert "A" in results and "AAAA" in results + a_addrs = {ans.rdata["A"] for ans in results["A"].response.answers} + aaaa_addrs = {ans.rdata["AAAA"] for ans in results["AAAA"].response.answers} + assert "1.1.1.1" in a_addrs + assert "2606:4700:4700::1111" in aaaa_addrs - raw_records_by_host = {e.host: e for e in events if e.type == "RAW_DNS_RECORD"} - assert raw_records_by_host["evilcorp.com"].tags == {"in-scope", "srv-record", "domain"} - assert raw_records_by_host["test.evilcorp.com"].tags == { - "in-scope", - "srv-record", - "srv-wildcard-possible", - "subdomain", - } - assert raw_records_by_host["test.test.evilcorp.com"].tags == { - "in-scope", - "srv-record", - "srv-wildcard-possible", - "subdomain", - } - assert raw_records_by_host["test.test.test.evilcorp.com"].tags == { - "in-scope", - "srv-record", - "srv-wildcard-possible", - "subdomain", - } - scan = bbot_scanner("1.1.1.1") +@pytest.mark.asyncio +async def test_resolve_event(bbot_scanner): + """end-to-end: dnsresolve uses resolve_multi_full and populates raw_dns_records.""" + scan = bbot_scanner("one.one.one.one", "1.1.1.1", config={"dns": {"minimal": False}}) await scan._prep() - helpers = scan.helpers - - # event resolution - wildcard_event1 = scan.make_event("wat.asdf.fdsa.github.io", "DNS_NAME", parent=scan.root_event) - wildcard_event1.scope_distance = 0 - wildcard_event2 = scan.make_event("wats.asd.fdsa.github.io", "DNS_NAME", parent=scan.root_event) - wildcard_event2.scope_distance = 0 - wildcard_event3 = scan.make_event("github.io", "DNS_NAME", parent=scan.root_event) - wildcard_event3.scope_distance = 0 + await scan.helpers.dns._mock_dns(mock_records) - await scan._prep() + resolved_event = scan.make_event("one.one.one.one", "DNS_NAME", parent=scan.root_event) + url_event = scan.make_event("http://one.one.one.one/", "URL_UNVERIFIED", parent=scan.root_event) dnsresolve = scan.modules["dnsresolve"] - await dnsresolve.handle_event(wildcard_event1) - await dnsresolve.handle_event(wildcard_event2) - await dnsresolve.handle_event(wildcard_event3) - assert "wildcard" in wildcard_event1.tags - assert "a-wildcard" in wildcard_event1.tags - assert "srv-wildcard" not in wildcard_event1.tags - assert "wildcard" in wildcard_event2.tags - assert "a-wildcard" in wildcard_event2.tags - assert "srv-wildcard" not in wildcard_event2.tags - assert wildcard_event1.data == "_wildcard.github.io" - assert wildcard_event2.data == "_wildcard.github.io" - assert wildcard_event3.data == "github.io" - - # dns resolve distance - event_distance_0 = scan.make_event( - "8.8.8.8", module=scan.modules["dnsresolve"]._make_dummy_module("PTR"), parent=scan.root_event - ) - assert event_distance_0.dns_resolve_distance == 0 - event_distance_1 = scan.make_event( - "evilcorp.com", module=scan.modules["dnsresolve"]._make_dummy_module("A"), parent=event_distance_0 - ) - assert event_distance_1.dns_resolve_distance == 1 - event_distance_2 = scan.make_event( - "1.2.3.4", module=scan.modules["dnsresolve"]._make_dummy_module("PTR"), parent=event_distance_1 - ) - assert event_distance_2.dns_resolve_distance == 1 - event_distance_3 = scan.make_event( - "evilcorp.org", module=scan.modules["dnsresolve"]._make_dummy_module("A"), parent=event_distance_2 - ) - assert event_distance_3.dns_resolve_distance == 2 + await dnsresolve.handle_event(resolved_event) + await dnsresolve.handle_event(url_event) + + assert "1.1.1.1" in url_event.resolved_hosts + # URL events don't get dns_children populated + assert not url_event.dns_children + assert resolved_event.resolved_hosts == url_event.resolved_hosts + # DNS_NAME events do + assert "1.1.1.1" in resolved_event.dns_children["A"] + assert "A" in resolved_event.raw_dns_records + assert "AAAA" in resolved_event.raw_dns_records + assert "a-record" in resolved_event.tags + assert "a-record" not in url_event.tags await scan._cleanup() - from bbot.scanner import Scanner - - # test with full scan - - scan2 = Scanner( - "github.io", - seeds=["asdfl.gashdgkjsadgsdf.github.io"], - config={"dns": {"minimal": False}}, - ) - await scan2._prep() - other_event = scan2.make_event( - "lkjg.sdfgsg.jgkhajshdsadf.github.io", module=scan2.modules["dnsresolve"], parent=scan2.root_event - ) - await scan2.ingress_module.queue_event(other_event, {}) - events = [e async for e in scan2.async_start()] - - assert len(events) == 4 - assert 2 == len([e for e in events if e.type == "SCAN"]) - unmodified_wildcard_events = [ - e for e in events if e.type == "DNS_NAME" and e.data == "asdfl.gashdgkjsadgsdf.github.io" - ] - assert len(unmodified_wildcard_events) == 1 - assert unmodified_wildcard_events[0].tags.issuperset( - { - "a-record", - "target", - "aaaa-wildcard", - "in-scope", - "subdomain", - "aaaa-record", - "wildcard", - "a-wildcard", - } - ) - modified_wildcard_events = [e for e in events if e.type == "DNS_NAME" and e.data == "_wildcard.github.io"] - assert len(modified_wildcard_events) == 1 - assert modified_wildcard_events[0].tags.issuperset( - { - "a-record", - "aaaa-wildcard", - "in-scope", - "subdomain", - "aaaa-record", - "wildcard", - "a-wildcard", - } - ) - assert modified_wildcard_events[0].host_original == "lkjg.sdfgsg.jgkhajshdsadf.github.io" - - # test with full scan (wildcard detection disabled for domain) - scan2 = Scanner( - "github.io", - seeds=["asdfl.gashdgkjsadgsdf.github.io"], - config={"dns": {"wildcard_ignore": ["github.io"]}}, - exclude_modules=["cloudcheck"], - ) - await scan2._prep() - other_event = scan2.make_event( - "lkjg.sdfgsg.jgkhajshdsadf.github.io", module=scan2.modules["dnsresolve"], parent=scan2.root_event - ) - await scan2.ingress_module.queue_event(other_event, {}) - events = [e async for e in scan2.async_start()] - assert len(events) == 4 - assert 2 == len([e for e in events if e.type == "SCAN"]) - unmodified_wildcard_events = [e for e in events if e.type == "DNS_NAME" and "_wildcard" not in e.data] - assert len(unmodified_wildcard_events) == 2 - assert 1 == len( - [ - e - for e in unmodified_wildcard_events - if e.data == "asdfl.gashdgkjsadgsdf.github.io" - and e.tags.issuperset( - { - "target", - "a-record", - "in-scope", - "subdomain", - "aaaa-record", - } - ) - ] - ) - assert 1 == len( - [ - e - for e in unmodified_wildcard_events - if e.data == "lkjg.sdfgsg.jgkhajshdsadf.github.io" - and e.tags.issuperset( - { - "a-record", - "in-scope", - "subdomain", - "aaaa-record", - } - ) - ] - ) - modified_wildcard_events = [e for e in events if e.type == "DNS_NAME" and e.data == "_wildcard.github.io"] - assert len(modified_wildcard_events) == 0 - @pytest.mark.asyncio -async def test_wildcard_deduplication(bbot_scanner): - custom_lookup = """ -def custom_lookup(query, rdtype): - if rdtype == "TXT" and query.strip(".").endswith("evilcorp.com"): - return {""} -""" - - mock_data = { - "evilcorp.com": {"A": ["127.0.0.1"]}, - } - +async def test_dns_raw_records(bbot_scanner): + """RAW_DNS_RECORD events carry the decoded text representation (no dnspython-style quoting).""" from bbot.modules.base import BaseModule - class DummyModule(BaseModule): - watched_events = ["DNS_NAME"] - per_domain_only = True - - async def handle_event(self, event): - for i in range(30): - await self.emit_event(f"www{i}.evilcorp.com", "DNS_NAME", parent=event) - - # scan without omitted event type - scan = bbot_scanner( - "evilcorp.com", config={"dns": {"minimal": False, "wildcard_ignore": []}, "omit_event_types": []} + expected_txt = ( + "v=spf1 ip4:103.151.192.0/23 ip4:185.12.80.0/22 " + "ip4:188.172.128.0/20 ip4:192.161.144.0/20 ip4:216.198.0.0/18 ~all" ) - await scan._prep() - await scan.helpers.dns._mock_dns(mock_data, custom_lookup_fn=custom_lookup) - dummy_module = DummyModule(scan) - scan.modules["dummy_module"] = dummy_module - events = [e async for e in scan.async_start()] - dns_name_events = [e for e in events if e.type == "DNS_NAME"] - assert len(dns_name_events) == 2 - assert 1 == len([e for e in dns_name_events if e.data == "_wildcard.evilcorp.com"]) - - -@pytest.mark.asyncio -async def test_dns_raw_records(bbot_scanner): - from bbot.modules.base import BaseModule class DummyModule(BaseModule): watched_events = ["*"] @@ -709,7 +122,7 @@ async def setup(self): async def handle_event(self, event): self.events.append(event) - # scan without omitted event type + # scan without omitted event type -- raw record should both flow through and reach output scan = bbot_scanner("one.one.one.one", "1.1.1.1", config={"dns": {"minimal": False}, "omit_event_types": []}) await scan._prep() await scan.helpers.dns._mock_dns(mock_records) @@ -717,34 +130,16 @@ async def handle_event(self, event): await dummy_module.setup() scan.modules["dummy_module"] = dummy_module events = [e async for e in scan.async_start()] - assert 1 == len([e for e in events if e.type == "RAW_DNS_RECORD"]) - assert 1 == len( - [ - e - for e in events - if e.type == "RAW_DNS_RECORD" - and e.host == "one.one.one.one" - and e.data["host"] == "one.one.one.one" - and e.data["type"] == "TXT" - and e.data["answer"] - == '"v=spf1 ip4:103.151.192.0/23 ip4:185.12.80.0/22 ip4:188.172.128.0/20 ip4:192.161.144.0/20 ip4:216.198.0.0/18 ~all"' - and e.discovery_context == "TXT lookup on one.one.one.one produced RAW_DNS_RECORD" - ] - ) - assert 1 == len( - [ - e - for e in dummy_module.events - if e.type == "RAW_DNS_RECORD" - and e.host == "one.one.one.one" - and e.data["host"] == "one.one.one.one" - and e.data["type"] == "TXT" - and e.data["answer"] - == '"v=spf1 ip4:103.151.192.0/23 ip4:185.12.80.0/22 ip4:188.172.128.0/20 ip4:192.161.144.0/20 ip4:216.198.0.0/18 ~all"' - and e.discovery_context == "TXT lookup on one.one.one.one produced RAW_DNS_RECORD" - ] - ) - # scan with omitted event type + raw_records = [e for e in events if e.type == "RAW_DNS_RECORD"] + assert len(raw_records) == 1 + rec = raw_records[0] + assert rec.host == "one.one.one.one" + assert rec.data["host"] == "one.one.one.one" + assert rec.data["type"] == "TXT" + assert rec.data["answer"] == expected_txt + assert rec.discovery_context == "TXT lookup on one.one.one.one produced RAW_DNS_RECORD" + + # scan with omitted event type -- no raw records anywhere scan = bbot_scanner("one.one.one.one", config={"dns": {"minimal": False}, "omit_event_types": ["RAW_DNS_RECORD"]}) await scan._prep() await scan.helpers.dns._mock_dns(mock_records) @@ -752,11 +147,10 @@ async def handle_event(self, event): await dummy_module.setup() scan.modules["dummy_module"] = dummy_module events = [e async for e in scan.async_start()] - # no raw records should be emitted assert 0 == len([e for e in events if e.type == "RAW_DNS_RECORD"]) assert 0 == len([e for e in dummy_module.events if e.type == "RAW_DNS_RECORD"]) - # scan with watching module + # scan with watching module -- raw records reach the module but aren't output DummyModule.watched_events = ["RAW_DNS_RECORD"] scan = bbot_scanner("one.one.one.one", config={"dns": {"minimal": False}, "omit_event_types": ["RAW_DNS_RECORD"]}) await scan._prep() @@ -765,22 +159,47 @@ async def handle_event(self, event): await dummy_module.setup() scan.modules["dummy_module"] = dummy_module events = [e async for e in scan.async_start()] - # no raw records should be output assert 0 == len([e for e in events if e.type == "RAW_DNS_RECORD"]) - # but they should still make it to the module - assert 1 == len( - [ - e - for e in dummy_module.events - if e.type == "RAW_DNS_RECORD" - and e.host == "one.one.one.one" - and e.data["host"] == "one.one.one.one" - and e.data["type"] == "TXT" - and e.data["answer"] - == '"v=spf1 ip4:103.151.192.0/23 ip4:185.12.80.0/22 ip4:188.172.128.0/20 ip4:192.161.144.0/20 ip4:216.198.0.0/18 ~all"' - and e.discovery_context == "TXT lookup on one.one.one.one produced RAW_DNS_RECORD" - ] - ) + raw_records = [e for e in dummy_module.events if e.type == "RAW_DNS_RECORD"] + assert len(raw_records) == 1 + assert raw_records[0].data["answer"] == expected_txt + + +@pytest.mark.asyncio +async def test_wildcards(bbot_scanner): + """is_wildcard / is_wildcard_domain against a mocked wildcard zone. + + blastdns MockClient supports ``regex:`` prefixed hosts for pattern matching, + which is how we simulate "every random subdomain resolves to the wildcard". + """ + # The test config preloads wildcard_ignore with several common domains (incl. evilcorp.com) + # so override it here to actually exercise wildcard detection. + scan = bbot_scanner("evilcorp.com", config={"dns": {"wildcard_ignore": []}}) + await scan._prep() + + # *.test.evilcorp.com is a wildcard pointing at 127.0.0.99 (A) and dead::beef (AAAA) + mock_data = { + "evilcorp.com": {"A": ["127.0.0.1"]}, + "test.evilcorp.com": {"A": ["127.0.0.99"]}, + r"regex:.*\.test\.evilcorp\.com$": {"A": ["127.0.0.99"], "AAAA": ["dead::beef"]}, + } + await scan.helpers.dns._mock_dns(mock_data) + + # is_wildcard_domain reports the wildcard pool per parent + wildcard_domains = await scan.helpers.dns.is_wildcard_domain("asdf.test.evilcorp.com", ["A", "AAAA"]) + assert "test.evilcorp.com" in wildcard_domains + assert "A" in wildcard_domains["test.evilcorp.com"] + + # is_wildcard tells us whether a specific hostname is a wildcard hit + wildcard_rdtypes = await scan.helpers.dns.is_wildcard("asdf.test.evilcorp.com", ["A", "AAAA"]) + assert wildcard_rdtypes.get("A") == (True, "test.evilcorp.com") + + # a non-wildcard sibling is not flagged + non_wildcard = await scan.helpers.dns.is_wildcard("evilcorp.com", ["A"]) + # is_domain short-circuits to {} for the bare domain + assert non_wildcard == {} + + await scan._cleanup() @pytest.mark.asyncio @@ -789,11 +208,7 @@ async def test_dns_graph_structure(bbot_scanner): await scan._prep() await scan.helpers.dns._mock_dns( { - "evilcorp.com": { - "CNAME": [ - "www.evilcorp.com", - ] - }, + "evilcorp.com": {"CNAME": ["www.evilcorp.com"]}, "www.evilcorp.com": {"CNAME": ["test.evilcorp.com"]}, "test.evilcorp.com": {"A": ["127.0.0.1"]}, } @@ -821,7 +236,9 @@ async def test_hostname_extraction(bbot_scanner): "evilcorp.com": { "A": ["127.0.0.1"], "TXT": [ - "v=spf1 include:spf-a.evilcorp.com include:spf-b.evilcorp.com include:icpbounce.com include:shops.shopify.com include:_spf.qemailserver.com include:spf.mandrillapp.com include:spf.protection.office365.us include:spf-003ea501.gpphosted.com 127.0.0.1 -all" + '"v=spf1 include:spf-a.evilcorp.com include:spf-b.evilcorp.com include:icpbounce.com ' + "include:shops.shopify.com include:_spf.qemailserver.com include:spf.mandrillapp.com " + 'include:spf.protection.office365.us include:spf-003ea501.gpphosted.com 127.0.0.1 -all"' ], } } @@ -859,6 +276,9 @@ async def test_dns_helpers(bbot_scanner): hostname = f"{srv_record}.example.com" assert service_record(hostname) is True + # all_rdtypes is the canonical list -- make sure it's not empty and contains the basics + assert "A" in all_rdtypes and "AAAA" in all_rdtypes and "CNAME" in all_rdtypes + # make sure system nameservers are excluded from use by DNS brute force brute_nameservers = tempwordlist(["1.2.3.4", "8.8.4.4", "4.3.2.1", "8.8.8.8"]) scan = bbot_scanner(config={"dns": {"brute_nameservers": brute_nameservers}}) diff --git a/bbot/test/test_step_2/module_tests/base.py b/bbot/test/test_step_2/module_tests/base.py index ffb4ae0d46..8bea4e2700 100644 --- a/bbot/test/test_step_2/module_tests/base.py +++ b/bbot/test/test_step_2/module_tests/base.py @@ -76,10 +76,10 @@ def set_expect_requests(self, expect_args={}, respond_args={}): def set_expect_requests_handler(self, expect_args=None, request_handler=None): self.httpserver.expect_request(expect_args).respond_with_handler(request_handler) - async def mock_dns(self, mock_data, custom_lookup_fn=None, scan=None): + async def mock_dns(self, mock_data, scan=None): if scan is None: scan = self.scan - await scan.helpers.dns._mock_dns(mock_data, custom_lookup_fn=custom_lookup_fn) + await scan.helpers.dns._mock_dns(mock_data) def mock_interactsh(self, name): from ...conftest import Interactsh_mock diff --git a/bbot/test/test_step_2/module_tests/test_module_baddns_zone.py b/bbot/test/test_step_2/module_tests/test_module_baddns_zone.py index 688f105ea5..63d28929da 100644 --- a/bbot/test/test_step_2/module_tests/test_module_baddns_zone.py +++ b/bbot/test/test_step_2/module_tests/test_module_baddns_zone.py @@ -47,14 +47,37 @@ class TestBaddns_zone_nsec(BaseTestBaddns_zone): async def setup_after_prep(self, module_test): from baddns.lib.whoismanager import WhoisManager + from baddns.lib.dnsmanager import DNSManager + + # NSEC records can't go through MockClient (hickory refuses zone-file NSEC parsing), + # so we pass only non-NSEC data to mock_dns and handle NSEC via DNSManager monkeypatch. + nsec_data = { + "bad.com": ["asdf.bad.com"], + "asdf.bad.com": ["zzzz.bad.com"], + "zzzz.bad.com": ["xyz.bad.com"], + } await module_test.mock_dns( { - "bad.com": {"A": ["127.0.0.5"], "NSEC": ["asdf.bad.com"]}, - "asdf.bad.com": {"NSEC": ["zzzz.bad.com"]}, - "zzzz.bad.com": {"NSEC": ["xyz.bad.com"]}, + "bad.com": {"A": ["127.0.0.5"]}, } ) + + original_do_resolve = DNSManager.do_resolve + original_dispatch = DNSManager.dispatchDNS + + async def patched_do_resolve(self, target, rdatatype): + if rdatatype == "NSEC" and target in nsec_data: + return nsec_data[target] + return await original_do_resolve(self, target, rdatatype) + + async def patched_dispatch(self, omit_types=[]): + await original_dispatch(self, omit_types=omit_types) + if "NSEC" not in omit_types and self.target in nsec_data: + self.answers["NSEC"] = nsec_data[self.target] + + module_test.monkeypatch.setattr(DNSManager, "do_resolve", patched_do_resolve) + module_test.monkeypatch.setattr(DNSManager, "dispatchDNS", patched_dispatch) module_test.monkeypatch.setattr(WhoisManager, "dispatchWHOIS", self.dispatchWHOIS) def check(self, module_test, events): diff --git a/bbot/test/test_step_2/module_tests/test_module_dnsbimi.py b/bbot/test/test_step_2/module_tests/test_module_dnsbimi.py index 5ceb9f44a4..3a74c493a6 100644 --- a/bbot/test/test_step_2/module_tests/test_module_dnsbimi.py +++ b/bbot/test/test_step_2/module_tests/test_module_dnsbimi.py @@ -1,9 +1,22 @@ from .base import ModuleTestBase -raw_bimi_txt_default = ( +# Mock data is zone-file format. TXT character-strings must be quoted so that +# whitespace inside the payload is preserved (otherwise the zone-file lexer +# splits each token into its own character-string). +mock_bimi_txt_default = ( '"v=BIMI1;l=https://bimi.test.localdomain/logo.svg; a=https://bimi.test.localdomain/certificate.pem"' ) -raw_bimi_txt_nondefault = '"v=BIMI1; l=https://nondefault.thirdparty.tld/brand/logo.svg;a=https://nondefault.thirdparty.tld/brand/certificate.pem;"' +mock_bimi_txt_nondefault = ( + '"v=BIMI1; l=https://nondefault.thirdparty.tld/brand/logo.svg;' + 'a=https://nondefault.thirdparty.tld/brand/certificate.pem;"' +) + +# What the modules emit in RAW_DNS_RECORD events: hickory's Display strips the +# surrounding quotes from TXT, returning the bare character-string content. +raw_bimi_txt_default = ( + "v=BIMI1;l=https://bimi.test.localdomain/logo.svg; a=https://bimi.test.localdomain/certificate.pem" +) +raw_bimi_txt_nondefault = "v=BIMI1; l=https://nondefault.thirdparty.tld/brand/logo.svg;a=https://nondefault.thirdparty.tld/brand/certificate.pem;" class TestDnsbimi(ModuleTestBase): @@ -28,27 +41,27 @@ async def setup_after_prep(self, module_test): }, "default._bimi.test.localdomain": { "A": ["127.0.0.44"], - "TXT": [raw_bimi_txt_default], + "TXT": [mock_bimi_txt_default], }, "nondefault._bimi.test.localdomain": { "A": ["127.0.0.44"], - "TXT": [raw_bimi_txt_nondefault], + "TXT": [mock_bimi_txt_nondefault], }, "_bimi.default._bimi.test.localdomain": { "A": ["127.0.0.44"], - "TXT": [raw_bimi_txt_default], + "TXT": [mock_bimi_txt_default], }, "_bimi.nondefault._bimi.test.localdomain": { "A": ["127.0.0.44"], - "TXT": [raw_bimi_txt_default], + "TXT": [mock_bimi_txt_default], }, "default._bimi.default._bimi.test.localdomain": { "A": ["127.0.0.44"], - "TXT": [raw_bimi_txt_default], + "TXT": [mock_bimi_txt_default], }, "nondefault._bimi.nondefault._bimi.test.localdomain": { "A": ["127.0.0.44"], - "TXT": [raw_bimi_txt_nondefault], + "TXT": [mock_bimi_txt_nondefault], }, } ) diff --git a/bbot/test/test_step_2/module_tests/test_module_dnstlsrpt.py b/bbot/test/test_step_2/module_tests/test_module_dnstlsrpt.py index d33ac11904..b561f2ff9d 100644 --- a/bbot/test/test_step_2/module_tests/test_module_dnstlsrpt.py +++ b/bbot/test/test_step_2/module_tests/test_module_dnstlsrpt.py @@ -1,6 +1,12 @@ from .base import ModuleTestBase -raw_smtp_tls_txt = '"v=TLSRPTv1; rua=mailto:tlsrpt@sub.blacklanternsecurity.notreal,mailto:test@on.thirdparty.com, https://tlspost.example.com;"' +# Mock data is zone-file format: TXT must be quoted to keep the value as a single +# character-string (otherwise the zone-file lexer splits on whitespace). +mock_smtp_tls_txt = '"v=TLSRPTv1; rua=mailto:tlsrpt@sub.blacklanternsecurity.notreal,mailto:test@on.thirdparty.com, https://tlspost.example.com;"' + +# What the module emits in RAW_DNS_RECORD events (no surrounding quotes from +# hickory's Display impl). +raw_smtp_tls_txt = "v=TLSRPTv1; rua=mailto:tlsrpt@sub.blacklanternsecurity.notreal,mailto:test@on.thirdparty.com, https://tlspost.example.com;" class TestDNSTLSRPT(ModuleTestBase): @@ -19,13 +25,13 @@ async def setup_after_prep(self, module_test): }, "_smtp._tls.blacklanternsecurity.notreal": { "A": ["127.0.0.33"], - "TXT": [raw_smtp_tls_txt], + "TXT": [mock_smtp_tls_txt], }, "_tls._smtp._tls.blacklanternsecurity.notreal": { "A": ["127.0.0.44"], }, "_smtp._tls._smtp._tls.blacklanternsecurity.notreal": { - "TXT": [raw_smtp_tls_txt], + "TXT": [mock_smtp_tls_txt], }, "sub.blacklanternsecurity.notreal": { "A": ["127.0.0.55"], diff --git a/bbot/test/test_step_2/template_tests/test_template_subdomain_enum.py b/bbot/test/test_step_2/template_tests/test_template_subdomain_enum.py index 4d1fbff4b3..5104291cc3 100644 --- a/bbot/test/test_step_2/template_tests/test_template_subdomain_enum.py +++ b/bbot/test/test_step_2/template_tests/test_template_subdomain_enum.py @@ -176,19 +176,32 @@ class TestSubdomainEnumWildcardDefense(TestSubdomainEnumWildcardBaseline): dedup_strategy = "highest_parent" dns_mock_data = { - "walmart.cn": {"A": ["127.0.0.2"], "TXT": ["asdf.walmart.cn"]}, + "walmart.cn": {"A": ["127.0.0.2"], "TXT": ['"asdf.walmart.cn"']}, + # wildcard: every *.walmart.cn resolves to an A record + r"regex:.*\.walmart\.cn$": {"A": ["127.0.0.99"]}, } async def setup_after_prep(self, module_test): - # simulate wildcard - custom_lookup = """ -def custom_lookup(query, rdtype): - import random - if rdtype == "A" and query.endswith(".walmart.cn"): - ip = ".".join([str(random.randint(0,256)) for _ in range(4)]) - return {ip} -""" - await module_test.mock_dns(self.dns_mock_data, custom_lookup_fn=custom_lookup) + import random + + await module_test.mock_dns(self.dns_mock_data) + + # Simulate walmart.cn's real-world behavior: random subdomains each + # resolve to a *different* IP, so wildcard detection flags it as + # POSSIBLE rather than TRUE. We monkey-patch _is_wildcard_zone to + # return random IPs in its result set, mimicking the inconsistency. + _original = module_test.scan.helpers.dns._is_wildcard_zone + + async def _random_wildcard_zone(host, rdtype): + results, results_raw = await _original(host, rdtype) + if results and host.endswith("walmart.cn"): + # Replace the consistent mock IPs with random ones so the + # wildcard detector sees them as POSSIBLE, not TRUE. + results = {".".join(str(random.randint(1, 254)) for _ in range(4)) for _ in range(5)} + results_raw = {str(ip) for ip in results} + return results, results_raw + + module_test.scan.helpers.dns._is_wildcard_zone = _random_wildcard_zone def check(self, module_test, events): # no subdomain enum should happen on this domain! @@ -224,6 +237,6 @@ def check(self, module_test, events): e for e in events if e.type == "RAW_DNS_RECORD" - and e.data == {"host": "walmart.cn", "type": "TXT", "answer": '"asdf.walmart.cn"'} + and e.data == {"host": "walmart.cn", "type": "TXT", "answer": "asdf.walmart.cn"} ] ) diff --git a/pyproject.toml b/pyproject.toml index e89b936f5d..6726f89c36 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -49,6 +49,7 @@ dependencies = [ "ansible-core>=2.17,<3", "tldextract>=5.3.0,<6", "cloudcheck>=9.2.0,<10", + "blastdns>=1.9.0,<2", ] [project.urls] @@ -77,8 +78,8 @@ dev = [ "fastapi>=0.115.5,<0.129.0", "pytest-httpx>=0.35", "pytest-benchmark>=4,<6", + "baddns~=2.1.0", "ruff==0.15.10", - "baddns~=2.0.0", ] docs = [ "mkdocs>=1.5.2,<2", diff --git a/uv.lock b/uv.lock index ae7c73d099..0de67585ce 100644 --- a/uv.lock +++ b/uv.lock @@ -166,9 +166,10 @@ wheels = [ [[package]] name = "baddns" -version = "2.0.452" +version = "2.1.0" source = { registry = "https://pypi.org/simple" } dependencies = [ + { name = "blastdns" }, { name = "cloudcheck" }, { name = "colorama" }, { name = "dnspython" }, @@ -178,9 +179,9 @@ dependencies = [ { name = "pyyaml" }, { name = "tldextract" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/77/32/1d82b8781bd9285fabb4a03f342bfc8e27f219665af5dd184fc97bf9694a/baddns-2.0.452.tar.gz", hash = "sha256:193e3ff866986ddb626ec563991c3ea760985ca74f7ed59c0f3770c9f5543d9d", size = 61143, upload-time = "2026-03-20T18:39:14.505Z" } +sdist = { url = "https://files.pythonhosted.org/packages/96/6a/d02ca8e8fa19d92a6e616324c7dda942d01c09f2729bb736114c7268720e/baddns-2.1.0.tar.gz", hash = "sha256:30058b22dffcf16f7f195e7b298b099cb817c264324cec06c47a264ac2b29309", size = 60334, upload-time = "2026-04-16T14:16:44.114Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d6/cb/7ed49be7f8802adfcb57710548ed28239d899eaf5db7a49e57df8600385f/baddns-2.0.452-py3-none-any.whl", hash = "sha256:54e38e2b661de981f95454abea8b17a6bb3d1d5ff1e046738c319cfd7cfc018d", size = 117120, upload-time = "2026-03-20T18:39:13.376Z" }, + { url = "https://files.pythonhosted.org/packages/26/a7/36449f6d054de75a70db7ece0a66566990af95b74666ebd4121357c6f89f/baddns-2.1.0-py3-none-any.whl", hash = "sha256:1cf722f394e33eb26700c1ef717dcd01a451a241607cac3d62c95d355de5e514", size = 117925, upload-time = "2026-04-16T14:16:43.075Z" }, ] [[package]] @@ -193,6 +194,7 @@ dependencies = [ { name = "ansible-runner" }, { name = "asndb" }, { name = "beautifulsoup4" }, + { name = "blastdns" }, { name = "cachetools" }, { name = "cloudcheck" }, { name = "deepdiff" }, @@ -263,6 +265,7 @@ requires-dist = [ { name = "ansible-runner", specifier = ">=2.3.2,<3" }, { name = "asndb", specifier = ">=1.0.4" }, { name = "beautifulsoup4", specifier = ">=4.12.2,<5" }, + { name = "blastdns", specifier = ">=1.9.0,<2" }, { name = "cachetools", specifier = ">=5.3.2,<7.0.0" }, { name = "cloudcheck", specifier = ">=9.2.0,<10" }, { name = "deepdiff", specifier = ">=8.0.0,<9" }, @@ -297,7 +300,7 @@ requires-dist = [ [package.metadata.requires-dev] dev = [ - { name = "baddns", specifier = "~=2.0.0" }, + { name = "baddns", specifier = "~=2.1.0" }, { name = "fastapi", specifier = ">=0.115.5,<0.129.0" }, { name = "pre-commit", specifier = ">=3.4,<5.0" }, { name = "pytest", specifier = ">=8.3.1,<9" }, @@ -340,6 +343,108 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1a/39/47f9197bdd44df24d67ac8893641e16f386c984a0619ef2ee4c51fbbc019/beautifulsoup4-4.14.3-py3-none-any.whl", hash = "sha256:0918bfe44902e6ad8d57732ba310582e98da931428d231a5ecb9e7c703a735bb", size = 107721, upload-time = "2025-11-30T15:08:24.087Z" }, ] +[[package]] +name = "blastdns" +version = "1.9.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "orjson" }, + { name = "pydantic" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a0/63/5ad427bd5e23780961ba4f183a85d8428d961feabcfaf237347801dd00a0/blastdns-1.9.1.tar.gz", hash = "sha256:e0ea09a9f0199a64aad8a6abe54e0f0de523a12e6f96da71e79af7fea9d12f2f", size = 114292, upload-time = "2026-04-15T20:33:18.661Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/87/89/c606bd3d9e50771e168ad64745e4b9ef28fa511533d225735178eddab5c7/blastdns-1.9.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:fc9a0c8808b90748dadc224e86a21fed75c1ccfa7df682906401790069262816", size = 2449222, upload-time = "2026-04-15T20:30:51.744Z" }, + { url = "https://files.pythonhosted.org/packages/56/97/8e10f45144e7644c6f9e7a9f96eb7e433ef34bc5e6e84ae3e50581ae24d1/blastdns-1.9.1-cp310-cp310-manylinux_2_28_armv7l.whl", hash = "sha256:077dfd2eb020ea7d120c18263a3f646bcd8dfe1e0d6d1c1f604d5034892d3404", size = 2321373, upload-time = "2026-04-15T20:31:06.388Z" }, + { url = "https://files.pythonhosted.org/packages/47/80/31f3383453114beeade87d99058e1513dd82dafd67002d09ee9337fa886c/blastdns-1.9.1-cp310-cp310-manylinux_2_28_i686.whl", hash = "sha256:eb7833f85bbb8ac246ec55ca5aa9bf622f18d61f6566b4fbc6b3c42e99eae5a7", size = 2546563, upload-time = "2026-04-15T20:31:49.596Z" }, + { url = "https://files.pythonhosted.org/packages/45/76/eefa2fbe0be7781d33baf0b4670bc9d2db6aff0d94aeec1dd8d54c389683/blastdns-1.9.1-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:911516924ff9652cae0905ba218cd58f03d6e4c4d8972d8c00a3540496a15dad", size = 3519094, upload-time = "2026-04-15T20:31:20.464Z" }, + { url = "https://files.pythonhosted.org/packages/62/57/bbef43cfb51ed61977ac2e9d378d5e82ccf541ae3e4f1d318acd25ec6100/blastdns-1.9.1-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:693ba5b6820c6ab8cfd37202babeef354a2fca45ce51afcd8ffa912f2a50e82e", size = 2506642, upload-time = "2026-04-15T20:31:35.818Z" }, + { url = "https://files.pythonhosted.org/packages/e2/25/811477849e41c66e85047133ee4af7ecbd320c5b251988fee94c97c426f8/blastdns-1.9.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:06ebb94fbcc130acfc73356b36f135f9ca655a5ced9780c3ff9bee6394c91639", size = 2439602, upload-time = "2026-04-15T20:31:59.906Z" }, + { url = "https://files.pythonhosted.org/packages/a5/9f/c592c221970f278822f2f8bcefc2422eebf6cced23268d3fda7daec24e01/blastdns-1.9.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:99966441bfed1f5b45abf6c736be5d9bc141946ffe7582a1fbfd90386b16d4af", size = 2627363, upload-time = "2026-04-15T20:32:23.893Z" }, + { url = "https://files.pythonhosted.org/packages/74/41/9a3b74481259f76ce863a6d883779766970007952ceedd469a2d903421f7/blastdns-1.9.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:dd587b057bebdf2797cafa69923425312a038a56b4f48e740db1ce9584d4169a", size = 2594515, upload-time = "2026-04-15T20:32:37.097Z" }, + { url = "https://files.pythonhosted.org/packages/a7/41/23f2ab8eda0f31bf85b1f1aaa41dc2205c224f7032ba431434d78cfc0625/blastdns-1.9.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:f45a4fb8dc5d84bc94b266230329ca4e14ae1e43a65f45e1ba52c1ab8509964b", size = 2671944, upload-time = "2026-04-15T20:32:50.331Z" }, + { url = "https://files.pythonhosted.org/packages/85/2a/4149667e1245c80cd501bad55bed7cdb42190c3d727eb742566e65f61041/blastdns-1.9.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:98b9ea17704491fd6d1e40a183222439403433ab2046f8e323ed10dcac5ab214", size = 2689118, upload-time = "2026-04-15T20:33:04.593Z" }, + { url = "https://files.pythonhosted.org/packages/8d/04/8af93946ac6a53f981356e13ddd2eda5c8b76ab355054731e8704c2f4a59/blastdns-1.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:8581a729dd9f32cca8edef0fd8c0529be81fe8ca7faef497f1712b5277c0b896", size = 2657117, upload-time = "2026-04-15T20:33:20.027Z" }, + { url = "https://files.pythonhosted.org/packages/5b/d7/b4a757c662a45f82dc32fe916151caef24763f06ddf1d7ac571ff6f0ac51/blastdns-1.9.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:67f52aa4880bed47da178065c8b2935cc41f526a340e0a1dae90f2eb641f99f8", size = 2655899, upload-time = "2026-04-15T20:32:17.262Z" }, + { url = "https://files.pythonhosted.org/packages/98/16/5fbb3f57e2a2da7b1ea954c4a7576a0216b8b6f45714cedc0da92c933183/blastdns-1.9.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9dda8352acba5c9b7e498c61f73f28120e312e441a37d26d8912ddb8c8876b11", size = 2522537, upload-time = "2026-04-15T20:32:10.658Z" }, + { url = "https://files.pythonhosted.org/packages/ab/7c/974f4a6f021fcf5a172dc9e6ab508527e5554576eb08f057f4d6345dca79/blastdns-1.9.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:b967cba75fd757b0e954cf4fb3d69c9fb39d95871316a9cf0bb0c2dc5e720077", size = 2450776, upload-time = "2026-04-15T20:30:53.403Z" }, + { url = "https://files.pythonhosted.org/packages/ad/a3/dc118d2a4be9efc933b29add465451537e10de53ee59e08bc6bf34de85f9/blastdns-1.9.1-cp311-cp311-manylinux_2_28_armv7l.whl", hash = "sha256:7a51dce160ffee92b548553b8c01d50ffa6d1f4c16d153df6ec36cc04e4ce8cc", size = 2321089, upload-time = "2026-04-15T20:31:07.684Z" }, + { url = "https://files.pythonhosted.org/packages/79/e9/47ae75a10808c166c38cd989fc7b65b5e31755985030e30ae3535a0abaad/blastdns-1.9.1-cp311-cp311-manylinux_2_28_i686.whl", hash = "sha256:f9fb56f9f8a672518968f7a4d882718c75b4b42629baa76f55c8bb4f60c6d8d1", size = 2551494, upload-time = "2026-04-15T20:31:50.955Z" }, + { url = "https://files.pythonhosted.org/packages/6d/2f/f2f8f5b40c83b868a807c1e8dbeafb930a0568c96ba53e95cd5e5b6b345b/blastdns-1.9.1-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:d0d23e22bdbf4fc8f4c32ff073603c56c6efb9f4911a168d9f004de664a5b13f", size = 3520728, upload-time = "2026-04-15T20:31:22.339Z" }, + { url = "https://files.pythonhosted.org/packages/69/81/af8964cd04f861077d2e99698b62d7cf01d5c5c193d8a815f3970dcdc0e6/blastdns-1.9.1-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:0e7603dc7e5fef925d03b5f28d75164e634ef2d6fc5b960671d0cf88380d7b70", size = 2507167, upload-time = "2026-04-15T20:31:37.313Z" }, + { url = "https://files.pythonhosted.org/packages/b4/8b/6326da63ad387377c0dd94f52426b276273f8cdf32eab17e4b3ba0ec4c0f/blastdns-1.9.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:8d2cc4189b98fd6d2ca2b41043f0a501455e8043562f9086fafb25c93ef053ad", size = 2438814, upload-time = "2026-04-15T20:32:01.274Z" }, + { url = "https://files.pythonhosted.org/packages/d5/30/d94d844101d7650982c6a686b41ba65fae84692f6a65147aea9756a4d09c/blastdns-1.9.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6e31c987da6b52c1194ec057c932400b72f729922fa18737b68a868df5911d27", size = 2628570, upload-time = "2026-04-15T20:32:25.287Z" }, + { url = "https://files.pythonhosted.org/packages/63/a0/c2d1d9d6bcf20836b66b06c970e2674e5d531c98f5f8a5d8b80a2c31e413/blastdns-1.9.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:da747e424bea405fc11ad7cb71ea36a082c100e40c0fc18855e209104f4e08d6", size = 2595745, upload-time = "2026-04-15T20:32:38.459Z" }, + { url = "https://files.pythonhosted.org/packages/e9/f6/0c0e8a070cce991ba16227a3a1e8602f76a84db783cd2f3f170b2250a373/blastdns-1.9.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:801224d7006b61789378addae8751aeb7c7877c8ecf19b515f4a856230fc68bc", size = 2671215, upload-time = "2026-04-15T20:32:51.674Z" }, + { url = "https://files.pythonhosted.org/packages/ee/4a/b2cfbb5cbede71ba5c116bea8bd68222e9ab5bef49b4cd1312954978c99f/blastdns-1.9.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1ce70a659667f3adc7c0eac7d4b2c6f4c9f8429367254ec5308d04f36213f935", size = 2689185, upload-time = "2026-04-15T20:33:06.529Z" }, + { url = "https://files.pythonhosted.org/packages/b3/34/ca482b1f59520b6c5b3fa50b253bd009e6e636541cf238788e34b81e5352/blastdns-1.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:5ab93ca714cb913423e2780b2c3881a1303fa21948c5602795262c9bd51c702b", size = 2658811, upload-time = "2026-04-15T20:33:21.347Z" }, + { url = "https://files.pythonhosted.org/packages/a9/f7/82d6bc99f2c3c65ee7d1c87627c9451ccb3c6dd1e21d1afb7caed4761c20/blastdns-1.9.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:e46c938ce8eec89384c82b68fd62fa2f76935d4d68d1ba51f199008c1a8b056b", size = 2649921, upload-time = "2026-04-15T20:32:18.725Z" }, + { url = "https://files.pythonhosted.org/packages/96/c4/6f1b04ee737feeac6b78459708b6b6b5e4215b8a443945bbb5078a1dba0b/blastdns-1.9.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b1eead79b192af541f4e89328aaed7e675254a56583d406dcc5cc527b1ed0662", size = 2510639, upload-time = "2026-04-15T20:32:12.531Z" }, + { url = "https://files.pythonhosted.org/packages/b0/b4/80cc72f8e94a46743bc400d2336506388fcbffe0feeaeaec03410d451db6/blastdns-1.9.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:d0d6a6d4654032b2fc1a1df1bb145afb83989c23160da5b629c64e797a6f3027", size = 2449415, upload-time = "2026-04-15T20:30:55.661Z" }, + { url = "https://files.pythonhosted.org/packages/f5/d6/459997181a1a0c8bd162c6f3b830732f49f39fd9e62d77dc1fde2b9fb953/blastdns-1.9.1-cp312-cp312-manylinux_2_28_armv7l.whl", hash = "sha256:cb5745b8f4ea3929d090dfa18126b9094986ea2e5808a54cdb58a355e1360a82", size = 2319087, upload-time = "2026-04-15T20:31:09.425Z" }, + { url = "https://files.pythonhosted.org/packages/10/a8/d67e9a0212005a178ab1a5fa3ea7cd280fb00b981a1020b15a7cf87ea244/blastdns-1.9.1-cp312-cp312-manylinux_2_28_i686.whl", hash = "sha256:7bd6bfc82e08624d1b40f1e4822b548f63829351e7f72080d69099b32fdb8361", size = 2549284, upload-time = "2026-04-15T20:31:52.737Z" }, + { url = "https://files.pythonhosted.org/packages/0a/e9/667b14a3cda6cd1bb31838c2769ec8533ce32aa5a3a905acc5db6e687851/blastdns-1.9.1-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:e23dc048586e8b20523ecbc4c5931d8bc5485e79200207c4c6f91ad4b5ea980f", size = 3516061, upload-time = "2026-04-15T20:31:24.299Z" }, + { url = "https://files.pythonhosted.org/packages/ee/f7/8b756e8a2dd781007763318b11fb2fb8f233dfb86f10409a69abc9b03c29/blastdns-1.9.1-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:6c0472de6616e7ed20b5fb3784c41b52e64a0ff1079106be0140c6f00e19f6c6", size = 2503102, upload-time = "2026-04-15T20:31:38.968Z" }, + { url = "https://files.pythonhosted.org/packages/91/c8/6f689dc1491c13ebe1275943995c3376644e26c130fd0e30dae538477996/blastdns-1.9.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:775dbd6b7d1eccb3375a9f17f43723ddf79a1d12ae1d202d3f587101aff21db1", size = 2436641, upload-time = "2026-04-15T20:32:02.719Z" }, + { url = "https://files.pythonhosted.org/packages/d1/52/5376331c9278f5334b76dd48210e1fb10a83f4920516c7060ccde56ec6da/blastdns-1.9.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:40bff362bd9be708197d5c548ad7f006688d763cada1faee44ae1e7d2acb06ef", size = 2625799, upload-time = "2026-04-15T20:32:26.823Z" }, + { url = "https://files.pythonhosted.org/packages/e4/d1/622c75809c143d51037daa8af9f3477e6e4606a12d58f5189b4da81e6511/blastdns-1.9.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:408c30d61126c1cc0ecacb1011ed6ef2f34840e937e2def22f3ed943ad6ba8f4", size = 2593888, upload-time = "2026-04-15T20:32:39.932Z" }, + { url = "https://files.pythonhosted.org/packages/11/a0/0a8dc041ccb68720dbe44ce36c10404d904b0572c0589987c5618c1c109d/blastdns-1.9.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9af85ef741d4f230ea55f12d4876b1123562f51ce9b9f7d58b6f52d3d84395c1", size = 2670025, upload-time = "2026-04-15T20:32:53.153Z" }, + { url = "https://files.pythonhosted.org/packages/72/33/b5d69a9a719b3ec354fd938769f95697e17e4c1011b97aa5f424d2a7be77/blastdns-1.9.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3339b69fc959292a66ea724b0d5e9563c4ca0be7dd48fc78adb8914a3c82ab57", size = 2685562, upload-time = "2026-04-15T20:33:08.351Z" }, + { url = "https://files.pythonhosted.org/packages/c6/d5/36c430234203e7f4c2719b321ee316c4ac82db2541761ecff6035445fbec/blastdns-1.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:074b0ffe33ed57d6bef65d0120fa508d855f49910a1ed51ee6c3920e6ffc73bf", size = 2657744, upload-time = "2026-04-15T20:33:22.771Z" }, + { url = "https://files.pythonhosted.org/packages/09/a2/ed3363e3cbc5d10fc2301fc0651bb316f203a287a331eb65ce0f0c0726d1/blastdns-1.9.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:3fe8fecc19fdb3ee6bcfcd4242c9e2433254c8c40d1b57fa40d6190eadd73da7", size = 2649286, upload-time = "2026-04-15T20:32:20.42Z" }, + { url = "https://files.pythonhosted.org/packages/db/15/1e37003ef21f8777b51f1abc85edae818358743a74b0a836a8a409383f91/blastdns-1.9.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b84013fc47cebeac3093f4feaeed3a4d5f45d82eebdffb73bde610e8e30f3399", size = 2510656, upload-time = "2026-04-15T20:32:14.308Z" }, + { url = "https://files.pythonhosted.org/packages/03/4e/b7f055bbe93b4201738c284482cd94d76b8f148381daa6774ddf509533f5/blastdns-1.9.1-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:7e2f7b28bff77a5c5627f1f85069309e3c4039d4d277756e84d2d5c229716c16", size = 2447985, upload-time = "2026-04-15T20:30:57.312Z" }, + { url = "https://files.pythonhosted.org/packages/14/d4/05a0eb6ce69da50e1a20a4374e685b54cafcf2550aa544280f77a56e7c88/blastdns-1.9.1-cp313-cp313-manylinux_2_28_armv7l.whl", hash = "sha256:4668ccfde0215c685a81e3e121ef5d0eafccdb940bb7f9d3f7b8ffb004955b51", size = 2319178, upload-time = "2026-04-15T20:31:11.009Z" }, + { url = "https://files.pythonhosted.org/packages/9d/a7/d483bf6db4762f9ac2772cac722659334cf704316bf8e1c190af5b2fd430/blastdns-1.9.1-cp313-cp313-manylinux_2_28_i686.whl", hash = "sha256:9f4b05ba48357b70f0ee9ff6dfc581aa3af55040e4b0db4e43c53e4c3d5c8e58", size = 2549530, upload-time = "2026-04-15T20:31:54.377Z" }, + { url = "https://files.pythonhosted.org/packages/77/c1/bc1eb90bb9fda8a354ffda502da723afa4aeaf7e7538ac94dea3e922dc4d/blastdns-1.9.1-cp313-cp313-manylinux_2_28_ppc64le.whl", hash = "sha256:6da53361836a9b6d05daf391d8ce8f765de881e394827d5314021cb4c013eec7", size = 3515285, upload-time = "2026-04-15T20:31:26.049Z" }, + { url = "https://files.pythonhosted.org/packages/2c/2e/6dd47138c57efa03ef848f40c0ee5686f69fba028b72f8683173af7e44a2/blastdns-1.9.1-cp313-cp313-manylinux_2_28_s390x.whl", hash = "sha256:a10dbdcda0ab8c9cf641163cb568d820d51282d848bdd0c8627a51f3f6038163", size = 2501978, upload-time = "2026-04-15T20:31:40.265Z" }, + { url = "https://files.pythonhosted.org/packages/8c/95/cc06697ddb03d03fb6a171b337c9c649cd5bc3493b4e8c83e185eeef1dad/blastdns-1.9.1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:1f3400075b16905fd6837902511fed9c4b98b13de3cad8e3a78cf6d0e78f8124", size = 2435831, upload-time = "2026-04-15T20:32:04.657Z" }, + { url = "https://files.pythonhosted.org/packages/9c/26/6a702d559328213e0e3e7bb3a7c8788031c42b0613ae363cfd5ee33d130d/blastdns-1.9.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a32a1e51d34b9e926a9c218c166a254d57c120a2f7b963f04fe81fd3d45f35ed", size = 2625279, upload-time = "2026-04-15T20:32:28.256Z" }, + { url = "https://files.pythonhosted.org/packages/25/e4/fc227358013e8bce5896d4405ecc882520d4480da1c30efb6c0950e526ac/blastdns-1.9.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:fbd5f5cd402b3075254e03b608c48e25825ea806421437e74c5fc833716a86a6", size = 2593895, upload-time = "2026-04-15T20:32:41.373Z" }, + { url = "https://files.pythonhosted.org/packages/62/8d/45e3c76947052ce2c60d58944f40178ded356b4440f83007a159ab095bef/blastdns-1.9.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:3c398d2c3780e50e1a12515e0ada2eb7c510e6d699ad60d6c6382352ba4a5db5", size = 2670367, upload-time = "2026-04-15T20:32:54.542Z" }, + { url = "https://files.pythonhosted.org/packages/77/ac/482fd604a13e05ab3e0efa84336fa39c5c391addf8308b16ee9109f67fd0/blastdns-1.9.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:11057e252c5545f1f89a4f5413a396366686799dd48b11e7c6dcad44d05df47e", size = 2684704, upload-time = "2026-04-15T20:33:09.845Z" }, + { url = "https://files.pythonhosted.org/packages/fc/67/2998dd8a5239a7afc223f4ec270048d47af5e10bb996d045c3e6e5580433/blastdns-1.9.1-cp313-cp313-win_amd64.whl", hash = "sha256:a0dc5dd486fec357fbcde1ff2f4dd3bcc4baa02323d46cc749c9a8f0fa810435", size = 2657425, upload-time = "2026-04-15T20:33:24.357Z" }, + { url = "https://files.pythonhosted.org/packages/47/49/2b51f2ba622a1723b75411fd2384d26a4d55576e0f642019fcba1ea5498a/blastdns-1.9.1-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:5bf0c8445d9d4cc0ce7668ddb2358e5c90ce711eb2001d5fd742fc30bdf6b9ff", size = 2443445, upload-time = "2026-04-15T20:30:59.07Z" }, + { url = "https://files.pythonhosted.org/packages/f7/60/490cd9f4b40cc9ee0de49252f278798e0d020f002f2c3bd93669096050f5/blastdns-1.9.1-cp313-cp313t-manylinux_2_28_armv7l.whl", hash = "sha256:0d3f1498eb37543ecbf92719aae8eda22c4cabbf14e1a1ba6c6a3e5a9157b2df", size = 2319663, upload-time = "2026-04-15T20:31:12.808Z" }, + { url = "https://files.pythonhosted.org/packages/d1/6c/b801b2706677a62cdac1c3dd46d618f3b1b18bd26b21a2df3531ab559ff4/blastdns-1.9.1-cp313-cp313t-manylinux_2_28_ppc64le.whl", hash = "sha256:1be2eb559002355311fd9f584a3ed4dc4b9b5fa4255864f39779203ad3d71228", size = 3521129, upload-time = "2026-04-15T20:31:27.637Z" }, + { url = "https://files.pythonhosted.org/packages/71/83/81f22f8b06fe06e7cc1021d6d84a1f570300c7d3f7f1e8da94617d68aba6/blastdns-1.9.1-cp313-cp313t-manylinux_2_28_s390x.whl", hash = "sha256:8b207ea568b9dfc62e0fc5a848afa15448311ec3f2dbcc519327399f6604f087", size = 2512375, upload-time = "2026-04-15T20:31:41.616Z" }, + { url = "https://files.pythonhosted.org/packages/59/6c/ff24de4e418c1b683db47975af30ea600deb96c6e8ea2163470b57d8ad85/blastdns-1.9.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4f4f11a2fa4b9aa7d735252e4cfa515e51330d019065ed575a71ceb645a7fc13", size = 2621796, upload-time = "2026-04-15T20:32:29.51Z" }, + { url = "https://files.pythonhosted.org/packages/52/53/60620d5ecab72926afcb197476c765f10d690b574786c1be8d3b8f5465a0/blastdns-1.9.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:072b0f20ed5a23ce9a69d93fc0f4f840ca03e92a02290640c32c08e51e85e09a", size = 2591749, upload-time = "2026-04-15T20:32:43.158Z" }, + { url = "https://files.pythonhosted.org/packages/6e/2a/d0fe319e0aa0a79346b0107a339724489feaedd266fd209374f4731f6abe/blastdns-1.9.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:8985b6a5d88c57d9d3b566551b37a09349c0a30234167148c3d80423abbe9c8c", size = 2668454, upload-time = "2026-04-15T20:32:55.824Z" }, + { url = "https://files.pythonhosted.org/packages/f7/19/541a831d645ecede4f91fa6441e2eef23fbc3e0c7c51830f2f0a40bee609/blastdns-1.9.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:306e7ba2b9a0a3a9a38086ee8d187a33aae5979b588ad80bfb5ecd01633c298b", size = 2687970, upload-time = "2026-04-15T20:33:11.578Z" }, + { url = "https://files.pythonhosted.org/packages/85/c8/d784d1df9d930183b991822954dc31aeae2b3947147ce9d53d12eb7a3822/blastdns-1.9.1-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:337771fc54faafbdda2206426c362839961253a4959135767e9014ff4db879ca", size = 2650821, upload-time = "2026-04-15T20:32:21.943Z" }, + { url = "https://files.pythonhosted.org/packages/b8/98/0b3d1d6b5ff0cf1c404a9b8c7948102d724e767cf22ddedf9a3bd239a1ec/blastdns-1.9.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:6015697b4fe9dc4ff2bc397367ae726c6e6cd87bdb86e1d6b5c6856d0914beb7", size = 2517319, upload-time = "2026-04-15T20:32:15.85Z" }, + { url = "https://files.pythonhosted.org/packages/b8/8b/ecc1e80fe8aa9ac5677c62d9c6d7bb6c2c68025d99f2ff81dc6be5eb907f/blastdns-1.9.1-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:01b2f1da7fe1fb44fd1af52c337979bb7284b727f971d83aa761a4887a22adb2", size = 2446094, upload-time = "2026-04-15T20:31:00.43Z" }, + { url = "https://files.pythonhosted.org/packages/1d/9e/0ac6101f9b597c61d3997623c0a3e5e7be4071461c8a2986851f3739da32/blastdns-1.9.1-cp314-cp314-manylinux_2_28_armv7l.whl", hash = "sha256:147d815a686771709c96803319233218310efb0247644df4f0d6958cb4f9b65c", size = 2320171, upload-time = "2026-04-15T20:31:14.233Z" }, + { url = "https://files.pythonhosted.org/packages/10/e4/d195149ab6402b157474ee93212b393a644eedea5f005a2aa0e650c3e4bb/blastdns-1.9.1-cp314-cp314-manylinux_2_28_i686.whl", hash = "sha256:ad4d1bb7b9fa57a7aec368a84d6830663b8f16550769edf273cf21e992425824", size = 2551099, upload-time = "2026-04-15T20:31:55.683Z" }, + { url = "https://files.pythonhosted.org/packages/c8/b0/c711045dd0ba9f15938930f75c66e57c6ae4962c821f6b0f43364d3799ff/blastdns-1.9.1-cp314-cp314-manylinux_2_28_ppc64le.whl", hash = "sha256:f2af21539691a44409020ef2260b63ac8f4ae0ee838a8e21ba51d3d480c2015c", size = 3519937, upload-time = "2026-04-15T20:31:29.379Z" }, + { url = "https://files.pythonhosted.org/packages/4c/72/2a0d2f09018e3461b16c5d64a72b7660dad97cc2eb1e0479f11a705f1db9/blastdns-1.9.1-cp314-cp314-manylinux_2_28_s390x.whl", hash = "sha256:c110f4de3839e204dc0ec97da33d01cdc78c9ffde991f583b735bf9df4c90b8d", size = 2508859, upload-time = "2026-04-15T20:31:42.963Z" }, + { url = "https://files.pythonhosted.org/packages/26/f6/5c1d954dfc4d6f420dea661dc7eb228f2fd6d31954e3d492888be426f97c/blastdns-1.9.1-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:743b1501d9a625f6d0dd8ed668fd96241646bd6f70399992af43f8fef5fc42d9", size = 2439876, upload-time = "2026-04-15T20:32:06.166Z" }, + { url = "https://files.pythonhosted.org/packages/1f/71/c5a3837136449a1f5f3f14fb251fb318d48d5611755fc972fc2868473fbb/blastdns-1.9.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4c4b0b8ca367405807b14d63f57805ca2af21c651afd07feae5db32fbaaa63f7", size = 2624052, upload-time = "2026-04-15T20:32:30.895Z" }, + { url = "https://files.pythonhosted.org/packages/59/e5/9ee26448806cc548d147a32811b406646c1530bdf66f9ef42bd232b95dfe/blastdns-1.9.1-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:8f4141a04a887e45f77e92fe5ae290c017275985b96455c2aa05084220b12423", size = 2594969, upload-time = "2026-04-15T20:32:44.441Z" }, + { url = "https://files.pythonhosted.org/packages/48/81/e9c759fc353a17b3256994f23b08d5c4f147bd0eab2e8522434548abd50c/blastdns-1.9.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:c0247ea74dcb9cb22fc319ab15518a6b2cd8e147b560e6908acaa97f4eb9912b", size = 2671068, upload-time = "2026-04-15T20:32:57.332Z" }, + { url = "https://files.pythonhosted.org/packages/10/9a/5dd9a17edbee1bfb996d289e5b1bbd2e1446644c02da8fe71925c4a6c828/blastdns-1.9.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:9131f9c496a6e67b46b5184fc49dc691340c1267982808460d3fdc9fbf00fc0d", size = 2689854, upload-time = "2026-04-15T20:33:12.962Z" }, + { url = "https://files.pythonhosted.org/packages/2a/82/62f420b248e485b222a7ad714d94d814e9d8ad90f4112fbbdf4d31d5f557/blastdns-1.9.1-cp314-cp314-win32.whl", hash = "sha256:f13198d57ac8606a542f273ffbeec0cf4d1b97ec5c367d217fdb7b25715888a6", size = 2206019, upload-time = "2026-04-15T20:33:27.428Z" }, + { url = "https://files.pythonhosted.org/packages/41/0b/6db2d905fce282e099b43a93f4ed7bdf586d55f9c79a6e8cf446364981d3/blastdns-1.9.1-cp314-cp314-win_amd64.whl", hash = "sha256:b1301428a30b6ad1137f47470f882da604329d6a566708045106e06c3cc5b13b", size = 2656519, upload-time = "2026-04-15T20:33:26.101Z" }, + { url = "https://files.pythonhosted.org/packages/53/ea/b5ac534f37548ae80edfb915c7a1cbd764fb92ab62cc87d360b8f951d11e/blastdns-1.9.1-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:e5d4e64d0cdfef7e31d1e4c9124cc901d801fd1925c0add743dd4b5dac5ab48d", size = 2445855, upload-time = "2026-04-15T20:31:02.055Z" }, + { url = "https://files.pythonhosted.org/packages/53/15/43ce6b8ef5ba5e5d1ffb9ad4dbf9148ea2356e556cb1d3925ffc35fcd4d9/blastdns-1.9.1-cp314-cp314t-manylinux_2_28_armv7l.whl", hash = "sha256:9078d39a13c467185dbe205b3121ef6667b452612e64a9a83670d38a4ff8f280", size = 2320251, upload-time = "2026-04-15T20:31:16.119Z" }, + { url = "https://files.pythonhosted.org/packages/a9/e8/52dc13fd874bc0cc02b850ae87781bd53b1bdb6f6efa2b6b769a4bd16106/blastdns-1.9.1-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:712ff165713d9624a0811d94e2a555af15339f11df7fae9535aebd0de3577730", size = 3523183, upload-time = "2026-04-15T20:31:31.017Z" }, + { url = "https://files.pythonhosted.org/packages/fc/be/f129125b775e40c17624fb97550393528fa69dbb945f56fa01ba4cde797e/blastdns-1.9.1-cp314-cp314t-manylinux_2_28_s390x.whl", hash = "sha256:c31fb423922e491d87435d497a3ad571fdd1200b9d9a51322fbee2aff0275d15", size = 2512967, upload-time = "2026-04-15T20:31:44.564Z" }, + { url = "https://files.pythonhosted.org/packages/5b/fd/38b3d4986f984eb9f61987f312fb851c3c7943678f7685ff1803ac061eb8/blastdns-1.9.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:269e2f84e83c5a4c48e05c8222670289935ef9504184f6742e8049ded205d67f", size = 2622821, upload-time = "2026-04-15T20:32:32.222Z" }, + { url = "https://files.pythonhosted.org/packages/a3/84/0f0a9d767389b43edeae21f2665c5c745e2e4a056c697515962a7b4fe463/blastdns-1.9.1-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:b2c751284fe68dcf70e3f9de11c6c7cbe228473c152563ad0a781fc4a95ed484", size = 2593955, upload-time = "2026-04-15T20:32:45.822Z" }, + { url = "https://files.pythonhosted.org/packages/8d/53/04ece144df58ae2af74607fa63f38d3345f56ab32e3fce31e99d2b358019/blastdns-1.9.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:c3925dfff6f2fb7dcaa514ee3730a55d0824dce0023038cabf69f9e341d5a632", size = 2670065, upload-time = "2026-04-15T20:32:59.942Z" }, + { url = "https://files.pythonhosted.org/packages/bd/92/6ae1976ec0a8d1b4b7946c3203cda4637bb5a261126eb8f46004d2af0297/blastdns-1.9.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:e163cd43d5206e224e8e5ef5b18af634e97d5e90f535f0906af783e13b7fcec5", size = 2688595, upload-time = "2026-04-15T20:33:14.533Z" }, + { url = "https://files.pythonhosted.org/packages/88/f0/c44a8092fae154af9fc74d21ae3639a1544500c0a0516dc31a8a086d3ad4/blastdns-1.9.1-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:03f5dd6a4dfa45d5972a3be01f704264b43bb6d02b86b7eba132070c9f18f286", size = 2452601, upload-time = "2026-04-15T20:31:04.974Z" }, + { url = "https://files.pythonhosted.org/packages/08/3c/d3136a70d9b8dc3349a3a11753b5cd459168fe244ae983d0d5ba81e2d6be/blastdns-1.9.1-pp311-pypy311_pp73-manylinux_2_28_armv7l.whl", hash = "sha256:1e2621a878baa1be84b910fb2b7d492184f484988f5d572746b2e555c89926d7", size = 2322996, upload-time = "2026-04-15T20:31:19.091Z" }, + { url = "https://files.pythonhosted.org/packages/64/a1/3dea21c2a56f85e9ca11b97cef0282b40737c9c1f35769ace9c4501aa790/blastdns-1.9.1-pp311-pypy311_pp73-manylinux_2_28_i686.whl", hash = "sha256:356fde1c078710dd93d627282c18945390d74cca7cad7c334a50624ebb7c9f9d", size = 2560325, upload-time = "2026-04-15T20:31:58.318Z" }, + { url = "https://files.pythonhosted.org/packages/4a/9f/46ba0541a4f2c9f473b6841983a5760dd18462ddcf702f6569328aa9de01/blastdns-1.9.1-pp311-pypy311_pp73-manylinux_2_28_ppc64le.whl", hash = "sha256:7d189845c2982e354127426923fdda4342cf083ce5b75b4f8276ca7d9bfc196a", size = 3522302, upload-time = "2026-04-15T20:31:34.43Z" }, + { url = "https://files.pythonhosted.org/packages/b9/9d/e2fdd3f5bfddf50b37d243fcfced48a3352d1e573c7f025229c67e3bbf00/blastdns-1.9.1-pp311-pypy311_pp73-manylinux_2_28_s390x.whl", hash = "sha256:1b41c108de569a822697dab53e15e2bc30fb59d3546c172903f9e66c19826282", size = 2510299, upload-time = "2026-04-15T20:31:47.831Z" }, + { url = "https://files.pythonhosted.org/packages/fc/10/a981c619876586b6b4a9763e80eea405ff7f3917ab3b5d35bc9db2565123/blastdns-1.9.1-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fbab1fe79723547ee03c861368769e38605e87cfb8be2858f60ad497aaca2520", size = 2444480, upload-time = "2026-04-15T20:32:09.231Z" }, + { url = "https://files.pythonhosted.org/packages/45/0d/7451f1a7bea17f39b25fe3ddbc74271c31c0b8b81b84cf5fb5b8f01ba1ed/blastdns-1.9.1-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:378e95b2723eabf23ffb04ed0bd326aa5661d4ae3396a8326ad2ffcc324d4bff", size = 2629253, upload-time = "2026-04-15T20:32:35.696Z" }, + { url = "https://files.pythonhosted.org/packages/b7/a1/ef1c8049ada69185f6276e42c33e50d18cea464037195bbf9260a3da8d64/blastdns-1.9.1-pp311-pypy311_pp73-musllinux_1_2_armv7l.whl", hash = "sha256:e4facab544265314dbf212ee015138ec823529e006b306e030ad5a4c2d4a6ec3", size = 2598351, upload-time = "2026-04-15T20:32:48.896Z" }, + { url = "https://files.pythonhosted.org/packages/d9/b4/dbc258187d079a86487f9b26761430f8f4ff531814d710caab66b388f402/blastdns-1.9.1-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:62269fee131b31f401dabe16f427dedf4ac6c0c5d7a302a52a7d685456772385", size = 2679763, upload-time = "2026-04-15T20:33:02.936Z" }, + { url = "https://files.pythonhosted.org/packages/1e/01/480f3c5e54abf85b6accdc25635a10428720f9f2fa6b95c89d59e35f2912/blastdns-1.9.1-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:64581573a438d95f51e297aa6790a85f49ed00fcb2afdf5a459b911dce970619", size = 2694305, upload-time = "2026-04-15T20:33:17.253Z" }, +] + [[package]] name = "cachetools" version = "6.2.6"