From c92aff1c43f1ce7c6319aede44923c72c66c488d Mon Sep 17 00:00:00 2001 From: Shawn Grigson Date: Sat, 28 Mar 2026 23:50:41 -0500 Subject: [PATCH 1/9] FIRES Integration And Retractions --- .gitignore | 4 + README.md | 208 +++++++++- etc/sample.fediblockhole.conf.toml | 41 +- src/fediblockhole/__init__.py | 282 +++++++++++++ src/fediblockhole/fires.py | 477 ++++++++++++++++++++++ tests/fixtures/data-fires-changes.json | 41 ++ tests/fixtures/data-fires-snapshot.json | 62 +++ tests/test_fires.py | 501 ++++++++++++++++++++++++ uv.lock | 353 ++++++++--------- 9 files changed, 1790 insertions(+), 179 deletions(-) create mode 100644 .gitignore mode change 100755 => 100644 src/fediblockhole/__init__.py create mode 100644 src/fediblockhole/fires.py create mode 100644 tests/fixtures/data-fires-changes.json create mode 100644 tests/fixtures/data-fires-snapshot.json create mode 100644 tests/test_fires.py diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..88ba5ea --- /dev/null +++ b/.gitignore @@ -0,0 +1,4 @@ +.DS_Store +__pycache__/ +*.pyc + diff --git a/README.md b/README.md index 61f5d8e..be0bc5d 100644 --- a/README.md +++ b/README.md @@ -29,6 +29,7 @@ cost. - Read domain block lists from arbitrary URLs, including local files. - Supports CSV and JSON format blocklists - Supports RapidBlock CSV and JSON format blocklists + - Consume moderation recommendations from [FIRES](https://github.com/fedimod/fires) datasets, with support for retractions, labels, and incremental polling ### Blocklist Export/Push @@ -184,10 +185,11 @@ Or you can use the default location of `/etc/default/fediblockhole.conf.toml`. As the filename suggests, FediBlockHole uses TOML syntax. -There are 4 key sections: +There are 5 key sections: - 1. `blocklist_urls_sources`: A list of URLs to read blocklists from + 1. `blocklist_url_sources`: A list of URLs to read blocklists from 1. `blocklist_instance_sources`: A list of Mastodon instances to read blocklists from via API + 1. `blocklist_fires_sources`: A list of FIRES servers/datasets to read moderation data from 1. `blocklist_instance_destinations`: A list of Mastodon instances to write blocklists to via API 1. `allowlist_url_sources`: A list of URLs to read allowlists from @@ -381,6 +383,208 @@ times to allow multiple domains. It is probably wise to include your own instance domain in an allowlist so you don't accidentally defederate from yourself. +## FIRES Integration + +FediBlockHole can consume moderation data from [FIRES](https://github.com/fedimod/fires) +(Fediverse Intelligence Replication Endpoint Server) datasets. FIRES is an open +protocol for sharing moderation recommendations across the Fediverse, providing +structured data with labels, policies, change tracking, and retractions. + +### How it works + +FIRES datasets publish moderation recommendations as structured data. Each +recommendation includes a domain, a policy (`drop`, `reject`, `filter`, or +`accept`), and optional labels describing why the recommendation exists. + +FediBlockHole maps these to Mastodon block semantics: + + - **drop** and **reject** become `suspend` blocks + - **filter** becomes a `silence` block + - **accept** feeds into the allowlist pipeline (see below) + - **Retractions** remove a domain from that source's contribution + +Only recommendations with `entityKind` of `domain` are processed. FIRES also +supports `actor`-level recommendations, but Mastodon's domain block API operates +at the domain level, so actor recommendations are silently skipped. + +FIRES changes come in four types, each handled differently: + + - **Recommendation**: Creates or updates a block. This is the actionable one. + - **Advisory**: Informational only — no block is created. If a domain is + downgraded from Recommendation to Advisory, it effectively falls out of + the blocklist (a soft retraction without fully removing it from the dataset). + - **Retraction**: The source explicitly says "stop blocking this." The domain + is removed from the source's contribution and, if `retractions = true`, + can be deleted from the server. + - **Tombstone**: Historical record cleanup. Silently skipped. + +Each FIRES dataset counts as one source for threshold calculations. If you +subscribe to 3 FIRES datasets and 2 CSV blocklists, a domain needs to appear +in `threshold` of those 5 sources to be included in the merged blocklist. + +### Configuration + +Add FIRES sources to your config file using the `blocklist_fires_sources` list. +Three formats are supported: + +```toml +blocklist_fires_sources = [ + # Discover and fetch all datasets from a FIRES server + { server = 'https://fires.example.com' }, + + # Fetch specific datasets by UUID from a server + { server = 'https://fires.example.com', datasets = ['uuid-1', 'uuid-2'] }, + + # Paste a dataset URL directly + { url = 'https://other-fires.example/datasets/019d3565-f022-abbc-c43d649f294b' }, +] +``` + +FIRES datasets are public, so no authentication is required to read them. + +Optional per-source settings: + + - `max_severity`: Cap the maximum severity applied (e.g., `'silence'`). Defaults to `'suspend'`. + - `ignore_accept`: When `true`, silently skip any `accept` policies from this source. Defaults to `false`. + - `retractions`: When `true`, honor retractions from this source by removing blocks from your instance. See the Retractions section below. Defaults to `false`. + +### State tracking and retractions + +FediBlockHole maintains a JSON state file to track its position in each +dataset's changes feed. On the first run, it fetches the full snapshot. On +subsequent runs, it polls only for new changes since the last run. + +When a FIRES dataset publishes a **retraction** (meaning "we no longer recommend +blocking this domain"), FediBlockHole removes that domain from the source's +contribution to the merge. If other sources still recommend blocking it, the +block remains. Retractions only affect the source that issued them. + +The state file defaults to `~/.fediblockhole/fires_state.json`. You can override +this with the `fires_state_file` config option or the `--fires-state-file` +commandline flag. + +### Labels as comments + +FIRES recommendations include labels from the +[IFTAS shared vocabulary](https://about.iftas.org/library/shared-vocabulary-labels/) +(e.g., "Hate Speech", "CSAM", "Spam"). These are mapped to the `public_comment` +field on domain blocks, so instance admins can see why a domain was recommended +for blocking. + +### The `accept` policy + +The FIRES protocol includes an `accept` policy for recommending that a domain +*should* be federated with. FediBlockHole handles `accept` the same way it +handles its existing allowlists: domains with an `accept` recommendation are +removed from the merged blocklist before it is pushed to instances. + +This means an `accept` from a FIRES dataset acts as an override, the same as +adding a domain to a CSV allowlist. It does not call any instance API to +explicitly allow the domain — it simply prevents it from being blocked. + +If you don't want FIRES `accept` policies to influence your blocklist at all, +set `ignore_accept = true` on the source: + +```toml +blocklist_fires_sources = [ + { server = 'https://fires.example.com', ignore_accept = true }, +] +``` + +With `ignore_accept` enabled, `accept` recommendations are silently skipped. +Block recommendations (`drop`, `reject`, `filter`) and retractions still work +normally. + +### Retractions: removing blocks that are no longer recommended + +Historically, FediBlockHole has been additive — it adds and updates blocks but +never removes them. This is safe but means blocks stay on your instance forever, +even if every source stops recommending them. + +FIRES changes this by providing the state that blocklists never had. When a +FIRES dataset retracts a recommendation, FediBlockHole can now act on it. + +There are two retraction mechanisms, and they can be used together: + +#### Source-level retractions (`retractions = true`) + +This is the FIRES-native approach. When a trusted FIRES source explicitly +retracts a domain, the block is removed from your instance — **regardless of +who originally added it** — as long as no other source in your merged list still +recommends blocking it. + +This is dataset-level trust: you're saying "I trust this feed's judgment, +including its judgment that something should come off." + +```toml +blocklist_fires_sources = [ + { server = 'https://fires.trusted.example', retractions = true }, + { url = 'https://other-fires.example/datasets/uuid', retractions = true }, +] +``` + +The safeguard is the merge: if *any* other source (FIRES, CSV, instance) still +recommends blocking that domain, the retraction is countered and the block stays. + +#### General retractions (`apply_retractions = true`) + +This is a broader mechanism that works with any source type, not just FIRES. +When enabled, blocks that exist on your instance but are no longer in *any* +source are removed — but only if they were originally added by FediBlockHole. + +This requires `override_private_comment` to be set, so FediBlockHole can +identify its own blocks by matching the stamp in `private_comment`. Blocks added +manually by the admin (with a different or no private comment) are never touched. + +```toml +override_private_comment = 'Added by FediBlockHole' +apply_retractions = true +``` + +This can also be set per-destination instance: + +```toml +blocklist_instance_destinations = [ + { domain = 'myinstance.social', token = '...', apply_retractions = true }, +] +``` + +#### A note on general retractions and reliability + +The general `apply_retractions` mechanism compares the merged list against what's +on your server. If a source goes offline or a URL is temporarily unreachable, +domains from that source will be absent from the merge, and `apply_retractions` +could remove them from your server even though nothing was actually retracted. + +For this reason, it's often best to write the merged blocklist to a file first +(`blocklist_savefile`), review it, and then apply it in a separate run. Reading +from the filesystem is reliable — remote sources are not. + +FIRES source-level retractions (`retractions = true`) don't have this problem. +They only act on domains that a FIRES dataset *explicitly* retracted via a +`Retraction` change entry. A dataset being unreachable doesn't generate +retractions — it just means no new changes are processed that run. + +#### How they differ + +| | Source retractions | General retractions | +|---|---|---| +| Trigger | FIRES dataset explicitly retracts a domain | Domain falls out of all sources | +| Scope | Removes any matching block on the instance | Only removes blocks FediBlockHole added | +| Requires `override_private_comment` | No | Yes | +| Requires `retractions = true` on source | Yes | No (global or per-destination) | +| Works with CSV/instance sources | No (FIRES only) | Yes (any source) | + +Both mechanisms respect the merge: if any source still recommends blocking a +domain, the block stays. Use `--dryrun` to preview what would be removed +without actually deleting anything. + +### Commandline flags + + - `--no-fetch-fires`: Skip fetching from FIRES datasets even if configured. + - `--fires-state-file `: Override the state file location. + - `--apply-retractions`: Enable retraction-based block removal (see above). + ## More advanced configuration For a list of possible configuration options, check the `--help` and read the diff --git a/etc/sample.fediblockhole.conf.toml b/etc/sample.fediblockhole.conf.toml index e18d76b..b773b5b 100644 --- a/etc/sample.fediblockhole.conf.toml +++ b/etc/sample.fediblockhole.conf.toml @@ -24,6 +24,36 @@ blocklist_url_sources = [ ] +# List of FIRES sources to read datasets from +# Three formats are supported: +# 1. Server-wide: fetch all datasets from a FIRES server +# { server = 'https://fires.example.com' } +# 2. Cherry-pick: fetch specific datasets from a server by UUID +# { server = 'https://fires.example.com', datasets = ['uuid-1', 'uuid-2'] } +# 3. Direct URL: paste a dataset URL directly +# { url = 'https://fires.example.com/datasets/uuid-1' } +# +# FIRES policies are mapped to Mastodon severities: +# drop/reject -> suspend, filter -> silence +# Each dataset counts as one source for threshold calculations. +# Optional: max_severity to cap the highest severity applied. +blocklist_fires_sources = [ + # { server = 'https://fires.example.com' }, # all datasets on this server + # { server = 'https://fires.example.com', datasets = ['dataset-uuid-1', 'dataset-uuid-2'] }, + # { url = 'https://other-fires.example/datasets/019d3565-f022-777b-abbc-c43d649f294b' }, + # { url = 'https://other-fires.example/datasets/019d3565-f022-777b-abbc-aabbccddeeff', max_severity = 'silence' }, + # { server = 'https://fires.example.com', ignore_accept = true }, # ignore 'accept' policies + # { server = 'https://fires.example.com', retractions = true }, # honor retractions from this source + # { url = 'https://trusted-fires.example/datasets/uuid', retractions = true }, +] + +## Path to the FIRES state file for tracking change cursors and retractions +## Defaults to ~/.fediblockhole/fires_state.json +# fires_state_file = '/path/to/fires_state.json' + +## Don't fetch from FIRES datasets, even if they're defined above +# no_fetch_fires = false + ## These global allowlists override blocks from blocklists # These are the same format and structure as blocklists, but they take precedence allowlist_url_sources = [ @@ -86,7 +116,16 @@ blocklist_instance_destinations = [ ## set an override private comment to be added when pushing a NEW block to an instance # this does not require importing private comments -# override_private_comment = 'Added by Fediblock Sync' +# When using apply_retractions, this stamp is used to identify which blocks +# FediBlockHole added (so it only removes blocks it created, not manual ones). +# override_private_comment = 'Added by FediBlockHole' + +## When enabled, blocks that were added by FediBlockHole but are no longer +## in any source (due to FIRES retractions or sources dropping a domain) +## will be removed from destination instances. +## Requires override_private_comment to be set so we can identify our blocks. +## Can also be set per-destination instance. +# apply_retractions = false ## Set which fields we import ## 'domain' and 'severity' are always imported, these are additional diff --git a/src/fediblockhole/__init__.py b/src/fediblockhole/__init__.py old mode 100755 new mode 100644 index 6ce236a..8f9e635 --- a/src/fediblockhole/__init__.py +++ b/src/fediblockhole/__init__.py @@ -17,6 +17,7 @@ from .blocklists import BlockAuditList, Blocklist, parse_blocklist from .const import BlockAudit, BlockSeverity, DomainBlock +from .fires import FIRESState, fetch_fires_blocklist __version__ = version("fediblockhole") @@ -96,6 +97,21 @@ def sync_blocklists(conf: argparse.Namespace): ) ) + # Fetch blocklists (and allowlists) from FIRES datasets + fires_allowlists = [] + fires_retractions = set() # domains retracted by trusted FIRES sources + if not conf.no_fetch_fires: + fires_blocks, fires_allows, fires_retractions = fetch_from_fires( + conf.blocklist_fires_sources, + conf.fires_state_file, + conf.save_intermediate, + conf.savedir, + export_fields, + conf.dryrun, + ) + blocklists.extend(fires_blocks) + fires_allowlists.extend(fires_allows) + # Merge blocklists into an update dict merged = merge_blocklists( blocklists, @@ -107,6 +123,11 @@ def sync_blocklists(conf: argparse.Namespace): # Remove items listed in allowlists, if any allowlists = fetch_allowlists(conf) + # fetch_allowlists returns a list of Blocklists or an empty Blocklist + if isinstance(allowlists, Blocklist): + allowlists = [allowlists] if len(allowlists) > 0 else [] + # Include any allowlists from FIRES 'accept' policies + allowlists.extend(fires_allowlists) merged = apply_allowlists(merged, conf, allowlists) # Save the final mergelist, if requested @@ -124,6 +145,10 @@ def sync_blocklists(conf: argparse.Namespace): max_followed_severity = BlockSeverity( dest.get("max_followed_severity", "silence") ) + apply_retractions = dest.get( + "apply_retractions", + conf.apply_retractions + ) push_blocklist( token, target, @@ -133,6 +158,8 @@ def sync_blocklists(conf: argparse.Namespace): max_followed_severity, scheme, conf.override_private_comment, + apply_retractions, + fires_retractions, ) @@ -238,6 +265,144 @@ def fetch_from_instances( return blocklists +def _parse_dataset_url(url: str) -> tuple: + """Extract server base URL and dataset ID from a full dataset URL. + + e.g. 'https://fires.example/datasets/019d3565-f022-777b-abbc-c43d649f294b' + returns ('https://fires.example', '019d3565-f022-777b-abbc-c43d649f294b') + """ + url = url.rstrip("/") + # Find /datasets/ in the URL and split there + marker = "/datasets/" + idx = url.find(marker) + if idx == -1: + raise ValueError(f"Not a valid FIRES dataset URL (missing /datasets/): {url}") + server_url = url[:idx] + dataset_id = url[idx + len(marker):] + # Strip any trailing path segments (e.g. /snapshot, /changes) + if "/" in dataset_id: + dataset_id = dataset_id.split("/")[0] + return server_url, dataset_id + + +def fetch_from_fires( + fires_sources: list, + state_file: str = None, + save_intermediate: bool = False, + savedir: str = None, + export_fields: list = EXPORT_FIELDS, + dryrun: bool = False, +) -> list: + """Fetch blocklists from FIRES datasets + + Supports three source formats: + - { server = '...' } -- discover and fetch all datasets + - { server = '...', datasets = [...] } -- fetch specific datasets by ID + - { url = '...' } -- fetch a single dataset by full URL + + @param fires_sources: List of FIRES source configs from the TOML file + @param state_file: Path to the FIRES state file for cursor tracking + @param save_intermediate: Whether to save intermediate blocklists + @param savedir: Directory to save intermediate blocklists + @param export_fields: Fields to include when saving intermediate lists + @returns: A list of Blocklist objects + """ + log.info("Fetching domain blocks from FIRES datasets...") + blocklists = [] + allowlists = [] + trusted_retractions = set() # domains retracted by sources with retractions=true + + if not fires_sources: + return blocklists, allowlists, trusted_retractions + + from .fires import DEFAULT_STATE_FILE + state = FIRESState(state_file or DEFAULT_STATE_FILE) + + for source in fires_sources: + max_severity = source.get("max_severity", "suspend") + ignore_accept = source.get("ignore_accept", False) + honor_retractions = source.get("retractions", False) + + # Collect (server_url, dataset_id) pairs to fetch + fetch_list = [] + + if "url" in source: + # Direct dataset URL: parse it into server + dataset_id + try: + server_url, dataset_id = _parse_dataset_url(source["url"]) + fetch_list.append((server_url, dataset_id)) + except ValueError as e: + log.error(f"FIRES: {e}") + continue + + elif "server" in source: + server_url = source["server"].rstrip("/") + dataset_ids = source.get("datasets", []) + + if dataset_ids: + # Cherry-pick specific datasets by ID + for did in dataset_ids: + fetch_list.append((server_url, did)) + else: + # Discover all datasets on the server + log.info(f"FIRES: discovering datasets on {server_url}") + from .fires import FIRESClient + client = FIRESClient(server_url) + try: + datasets = client.get_datasets() + for ds in datasets: + ds_id_url = ds.get("id", "") + if ds_id_url: + ds_id = ds_id_url.rstrip("/").split("/")[-1] + fetch_list.append((server_url, ds_id)) + log.info(f"FIRES: found {len(fetch_list)} datasets") + except Exception as e: + log.error( + f"FIRES: could not discover datasets on {server_url}: {e}" + ) + continue + else: + log.warning( + "FIRES: source must have either 'server' or 'url'. Skipping." + ) + continue + + # Fetch each dataset + for srv, did in fetch_list: + try: + bl, al = fetch_fires_blocklist( + srv, did, state, + max_severity=max_severity, + ignore_accept=ignore_accept, + ) + blocklists.append(bl) + if len(al) > 0: + allowlists.append(al) + # Collect retractions from trusted sources + if honor_retractions: + dataset_url = f"{srv}/datasets/{did}" + trusted_retractions.update( + state.get_retractions(dataset_url) + ) + if save_intermediate: + save_intermediate_blocklist(bl, savedir, export_fields) + except Exception as e: + log.error( + f"FIRES: error fetching dataset {did} from {srv}: {e}" + ) + continue + + # Persist state after all datasets are processed + # Don't save state during dryrun — we want the next real run + # to see the same changes we just previewed + if not dryrun: + state.save() + else: + log.info("Dry run: not updating FIRES state file.") + + return blocklists, allowlists, trusted_retractions + + def merge_blocklists( blocklists: list[Blocklist], mergeplan: str = "max", @@ -659,15 +824,24 @@ def push_blocklist( max_followed_severity: BlockSeverity = BlockSeverity("silence"), scheme: str = "https", override_private_comment: str = None, + apply_retractions: bool = False, + fires_retractions: set = None, ): """Push a blocklist to a remote instance. Updates existing entries if they exist, creates new blocks if they don't. + If `apply_retractions` is True, blocks that exist on the server but are + no longer in the merged blocklist will be removed — but only if they + were originally added by FediBlockHole (identified by matching the + `override_private_comment` stamp in `private_comment`). + @param token: The Bearer token for OAUTH API authentication @param host: The instance host, FQDN or IP @param blocklist: A list of block definitions. They must include the domain. @param import_fields: A list of fields to import to the instances. + @param apply_retractions: If True, remove blocks we previously added that + are no longer in any source. Requires override_private_comment to be set. """ log.info(f"Pushing blocklist to host {host} ...") # Fetch the existing blocklist from the instance @@ -764,6 +938,84 @@ def push_blocklist( else: log.info("Dry run selected. Not adding block.") + # Apply retractions: remove blocks we added that are no longer recommended + if apply_retractions: + if not override_private_comment: + log.warning( + "apply_retractions is enabled but override_private_comment is not set. " + "Cannot safely identify which blocks were added by FediBlockHole. " + "Skipping retraction removal. Set override_private_comment to enable." + ) + else: + log.info(f"Checking for retracted blocks to remove from {host}...") + removed = 0 + for domain, serverblock in serverblocks.items(): + # Only remove blocks that we originally added + if not hasattr(serverblock, 'private_comment'): + continue + if serverblock.private_comment != override_private_comment: + continue + # If this block is still in the merged list, keep it + if domain in blocklist: + continue + + # This block was added by us but is no longer recommended + log.info( + f"Retraction: removing block for {domain} from {host} " + f"(was added by FediBlockHole, no longer in any source)" + ) + if not dryrun: + delete_block(token, host, serverblock.id, scheme) + time.sleep(API_CALL_DELAY) + removed += 1 + else: + log.info(f"Dry run: would remove block for {domain}") + removed += 1 + + if removed: + log.info(f"Removed {removed} retracted blocks from {host}") + else: + log.debug("No retracted blocks to remove.") + + # FIRES-sourced retractions: remove blocks from trusted feeds + # These don't require override_private_comment — if a trusted FIRES + # source says "retract this" and nothing else in the merged list + # counters it, the block gets removed regardless of who added it. + if fires_retractions: + log.info( + f"Checking {len(fires_retractions)} FIRES retractions " + f"against {host}..." + ) + removed = 0 + for domain, serverblock in serverblocks.items(): + if domain not in fires_retractions: + continue + # If this domain is still in the merged blocklist (another + # source still recommends it), don't remove it + if domain in blocklist: + log.debug( + f"FIRES retraction for {domain} countered by " + f"another source, keeping block." + ) + continue + + log.info( + f"FIRES retraction: removing block for {domain} from {host} " + f"(retracted by trusted FIRES source, not in any other source)" + ) + if not dryrun: + delete_block(token, host, serverblock.id, scheme) + time.sleep(API_CALL_DELAY) + removed += 1 + else: + log.info(f"Dry run: would remove block for {domain}") + removed += 1 + + if removed: + log.info( + f"Removed {removed} FIRES-retracted blocks from {host}" + ) + def load_config(configfile: str): """Augment commandline arguments with config file parameters @@ -948,6 +1200,17 @@ def augment_args(args, tomldata: str = None): conf.get("blocklist_instance_destinations", []) ) + args.blocklist_fires_sources = conf.get("blocklist_fires_sources", []) + + if not args.fires_state_file: + args.fires_state_file = conf.get("fires_state_file", None) + + if not args.no_fetch_fires: + args.no_fetch_fires = conf.get("no_fetch_fires", False) + + if not args.apply_retractions: + args.apply_retractions = conf.get("apply_retractions", False) + return args @@ -1041,6 +1304,25 @@ def setup_argparse(): action="store_true", help="Don't fetch from instances, even if configured.", ) + ap.add_argument( + "--no-fetch-fires", + dest="no_fetch_fires", + action="store_true", + help="Don't fetch from FIRES datasets, even if configured.", + ) + ap.add_argument( + "--fires-state-file", + dest="fires_state_file", + help="Path to FIRES state file for tracking change cursors.", + ) + ap.add_argument( + "--apply-retractions", + dest="apply_retractions", + action="store_true", + help="Remove blocks from instances when they are no longer in any source. " + "Only removes blocks originally added by FediBlockHole " + "(identified by override_private_comment).", + ) ap.add_argument( "--no-push-instance", dest="no_push_instance", diff --git a/src/fediblockhole/fires.py b/src/fediblockhole/fires.py new file mode 100644 index 0000000..e81b2d7 --- /dev/null +++ b/src/fediblockhole/fires.py @@ -0,0 +1,477 @@ +"""FIRES protocol support for FediBlockHole + +Fetches moderation recommendations from FIRES (Fediverse Intelligence +Replication Endpoint Server) datasets and converts them to DomainBlocks +for merging into the standard FediBlockHole pipeline. + +FIRES datasets publish snapshots (current state) and changes feeds +(incremental updates with retractions). This module supports both modes: + +- First run: fetch the full snapshot +- Subsequent runs: poll the changes feed from the last-seen cursor +- Retractions: remove domains from the source's contribution + +State is persisted in a JSON file so we can do incremental polling. +""" + +from __future__ import annotations + +import json +import logging +import os +from typing import Optional + +import requests + +from .blocklists import Blocklist +from .const import DomainBlock + +log = logging.getLogger("fediblockhole") + +# FIRES policy -> Mastodon severity mapping +# 'accept' is handled separately as an allowlist entry +POLICY_MAP = { + "drop": "suspend", + "reject": "suspend", + "filter": "silence", +} + +# Policies that indicate the domain should be allowed, not blocked +ALLOW_POLICIES = {"accept"} + +# Default state file location +DEFAULT_STATE_FILE = os.path.expanduser("~/.fediblockhole/fires_state.json") + +# Request timeout for FIRES API calls +REQUEST_TIMEOUT = 30 + + +class FIRESState: + """Manages persistent state for FIRES dataset polling. + + Tracks the last-seen change cursor per dataset URL so we can + do incremental polling on subsequent runs. + """ + + def __init__(self, filepath: str = DEFAULT_STATE_FILE): + self.filepath = filepath + self.data = self._load() + + def _load(self) -> dict: + """Load state from disk, or return empty state.""" + if os.path.exists(self.filepath): + try: + with open(self.filepath, "r") as f: + return json.load(f) + except (json.JSONDecodeError, IOError) as e: + log.warning(f"Could not load FIRES state from {self.filepath}: {e}") + return {} + + def save(self): + """Persist state to disk.""" + os.makedirs(os.path.dirname(self.filepath), exist_ok=True) + with open(self.filepath, "w") as f: + json.dump(self.data, f, indent=2) + + def get_cursor(self, dataset_url: str) -> Optional[str]: + """Get the last-seen changes cursor URL for a dataset.""" + return self.data.get(dataset_url, {}).get("cursor") + + def set_cursor(self, dataset_url: str, cursor: str): + """Update the cursor for a dataset.""" + if dataset_url not in self.data: + self.data[dataset_url] = {} + self.data[dataset_url]["cursor"] = cursor + + def get_retractions(self, dataset_url: str) -> set: + """Get the set of retracted domains for a dataset.""" + return set(self.data.get(dataset_url, {}).get("retractions", [])) + + def add_retraction(self, dataset_url: str, domain: str): + """Record a retraction for a domain.""" + if dataset_url not in self.data: + self.data[dataset_url] = {} + retractions = self.data[dataset_url].get("retractions", []) + if domain not in retractions: + retractions.append(domain) + self.data[dataset_url]["retractions"] = retractions + + def remove_retraction(self, dataset_url: str, domain: str): + """Remove a retraction (domain was re-recommended).""" + if dataset_url in self.data: + retractions = self.data[dataset_url].get("retractions", []) + if domain in retractions: + retractions.remove(domain) + self.data[dataset_url]["retractions"] = retractions + + +class FIRESClient: + """HTTP client for consuming public FIRES protocol endpoints.""" + + def __init__(self, base_url: str): + self.base_url = base_url.rstrip("/") + + def _headers(self) -> dict: + return { + "Accept": "application/ld+json, application/json", + "User-Agent": "FediBlockHole-FIRES/1.0", + } + + def _get(self, url: str) -> dict: + """Fetch a URL and return parsed JSON.""" + log.debug(f"FIRES fetch: {url}") + response = requests.get( + url, headers=self._headers(), timeout=REQUEST_TIMEOUT + ) + if response.status_code != 200: + log.error(f"FIRES request failed: {response.status_code} {url}") + raise ValueError( + f"FIRES request failed: {response.status_code}: {response.content}" + ) + return response.json() + + def get_datasets(self) -> list: + """List all datasets on the server.""" + data = self._get(f"{self.base_url}/datasets") + # The datasets collection uses 'items', not 'orderedItems' + return data.get("items", data.get("orderedItems", [])) + + def get_snapshot(self, dataset_id: str) -> dict: + """Fetch the current snapshot for a dataset.""" + return self._get(f"{self.base_url}/datasets/{dataset_id}/snapshot") + + def get_changes(self, dataset_id: str, since: str = None) -> dict: + """Fetch a page of changes for a dataset.""" + url = f"{self.base_url}/datasets/{dataset_id}/changes" + if since: + url += f"?since={since}" + return self._get(url) + + def get_changes_from_url(self, url: str) -> dict: + """Fetch changes from a full URL (for pagination).""" + return self._get(url) + + def get_labels(self) -> dict: + """Fetch the labels collection.""" + return self._get(f"{self.base_url}/labels") + + +def fires_policy_to_severity(policy: str) -> str: + """Map a FIRES recommended policy to a Mastodon severity. + + drop/reject -> suspend + filter -> silence + unknown -> suspend (safe default) + """ + return POLICY_MAP.get(policy, "suspend") + + +def fires_labels_to_comment(labels: list, label_names: dict) -> str: + """Convert FIRES label URLs/IDs to a human-readable comment string. + + @param labels: List of label URLs or UUIDs from the FIRES response + @param label_names: Dict mapping label URL/ID -> human-readable name + @returns: Comma-separated string of label names + """ + names = [] + for label_ref in labels: + if label_ref in label_names: + names.append(label_names[label_ref]) + else: + # Try extracting a slug from a URL as fallback + slug = label_ref.rstrip("/").split("/")[-1] + names.append(slug) + return ", ".join(names) + + +def build_label_map(client: FIRESClient) -> dict: + """Fetch labels from the FIRES server and build an ID -> name map.""" + label_map = {} + try: + labels_data = client.get_labels() + for item in labels_data.get("items", []): + label_id = item.get("id", "") + # Prefer nameMap.en over flat name + name_map = item.get("nameMap") + if name_map and isinstance(name_map, dict): + name = name_map.get("en", name_map.get("en-US", "")) + if not name: + # Grab first available + name = next(iter(name_map.values()), "") + else: + name = item.get("name", "") + if label_id and name: + label_map[label_id] = name + except Exception as e: + log.warning(f"Could not fetch FIRES labels: {e}") + return label_map + + +def snapshot_to_blocklist( + snapshot: dict, + origin: str, + label_map: dict, + max_severity: str = "suspend", + retractions: set = None, + ignore_accept: bool = False, +) -> tuple: + """Convert a FIRES snapshot response to a Blocklist and an allowlist. + + FIRES recommendations with 'accept' policy become allowlist entries. + All other recommendations become blocklist entries. + + @param snapshot: The parsed JSON snapshot from the FIRES API + @param origin: Origin string for the blocklist + @param label_map: Dict mapping label IDs to names + @param max_severity: Maximum severity to apply + @param retractions: Set of domains to exclude (previously retracted) + @returns: Tuple of (Blocklist, Blocklist) where the second is the allowlist + """ + if retractions is None: + retractions = set() + + blocklist = Blocklist(origin) + allowlist = Blocklist(origin) + items = snapshot.get("orderedItems", []) + + for item in items: + item_type = item.get("type", "") + entity_kind = item.get("entityKind", "") + domain = item.get("entityKey", "") + + # We only handle domain recommendations + if entity_kind != "domain" or not domain: + continue + + # Only Recommendations are actionable blocks. + # Retractions, Advisories, and Tombstones are skipped: + # - Retraction: latest action was a retraction, not an active block + # - Advisory: informational only, no recommended action + # - Tombstone: historical cleanup + if item_type != "Recommendation": + continue + + # Skip domains that have been retracted via changes feed + if domain in retractions: + continue + + # Map FIRES policy to Mastodon severity + policy = item.get("recommendedPolicy", "drop") + + # Accept policy -> allowlist (unless ignored) + if policy in ALLOW_POLICIES: + if not ignore_accept: + allowlist.blocks[domain] = DomainBlock( + domain=domain, + severity="noop", + public_comment=fires_labels_to_comment( + item.get("labels", []), label_map + ), + ) + continue + + severity = fires_policy_to_severity(policy) + + # Build a comment from labels + labels = item.get("labels", []) + public_comment = fires_labels_to_comment(labels, label_map) + + block = DomainBlock( + domain=domain, + severity=severity, + public_comment=public_comment, + ) + + # Apply max_severity cap + from .const import BlockSeverity + max_sev = BlockSeverity(max_severity) + if block.severity > max_sev: + block.severity = max_sev + + blocklist.blocks[domain] = block + + return blocklist, allowlist + + +def apply_changes( + blocklist: Blocklist, + allowlist: Blocklist, + changes: list, + label_map: dict, + state: FIRESState, + dataset_url: str, + max_severity: str = "suspend", + ignore_accept: bool = False, +) -> tuple: + """Apply a list of FIRES change items to existing blocklist and allowlist. + + Recommendations with accept policy go to the allowlist. + Other recommendations add/update blocklist entries. + Retractions remove entries from both lists and record in state. + + @param blocklist: The existing blocklist to modify + @param allowlist: The existing allowlist to modify + @param changes: List of change items from the FIRES changes feed + @param label_map: Dict mapping label IDs to names + @param state: FIRESState for recording retractions + @param dataset_url: The dataset URL key for state tracking + @param max_severity: Maximum severity to apply + @returns: Tuple of (blocklist, allowlist) + """ + from .const import BlockSeverity + + for item in changes: + item_type = item.get("type", "") + entity_kind = item.get("entityKind", "") + domain = item.get("entityKey", "") + + if entity_kind != "domain" or not domain: + continue + + if item_type == "Recommendation": + policy = item.get("recommendedPolicy", "drop") + labels = item.get("labels", []) + public_comment = fires_labels_to_comment(labels, label_map) + + if policy in ALLOW_POLICIES: + if not ignore_accept: + # Accept -> allowlist, remove from blocklist if present + allowlist.blocks[domain] = DomainBlock( + domain=domain, + severity="noop", + public_comment=public_comment, + ) + if domain in blocklist.blocks: + del blocklist.blocks[domain] + else: + # Block recommendation + severity = fires_policy_to_severity(policy) + block = DomainBlock( + domain=domain, + severity=severity, + public_comment=public_comment, + ) + max_sev = BlockSeverity(max_severity) + if block.severity > max_sev: + block.severity = max_sev + blocklist.blocks[domain] = block + + # If it was on the allowlist, remove it + if domain in allowlist.blocks: + del allowlist.blocks[domain] + + # If this domain was previously retracted, undo that + state.remove_retraction(dataset_url, domain) + + elif item_type == "Retraction": + # Remove from both lists and record the retraction + if domain in blocklist.blocks: + log.info(f"FIRES retraction: removing {domain} from blocklist") + del blocklist.blocks[domain] + if domain in allowlist.blocks: + log.info(f"FIRES retraction: removing {domain} from allowlist") + del allowlist.blocks[domain] + state.add_retraction(dataset_url, domain) + + return blocklist, allowlist + + +def fetch_fires_blocklist( + server_url: str, + dataset_id: str, + state: FIRESState, + max_severity: str = "suspend", + max_pages: int = 50, + ignore_accept: bool = False, +) -> tuple: + """Fetch a blocklist and allowlist from a FIRES dataset. + + On first run (no cursor in state), fetches the full snapshot. + On subsequent runs, polls the changes feed from the last cursor. + + Recommendations with 'accept' policy go to the allowlist. + All other recommendations go to the blocklist. + + @param server_url: Base URL of the FIRES server + @param dataset_id: UUID of the dataset to fetch + @param state: FIRESState for cursor and retraction tracking + @param max_severity: Maximum severity cap + @param max_pages: Maximum number of changes pages to walk + @returns: Tuple of (Blocklist, Blocklist) where the second is the allowlist + """ + client = FIRESClient(server_url) + dataset_url = f"{server_url}/datasets/{dataset_id}" + + # Build label name lookup + label_map = build_label_map(client) + + # Check for existing cursor + cursor = state.get_cursor(dataset_url) + retractions = state.get_retractions(dataset_url) + + if cursor is None: + # First run: fetch the full snapshot + log.info(f"FIRES: fetching full snapshot for dataset {dataset_id}") + snapshot = client.get_snapshot(dataset_id) + + blocklist, allowlist = snapshot_to_blocklist( + snapshot, dataset_url, label_map, max_severity, retractions, + ignore_accept + ) + + # Save the cursor from the snapshot for next time + changes_url = snapshot.get("changes") + if changes_url: + state.set_cursor(dataset_url, changes_url) + + log.info( + f"FIRES: snapshot loaded {len(blocklist)} blocks, " + f"{len(allowlist)} allows from {dataset_url}" + ) + + else: + # Incremental: start from snapshot, then apply changes + log.info(f"FIRES: incremental update for dataset {dataset_id}") + snapshot = client.get_snapshot(dataset_id) + blocklist, allowlist = snapshot_to_blocklist( + snapshot, dataset_url, label_map, max_severity, retractions, + ignore_accept + ) + + # Walk the changes feed from our cursor + all_changes = [] + page_url = cursor + pages = 0 + + while page_url and pages < max_pages: + log.debug(f"FIRES: fetching changes page {pages + 1}") + page = client.get_changes_from_url(page_url) + items = page.get("orderedItems", []) + + if not items: + break + + all_changes.extend(items) + page_url = page.get("next") + pages += 1 + + if all_changes: + log.info( + f"FIRES: applying {len(all_changes)} changes from {pages} pages" + ) + blocklist, allowlist = apply_changes( + blocklist, allowlist, all_changes, label_map, + state, dataset_url, max_severity, ignore_accept + ) + + # Update cursor to the latest position + new_cursor = snapshot.get("changes") + if new_cursor: + state.set_cursor(dataset_url, new_cursor) + + log.info( + f"FIRES: incremental update complete, {len(blocklist)} blocks, " + f"{len(allowlist)} allows from {dataset_url}" + ) + + return blocklist, allowlist diff --git a/tests/fixtures/data-fires-changes.json b/tests/fixtures/data-fires-changes.json new file mode 100644 index 0000000..5906f1d --- /dev/null +++ b/tests/fixtures/data-fires-changes.json @@ -0,0 +1,41 @@ +{ + "@context": "https://fires.fedimod.org/ns", + "id": "http://localhost:4444/datasets/test-dataset-001/changes?since=019d358a-a17e-76ad-abbe-052cd3048c98", + "partOf": "http://localhost:4444/datasets/test-dataset-001/changes", + "type": "OrderedCollectionPage", + "dataset": "http://localhost:4444/datasets/test-dataset-001", + "totalItems": 3, + "orderedItems": [ + { + "id": "http://localhost:4444/datasets/test-dataset-001/changes/019d3565-f022-777b-abbc-000000000010", + "type": "Recommendation", + "published": "2026-03-28T18:00:00Z", + "entityKind": "domain", + "entityKey": "newbad.example", + "labels": ["http://localhost:4444/labels/label-uuid-disinformation"], + "recommendedPolicy": "drop", + "recommendedFilters": [] + }, + { + "id": "http://localhost:4444/datasets/test-dataset-001/changes/019d3565-f022-777b-abbc-000000000011", + "type": "Retraction", + "published": "2026-03-28T18:10:00Z", + "entityKind": "domain", + "entityKey": "spammer.example" + }, + { + "id": "http://localhost:4444/datasets/test-dataset-001/changes/019d3565-f022-777b-abbc-000000000012", + "type": "Recommendation", + "published": "2026-03-28T18:20:00Z", + "entityKind": "domain", + "entityKey": "troll.example", + "labels": [ + "http://localhost:4444/labels/label-uuid-troll", + "http://localhost:4444/labels/label-uuid-harassment", + "http://localhost:4444/labels/label-uuid-hate-speech" + ], + "recommendedPolicy": "drop", + "recommendedFilters": [] + } + ] +} diff --git a/tests/fixtures/data-fires-snapshot.json b/tests/fixtures/data-fires-snapshot.json new file mode 100644 index 0000000..059831e --- /dev/null +++ b/tests/fixtures/data-fires-snapshot.json @@ -0,0 +1,62 @@ +{ + "@context": "https://fires.fedimod.org/ns", + "id": "http://localhost:4444/datasets/test-dataset-001/snapshot", + "type": "Collection", + "summary": "Snapshot for Test Dataset", + "dataset": "http://localhost:4444/datasets/test-dataset-001", + "totalItems": 5, + "updated": "2026-03-28T17:42:51Z", + "changes": "http://localhost:4444/datasets/test-dataset-001/changes?since=019d358a-a17e-76ad-abbe-052cd3048c98", + "orderedItems": [ + { + "id": "http://localhost:4444/datasets/test-dataset-001/changes/019d3565-f022-777b-abbc-000000000001", + "type": "Recommendation", + "published": "2026-03-28T17:00:00Z", + "entityKind": "domain", + "entityKey": "badactor.example", + "labels": ["http://localhost:4444/labels/label-uuid-hate-speech"], + "recommendedPolicy": "drop", + "recommendedFilters": [] + }, + { + "id": "http://localhost:4444/datasets/test-dataset-001/changes/019d3565-f022-777b-abbc-000000000002", + "type": "Recommendation", + "published": "2026-03-28T17:10:00Z", + "entityKind": "domain", + "entityKey": "spammer.example", + "labels": ["http://localhost:4444/labels/label-uuid-spam"], + "recommendedPolicy": "filter", + "recommendedFilters": ["auto-cw", "prevent-trending"] + }, + { + "id": "http://localhost:4444/datasets/test-dataset-001/changes/019d3565-f022-777b-abbc-000000000003", + "type": "Recommendation", + "published": "2026-03-28T17:20:00Z", + "entityKind": "domain", + "entityKey": "csam.example", + "labels": ["http://localhost:4444/labels/label-uuid-csam"], + "recommendedPolicy": "reject", + "recommendedFilters": [] + }, + { + "id": "http://localhost:4444/datasets/test-dataset-001/changes/019d3565-f022-777b-abbc-000000000004", + "type": "Recommendation", + "published": "2026-03-28T17:30:00Z", + "entityKind": "domain", + "entityKey": "troll.example", + "labels": [ + "http://localhost:4444/labels/label-uuid-troll", + "http://localhost:4444/labels/label-uuid-harassment" + ], + "recommendedPolicy": "filter", + "recommendedFilters": ["auto-cw", "auto-unlisted"] + }, + { + "id": "http://localhost:4444/datasets/test-dataset-001/changes/019d3565-f022-777b-abbc-000000000005", + "type": "Retraction", + "published": "2026-03-28T17:40:00Z", + "entityKind": "domain", + "entityKey": "redeemed.example" + } + ] +} diff --git a/tests/test_fires.py b/tests/test_fires.py new file mode 100644 index 0000000..aae220e --- /dev/null +++ b/tests/test_fires.py @@ -0,0 +1,501 @@ +"""Tests for FIRES protocol integration +""" +import json +import os +import tempfile + +from fediblockhole.blocklists import Blocklist +from fediblockhole.const import SeverityLevel +from fediblockhole.fires import ( + FIRESState, + apply_changes, + fires_labels_to_comment, + fires_policy_to_severity, + snapshot_to_blocklist, +) + + +# -- Label map used across tests -- + +LABEL_MAP = { + "http://localhost:4444/labels/label-uuid-hate-speech": "Hate Speech", + "http://localhost:4444/labels/label-uuid-spam": "Spam", + "http://localhost:4444/labels/label-uuid-csam": "CSAM", + "http://localhost:4444/labels/label-uuid-troll": "Troll", + "http://localhost:4444/labels/label-uuid-harassment": "Online Harassment", + "http://localhost:4444/labels/label-uuid-disinformation": "Disinformation", +} + + +def load_fixture(name): + path = os.path.join(os.path.dirname(__file__), "fixtures", name) + with open(path) as f: + return json.load(f) + + +# -- Policy mapping tests -- + + +def test_policy_drop_maps_to_suspend(): + assert fires_policy_to_severity("drop") == "suspend" + + +def test_policy_reject_maps_to_suspend(): + assert fires_policy_to_severity("reject") == "suspend" + + +def test_policy_filter_maps_to_silence(): + assert fires_policy_to_severity("filter") == "silence" + + +def test_policy_unknown_maps_to_suspend(): + assert fires_policy_to_severity("whatever") == "suspend" + + +# -- Label comment tests -- + + +def test_labels_to_comment(): + labels = [ + "http://localhost:4444/labels/label-uuid-hate-speech", + "http://localhost:4444/labels/label-uuid-troll", + ] + result = fires_labels_to_comment(labels, LABEL_MAP) + assert result == "Hate Speech, Troll" + + +def test_labels_to_comment_unknown_label(): + labels = ["http://localhost:4444/labels/unknown-uuid"] + result = fires_labels_to_comment(labels, LABEL_MAP) + assert result == "unknown-uuid" + + +def test_labels_to_comment_empty(): + result = fires_labels_to_comment([], LABEL_MAP) + assert result == "" + + +# -- Snapshot parsing tests -- + + +def test_snapshot_to_blocklist(): + snapshot = load_fixture("data-fires-snapshot.json") + bl, al = snapshot_to_blocklist(snapshot, "test-fires", LABEL_MAP) + + # Should have 4 domains (the retraction for redeemed.example is skipped) + assert len(bl) == 4 + assert "badactor.example" in bl + assert "spammer.example" in bl + assert "csam.example" in bl + assert "troll.example" in bl + assert "redeemed.example" not in bl + # No accept policies in fixture, so allowlist should be empty + assert len(al) == 0 + + +def test_snapshot_severity_mapping(): + snapshot = load_fixture("data-fires-snapshot.json") + bl, al = snapshot_to_blocklist(snapshot, "test-fires", LABEL_MAP) + + # drop -> suspend + assert bl["badactor.example"].severity.level == SeverityLevel.SUSPEND + # filter -> silence + assert bl["spammer.example"].severity.level == SeverityLevel.SILENCE + # reject -> suspend + assert bl["csam.example"].severity.level == SeverityLevel.SUSPEND + # filter -> silence + assert bl["troll.example"].severity.level == SeverityLevel.SILENCE + + +def test_snapshot_labels_as_comments(): + snapshot = load_fixture("data-fires-snapshot.json") + bl, al = snapshot_to_blocklist(snapshot, "test-fires", LABEL_MAP) + + assert bl["badactor.example"].public_comment == "Hate Speech" + assert bl["spammer.example"].public_comment == "Spam" + assert bl["csam.example"].public_comment == "CSAM" + assert bl["troll.example"].public_comment == "Troll, Online Harassment" + + +def test_snapshot_max_severity(): + snapshot = load_fixture("data-fires-snapshot.json") + bl, al = snapshot_to_blocklist(snapshot, "test-fires", LABEL_MAP, max_severity="silence") + + # Everything should be capped at silence + assert bl["badactor.example"].severity.level == SeverityLevel.SILENCE + assert bl["csam.example"].severity.level == SeverityLevel.SILENCE + + +def test_snapshot_respects_retractions(): + snapshot = load_fixture("data-fires-snapshot.json") + retractions = {"spammer.example"} + bl, al = snapshot_to_blocklist(snapshot, "test-fires", LABEL_MAP, retractions=retractions) + + assert "spammer.example" not in bl + assert len(bl) == 3 + + +# -- Changes feed tests -- + + +def test_apply_changes_add(): + changes = load_fixture("data-fires-changes.json") + bl = Blocklist("test-fires") + al = Blocklist("test-fires") + state = FIRESState(os.path.join(tempfile.mkdtemp(), "state.json")) + + bl, al = apply_changes( + bl, al, changes["orderedItems"], LABEL_MAP, state, "test-dataset" + ) + + # newbad.example added, troll.example added (updated) + assert "newbad.example" in bl + assert "troll.example" in bl + assert bl["newbad.example"].severity.level == SeverityLevel.SUSPEND + assert bl["newbad.example"].public_comment == "Disinformation" + + +def test_apply_changes_retraction(): + changes = load_fixture("data-fires-changes.json") + + # Start with spammer.example in the blocklist + bl = Blocklist("test-fires") + al = Blocklist("test-fires") + from fediblockhole.const import DomainBlock + bl.blocks["spammer.example"] = DomainBlock("spammer.example", "silence") + + state = FIRESState(os.path.join(tempfile.mkdtemp(), "state.json")) + + bl, al = apply_changes( + bl, al, changes["orderedItems"], LABEL_MAP, state, "test-dataset" + ) + + # spammer.example should be retracted + assert "spammer.example" not in bl + + # Should be recorded in state + assert "spammer.example" in state.get_retractions("test-dataset") + + +def test_apply_changes_severity_upgrade(): + """A recommendation can upgrade severity (troll.example: filter -> drop)""" + changes = load_fixture("data-fires-changes.json") + + bl = Blocklist("test-fires") + al = Blocklist("test-fires") + from fediblockhole.const import DomainBlock + bl.blocks["troll.example"] = DomainBlock("troll.example", "silence") + + state = FIRESState(os.path.join(tempfile.mkdtemp(), "state.json")) + + bl, al = apply_changes( + bl, al, changes["orderedItems"], LABEL_MAP, state, "test-dataset" + ) + + # troll.example should be upgraded to suspend (drop policy) + assert bl["troll.example"].severity.level == SeverityLevel.SUSPEND + # And should now have 3 labels + assert "Hate Speech" in bl["troll.example"].public_comment + + +def test_apply_changes_undo_retraction(): + """Re-recommending a retracted domain clears the retraction.""" + state = FIRESState(os.path.join(tempfile.mkdtemp(), "state.json")) + state.add_retraction("test-dataset", "newbad.example") + assert "newbad.example" in state.get_retractions("test-dataset") + + changes = load_fixture("data-fires-changes.json") + bl = Blocklist("test-fires") + al = Blocklist("test-fires") + + bl, al = apply_changes( + bl, al, changes["orderedItems"], LABEL_MAP, state, "test-dataset" + ) + + # newbad.example was recommended again, so retraction should be cleared + assert "newbad.example" not in state.get_retractions("test-dataset") + assert "newbad.example" in bl + + +# -- State file tests -- + + +def test_state_persistence(): + tmpdir = tempfile.mkdtemp() + filepath = os.path.join(tmpdir, "state.json") + + state = FIRESState(filepath) + state.set_cursor("http://fires.example/datasets/1", "http://fires.example/datasets/1/changes?since=abc") + state.add_retraction("http://fires.example/datasets/1", "bad.example") + state.save() + + # Reload from disk + state2 = FIRESState(filepath) + assert state2.get_cursor("http://fires.example/datasets/1") == "http://fires.example/datasets/1/changes?since=abc" + assert "bad.example" in state2.get_retractions("http://fires.example/datasets/1") + + +def test_state_empty_on_missing_file(): + state = FIRESState("/tmp/nonexistent_fires_state_test.json") + assert state.get_cursor("anything") is None + assert len(state.get_retractions("anything")) == 0 + + +# -- Accept policy / allowlist tests -- + + +def test_snapshot_accept_policy_to_allowlist(): + """Domains with 'accept' policy go to allowlist, not blocklist.""" + snapshot = { + "orderedItems": [ + { + "type": "Recommendation", + "entityKind": "domain", + "entityKey": "good.example", + "recommendedPolicy": "accept", + "labels": [], + }, + { + "type": "Recommendation", + "entityKind": "domain", + "entityKey": "bad.example", + "recommendedPolicy": "drop", + "labels": [], + }, + ] + } + bl, al = snapshot_to_blocklist(snapshot, "test", LABEL_MAP) + + assert "good.example" not in bl + assert "good.example" in al + assert al["good.example"].severity.level == SeverityLevel.NONE + + assert "bad.example" in bl + assert "bad.example" not in al + + +def test_apply_changes_accept_moves_to_allowlist(): + """A domain recommended with accept should move from blocklist to allowlist.""" + bl = Blocklist("test") + al = Blocklist("test") + from fediblockhole.const import DomainBlock + bl.blocks["reformed.example"] = DomainBlock("reformed.example", "suspend") + + changes = [ + { + "type": "Recommendation", + "entityKind": "domain", + "entityKey": "reformed.example", + "recommendedPolicy": "accept", + "labels": [], + } + ] + state = FIRESState(os.path.join(tempfile.mkdtemp(), "state.json")) + bl, al = apply_changes(bl, al, changes, LABEL_MAP, state, "test") + + assert "reformed.example" not in bl + assert "reformed.example" in al + + +def test_apply_changes_block_removes_from_allowlist(): + """A block recommendation should remove domain from allowlist.""" + bl = Blocklist("test") + al = Blocklist("test") + from fediblockhole.const import DomainBlock + al.blocks["fallen.example"] = DomainBlock("fallen.example", "noop") + + changes = [ + { + "type": "Recommendation", + "entityKind": "domain", + "entityKey": "fallen.example", + "recommendedPolicy": "drop", + "labels": [], + } + ] + state = FIRESState(os.path.join(tempfile.mkdtemp(), "state.json")) + bl, al = apply_changes(bl, al, changes, LABEL_MAP, state, "test") + + assert "fallen.example" in bl + assert "fallen.example" not in al + + +# -- ignore_accept tests -- + + +def test_snapshot_ignore_accept(): + """When ignore_accept=True, accept policies are silently skipped.""" + snapshot = { + "orderedItems": [ + { + "type": "Recommendation", + "entityKind": "domain", + "entityKey": "good.example", + "recommendedPolicy": "accept", + "labels": [], + }, + { + "type": "Recommendation", + "entityKind": "domain", + "entityKey": "bad.example", + "recommendedPolicy": "drop", + "labels": [], + }, + ] + } + bl, al = snapshot_to_blocklist(snapshot, "test", LABEL_MAP, ignore_accept=True) + + assert "good.example" not in bl + assert "good.example" not in al # not in allowlist either + assert "bad.example" in bl + + +def test_apply_changes_ignore_accept(): + """When ignore_accept=True, accept changes don't modify blocklist or allowlist.""" + bl = Blocklist("test") + al = Blocklist("test") + from fediblockhole.const import DomainBlock + bl.blocks["reformed.example"] = DomainBlock("reformed.example", "suspend") + + changes = [ + { + "type": "Recommendation", + "entityKind": "domain", + "entityKey": "reformed.example", + "recommendedPolicy": "accept", + "labels": [], + } + ] + state = FIRESState(os.path.join(tempfile.mkdtemp(), "state.json")) + bl, al = apply_changes(bl, al, changes, LABEL_MAP, state, "test", + ignore_accept=True) + + # Should still be in blocklist, not moved to allowlist + assert "reformed.example" in bl + assert "reformed.example" not in al + + +# -- Advisory handling tests -- + + +def test_snapshot_skips_advisories(): + """Advisories are informational only, not actionable blocks.""" + snapshot = { + "orderedItems": [ + { + "type": "Recommendation", + "entityKind": "domain", + "entityKey": "bad.example", + "recommendedPolicy": "drop", + "labels": [], + }, + { + "type": "Advisory", + "entityKind": "domain", + "entityKey": "watch.example", + "labels": ["http://localhost:4444/labels/label-uuid-spam"], + }, + ] + } + bl, al = snapshot_to_blocklist(snapshot, "test", LABEL_MAP) + + assert "bad.example" in bl + assert "watch.example" not in bl + assert "watch.example" not in al + + +def test_apply_changes_advisory_removes_block(): + """Downgrading from Recommendation to Advisory removes the block.""" + bl = Blocklist("test") + al = Blocklist("test") + from fediblockhole.const import DomainBlock + bl.blocks["downgraded.example"] = DomainBlock("downgraded.example", "suspend") + + # Advisory in changes feed — no recommendedPolicy, just labels + changes = [ + { + "type": "Advisory", + "entityKind": "domain", + "entityKey": "downgraded.example", + "labels": ["http://localhost:4444/labels/label-uuid-spam"], + } + ] + state = FIRESState(os.path.join(tempfile.mkdtemp(), "state.json")) + bl, al = apply_changes(bl, al, changes, LABEL_MAP, state, "test") + + # Advisory doesn't create a block, and the snapshot would no longer + # have a Recommendation for this domain, so it falls out naturally. + # The changes feed advisory itself doesn't remove the block — that + # happens because the snapshot no longer includes a Recommendation. + # apply_changes only acts on Recommendation and Retraction types. + assert "downgraded.example" in bl # still there from the blocklist + + +# -- Actor entity skipping tests -- + + +def test_snapshot_skips_actor_entities(): + """Only domain entities are processed, actors are skipped.""" + snapshot = { + "orderedItems": [ + { + "type": "Recommendation", + "entityKind": "domain", + "entityKey": "bad.example", + "recommendedPolicy": "drop", + "labels": [], + }, + { + "type": "Recommendation", + "entityKind": "actor", + "entityKey": "baduser@some.instance", + "recommendedPolicy": "drop", + "labels": [], + }, + ] + } + bl, al = snapshot_to_blocklist(snapshot, "test", LABEL_MAP) + + assert "bad.example" in bl + assert len(bl) == 1 # actor was skipped + + +# -- URL parsing tests -- + + +def test_parse_dataset_url(): + from fediblockhole import _parse_dataset_url + + server, did = _parse_dataset_url( + "https://fires.example.com/datasets/019d3565-f022-777b-abbc-c43d649f294b" + ) + assert server == "https://fires.example.com" + assert did == "019d3565-f022-777b-abbc-c43d649f294b" + + +def test_parse_dataset_url_trailing_slash(): + from fediblockhole import _parse_dataset_url + + server, did = _parse_dataset_url( + "https://fires.example.com/datasets/019d3565-f022-777b-abbc-c43d649f294b/" + ) + assert server == "https://fires.example.com" + assert did == "019d3565-f022-777b-abbc-c43d649f294b" + + +def test_parse_dataset_url_with_snapshot_path(): + from fediblockhole import _parse_dataset_url + + server, did = _parse_dataset_url( + "https://fires.example.com/datasets/019d3565-f022-777b-abbc-c43d649f294b/snapshot" + ) + assert server == "https://fires.example.com" + assert did == "019d3565-f022-777b-abbc-c43d649f294b" + + +def test_parse_dataset_url_invalid(): + from fediblockhole import _parse_dataset_url + import pytest + + with pytest.raises(ValueError, match="missing /datasets/"): + _parse_dataset_url("https://fires.example.com/labels/something") diff --git a/uv.lock b/uv.lock index 374132a..b617497 100644 --- a/uv.lock +++ b/uv.lock @@ -1,185 +1,186 @@ version = 1 +revision = 3 requires-python = ">=3.8" [[package]] name = "certifi" version = "2024.8.30" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b0/ee/9b19140fe824b367c04c5e1b369942dd754c4c5462d5674002f75c4dedc1/certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9", size = 168507 } +sdist = { url = "https://files.pythonhosted.org/packages/b0/ee/9b19140fe824b367c04c5e1b369942dd754c4c5462d5674002f75c4dedc1/certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9", size = 168507, upload-time = "2024-08-30T01:55:04.365Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/12/90/3c9ff0512038035f59d279fddeb79f5f1eccd8859f06d6163c58798b9487/certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8", size = 167321 }, + { url = "https://files.pythonhosted.org/packages/12/90/3c9ff0512038035f59d279fddeb79f5f1eccd8859f06d6163c58798b9487/certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8", size = 167321, upload-time = "2024-08-30T01:55:02.591Z" }, ] [[package]] name = "charset-normalizer" version = "3.3.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/63/09/c1bc53dab74b1816a00d8d030de5bf98f724c52c1635e07681d312f20be8/charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5", size = 104809 } +sdist = { url = "https://files.pythonhosted.org/packages/63/09/c1bc53dab74b1816a00d8d030de5bf98f724c52c1635e07681d312f20be8/charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5", size = 104809, upload-time = "2023-11-01T04:04:59.997Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2b/61/095a0aa1a84d1481998b534177c8566fdc50bb1233ea9a0478cd3cc075bd/charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3", size = 194219 }, - { url = "https://files.pythonhosted.org/packages/cc/94/f7cf5e5134175de79ad2059edf2adce18e0685ebdb9227ff0139975d0e93/charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027", size = 122521 }, - { url = "https://files.pythonhosted.org/packages/46/6a/d5c26c41c49b546860cc1acabdddf48b0b3fb2685f4f5617ac59261b44ae/charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03", size = 120383 }, - { url = "https://files.pythonhosted.org/packages/b8/60/e2f67915a51be59d4539ed189eb0a2b0d292bf79270410746becb32bc2c3/charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d", size = 138223 }, - { url = "https://files.pythonhosted.org/packages/05/8c/eb854996d5fef5e4f33ad56927ad053d04dc820e4a3d39023f35cad72617/charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e", size = 148101 }, - { url = "https://files.pythonhosted.org/packages/f6/93/bb6cbeec3bf9da9b2eba458c15966658d1daa8b982c642f81c93ad9b40e1/charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6", size = 140699 }, - { url = "https://files.pythonhosted.org/packages/da/f1/3702ba2a7470666a62fd81c58a4c40be00670e5006a67f4d626e57f013ae/charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5", size = 142065 }, - { url = "https://files.pythonhosted.org/packages/3f/ba/3f5e7be00b215fa10e13d64b1f6237eb6ebea66676a41b2bcdd09fe74323/charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537", size = 144505 }, - { url = "https://files.pythonhosted.org/packages/33/c3/3b96a435c5109dd5b6adc8a59ba1d678b302a97938f032e3770cc84cd354/charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c", size = 139425 }, - { url = "https://files.pythonhosted.org/packages/43/05/3bf613e719efe68fb3a77f9c536a389f35b95d75424b96b426a47a45ef1d/charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12", size = 145287 }, - { url = "https://files.pythonhosted.org/packages/58/78/a0bc646900994df12e07b4ae5c713f2b3e5998f58b9d3720cce2aa45652f/charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f", size = 149929 }, - { url = "https://files.pythonhosted.org/packages/eb/5c/97d97248af4920bc68687d9c3b3c0f47c910e21a8ff80af4565a576bd2f0/charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269", size = 141605 }, - { url = "https://files.pythonhosted.org/packages/a8/31/47d018ef89f95b8aded95c589a77c072c55e94b50a41aa99c0a2008a45a4/charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519", size = 142646 }, - { url = "https://files.pythonhosted.org/packages/ae/d5/4fecf1d58bedb1340a50f165ba1c7ddc0400252d6832ff619c4568b36cc0/charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73", size = 92846 }, - { url = "https://files.pythonhosted.org/packages/a2/a0/4af29e22cb5942488cf45630cbdd7cefd908768e69bdd90280842e4e8529/charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09", size = 100343 }, - { url = "https://files.pythonhosted.org/packages/68/77/02839016f6fbbf808e8b38601df6e0e66c17bbab76dff4613f7511413597/charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db", size = 191647 }, - { url = "https://files.pythonhosted.org/packages/3e/33/21a875a61057165e92227466e54ee076b73af1e21fe1b31f1e292251aa1e/charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96", size = 121434 }, - { url = "https://files.pythonhosted.org/packages/dd/51/68b61b90b24ca35495956b718f35a9756ef7d3dd4b3c1508056fa98d1a1b/charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e", size = 118979 }, - { url = "https://files.pythonhosted.org/packages/e4/a6/7ee57823d46331ddc37dd00749c95b0edec2c79b15fc0d6e6efb532e89ac/charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f", size = 136582 }, - { url = "https://files.pythonhosted.org/packages/74/f1/0d9fe69ac441467b737ba7f48c68241487df2f4522dd7246d9426e7c690e/charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574", size = 146645 }, - { url = "https://files.pythonhosted.org/packages/05/31/e1f51c76db7be1d4aef220d29fbfa5dbb4a99165d9833dcbf166753b6dc0/charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4", size = 139398 }, - { url = "https://files.pythonhosted.org/packages/40/26/f35951c45070edc957ba40a5b1db3cf60a9dbb1b350c2d5bef03e01e61de/charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8", size = 140273 }, - { url = "https://files.pythonhosted.org/packages/07/07/7e554f2bbce3295e191f7e653ff15d55309a9ca40d0362fcdab36f01063c/charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc", size = 142577 }, - { url = "https://files.pythonhosted.org/packages/d8/b5/eb705c313100defa57da79277d9207dc8d8e45931035862fa64b625bfead/charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae", size = 137747 }, - { url = "https://files.pythonhosted.org/packages/19/28/573147271fd041d351b438a5665be8223f1dd92f273713cb882ddafe214c/charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887", size = 143375 }, - { url = "https://files.pythonhosted.org/packages/cf/7c/f3b682fa053cc21373c9a839e6beba7705857075686a05c72e0f8c4980ca/charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae", size = 148474 }, - { url = "https://files.pythonhosted.org/packages/1e/49/7ab74d4ac537ece3bc3334ee08645e231f39f7d6df6347b29a74b0537103/charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce", size = 140232 }, - { url = "https://files.pythonhosted.org/packages/2d/dc/9dacba68c9ac0ae781d40e1a0c0058e26302ea0660e574ddf6797a0347f7/charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f", size = 140859 }, - { url = "https://files.pythonhosted.org/packages/6c/c2/4a583f800c0708dd22096298e49f887b49d9746d0e78bfc1d7e29816614c/charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab", size = 92509 }, - { url = "https://files.pythonhosted.org/packages/57/ec/80c8d48ac8b1741d5b963797b7c0c869335619e13d4744ca2f67fc11c6fc/charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77", size = 99870 }, - { url = "https://files.pythonhosted.org/packages/d1/b2/fcedc8255ec42afee97f9e6f0145c734bbe104aac28300214593eb326f1d/charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8", size = 192892 }, - { url = "https://files.pythonhosted.org/packages/2e/7d/2259318c202f3d17f3fe6438149b3b9e706d1070fe3fcbb28049730bb25c/charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b", size = 122213 }, - { url = "https://files.pythonhosted.org/packages/3a/52/9f9d17c3b54dc238de384c4cb5a2ef0e27985b42a0e5cc8e8a31d918d48d/charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6", size = 119404 }, - { url = "https://files.pythonhosted.org/packages/99/b0/9c365f6d79a9f0f3c379ddb40a256a67aa69c59609608fe7feb6235896e1/charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a", size = 137275 }, - { url = "https://files.pythonhosted.org/packages/91/33/749df346e93d7a30cdcb90cbfdd41a06026317bfbfb62cd68307c1a3c543/charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389", size = 147518 }, - { url = "https://files.pythonhosted.org/packages/72/1a/641d5c9f59e6af4c7b53da463d07600a695b9824e20849cb6eea8a627761/charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa", size = 140182 }, - { url = "https://files.pythonhosted.org/packages/ee/fb/14d30eb4956408ee3ae09ad34299131fb383c47df355ddb428a7331cfa1e/charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b", size = 141869 }, - { url = "https://files.pythonhosted.org/packages/df/3e/a06b18788ca2eb6695c9b22325b6fde7dde0f1d1838b1792a0076f58fe9d/charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed", size = 144042 }, - { url = "https://files.pythonhosted.org/packages/45/59/3d27019d3b447a88fe7e7d004a1e04be220227760264cc41b405e863891b/charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26", size = 138275 }, - { url = "https://files.pythonhosted.org/packages/7b/ef/5eb105530b4da8ae37d506ccfa25057961b7b63d581def6f99165ea89c7e/charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d", size = 144819 }, - { url = "https://files.pythonhosted.org/packages/a2/51/e5023f937d7f307c948ed3e5c29c4b7a3e42ed2ee0b8cdf8f3a706089bf0/charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068", size = 149415 }, - { url = "https://files.pythonhosted.org/packages/24/9d/2e3ef673dfd5be0154b20363c5cdcc5606f35666544381bee15af3778239/charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143", size = 141212 }, - { url = "https://files.pythonhosted.org/packages/5b/ae/ce2c12fcac59cb3860b2e2d76dc405253a4475436b1861d95fe75bdea520/charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4", size = 142167 }, - { url = "https://files.pythonhosted.org/packages/ed/3a/a448bf035dce5da359daf9ae8a16b8a39623cc395a2ffb1620aa1bce62b0/charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7", size = 93041 }, - { url = "https://files.pythonhosted.org/packages/b6/7c/8debebb4f90174074b827c63242c23851bdf00a532489fba57fef3416e40/charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001", size = 100397 }, - { url = "https://files.pythonhosted.org/packages/ef/d4/a1d72a8f6aa754fdebe91b848912025d30ab7dced61e9ed8aabbf791ed65/charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a", size = 191415 }, - { url = "https://files.pythonhosted.org/packages/13/82/83c188028b6f38d39538442dd127dc794c602ae6d45d66c469f4063a4c30/charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac", size = 121051 }, - { url = "https://files.pythonhosted.org/packages/16/ea/a9e284aa38cccea06b7056d4cbc7adf37670b1f8a668a312864abf1ff7c6/charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a", size = 119143 }, - { url = "https://files.pythonhosted.org/packages/34/2a/f392457d45e24a0c9bfc012887ed4f3c54bf5d4d05a5deb970ffec4b7fc0/charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33", size = 137506 }, - { url = "https://files.pythonhosted.org/packages/be/4d/9e370f8281cec2fcc9452c4d1ac513324c32957c5f70c73dd2fa8442a21a/charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238", size = 147272 }, - { url = "https://files.pythonhosted.org/packages/33/95/ef68482e4a6adf781fae8d183fb48d6f2be8facb414f49c90ba6a5149cd1/charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a", size = 139734 }, - { url = "https://files.pythonhosted.org/packages/3d/09/d82fe4a34c5f0585f9ea1df090e2a71eb9bb1e469723053e1ee9f57c16f3/charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2", size = 141094 }, - { url = "https://files.pythonhosted.org/packages/81/b2/160893421adfa3c45554fb418e321ed342bb10c0a4549e855b2b2a3699cb/charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8", size = 144113 }, - { url = "https://files.pythonhosted.org/packages/9e/ef/cd47a63d3200b232792e361cd67530173a09eb011813478b1c0fb8aa7226/charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898", size = 138555 }, - { url = "https://files.pythonhosted.org/packages/a8/6f/4ff299b97da2ed6358154b6eb3a2db67da2ae204e53d205aacb18a7e4f34/charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99", size = 144944 }, - { url = "https://files.pythonhosted.org/packages/d1/2f/0d1efd07c74c52b6886c32a3b906fb8afd2fecf448650e73ecb90a5a27f1/charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d", size = 148925 }, - { url = "https://files.pythonhosted.org/packages/bd/28/7ea29e73eea52c7e15b4b9108d0743fc9e4cc2cdb00d275af1df3d46d360/charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04", size = 140732 }, - { url = "https://files.pythonhosted.org/packages/b3/c1/ebca8e87c714a6a561cfee063f0655f742e54b8ae6e78151f60ba8708b3a/charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087", size = 141288 }, - { url = "https://files.pythonhosted.org/packages/74/20/8923a06f15eb3d7f6a306729360bd58f9ead1dc39bc7ea8831f4b407e4ae/charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25", size = 92373 }, - { url = "https://files.pythonhosted.org/packages/db/fb/d29e343e7c57bbf1231275939f6e75eb740cd47a9d7cb2c52ffeb62ef869/charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b", size = 99577 }, - { url = "https://files.pythonhosted.org/packages/f7/9d/bcf4a449a438ed6f19790eee543a86a740c77508fbc5ddab210ab3ba3a9a/charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4", size = 194198 }, - { url = "https://files.pythonhosted.org/packages/66/fe/c7d3da40a66a6bf2920cce0f436fa1f62ee28aaf92f412f0bf3b84c8ad6c/charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d", size = 122494 }, - { url = "https://files.pythonhosted.org/packages/2a/9d/a6d15bd1e3e2914af5955c8eb15f4071997e7078419328fee93dfd497eb7/charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0", size = 120393 }, - { url = "https://files.pythonhosted.org/packages/3d/85/5b7416b349609d20611a64718bed383b9251b5a601044550f0c8983b8900/charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269", size = 138331 }, - { url = "https://files.pythonhosted.org/packages/79/66/8946baa705c588521afe10b2d7967300e49380ded089a62d38537264aece/charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c", size = 148097 }, - { url = "https://files.pythonhosted.org/packages/44/80/b339237b4ce635b4af1c73742459eee5f97201bd92b2371c53e11958392e/charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519", size = 140711 }, - { url = "https://files.pythonhosted.org/packages/98/69/5d8751b4b670d623aa7a47bef061d69c279e9f922f6705147983aa76c3ce/charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796", size = 142251 }, - { url = "https://files.pythonhosted.org/packages/1f/8d/33c860a7032da5b93382cbe2873261f81467e7b37f4ed91e25fed62fd49b/charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185", size = 144636 }, - { url = "https://files.pythonhosted.org/packages/c2/65/52aaf47b3dd616c11a19b1052ce7fa6321250a7a0b975f48d8c366733b9f/charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c", size = 139514 }, - { url = "https://files.pythonhosted.org/packages/51/fd/0ee5b1c2860bb3c60236d05b6e4ac240cf702b67471138571dad91bcfed8/charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458", size = 145528 }, - { url = "https://files.pythonhosted.org/packages/e1/9c/60729bf15dc82e3aaf5f71e81686e42e50715a1399770bcde1a9e43d09db/charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2", size = 149804 }, - { url = "https://files.pythonhosted.org/packages/53/cd/aa4b8a4d82eeceb872f83237b2d27e43e637cac9ffaef19a1321c3bafb67/charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8", size = 141708 }, - { url = "https://files.pythonhosted.org/packages/54/7f/cad0b328759630814fcf9d804bfabaf47776816ad4ef2e9938b7e1123d04/charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561", size = 142708 }, - { url = "https://files.pythonhosted.org/packages/c1/9d/254a2f1bcb0ce9acad838e94ed05ba71a7cb1e27affaa4d9e1ca3958cdb6/charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f", size = 92830 }, - { url = "https://files.pythonhosted.org/packages/2f/0e/d7303ccae9735ff8ff01e36705ad6233ad2002962e8668a970fc000c5e1b/charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d", size = 100376 }, - { url = "https://files.pythonhosted.org/packages/28/76/e6222113b83e3622caa4bb41032d0b1bf785250607392e1b778aca0b8a7d/charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc", size = 48543 }, + { url = "https://files.pythonhosted.org/packages/2b/61/095a0aa1a84d1481998b534177c8566fdc50bb1233ea9a0478cd3cc075bd/charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3", size = 194219, upload-time = "2023-11-01T04:02:29.048Z" }, + { url = "https://files.pythonhosted.org/packages/cc/94/f7cf5e5134175de79ad2059edf2adce18e0685ebdb9227ff0139975d0e93/charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027", size = 122521, upload-time = "2023-11-01T04:02:32.452Z" }, + { url = "https://files.pythonhosted.org/packages/46/6a/d5c26c41c49b546860cc1acabdddf48b0b3fb2685f4f5617ac59261b44ae/charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03", size = 120383, upload-time = "2023-11-01T04:02:34.11Z" }, + { url = "https://files.pythonhosted.org/packages/b8/60/e2f67915a51be59d4539ed189eb0a2b0d292bf79270410746becb32bc2c3/charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d", size = 138223, upload-time = "2023-11-01T04:02:36.213Z" }, + { url = "https://files.pythonhosted.org/packages/05/8c/eb854996d5fef5e4f33ad56927ad053d04dc820e4a3d39023f35cad72617/charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e", size = 148101, upload-time = "2023-11-01T04:02:38.067Z" }, + { url = "https://files.pythonhosted.org/packages/f6/93/bb6cbeec3bf9da9b2eba458c15966658d1daa8b982c642f81c93ad9b40e1/charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6", size = 140699, upload-time = "2023-11-01T04:02:39.436Z" }, + { url = "https://files.pythonhosted.org/packages/da/f1/3702ba2a7470666a62fd81c58a4c40be00670e5006a67f4d626e57f013ae/charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5", size = 142065, upload-time = "2023-11-01T04:02:41.357Z" }, + { url = "https://files.pythonhosted.org/packages/3f/ba/3f5e7be00b215fa10e13d64b1f6237eb6ebea66676a41b2bcdd09fe74323/charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537", size = 144505, upload-time = "2023-11-01T04:02:43.108Z" }, + { url = "https://files.pythonhosted.org/packages/33/c3/3b96a435c5109dd5b6adc8a59ba1d678b302a97938f032e3770cc84cd354/charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c", size = 139425, upload-time = "2023-11-01T04:02:45.427Z" }, + { url = "https://files.pythonhosted.org/packages/43/05/3bf613e719efe68fb3a77f9c536a389f35b95d75424b96b426a47a45ef1d/charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12", size = 145287, upload-time = "2023-11-01T04:02:46.705Z" }, + { url = "https://files.pythonhosted.org/packages/58/78/a0bc646900994df12e07b4ae5c713f2b3e5998f58b9d3720cce2aa45652f/charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f", size = 149929, upload-time = "2023-11-01T04:02:48.098Z" }, + { url = "https://files.pythonhosted.org/packages/eb/5c/97d97248af4920bc68687d9c3b3c0f47c910e21a8ff80af4565a576bd2f0/charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269", size = 141605, upload-time = "2023-11-01T04:02:49.605Z" }, + { url = "https://files.pythonhosted.org/packages/a8/31/47d018ef89f95b8aded95c589a77c072c55e94b50a41aa99c0a2008a45a4/charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519", size = 142646, upload-time = "2023-11-01T04:02:51.35Z" }, + { url = "https://files.pythonhosted.org/packages/ae/d5/4fecf1d58bedb1340a50f165ba1c7ddc0400252d6832ff619c4568b36cc0/charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73", size = 92846, upload-time = "2023-11-01T04:02:52.679Z" }, + { url = "https://files.pythonhosted.org/packages/a2/a0/4af29e22cb5942488cf45630cbdd7cefd908768e69bdd90280842e4e8529/charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09", size = 100343, upload-time = "2023-11-01T04:02:53.915Z" }, + { url = "https://files.pythonhosted.org/packages/68/77/02839016f6fbbf808e8b38601df6e0e66c17bbab76dff4613f7511413597/charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db", size = 191647, upload-time = "2023-11-01T04:02:55.329Z" }, + { url = "https://files.pythonhosted.org/packages/3e/33/21a875a61057165e92227466e54ee076b73af1e21fe1b31f1e292251aa1e/charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96", size = 121434, upload-time = "2023-11-01T04:02:57.173Z" }, + { url = "https://files.pythonhosted.org/packages/dd/51/68b61b90b24ca35495956b718f35a9756ef7d3dd4b3c1508056fa98d1a1b/charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e", size = 118979, upload-time = "2023-11-01T04:02:58.442Z" }, + { url = "https://files.pythonhosted.org/packages/e4/a6/7ee57823d46331ddc37dd00749c95b0edec2c79b15fc0d6e6efb532e89ac/charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f", size = 136582, upload-time = "2023-11-01T04:02:59.776Z" }, + { url = "https://files.pythonhosted.org/packages/74/f1/0d9fe69ac441467b737ba7f48c68241487df2f4522dd7246d9426e7c690e/charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574", size = 146645, upload-time = "2023-11-01T04:03:02.186Z" }, + { url = "https://files.pythonhosted.org/packages/05/31/e1f51c76db7be1d4aef220d29fbfa5dbb4a99165d9833dcbf166753b6dc0/charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4", size = 139398, upload-time = "2023-11-01T04:03:04.255Z" }, + { url = "https://files.pythonhosted.org/packages/40/26/f35951c45070edc957ba40a5b1db3cf60a9dbb1b350c2d5bef03e01e61de/charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8", size = 140273, upload-time = "2023-11-01T04:03:05.983Z" }, + { url = "https://files.pythonhosted.org/packages/07/07/7e554f2bbce3295e191f7e653ff15d55309a9ca40d0362fcdab36f01063c/charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc", size = 142577, upload-time = "2023-11-01T04:03:07.567Z" }, + { url = "https://files.pythonhosted.org/packages/d8/b5/eb705c313100defa57da79277d9207dc8d8e45931035862fa64b625bfead/charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae", size = 137747, upload-time = "2023-11-01T04:03:08.886Z" }, + { url = "https://files.pythonhosted.org/packages/19/28/573147271fd041d351b438a5665be8223f1dd92f273713cb882ddafe214c/charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887", size = 143375, upload-time = "2023-11-01T04:03:10.613Z" }, + { url = "https://files.pythonhosted.org/packages/cf/7c/f3b682fa053cc21373c9a839e6beba7705857075686a05c72e0f8c4980ca/charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae", size = 148474, upload-time = "2023-11-01T04:03:11.973Z" }, + { url = "https://files.pythonhosted.org/packages/1e/49/7ab74d4ac537ece3bc3334ee08645e231f39f7d6df6347b29a74b0537103/charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce", size = 140232, upload-time = "2023-11-01T04:03:13.505Z" }, + { url = "https://files.pythonhosted.org/packages/2d/dc/9dacba68c9ac0ae781d40e1a0c0058e26302ea0660e574ddf6797a0347f7/charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f", size = 140859, upload-time = "2023-11-01T04:03:17.362Z" }, + { url = "https://files.pythonhosted.org/packages/6c/c2/4a583f800c0708dd22096298e49f887b49d9746d0e78bfc1d7e29816614c/charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab", size = 92509, upload-time = "2023-11-01T04:03:21.453Z" }, + { url = "https://files.pythonhosted.org/packages/57/ec/80c8d48ac8b1741d5b963797b7c0c869335619e13d4744ca2f67fc11c6fc/charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77", size = 99870, upload-time = "2023-11-01T04:03:22.723Z" }, + { url = "https://files.pythonhosted.org/packages/d1/b2/fcedc8255ec42afee97f9e6f0145c734bbe104aac28300214593eb326f1d/charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8", size = 192892, upload-time = "2023-11-01T04:03:24.135Z" }, + { url = "https://files.pythonhosted.org/packages/2e/7d/2259318c202f3d17f3fe6438149b3b9e706d1070fe3fcbb28049730bb25c/charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b", size = 122213, upload-time = "2023-11-01T04:03:25.66Z" }, + { url = "https://files.pythonhosted.org/packages/3a/52/9f9d17c3b54dc238de384c4cb5a2ef0e27985b42a0e5cc8e8a31d918d48d/charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6", size = 119404, upload-time = "2023-11-01T04:03:27.04Z" }, + { url = "https://files.pythonhosted.org/packages/99/b0/9c365f6d79a9f0f3c379ddb40a256a67aa69c59609608fe7feb6235896e1/charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a", size = 137275, upload-time = "2023-11-01T04:03:28.466Z" }, + { url = "https://files.pythonhosted.org/packages/91/33/749df346e93d7a30cdcb90cbfdd41a06026317bfbfb62cd68307c1a3c543/charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389", size = 147518, upload-time = "2023-11-01T04:03:29.82Z" }, + { url = "https://files.pythonhosted.org/packages/72/1a/641d5c9f59e6af4c7b53da463d07600a695b9824e20849cb6eea8a627761/charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa", size = 140182, upload-time = "2023-11-01T04:03:31.511Z" }, + { url = "https://files.pythonhosted.org/packages/ee/fb/14d30eb4956408ee3ae09ad34299131fb383c47df355ddb428a7331cfa1e/charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b", size = 141869, upload-time = "2023-11-01T04:03:32.887Z" }, + { url = "https://files.pythonhosted.org/packages/df/3e/a06b18788ca2eb6695c9b22325b6fde7dde0f1d1838b1792a0076f58fe9d/charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed", size = 144042, upload-time = "2023-11-01T04:03:34.412Z" }, + { url = "https://files.pythonhosted.org/packages/45/59/3d27019d3b447a88fe7e7d004a1e04be220227760264cc41b405e863891b/charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26", size = 138275, upload-time = "2023-11-01T04:03:35.759Z" }, + { url = "https://files.pythonhosted.org/packages/7b/ef/5eb105530b4da8ae37d506ccfa25057961b7b63d581def6f99165ea89c7e/charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d", size = 144819, upload-time = "2023-11-01T04:03:37.216Z" }, + { url = "https://files.pythonhosted.org/packages/a2/51/e5023f937d7f307c948ed3e5c29c4b7a3e42ed2ee0b8cdf8f3a706089bf0/charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068", size = 149415, upload-time = "2023-11-01T04:03:38.694Z" }, + { url = "https://files.pythonhosted.org/packages/24/9d/2e3ef673dfd5be0154b20363c5cdcc5606f35666544381bee15af3778239/charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143", size = 141212, upload-time = "2023-11-01T04:03:40.07Z" }, + { url = "https://files.pythonhosted.org/packages/5b/ae/ce2c12fcac59cb3860b2e2d76dc405253a4475436b1861d95fe75bdea520/charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4", size = 142167, upload-time = "2023-11-01T04:03:41.491Z" }, + { url = "https://files.pythonhosted.org/packages/ed/3a/a448bf035dce5da359daf9ae8a16b8a39623cc395a2ffb1620aa1bce62b0/charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7", size = 93041, upload-time = "2023-11-01T04:03:42.836Z" }, + { url = "https://files.pythonhosted.org/packages/b6/7c/8debebb4f90174074b827c63242c23851bdf00a532489fba57fef3416e40/charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001", size = 100397, upload-time = "2023-11-01T04:03:44.467Z" }, + { url = "https://files.pythonhosted.org/packages/ef/d4/a1d72a8f6aa754fdebe91b848912025d30ab7dced61e9ed8aabbf791ed65/charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a", size = 191415, upload-time = "2023-11-01T04:04:07.592Z" }, + { url = "https://files.pythonhosted.org/packages/13/82/83c188028b6f38d39538442dd127dc794c602ae6d45d66c469f4063a4c30/charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac", size = 121051, upload-time = "2023-11-01T04:04:09.231Z" }, + { url = "https://files.pythonhosted.org/packages/16/ea/a9e284aa38cccea06b7056d4cbc7adf37670b1f8a668a312864abf1ff7c6/charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a", size = 119143, upload-time = "2023-11-01T04:04:10.897Z" }, + { url = "https://files.pythonhosted.org/packages/34/2a/f392457d45e24a0c9bfc012887ed4f3c54bf5d4d05a5deb970ffec4b7fc0/charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33", size = 137506, upload-time = "2023-11-01T04:04:12.612Z" }, + { url = "https://files.pythonhosted.org/packages/be/4d/9e370f8281cec2fcc9452c4d1ac513324c32957c5f70c73dd2fa8442a21a/charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238", size = 147272, upload-time = "2023-11-01T04:04:13.978Z" }, + { url = "https://files.pythonhosted.org/packages/33/95/ef68482e4a6adf781fae8d183fb48d6f2be8facb414f49c90ba6a5149cd1/charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a", size = 139734, upload-time = "2023-11-01T04:04:15.626Z" }, + { url = "https://files.pythonhosted.org/packages/3d/09/d82fe4a34c5f0585f9ea1df090e2a71eb9bb1e469723053e1ee9f57c16f3/charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2", size = 141094, upload-time = "2023-11-01T04:04:17.286Z" }, + { url = "https://files.pythonhosted.org/packages/81/b2/160893421adfa3c45554fb418e321ed342bb10c0a4549e855b2b2a3699cb/charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8", size = 144113, upload-time = "2023-11-01T04:04:18.739Z" }, + { url = "https://files.pythonhosted.org/packages/9e/ef/cd47a63d3200b232792e361cd67530173a09eb011813478b1c0fb8aa7226/charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898", size = 138555, upload-time = "2023-11-01T04:04:20.482Z" }, + { url = "https://files.pythonhosted.org/packages/a8/6f/4ff299b97da2ed6358154b6eb3a2db67da2ae204e53d205aacb18a7e4f34/charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99", size = 144944, upload-time = "2023-11-01T04:04:21.973Z" }, + { url = "https://files.pythonhosted.org/packages/d1/2f/0d1efd07c74c52b6886c32a3b906fb8afd2fecf448650e73ecb90a5a27f1/charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d", size = 148925, upload-time = "2023-11-01T04:04:23.472Z" }, + { url = "https://files.pythonhosted.org/packages/bd/28/7ea29e73eea52c7e15b4b9108d0743fc9e4cc2cdb00d275af1df3d46d360/charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04", size = 140732, upload-time = "2023-11-01T04:04:25.156Z" }, + { url = "https://files.pythonhosted.org/packages/b3/c1/ebca8e87c714a6a561cfee063f0655f742e54b8ae6e78151f60ba8708b3a/charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087", size = 141288, upload-time = "2023-11-01T04:04:26.567Z" }, + { url = "https://files.pythonhosted.org/packages/74/20/8923a06f15eb3d7f6a306729360bd58f9ead1dc39bc7ea8831f4b407e4ae/charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25", size = 92373, upload-time = "2023-11-01T04:04:29.116Z" }, + { url = "https://files.pythonhosted.org/packages/db/fb/d29e343e7c57bbf1231275939f6e75eb740cd47a9d7cb2c52ffeb62ef869/charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b", size = 99577, upload-time = "2023-11-01T04:04:30.833Z" }, + { url = "https://files.pythonhosted.org/packages/f7/9d/bcf4a449a438ed6f19790eee543a86a740c77508fbc5ddab210ab3ba3a9a/charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4", size = 194198, upload-time = "2023-11-01T04:04:32.328Z" }, + { url = "https://files.pythonhosted.org/packages/66/fe/c7d3da40a66a6bf2920cce0f436fa1f62ee28aaf92f412f0bf3b84c8ad6c/charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d", size = 122494, upload-time = "2023-11-01T04:04:33.993Z" }, + { url = "https://files.pythonhosted.org/packages/2a/9d/a6d15bd1e3e2914af5955c8eb15f4071997e7078419328fee93dfd497eb7/charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0", size = 120393, upload-time = "2023-11-01T04:04:35.533Z" }, + { url = "https://files.pythonhosted.org/packages/3d/85/5b7416b349609d20611a64718bed383b9251b5a601044550f0c8983b8900/charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269", size = 138331, upload-time = "2023-11-01T04:04:37.199Z" }, + { url = "https://files.pythonhosted.org/packages/79/66/8946baa705c588521afe10b2d7967300e49380ded089a62d38537264aece/charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c", size = 148097, upload-time = "2023-11-01T04:04:39.217Z" }, + { url = "https://files.pythonhosted.org/packages/44/80/b339237b4ce635b4af1c73742459eee5f97201bd92b2371c53e11958392e/charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519", size = 140711, upload-time = "2023-11-01T04:04:40.964Z" }, + { url = "https://files.pythonhosted.org/packages/98/69/5d8751b4b670d623aa7a47bef061d69c279e9f922f6705147983aa76c3ce/charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796", size = 142251, upload-time = "2023-11-01T04:04:42.893Z" }, + { url = "https://files.pythonhosted.org/packages/1f/8d/33c860a7032da5b93382cbe2873261f81467e7b37f4ed91e25fed62fd49b/charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185", size = 144636, upload-time = "2023-11-01T04:04:44.693Z" }, + { url = "https://files.pythonhosted.org/packages/c2/65/52aaf47b3dd616c11a19b1052ce7fa6321250a7a0b975f48d8c366733b9f/charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c", size = 139514, upload-time = "2023-11-01T04:04:46.388Z" }, + { url = "https://files.pythonhosted.org/packages/51/fd/0ee5b1c2860bb3c60236d05b6e4ac240cf702b67471138571dad91bcfed8/charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458", size = 145528, upload-time = "2023-11-01T04:04:47.893Z" }, + { url = "https://files.pythonhosted.org/packages/e1/9c/60729bf15dc82e3aaf5f71e81686e42e50715a1399770bcde1a9e43d09db/charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2", size = 149804, upload-time = "2023-11-01T04:04:49.979Z" }, + { url = "https://files.pythonhosted.org/packages/53/cd/aa4b8a4d82eeceb872f83237b2d27e43e637cac9ffaef19a1321c3bafb67/charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8", size = 141708, upload-time = "2023-11-01T04:04:51.846Z" }, + { url = "https://files.pythonhosted.org/packages/54/7f/cad0b328759630814fcf9d804bfabaf47776816ad4ef2e9938b7e1123d04/charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561", size = 142708, upload-time = "2023-11-01T04:04:53.252Z" }, + { url = "https://files.pythonhosted.org/packages/c1/9d/254a2f1bcb0ce9acad838e94ed05ba71a7cb1e27affaa4d9e1ca3958cdb6/charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f", size = 92830, upload-time = "2023-11-01T04:04:54.827Z" }, + { url = "https://files.pythonhosted.org/packages/2f/0e/d7303ccae9735ff8ff01e36705ad6233ad2002962e8668a970fc000c5e1b/charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d", size = 100376, upload-time = "2023-11-01T04:04:56.588Z" }, + { url = "https://files.pythonhosted.org/packages/28/76/e6222113b83e3622caa4bb41032d0b1bf785250607392e1b778aca0b8a7d/charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc", size = 48543, upload-time = "2023-11-01T04:04:58.622Z" }, ] [[package]] name = "colorama" version = "0.4.6" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 }, + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, ] [[package]] name = "coverage" version = "7.6.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f7/08/7e37f82e4d1aead42a7443ff06a1e406aabf7302c4f00a546e4b320b994c/coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d", size = 798791 } +sdist = { url = "https://files.pythonhosted.org/packages/f7/08/7e37f82e4d1aead42a7443ff06a1e406aabf7302c4f00a546e4b320b994c/coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d", size = 798791, upload-time = "2024-08-04T19:45:30.9Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/61/eb7ce5ed62bacf21beca4937a90fe32545c91a3c8a42a30c6616d48fc70d/coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16", size = 206690 }, - { url = "https://files.pythonhosted.org/packages/7d/73/041928e434442bd3afde5584bdc3f932fb4562b1597629f537387cec6f3d/coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36", size = 207127 }, - { url = "https://files.pythonhosted.org/packages/c7/c8/6ca52b5147828e45ad0242388477fdb90df2c6cbb9a441701a12b3c71bc8/coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02", size = 235654 }, - { url = "https://files.pythonhosted.org/packages/d5/da/9ac2b62557f4340270942011d6efeab9833648380109e897d48ab7c1035d/coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc", size = 233598 }, - { url = "https://files.pythonhosted.org/packages/53/23/9e2c114d0178abc42b6d8d5281f651a8e6519abfa0ef460a00a91f80879d/coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23", size = 234732 }, - { url = "https://files.pythonhosted.org/packages/0f/7e/a0230756fb133343a52716e8b855045f13342b70e48e8ad41d8a0d60ab98/coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34", size = 233816 }, - { url = "https://files.pythonhosted.org/packages/28/7c/3753c8b40d232b1e5eeaed798c875537cf3cb183fb5041017c1fdb7ec14e/coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c", size = 232325 }, - { url = "https://files.pythonhosted.org/packages/57/e3/818a2b2af5b7573b4b82cf3e9f137ab158c90ea750a8f053716a32f20f06/coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959", size = 233418 }, - { url = "https://files.pythonhosted.org/packages/c8/fb/4532b0b0cefb3f06d201648715e03b0feb822907edab3935112b61b885e2/coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232", size = 209343 }, - { url = "https://files.pythonhosted.org/packages/5a/25/af337cc7421eca1c187cc9c315f0a755d48e755d2853715bfe8c418a45fa/coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0", size = 210136 }, - { url = "https://files.pythonhosted.org/packages/ad/5f/67af7d60d7e8ce61a4e2ddcd1bd5fb787180c8d0ae0fbd073f903b3dd95d/coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93", size = 206796 }, - { url = "https://files.pythonhosted.org/packages/e1/0e/e52332389e057daa2e03be1fbfef25bb4d626b37d12ed42ae6281d0a274c/coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3", size = 207244 }, - { url = "https://files.pythonhosted.org/packages/aa/cd/766b45fb6e090f20f8927d9c7cb34237d41c73a939358bc881883fd3a40d/coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff", size = 239279 }, - { url = "https://files.pythonhosted.org/packages/70/6c/a9ccd6fe50ddaf13442a1e2dd519ca805cbe0f1fcd377fba6d8339b98ccb/coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d", size = 236859 }, - { url = "https://files.pythonhosted.org/packages/14/6f/8351b465febb4dbc1ca9929505202db909c5a635c6fdf33e089bbc3d7d85/coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6", size = 238549 }, - { url = "https://files.pythonhosted.org/packages/68/3c/289b81fa18ad72138e6d78c4c11a82b5378a312c0e467e2f6b495c260907/coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56", size = 237477 }, - { url = "https://files.pythonhosted.org/packages/ed/1c/aa1efa6459d822bd72c4abc0b9418cf268de3f60eeccd65dc4988553bd8d/coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234", size = 236134 }, - { url = "https://files.pythonhosted.org/packages/fb/c8/521c698f2d2796565fe9c789c2ee1ccdae610b3aa20b9b2ef980cc253640/coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133", size = 236910 }, - { url = "https://files.pythonhosted.org/packages/7d/30/033e663399ff17dca90d793ee8a2ea2890e7fdf085da58d82468b4220bf7/coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c", size = 209348 }, - { url = "https://files.pythonhosted.org/packages/20/05/0d1ccbb52727ccdadaa3ff37e4d2dc1cd4d47f0c3df9eb58d9ec8508ca88/coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6", size = 210230 }, - { url = "https://files.pythonhosted.org/packages/7e/d4/300fc921dff243cd518c7db3a4c614b7e4b2431b0d1145c1e274fd99bd70/coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778", size = 206983 }, - { url = "https://files.pythonhosted.org/packages/e1/ab/6bf00de5327ecb8db205f9ae596885417a31535eeda6e7b99463108782e1/coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391", size = 207221 }, - { url = "https://files.pythonhosted.org/packages/92/8f/2ead05e735022d1a7f3a0a683ac7f737de14850395a826192f0288703472/coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8", size = 240342 }, - { url = "https://files.pythonhosted.org/packages/0f/ef/94043e478201ffa85b8ae2d2c79b4081e5a1b73438aafafccf3e9bafb6b5/coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d", size = 237371 }, - { url = "https://files.pythonhosted.org/packages/1f/0f/c890339dd605f3ebc269543247bdd43b703cce6825b5ed42ff5f2d6122c7/coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca", size = 239455 }, - { url = "https://files.pythonhosted.org/packages/d1/04/7fd7b39ec7372a04efb0f70c70e35857a99b6a9188b5205efb4c77d6a57a/coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163", size = 238924 }, - { url = "https://files.pythonhosted.org/packages/ed/bf/73ce346a9d32a09cf369f14d2a06651329c984e106f5992c89579d25b27e/coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a", size = 237252 }, - { url = "https://files.pythonhosted.org/packages/86/74/1dc7a20969725e917b1e07fe71a955eb34bc606b938316bcc799f228374b/coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d", size = 238897 }, - { url = "https://files.pythonhosted.org/packages/b6/e9/d9cc3deceb361c491b81005c668578b0dfa51eed02cd081620e9a62f24ec/coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5", size = 209606 }, - { url = "https://files.pythonhosted.org/packages/47/c8/5a2e41922ea6740f77d555c4d47544acd7dc3f251fe14199c09c0f5958d3/coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb", size = 210373 }, - { url = "https://files.pythonhosted.org/packages/8c/f9/9aa4dfb751cb01c949c990d136a0f92027fbcc5781c6e921df1cb1563f20/coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106", size = 207007 }, - { url = "https://files.pythonhosted.org/packages/b9/67/e1413d5a8591622a46dd04ff80873b04c849268831ed5c304c16433e7e30/coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9", size = 207269 }, - { url = "https://files.pythonhosted.org/packages/14/5b/9dec847b305e44a5634d0fb8498d135ab1d88330482b74065fcec0622224/coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c", size = 239886 }, - { url = "https://files.pythonhosted.org/packages/7b/b7/35760a67c168e29f454928f51f970342d23cf75a2bb0323e0f07334c85f3/coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a", size = 237037 }, - { url = "https://files.pythonhosted.org/packages/f7/95/d2fd31f1d638df806cae59d7daea5abf2b15b5234016a5ebb502c2f3f7ee/coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060", size = 239038 }, - { url = "https://files.pythonhosted.org/packages/6e/bd/110689ff5752b67924efd5e2aedf5190cbbe245fc81b8dec1abaffba619d/coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862", size = 238690 }, - { url = "https://files.pythonhosted.org/packages/d3/a8/08d7b38e6ff8df52331c83130d0ab92d9c9a8b5462f9e99c9f051a4ae206/coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388", size = 236765 }, - { url = "https://files.pythonhosted.org/packages/d6/6a/9cf96839d3147d55ae713eb2d877f4d777e7dc5ba2bce227167d0118dfe8/coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155", size = 238611 }, - { url = "https://files.pythonhosted.org/packages/74/e4/7ff20d6a0b59eeaab40b3140a71e38cf52547ba21dbcf1d79c5a32bba61b/coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a", size = 209671 }, - { url = "https://files.pythonhosted.org/packages/35/59/1812f08a85b57c9fdb6d0b383d779e47b6f643bc278ed682859512517e83/coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129", size = 210368 }, - { url = "https://files.pythonhosted.org/packages/9c/15/08913be1c59d7562a3e39fce20661a98c0a3f59d5754312899acc6cb8a2d/coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e", size = 207758 }, - { url = "https://files.pythonhosted.org/packages/c4/ae/b5d58dff26cade02ada6ca612a76447acd69dccdbb3a478e9e088eb3d4b9/coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962", size = 208035 }, - { url = "https://files.pythonhosted.org/packages/b8/d7/62095e355ec0613b08dfb19206ce3033a0eedb6f4a67af5ed267a8800642/coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb", size = 250839 }, - { url = "https://files.pythonhosted.org/packages/7c/1e/c2967cb7991b112ba3766df0d9c21de46b476d103e32bb401b1b2adf3380/coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704", size = 246569 }, - { url = "https://files.pythonhosted.org/packages/8b/61/a7a6a55dd266007ed3b1df7a3386a0d760d014542d72f7c2c6938483b7bd/coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b", size = 248927 }, - { url = "https://files.pythonhosted.org/packages/c8/fa/13a6f56d72b429f56ef612eb3bc5ce1b75b7ee12864b3bd12526ab794847/coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f", size = 248401 }, - { url = "https://files.pythonhosted.org/packages/75/06/0429c652aa0fb761fc60e8c6b291338c9173c6aa0f4e40e1902345b42830/coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223", size = 246301 }, - { url = "https://files.pythonhosted.org/packages/52/76/1766bb8b803a88f93c3a2d07e30ffa359467810e5cbc68e375ebe6906efb/coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3", size = 247598 }, - { url = "https://files.pythonhosted.org/packages/66/8b/f54f8db2ae17188be9566e8166ac6df105c1c611e25da755738025708d54/coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f", size = 210307 }, - { url = "https://files.pythonhosted.org/packages/9f/b0/e0dca6da9170aefc07515cce067b97178cefafb512d00a87a1c717d2efd5/coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657", size = 211453 }, - { url = "https://files.pythonhosted.org/packages/81/d0/d9e3d554e38beea5a2e22178ddb16587dbcbe9a1ef3211f55733924bf7fa/coverage-7.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0", size = 206674 }, - { url = "https://files.pythonhosted.org/packages/38/ea/cab2dc248d9f45b2b7f9f1f596a4d75a435cb364437c61b51d2eb33ceb0e/coverage-7.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a", size = 207101 }, - { url = "https://files.pythonhosted.org/packages/ca/6f/f82f9a500c7c5722368978a5390c418d2a4d083ef955309a8748ecaa8920/coverage-7.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b", size = 236554 }, - { url = "https://files.pythonhosted.org/packages/a6/94/d3055aa33d4e7e733d8fa309d9adf147b4b06a82c1346366fc15a2b1d5fa/coverage-7.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3", size = 234440 }, - { url = "https://files.pythonhosted.org/packages/e4/6e/885bcd787d9dd674de4a7d8ec83faf729534c63d05d51d45d4fa168f7102/coverage-7.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de", size = 235889 }, - { url = "https://files.pythonhosted.org/packages/f4/63/df50120a7744492710854860783d6819ff23e482dee15462c9a833cc428a/coverage-7.6.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6", size = 235142 }, - { url = "https://files.pythonhosted.org/packages/3a/5d/9d0acfcded2b3e9ce1c7923ca52ccc00c78a74e112fc2aee661125b7843b/coverage-7.6.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569", size = 233805 }, - { url = "https://files.pythonhosted.org/packages/c4/56/50abf070cb3cd9b1dd32f2c88f083aab561ecbffbcd783275cb51c17f11d/coverage-7.6.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989", size = 234655 }, - { url = "https://files.pythonhosted.org/packages/25/ee/b4c246048b8485f85a2426ef4abab88e48c6e80c74e964bea5cd4cd4b115/coverage-7.6.1-cp38-cp38-win32.whl", hash = "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7", size = 209296 }, - { url = "https://files.pythonhosted.org/packages/5c/1c/96cf86b70b69ea2b12924cdf7cabb8ad10e6130eab8d767a1099fbd2a44f/coverage-7.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8", size = 210137 }, - { url = "https://files.pythonhosted.org/packages/19/d3/d54c5aa83268779d54c86deb39c1c4566e5d45c155369ca152765f8db413/coverage-7.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255", size = 206688 }, - { url = "https://files.pythonhosted.org/packages/a5/fe/137d5dca72e4a258b1bc17bb04f2e0196898fe495843402ce826a7419fe3/coverage-7.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8", size = 207120 }, - { url = "https://files.pythonhosted.org/packages/78/5b/a0a796983f3201ff5485323b225d7c8b74ce30c11f456017e23d8e8d1945/coverage-7.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2", size = 235249 }, - { url = "https://files.pythonhosted.org/packages/4e/e1/76089d6a5ef9d68f018f65411fcdaaeb0141b504587b901d74e8587606ad/coverage-7.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a", size = 233237 }, - { url = "https://files.pythonhosted.org/packages/9a/6f/eef79b779a540326fee9520e5542a8b428cc3bfa8b7c8f1022c1ee4fc66c/coverage-7.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc", size = 234311 }, - { url = "https://files.pythonhosted.org/packages/75/e1/656d65fb126c29a494ef964005702b012f3498db1a30dd562958e85a4049/coverage-7.6.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004", size = 233453 }, - { url = "https://files.pythonhosted.org/packages/68/6a/45f108f137941a4a1238c85f28fd9d048cc46b5466d6b8dda3aba1bb9d4f/coverage-7.6.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb", size = 231958 }, - { url = "https://files.pythonhosted.org/packages/9b/e7/47b809099168b8b8c72ae311efc3e88c8d8a1162b3ba4b8da3cfcdb85743/coverage-7.6.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36", size = 232938 }, - { url = "https://files.pythonhosted.org/packages/52/80/052222ba7058071f905435bad0ba392cc12006380731c37afaf3fe749b88/coverage-7.6.1-cp39-cp39-win32.whl", hash = "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c", size = 209352 }, - { url = "https://files.pythonhosted.org/packages/b8/d8/1b92e0b3adcf384e98770a00ca095da1b5f7b483e6563ae4eb5e935d24a1/coverage-7.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca", size = 210153 }, - { url = "https://files.pythonhosted.org/packages/a5/2b/0354ed096bca64dc8e32a7cbcae28b34cb5ad0b1fe2125d6d99583313ac0/coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df", size = 198926 }, + { url = "https://files.pythonhosted.org/packages/7e/61/eb7ce5ed62bacf21beca4937a90fe32545c91a3c8a42a30c6616d48fc70d/coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16", size = 206690, upload-time = "2024-08-04T19:43:07.695Z" }, + { url = "https://files.pythonhosted.org/packages/7d/73/041928e434442bd3afde5584bdc3f932fb4562b1597629f537387cec6f3d/coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36", size = 207127, upload-time = "2024-08-04T19:43:10.15Z" }, + { url = "https://files.pythonhosted.org/packages/c7/c8/6ca52b5147828e45ad0242388477fdb90df2c6cbb9a441701a12b3c71bc8/coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02", size = 235654, upload-time = "2024-08-04T19:43:12.405Z" }, + { url = "https://files.pythonhosted.org/packages/d5/da/9ac2b62557f4340270942011d6efeab9833648380109e897d48ab7c1035d/coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc", size = 233598, upload-time = "2024-08-04T19:43:14.078Z" }, + { url = "https://files.pythonhosted.org/packages/53/23/9e2c114d0178abc42b6d8d5281f651a8e6519abfa0ef460a00a91f80879d/coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23", size = 234732, upload-time = "2024-08-04T19:43:16.632Z" }, + { url = "https://files.pythonhosted.org/packages/0f/7e/a0230756fb133343a52716e8b855045f13342b70e48e8ad41d8a0d60ab98/coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34", size = 233816, upload-time = "2024-08-04T19:43:19.049Z" }, + { url = "https://files.pythonhosted.org/packages/28/7c/3753c8b40d232b1e5eeaed798c875537cf3cb183fb5041017c1fdb7ec14e/coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c", size = 232325, upload-time = "2024-08-04T19:43:21.246Z" }, + { url = "https://files.pythonhosted.org/packages/57/e3/818a2b2af5b7573b4b82cf3e9f137ab158c90ea750a8f053716a32f20f06/coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959", size = 233418, upload-time = "2024-08-04T19:43:22.945Z" }, + { url = "https://files.pythonhosted.org/packages/c8/fb/4532b0b0cefb3f06d201648715e03b0feb822907edab3935112b61b885e2/coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232", size = 209343, upload-time = "2024-08-04T19:43:25.121Z" }, + { url = "https://files.pythonhosted.org/packages/5a/25/af337cc7421eca1c187cc9c315f0a755d48e755d2853715bfe8c418a45fa/coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0", size = 210136, upload-time = "2024-08-04T19:43:26.851Z" }, + { url = "https://files.pythonhosted.org/packages/ad/5f/67af7d60d7e8ce61a4e2ddcd1bd5fb787180c8d0ae0fbd073f903b3dd95d/coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93", size = 206796, upload-time = "2024-08-04T19:43:29.115Z" }, + { url = "https://files.pythonhosted.org/packages/e1/0e/e52332389e057daa2e03be1fbfef25bb4d626b37d12ed42ae6281d0a274c/coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3", size = 207244, upload-time = "2024-08-04T19:43:31.285Z" }, + { url = "https://files.pythonhosted.org/packages/aa/cd/766b45fb6e090f20f8927d9c7cb34237d41c73a939358bc881883fd3a40d/coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff", size = 239279, upload-time = "2024-08-04T19:43:33.581Z" }, + { url = "https://files.pythonhosted.org/packages/70/6c/a9ccd6fe50ddaf13442a1e2dd519ca805cbe0f1fcd377fba6d8339b98ccb/coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d", size = 236859, upload-time = "2024-08-04T19:43:35.301Z" }, + { url = "https://files.pythonhosted.org/packages/14/6f/8351b465febb4dbc1ca9929505202db909c5a635c6fdf33e089bbc3d7d85/coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6", size = 238549, upload-time = "2024-08-04T19:43:37.578Z" }, + { url = "https://files.pythonhosted.org/packages/68/3c/289b81fa18ad72138e6d78c4c11a82b5378a312c0e467e2f6b495c260907/coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56", size = 237477, upload-time = "2024-08-04T19:43:39.92Z" }, + { url = "https://files.pythonhosted.org/packages/ed/1c/aa1efa6459d822bd72c4abc0b9418cf268de3f60eeccd65dc4988553bd8d/coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234", size = 236134, upload-time = "2024-08-04T19:43:41.453Z" }, + { url = "https://files.pythonhosted.org/packages/fb/c8/521c698f2d2796565fe9c789c2ee1ccdae610b3aa20b9b2ef980cc253640/coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133", size = 236910, upload-time = "2024-08-04T19:43:43.037Z" }, + { url = "https://files.pythonhosted.org/packages/7d/30/033e663399ff17dca90d793ee8a2ea2890e7fdf085da58d82468b4220bf7/coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c", size = 209348, upload-time = "2024-08-04T19:43:44.787Z" }, + { url = "https://files.pythonhosted.org/packages/20/05/0d1ccbb52727ccdadaa3ff37e4d2dc1cd4d47f0c3df9eb58d9ec8508ca88/coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6", size = 210230, upload-time = "2024-08-04T19:43:46.707Z" }, + { url = "https://files.pythonhosted.org/packages/7e/d4/300fc921dff243cd518c7db3a4c614b7e4b2431b0d1145c1e274fd99bd70/coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778", size = 206983, upload-time = "2024-08-04T19:43:49.082Z" }, + { url = "https://files.pythonhosted.org/packages/e1/ab/6bf00de5327ecb8db205f9ae596885417a31535eeda6e7b99463108782e1/coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391", size = 207221, upload-time = "2024-08-04T19:43:52.15Z" }, + { url = "https://files.pythonhosted.org/packages/92/8f/2ead05e735022d1a7f3a0a683ac7f737de14850395a826192f0288703472/coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8", size = 240342, upload-time = "2024-08-04T19:43:53.746Z" }, + { url = "https://files.pythonhosted.org/packages/0f/ef/94043e478201ffa85b8ae2d2c79b4081e5a1b73438aafafccf3e9bafb6b5/coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d", size = 237371, upload-time = "2024-08-04T19:43:55.993Z" }, + { url = "https://files.pythonhosted.org/packages/1f/0f/c890339dd605f3ebc269543247bdd43b703cce6825b5ed42ff5f2d6122c7/coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca", size = 239455, upload-time = "2024-08-04T19:43:57.618Z" }, + { url = "https://files.pythonhosted.org/packages/d1/04/7fd7b39ec7372a04efb0f70c70e35857a99b6a9188b5205efb4c77d6a57a/coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163", size = 238924, upload-time = "2024-08-04T19:44:00.012Z" }, + { url = "https://files.pythonhosted.org/packages/ed/bf/73ce346a9d32a09cf369f14d2a06651329c984e106f5992c89579d25b27e/coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a", size = 237252, upload-time = "2024-08-04T19:44:01.713Z" }, + { url = "https://files.pythonhosted.org/packages/86/74/1dc7a20969725e917b1e07fe71a955eb34bc606b938316bcc799f228374b/coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d", size = 238897, upload-time = "2024-08-04T19:44:03.898Z" }, + { url = "https://files.pythonhosted.org/packages/b6/e9/d9cc3deceb361c491b81005c668578b0dfa51eed02cd081620e9a62f24ec/coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5", size = 209606, upload-time = "2024-08-04T19:44:05.532Z" }, + { url = "https://files.pythonhosted.org/packages/47/c8/5a2e41922ea6740f77d555c4d47544acd7dc3f251fe14199c09c0f5958d3/coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb", size = 210373, upload-time = "2024-08-04T19:44:07.079Z" }, + { url = "https://files.pythonhosted.org/packages/8c/f9/9aa4dfb751cb01c949c990d136a0f92027fbcc5781c6e921df1cb1563f20/coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106", size = 207007, upload-time = "2024-08-04T19:44:09.453Z" }, + { url = "https://files.pythonhosted.org/packages/b9/67/e1413d5a8591622a46dd04ff80873b04c849268831ed5c304c16433e7e30/coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9", size = 207269, upload-time = "2024-08-04T19:44:11.045Z" }, + { url = "https://files.pythonhosted.org/packages/14/5b/9dec847b305e44a5634d0fb8498d135ab1d88330482b74065fcec0622224/coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c", size = 239886, upload-time = "2024-08-04T19:44:12.83Z" }, + { url = "https://files.pythonhosted.org/packages/7b/b7/35760a67c168e29f454928f51f970342d23cf75a2bb0323e0f07334c85f3/coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a", size = 237037, upload-time = "2024-08-04T19:44:15.393Z" }, + { url = "https://files.pythonhosted.org/packages/f7/95/d2fd31f1d638df806cae59d7daea5abf2b15b5234016a5ebb502c2f3f7ee/coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060", size = 239038, upload-time = "2024-08-04T19:44:17.466Z" }, + { url = "https://files.pythonhosted.org/packages/6e/bd/110689ff5752b67924efd5e2aedf5190cbbe245fc81b8dec1abaffba619d/coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862", size = 238690, upload-time = "2024-08-04T19:44:19.336Z" }, + { url = "https://files.pythonhosted.org/packages/d3/a8/08d7b38e6ff8df52331c83130d0ab92d9c9a8b5462f9e99c9f051a4ae206/coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388", size = 236765, upload-time = "2024-08-04T19:44:20.994Z" }, + { url = "https://files.pythonhosted.org/packages/d6/6a/9cf96839d3147d55ae713eb2d877f4d777e7dc5ba2bce227167d0118dfe8/coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155", size = 238611, upload-time = "2024-08-04T19:44:22.616Z" }, + { url = "https://files.pythonhosted.org/packages/74/e4/7ff20d6a0b59eeaab40b3140a71e38cf52547ba21dbcf1d79c5a32bba61b/coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a", size = 209671, upload-time = "2024-08-04T19:44:24.418Z" }, + { url = "https://files.pythonhosted.org/packages/35/59/1812f08a85b57c9fdb6d0b383d779e47b6f643bc278ed682859512517e83/coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129", size = 210368, upload-time = "2024-08-04T19:44:26.276Z" }, + { url = "https://files.pythonhosted.org/packages/9c/15/08913be1c59d7562a3e39fce20661a98c0a3f59d5754312899acc6cb8a2d/coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e", size = 207758, upload-time = "2024-08-04T19:44:29.028Z" }, + { url = "https://files.pythonhosted.org/packages/c4/ae/b5d58dff26cade02ada6ca612a76447acd69dccdbb3a478e9e088eb3d4b9/coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962", size = 208035, upload-time = "2024-08-04T19:44:30.673Z" }, + { url = "https://files.pythonhosted.org/packages/b8/d7/62095e355ec0613b08dfb19206ce3033a0eedb6f4a67af5ed267a8800642/coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb", size = 250839, upload-time = "2024-08-04T19:44:32.412Z" }, + { url = "https://files.pythonhosted.org/packages/7c/1e/c2967cb7991b112ba3766df0d9c21de46b476d103e32bb401b1b2adf3380/coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704", size = 246569, upload-time = "2024-08-04T19:44:34.547Z" }, + { url = "https://files.pythonhosted.org/packages/8b/61/a7a6a55dd266007ed3b1df7a3386a0d760d014542d72f7c2c6938483b7bd/coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b", size = 248927, upload-time = "2024-08-04T19:44:36.313Z" }, + { url = "https://files.pythonhosted.org/packages/c8/fa/13a6f56d72b429f56ef612eb3bc5ce1b75b7ee12864b3bd12526ab794847/coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f", size = 248401, upload-time = "2024-08-04T19:44:38.155Z" }, + { url = "https://files.pythonhosted.org/packages/75/06/0429c652aa0fb761fc60e8c6b291338c9173c6aa0f4e40e1902345b42830/coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223", size = 246301, upload-time = "2024-08-04T19:44:39.883Z" }, + { url = "https://files.pythonhosted.org/packages/52/76/1766bb8b803a88f93c3a2d07e30ffa359467810e5cbc68e375ebe6906efb/coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3", size = 247598, upload-time = "2024-08-04T19:44:41.59Z" }, + { url = "https://files.pythonhosted.org/packages/66/8b/f54f8db2ae17188be9566e8166ac6df105c1c611e25da755738025708d54/coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f", size = 210307, upload-time = "2024-08-04T19:44:43.301Z" }, + { url = "https://files.pythonhosted.org/packages/9f/b0/e0dca6da9170aefc07515cce067b97178cefafb512d00a87a1c717d2efd5/coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657", size = 211453, upload-time = "2024-08-04T19:44:45.677Z" }, + { url = "https://files.pythonhosted.org/packages/81/d0/d9e3d554e38beea5a2e22178ddb16587dbcbe9a1ef3211f55733924bf7fa/coverage-7.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0", size = 206674, upload-time = "2024-08-04T19:44:47.694Z" }, + { url = "https://files.pythonhosted.org/packages/38/ea/cab2dc248d9f45b2b7f9f1f596a4d75a435cb364437c61b51d2eb33ceb0e/coverage-7.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a", size = 207101, upload-time = "2024-08-04T19:44:49.32Z" }, + { url = "https://files.pythonhosted.org/packages/ca/6f/f82f9a500c7c5722368978a5390c418d2a4d083ef955309a8748ecaa8920/coverage-7.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b", size = 236554, upload-time = "2024-08-04T19:44:51.631Z" }, + { url = "https://files.pythonhosted.org/packages/a6/94/d3055aa33d4e7e733d8fa309d9adf147b4b06a82c1346366fc15a2b1d5fa/coverage-7.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3", size = 234440, upload-time = "2024-08-04T19:44:53.464Z" }, + { url = "https://files.pythonhosted.org/packages/e4/6e/885bcd787d9dd674de4a7d8ec83faf729534c63d05d51d45d4fa168f7102/coverage-7.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de", size = 235889, upload-time = "2024-08-04T19:44:55.165Z" }, + { url = "https://files.pythonhosted.org/packages/f4/63/df50120a7744492710854860783d6819ff23e482dee15462c9a833cc428a/coverage-7.6.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6", size = 235142, upload-time = "2024-08-04T19:44:57.269Z" }, + { url = "https://files.pythonhosted.org/packages/3a/5d/9d0acfcded2b3e9ce1c7923ca52ccc00c78a74e112fc2aee661125b7843b/coverage-7.6.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569", size = 233805, upload-time = "2024-08-04T19:44:59.033Z" }, + { url = "https://files.pythonhosted.org/packages/c4/56/50abf070cb3cd9b1dd32f2c88f083aab561ecbffbcd783275cb51c17f11d/coverage-7.6.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989", size = 234655, upload-time = "2024-08-04T19:45:01.398Z" }, + { url = "https://files.pythonhosted.org/packages/25/ee/b4c246048b8485f85a2426ef4abab88e48c6e80c74e964bea5cd4cd4b115/coverage-7.6.1-cp38-cp38-win32.whl", hash = "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7", size = 209296, upload-time = "2024-08-04T19:45:03.819Z" }, + { url = "https://files.pythonhosted.org/packages/5c/1c/96cf86b70b69ea2b12924cdf7cabb8ad10e6130eab8d767a1099fbd2a44f/coverage-7.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8", size = 210137, upload-time = "2024-08-04T19:45:06.25Z" }, + { url = "https://files.pythonhosted.org/packages/19/d3/d54c5aa83268779d54c86deb39c1c4566e5d45c155369ca152765f8db413/coverage-7.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255", size = 206688, upload-time = "2024-08-04T19:45:08.358Z" }, + { url = "https://files.pythonhosted.org/packages/a5/fe/137d5dca72e4a258b1bc17bb04f2e0196898fe495843402ce826a7419fe3/coverage-7.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8", size = 207120, upload-time = "2024-08-04T19:45:11.526Z" }, + { url = "https://files.pythonhosted.org/packages/78/5b/a0a796983f3201ff5485323b225d7c8b74ce30c11f456017e23d8e8d1945/coverage-7.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2", size = 235249, upload-time = "2024-08-04T19:45:13.202Z" }, + { url = "https://files.pythonhosted.org/packages/4e/e1/76089d6a5ef9d68f018f65411fcdaaeb0141b504587b901d74e8587606ad/coverage-7.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a", size = 233237, upload-time = "2024-08-04T19:45:14.961Z" }, + { url = "https://files.pythonhosted.org/packages/9a/6f/eef79b779a540326fee9520e5542a8b428cc3bfa8b7c8f1022c1ee4fc66c/coverage-7.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc", size = 234311, upload-time = "2024-08-04T19:45:16.924Z" }, + { url = "https://files.pythonhosted.org/packages/75/e1/656d65fb126c29a494ef964005702b012f3498db1a30dd562958e85a4049/coverage-7.6.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004", size = 233453, upload-time = "2024-08-04T19:45:18.672Z" }, + { url = "https://files.pythonhosted.org/packages/68/6a/45f108f137941a4a1238c85f28fd9d048cc46b5466d6b8dda3aba1bb9d4f/coverage-7.6.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb", size = 231958, upload-time = "2024-08-04T19:45:20.63Z" }, + { url = "https://files.pythonhosted.org/packages/9b/e7/47b809099168b8b8c72ae311efc3e88c8d8a1162b3ba4b8da3cfcdb85743/coverage-7.6.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36", size = 232938, upload-time = "2024-08-04T19:45:23.062Z" }, + { url = "https://files.pythonhosted.org/packages/52/80/052222ba7058071f905435bad0ba392cc12006380731c37afaf3fe749b88/coverage-7.6.1-cp39-cp39-win32.whl", hash = "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c", size = 209352, upload-time = "2024-08-04T19:45:25.042Z" }, + { url = "https://files.pythonhosted.org/packages/b8/d8/1b92e0b3adcf384e98770a00ca095da1b5f7b483e6563ae4eb5e935d24a1/coverage-7.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca", size = 210153, upload-time = "2024-08-04T19:45:27.079Z" }, + { url = "https://files.pythonhosted.org/packages/a5/2b/0354ed096bca64dc8e32a7cbcae28b34cb5ad0b1fe2125d6d99583313ac0/coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df", size = 198926, upload-time = "2024-08-04T19:45:28.875Z" }, ] [package.optional-dependencies] @@ -191,21 +192,21 @@ toml = [ name = "exceptiongroup" version = "1.2.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/09/35/2495c4ac46b980e4ca1f6ad6db102322ef3ad2410b79fdde159a4b0f3b92/exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc", size = 28883 } +sdist = { url = "https://files.pythonhosted.org/packages/09/35/2495c4ac46b980e4ca1f6ad6db102322ef3ad2410b79fdde159a4b0f3b92/exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc", size = 28883, upload-time = "2024-07-12T22:26:00.161Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/02/cc/b7e31358aac6ed1ef2bb790a9746ac2c69bcb3c8588b41616914eb106eaf/exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b", size = 16453 }, + { url = "https://files.pythonhosted.org/packages/02/cc/b7e31358aac6ed1ef2bb790a9746ac2c69bcb3c8588b41616914eb106eaf/exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b", size = 16453, upload-time = "2024-07-12T22:25:58.476Z" }, ] [[package]] name = "fediblockhole" -version = "0.4.5" +version = "0.4.6" source = { editable = "." } dependencies = [ { name = "requests" }, { name = "toml" }, ] -[package.dependency-groups] +[package.dev-dependencies] dev = [ { name = "coverage", extra = ["toml"] }, { name = "pytest" }, @@ -217,7 +218,7 @@ requires-dist = [ { name = "toml" }, ] -[package.metadata.dependency-groups] +[package.metadata.requires-dev] dev = [ { name = "coverage", extras = ["toml"], specifier = ">=7.6.1" }, { name = "pytest", specifier = "~=8.3" }, @@ -227,36 +228,36 @@ dev = [ name = "idna" version = "3.10" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 } +sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 }, + { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, ] [[package]] name = "iniconfig" version = "2.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", size = 4646 } +sdist = { url = "https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", size = 4646, upload-time = "2023-01-07T11:08:11.254Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374", size = 5892 }, + { url = "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374", size = 5892, upload-time = "2023-01-07T11:08:09.864Z" }, ] [[package]] name = "packaging" version = "24.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ee/b5/b43a27ac7472e1818c4bafd44430e69605baefe1f34440593e0332ec8b4d/packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9", size = 147882 } +sdist = { url = "https://files.pythonhosted.org/packages/ee/b5/b43a27ac7472e1818c4bafd44430e69605baefe1f34440593e0332ec8b4d/packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9", size = 147882, upload-time = "2024-03-10T09:39:28.33Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/49/df/1fceb2f8900f8639e278b056416d49134fb8d84c5942ffaa01ad34782422/packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5", size = 53488 }, + { url = "https://files.pythonhosted.org/packages/49/df/1fceb2f8900f8639e278b056416d49134fb8d84c5942ffaa01ad34782422/packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5", size = 53488, upload-time = "2024-03-10T09:39:25.947Z" }, ] [[package]] name = "pluggy" version = "1.5.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/96/2d/02d4312c973c6050a18b314a5ad0b3210edb65a906f868e31c111dede4a6/pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1", size = 67955 } +sdist = { url = "https://files.pythonhosted.org/packages/96/2d/02d4312c973c6050a18b314a5ad0b3210edb65a906f868e31c111dede4a6/pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1", size = 67955, upload-time = "2024-04-20T21:34:42.531Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669", size = 20556 }, + { url = "https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669", size = 20556, upload-time = "2024-04-20T21:34:40.434Z" }, ] [[package]] @@ -271,9 +272,9 @@ dependencies = [ { name = "pluggy" }, { name = "tomli", marker = "python_full_version < '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/8b/6c/62bbd536103af674e227c41a8f3dcd022d591f6eed5facb5a0f31ee33bbc/pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181", size = 1442487 } +sdist = { url = "https://files.pythonhosted.org/packages/8b/6c/62bbd536103af674e227c41a8f3dcd022d591f6eed5facb5a0f31ee33bbc/pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181", size = 1442487, upload-time = "2024-09-10T10:52:15.003Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6b/77/7440a06a8ead44c7757a64362dd22df5760f9b12dc5f11b6188cd2fc27a0/pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2", size = 342341 }, + { url = "https://files.pythonhosted.org/packages/6b/77/7440a06a8ead44c7757a64362dd22df5760f9b12dc5f11b6188cd2fc27a0/pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2", size = 342341, upload-time = "2024-09-10T10:52:12.54Z" }, ] [[package]] @@ -286,34 +287,34 @@ dependencies = [ { name = "idna" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9d/be/10918a2eac4ae9f02f6cfe6414b7a155ccd8f7f9d4380d62fd5b955065c3/requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1", size = 110794 } +sdist = { url = "https://files.pythonhosted.org/packages/9d/be/10918a2eac4ae9f02f6cfe6414b7a155ccd8f7f9d4380d62fd5b955065c3/requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1", size = 110794, upload-time = "2023-05-22T15:12:44.175Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/70/8e/0e2d847013cb52cd35b38c009bb167a1a26b2ce6cd6965bf26b47bc0bf44/requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f", size = 62574 }, + { url = "https://files.pythonhosted.org/packages/70/8e/0e2d847013cb52cd35b38c009bb167a1a26b2ce6cd6965bf26b47bc0bf44/requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f", size = 62574, upload-time = "2023-05-22T15:12:42.313Z" }, ] [[package]] name = "toml" version = "0.10.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/be/ba/1f744cdc819428fc6b5084ec34d9b30660f6f9daaf70eead706e3203ec3c/toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f", size = 22253 } +sdist = { url = "https://files.pythonhosted.org/packages/be/ba/1f744cdc819428fc6b5084ec34d9b30660f6f9daaf70eead706e3203ec3c/toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f", size = 22253, upload-time = "2020-11-01T01:40:22.204Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/44/6f/7120676b6d73228c96e17f1f794d8ab046fc910d781c8d151120c3f1569e/toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", size = 16588 }, + { url = "https://files.pythonhosted.org/packages/44/6f/7120676b6d73228c96e17f1f794d8ab046fc910d781c8d151120c3f1569e/toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", size = 16588, upload-time = "2020-11-01T01:40:20.672Z" }, ] [[package]] name = "tomli" version = "2.0.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c0/3f/d7af728f075fb08564c5949a9c95e44352e23dee646869fa104a3b2060a3/tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f", size = 15164 } +sdist = { url = "https://files.pythonhosted.org/packages/c0/3f/d7af728f075fb08564c5949a9c95e44352e23dee646869fa104a3b2060a3/tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f", size = 15164, upload-time = "2022-02-08T10:54:04.006Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/97/75/10a9ebee3fd790d20926a90a2547f0bf78f371b2f13aa822c759680ca7b9/tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc", size = 12757 }, + { url = "https://files.pythonhosted.org/packages/97/75/10a9ebee3fd790d20926a90a2547f0bf78f371b2f13aa822c759680ca7b9/tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc", size = 12757, upload-time = "2022-02-08T10:54:02.017Z" }, ] [[package]] name = "urllib3" version = "2.0.7" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/af/47/b215df9f71b4fdba1025fc05a77db2ad243fa0926755a52c5e71659f4e3c/urllib3-2.0.7.tar.gz", hash = "sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84", size = 282546 } +sdist = { url = "https://files.pythonhosted.org/packages/af/47/b215df9f71b4fdba1025fc05a77db2ad243fa0926755a52c5e71659f4e3c/urllib3-2.0.7.tar.gz", hash = "sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84", size = 282546, upload-time = "2023-10-17T17:46:50.542Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d2/b2/b157855192a68541a91ba7b2bbcb91f1b4faa51f8bae38d8005c034be524/urllib3-2.0.7-py3-none-any.whl", hash = "sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e", size = 124213 }, + { url = "https://files.pythonhosted.org/packages/d2/b2/b157855192a68541a91ba7b2bbcb91f1b4faa51f8bae38d8005c034be524/urllib3-2.0.7-py3-none-any.whl", hash = "sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e", size = 124213, upload-time = "2023-10-17T17:46:48.538Z" }, ] From 4ab7acc25bbaed3238bfcb6c918816b71647ad36 Mon Sep 17 00:00:00 2001 From: Shawn Grigson Date: Mon, 30 Mar 2026 12:33:53 -0500 Subject: [PATCH 2/9] change label behavior into comments --- src/fediblockhole/fires.py | 32 +++++++++++++++++++++++++++----- tests/test_fires.py | 29 +++++++++++++++++++++++++++++ 2 files changed, 56 insertions(+), 5 deletions(-) diff --git a/src/fediblockhole/fires.py b/src/fediblockhole/fires.py index e81b2d7..0645798 100644 --- a/src/fediblockhole/fires.py +++ b/src/fediblockhole/fires.py @@ -184,6 +184,23 @@ def fires_labels_to_comment(labels: list, label_names: dict) -> str: return ", ".join(names) +def build_public_comment(labels: list, label_names: dict, comment: str = "") -> str: + """Build a public_comment from FIRES labels and optional freeform comment. + + If both labels and a comment are present, combines them with a separator. + + @param labels: List of label URLs or UUIDs from the FIRES response + @param label_names: Dict mapping label URL/ID -> human-readable name + @param comment: Optional freeform comment from the FIRES change + @returns: Combined label names and comment string + """ + label_text = fires_labels_to_comment(labels, label_names) + comment = (comment or "").strip() + if label_text and comment: + return f"{label_text} — {comment}" + return label_text or comment + + def build_label_map(client: FIRESClient) -> dict: """Fetch labels from the FIRES server and build an ID -> name map.""" label_map = {} @@ -264,17 +281,20 @@ def snapshot_to_blocklist( allowlist.blocks[domain] = DomainBlock( domain=domain, severity="noop", - public_comment=fires_labels_to_comment( - item.get("labels", []), label_map + public_comment=build_public_comment( + item.get("labels", []), label_map, + item.get("comment", "") ), ) continue severity = fires_policy_to_severity(policy) - # Build a comment from labels + # Build a comment from labels and optional freeform comment labels = item.get("labels", []) - public_comment = fires_labels_to_comment(labels, label_map) + public_comment = build_public_comment( + labels, label_map, item.get("comment", "") + ) block = DomainBlock( domain=domain, @@ -331,7 +351,9 @@ def apply_changes( if item_type == "Recommendation": policy = item.get("recommendedPolicy", "drop") labels = item.get("labels", []) - public_comment = fires_labels_to_comment(labels, label_map) + public_comment = build_public_comment( + labels, label_map, item.get("comment", "") + ) if policy in ALLOW_POLICIES: if not ignore_accept: diff --git a/tests/test_fires.py b/tests/test_fires.py index aae220e..7010e84 100644 --- a/tests/test_fires.py +++ b/tests/test_fires.py @@ -9,6 +9,7 @@ from fediblockhole.fires import ( FIRESState, apply_changes, + build_public_comment, fires_labels_to_comment, fires_policy_to_severity, snapshot_to_blocklist, @@ -75,6 +76,34 @@ def test_labels_to_comment_empty(): assert result == "" +# -- Comment building tests -- + + +def test_build_public_comment_labels_only(): + labels = ["http://localhost:4444/labels/label-uuid-hate-speech"] + result = build_public_comment(labels, LABEL_MAP) + assert result == "Hate Speech" + + +def test_build_public_comment_comment_only(): + result = build_public_comment([], LABEL_MAP, "Admin recruits for brigading") + assert result == "Admin recruits for brigading" + + +def test_build_public_comment_labels_and_comment(): + labels = [ + "http://localhost:4444/labels/label-uuid-hate-speech", + "http://localhost:4444/labels/label-uuid-harassment", + ] + result = build_public_comment(labels, LABEL_MAP, "Documented targeting of trans users") + assert result == "Hate Speech, Online Harassment \u2014 Documented targeting of trans users" + + +def test_build_public_comment_empty(): + result = build_public_comment([], LABEL_MAP, "") + assert result == "" + + # -- Snapshot parsing tests -- From d1a6de89e2dfa1c1af8fd93625d984d07ee02757 Mon Sep 17 00:00:00 2001 From: Shawn Grigson Date: Mon, 30 Mar 2026 14:40:06 -0500 Subject: [PATCH 3/9] removing a lot of cumbersome url construction in favor of true autodiscovery --- README.md | 37 ++++---- etc/sample.fediblockhole.conf.toml | 25 +++--- src/fediblockhole/__init__.py | 132 ++++++++--------------------- src/fediblockhole/fires.py | 120 ++++++++++++++++---------- tests/test_fires.py | 38 ++------- 5 files changed, 150 insertions(+), 202 deletions(-) diff --git a/README.md b/README.md index be0bc5d..f565ac7 100644 --- a/README.md +++ b/README.md @@ -425,21 +425,26 @@ in `threshold` of those 5 sources to be included in the merged blocklist. ### Configuration Add FIRES sources to your config file using the `blocklist_fires_sources` list. -Three formats are supported: +Each entry must have a `url` key pointing to the full dataset URL. The dataset +URL is the canonical identifier per the FIRES spec. ```toml blocklist_fires_sources = [ - # Discover and fetch all datasets from a FIRES server - { server = 'https://fires.example.com' }, - - # Fetch specific datasets by UUID from a server - { server = 'https://fires.example.com', datasets = ['uuid-1', 'uuid-2'] }, - - # Paste a dataset URL directly - { url = 'https://other-fires.example/datasets/019d3565-f022-abbc-c43d649f294b' }, + { url = 'https://fires.example/datasets/019d3565-f022-abbc-c43d649f294b' }, + { url = 'https://fires.example/datasets/019d3565-aabb-ccdd-eeff-112233445566', max_severity = 'silence' }, + { url = 'https://trusted-fires.example/datasets/uuid', retractions = true }, ] ``` +The dataset URL is opaque — FediBlockHole fetches it with an `Accept: application/ld+json` +header and the dataset metadata tells it where the snapshot and changes endpoints are. +No path construction, no assumptions about URL structure. + +Label names are resolved by fetching each label URL found in the snapshot data. +FIRES snapshots include full label URLs (e.g., `http://fires.example/labels/uuid`) +which are individually fetchable resources. No separate labels collection endpoint +is needed. + FIRES datasets are public, so no authentication is required to read them. Optional per-source settings: @@ -463,14 +468,6 @@ The state file defaults to `~/.fediblockhole/fires_state.json`. You can override this with the `fires_state_file` config option or the `--fires-state-file` commandline flag. -### Labels as comments - -FIRES recommendations include labels from the -[IFTAS shared vocabulary](https://about.iftas.org/library/shared-vocabulary-labels/) -(e.g., "Hate Speech", "CSAM", "Spam"). These are mapped to the `public_comment` -field on domain blocks, so instance admins can see why a domain was recommended -for blocking. - ### The `accept` policy The FIRES protocol includes an `accept` policy for recommending that a domain @@ -487,7 +484,7 @@ set `ignore_accept = true` on the source: ```toml blocklist_fires_sources = [ - { server = 'https://fires.example.com', ignore_accept = true }, + { url = 'https://fires.example/datasets/uuid', ignore_accept = true }, ] ``` @@ -518,8 +515,8 @@ including its judgment that something should come off." ```toml blocklist_fires_sources = [ - { server = 'https://fires.trusted.example', retractions = true }, - { url = 'https://other-fires.example/datasets/uuid', retractions = true }, + { url = 'https://fires.trusted.example/datasets/uuid-1', retractions = true }, + { url = 'https://other-fires.example/datasets/uuid-2', retractions = true }, ] ``` diff --git a/etc/sample.fediblockhole.conf.toml b/etc/sample.fediblockhole.conf.toml index b773b5b..579ca22 100644 --- a/etc/sample.fediblockhole.conf.toml +++ b/etc/sample.fediblockhole.conf.toml @@ -24,26 +24,21 @@ blocklist_url_sources = [ ] -# List of FIRES sources to read datasets from -# Three formats are supported: -# 1. Server-wide: fetch all datasets from a FIRES server -# { server = 'https://fires.example.com' } -# 2. Cherry-pick: fetch specific datasets from a server by UUID -# { server = 'https://fires.example.com', datasets = ['uuid-1', 'uuid-2'] } -# 3. Direct URL: paste a dataset URL directly -# { url = 'https://fires.example.com/datasets/uuid-1' } +# List of FIRES dataset sources to read from. +# Each entry must have a 'url' key with the full dataset URL. +# The dataset URL is the canonical identifier per the FIRES spec. # # FIRES policies are mapped to Mastodon severities: # drop/reject -> suspend, filter -> silence # Each dataset counts as one source for threshold calculations. -# Optional: max_severity to cap the highest severity applied. +# +# Optional per-source keys: +# max_severity -- cap the highest severity applied (default: 'suspend') +# ignore_accept -- skip 'accept' policy entries (default: false) +# retractions -- honor retractions from this source (default: false) blocklist_fires_sources = [ - # { server = 'https://fires.example.com' }, # all datasets on this server - # { server = 'https://fires.example.com', datasets = ['dataset-uuid-1', 'dataset-uuid-2'] }, - # { url = 'https://other-fires.example/datasets/019d3565-f022-777b-abbc-c43d649f294b' }, - # { url = 'https://other-fires.example/datasets/019d3565-f022-777b-abbc-aabbccddeeff', max_severity = 'silence' }, - # { server = 'https://fires.example.com', ignore_accept = true }, # ignore 'accept' policies - # { server = 'https://fires.example.com', retractions = true }, # honor retractions from this source + # { url = 'https://fires.example/datasets/019d3565-f022-777b-abbc-c43d649f294b' }, + # { url = 'https://fires.example/datasets/019d3565-f022-777b-abbc-aabbccddeeff', max_severity = 'silence' }, # { url = 'https://trusted-fires.example/datasets/uuid', retractions = true }, ] diff --git a/src/fediblockhole/__init__.py b/src/fediblockhole/__init__.py index 8f9e635..6eb19a8 100644 --- a/src/fediblockhole/__init__.py +++ b/src/fediblockhole/__init__.py @@ -265,26 +265,6 @@ def fetch_from_instances( return blocklists -def _parse_dataset_url(url: str) -> tuple: - """Extract server base URL and dataset ID from a full dataset URL. - - e.g. 'https://fires.example/datasets/019d3565-f022-777b-abbc-c43d649f294b' - returns ('https://fires.example', '019d3565-f022-777b-abbc-c43d649f294b') - """ - url = url.rstrip("/") - # Find /datasets/ in the URL and split there - marker = "/datasets/" - idx = url.find(marker) - if idx == -1: - raise ValueError(f"Not a valid FIRES dataset URL (missing /datasets/): {url}") - server_url = url[:idx] - dataset_id = url[idx + len(marker):] - # Strip any trailing path segments (e.g. /snapshot, /changes) - if "/" in dataset_id: - dataset_id = dataset_id.split("/")[0] - return server_url, dataset_id - - def fetch_from_fires( fires_sources: list, state_file: str = None, @@ -293,24 +273,30 @@ def fetch_from_fires( export_fields: list = EXPORT_FIELDS, dryrun: bool = False, ) -> list: - """Fetch blocklists from FIRES datasets + """Fetch blocklists from FIRES datasets. - Supports three source formats: - - { server = '...' } -- discover and fetch all datasets - - { server = '...', datasets = [...] } -- fetch specific datasets by ID - - { url = '...' } -- fetch a single dataset by full URL + Each source is a dict with a 'url' key pointing to the full dataset URL. + The dataset URL is the canonical identifier per the FIRES spec. + + Example config: + { url = 'https://fires.example/datasets/019d3565-f022-777b-abbc-c43d649f294b' } + + Optional per-source keys: + max_severity -- cap the highest severity (default: 'suspend') + ignore_accept -- skip 'accept' policy entries (default: false) + retractions -- honor retractions from this source (default: false) @param fires_sources: List of FIRES source configs from the TOML file @param state_file: Path to the FIRES state file for cursor tracking @param save_intermediate: Whether to save intermediate blocklists @param savedir: Directory to save intermediate blocklists @param export_fields: Fields to include when saving intermediate lists - @returns: A list of Blocklist objects + @returns: Tuple of (blocklists, allowlists, trusted_retractions) """ log.info("Fetching domain blocks from FIRES datasets...") blocklists = [] allowlists = [] - trusted_retractions = set() # domains retracted by sources with retractions=true + trusted_retractions = set() if not fires_sources: return blocklists, allowlists, trusted_retractions @@ -319,82 +305,38 @@ def fetch_from_fires( state = FIRESState(state_file or DEFAULT_STATE_FILE) for source in fires_sources: + if "url" not in source: + log.warning( + "FIRES: source must have a 'url' key with the full dataset URL. Skipping." + ) + continue + + dataset_url = source["url"].rstrip("/") max_severity = source.get("max_severity", "suspend") ignore_accept = source.get("ignore_accept", False) honor_retractions = source.get("retractions", False) - # Collect (server_url, dataset_id) pairs to fetch - fetch_list = [] - - if "url" in source: - # Direct dataset URL: parse it into server + dataset_id - try: - server_url, dataset_id = _parse_dataset_url(source["url"]) - fetch_list.append((server_url, dataset_id)) - except ValueError as e: - log.error(f"FIRES: {e}") - continue - - elif "server" in source: - server_url = source["server"].rstrip("/") - dataset_ids = source.get("datasets", []) - - if dataset_ids: - # Cherry-pick specific datasets by ID - for did in dataset_ids: - fetch_list.append((server_url, did)) - else: - # Discover all datasets on the server - log.info(f"FIRES: discovering datasets on {server_url}") - from .fires import FIRESClient - client = FIRESClient(server_url) - try: - datasets = client.get_datasets() - for ds in datasets: - ds_id_url = ds.get("id", "") - if ds_id_url: - ds_id = ds_id_url.rstrip("/").split("/")[-1] - fetch_list.append((server_url, ds_id)) - log.info(f"FIRES: found {len(fetch_list)} datasets") - except Exception as e: - log.error( - f"FIRES: could not discover datasets on {server_url}: {e}" - ) - continue - else: - log.warning( - "FIRES: source must have either 'server' or 'url'. Skipping." + try: + bl, al = fetch_fires_blocklist( + dataset_url, state, + max_severity=max_severity, + ignore_accept=ignore_accept, ) - continue + blocklists.append(bl) + if len(al) > 0: + allowlists.append(al) - # Fetch each dataset - for srv, did in fetch_list: - try: - bl, al = fetch_fires_blocklist( - srv, did, state, - max_severity=max_severity, - ignore_accept=ignore_accept, + if honor_retractions: + trusted_retractions.update( + state.get_retractions(dataset_url) ) - blocklists.append(bl) - if len(al) > 0: - allowlists.append(al) - # Collect retractions from trusted sources - if honor_retractions: - dataset_url = f"{srv}/datasets/{did}" - trusted_retractions.update( - state.get_retractions(dataset_url) - ) - if save_intermediate: - save_intermediate_blocklist(bl, savedir, export_fields) - except Exception as e: - log.error( - f"FIRES: error fetching dataset {did} from {srv}: {e}" - ) - continue - # Persist state after all datasets are processed - # Don't save state during dryrun — we want the next real run - # to see the same changes we just previewed + if save_intermediate: + save_intermediate_blocklist(bl, savedir, export_fields) + except Exception as e: + log.error(f"FIRES: error fetching {dataset_url}: {e}") + continue + if not dryrun: state.save() else: diff --git a/src/fediblockhole/fires.py b/src/fediblockhole/fires.py index 0645798..8254d5f 100644 --- a/src/fediblockhole/fires.py +++ b/src/fediblockhole/fires.py @@ -106,10 +106,16 @@ def remove_retraction(self, dataset_url: str, domain: str): class FIRESClient: - """HTTP client for consuming public FIRES protocol endpoints.""" + """HTTP client for consuming a single FIRES dataset. + + Takes the full dataset URL as the identifier. Fetches the dataset + metadata to discover the snapshot, changes, and labels endpoints + as provided by the server — no path construction. + """ - def __init__(self, base_url: str): - self.base_url = base_url.rstrip("/") + def __init__(self, dataset_url: str): + self.dataset_url = dataset_url.rstrip("/") + self._endpoints = None def _headers(self) -> dict: return { @@ -130,19 +136,29 @@ def _get(self, url: str) -> dict: ) return response.json() - def get_datasets(self) -> list: - """List all datasets on the server.""" - data = self._get(f"{self.base_url}/datasets") - # The datasets collection uses 'items', not 'orderedItems' - return data.get("items", data.get("orderedItems", [])) + def _ensure_endpoints(self): + """Fetch the dataset metadata to discover endpoints.""" + if self._endpoints is not None: + return + log.debug(f"FIRES: discovering endpoints for {self.dataset_url}") + data = self._get(self.dataset_url) + endpoints = data.get("endpoints", {}) + if not endpoints.get("snapshot") or not endpoints.get("changes"): + raise ValueError( + f"FIRES dataset at {self.dataset_url} did not provide " + f"snapshot/changes endpoints in its metadata" + ) + self._endpoints = endpoints - def get_snapshot(self, dataset_id: str) -> dict: - """Fetch the current snapshot for a dataset.""" - return self._get(f"{self.base_url}/datasets/{dataset_id}/snapshot") + def get_snapshot(self) -> dict: + """Fetch the current snapshot for this dataset.""" + self._ensure_endpoints() + return self._get(self._endpoints["snapshot"]) - def get_changes(self, dataset_id: str, since: str = None) -> dict: - """Fetch a page of changes for a dataset.""" - url = f"{self.base_url}/datasets/{dataset_id}/changes" + def get_changes(self, since: str = None) -> dict: + """Fetch a page of changes for this dataset.""" + self._ensure_endpoints() + url = self._endpoints["changes"] if since: url += f"?since={since}" return self._get(url) @@ -151,9 +167,6 @@ def get_changes_from_url(self, url: str) -> dict: """Fetch changes from a full URL (for pagination).""" return self._get(url) - def get_labels(self) -> dict: - """Fetch the labels collection.""" - return self._get(f"{self.base_url}/labels") def fires_policy_to_severity(policy: str) -> str: @@ -201,26 +214,43 @@ def build_public_comment(labels: list, label_names: dict, comment: str = "") -> return label_text or comment -def build_label_map(client: FIRESClient) -> dict: - """Fetch labels from the FIRES server and build an ID -> name map.""" +def build_label_map_from_snapshot(client: FIRESClient, snapshot: dict) -> dict: + """Build a label ID -> name map by fetching each label URL from the snapshot. + + FIRES snapshots include full label URLs in each item's 'labels' array. + Each URL is a fetchable resource that returns the label data. + We collect unique label URLs across all items and fetch each one. + """ + # Collect unique label URLs from all items + label_urls = set() + for item in snapshot.get("orderedItems", []): + for label_ref in item.get("labels", []): + if isinstance(label_ref, str) and label_ref.startswith("http"): + label_urls.add(label_ref) + label_map = {} - try: - labels_data = client.get_labels() - for item in labels_data.get("items", []): - label_id = item.get("id", "") - # Prefer nameMap.en over flat name - name_map = item.get("nameMap") + for url in label_urls: + try: + data = client._get(url) + label_id = data.get("id", url) + name_map = data.get("nameMap") if name_map and isinstance(name_map, dict): name = name_map.get("en", name_map.get("en-US", "")) if not name: - # Grab first available name = next(iter(name_map.values()), "") else: - name = item.get("name", "") - if label_id and name: + name = data.get("name", "") + if name: label_map[label_id] = name - except Exception as e: - log.warning(f"Could not fetch FIRES labels: {e}") + # Also key by the URL we fetched, in case id differs + if label_id != url: + label_map[url] = name + except Exception as e: + log.warning(f"Could not fetch FIRES label {url}: {e}") + # Fall back to slug from URL + slug = url.rstrip("/").split("/")[-1] + label_map[url] = slug + return label_map @@ -399,8 +429,7 @@ def apply_changes( def fetch_fires_blocklist( - server_url: str, - dataset_id: str, + dataset_url: str, state: FIRESState, max_severity: str = "suspend", max_pages: int = 50, @@ -414,18 +443,16 @@ def fetch_fires_blocklist( Recommendations with 'accept' policy go to the allowlist. All other recommendations go to the blocklist. - @param server_url: Base URL of the FIRES server - @param dataset_id: UUID of the dataset to fetch + Label names are resolved by fetching each label URL found in the + snapshot data — no separate labels endpoint needed. + + @param dataset_url: Full URL of the FIRES dataset (the canonical identifier) @param state: FIRESState for cursor and retraction tracking @param max_severity: Maximum severity cap @param max_pages: Maximum number of changes pages to walk @returns: Tuple of (Blocklist, Blocklist) where the second is the allowlist """ - client = FIRESClient(server_url) - dataset_url = f"{server_url}/datasets/{dataset_id}" - - # Build label name lookup - label_map = build_label_map(client) + client = FIRESClient(dataset_url) # Check for existing cursor cursor = state.get_cursor(dataset_url) @@ -433,8 +460,11 @@ def fetch_fires_blocklist( if cursor is None: # First run: fetch the full snapshot - log.info(f"FIRES: fetching full snapshot for dataset {dataset_id}") - snapshot = client.get_snapshot(dataset_id) + log.info(f"FIRES: fetching full snapshot for {dataset_url}") + snapshot = client.get_snapshot() + + # Build label names from the label URLs in the snapshot + label_map = build_label_map_from_snapshot(client, snapshot) blocklist, allowlist = snapshot_to_blocklist( snapshot, dataset_url, label_map, max_severity, retractions, @@ -453,8 +483,12 @@ def fetch_fires_blocklist( else: # Incremental: start from snapshot, then apply changes - log.info(f"FIRES: incremental update for dataset {dataset_id}") - snapshot = client.get_snapshot(dataset_id) + log.info(f"FIRES: incremental update for {dataset_url}") + snapshot = client.get_snapshot() + + # Build label names from the label URLs in the snapshot + label_map = build_label_map_from_snapshot(client, snapshot) + blocklist, allowlist = snapshot_to_blocklist( snapshot, dataset_url, label_map, max_severity, retractions, ignore_accept diff --git a/tests/test_fires.py b/tests/test_fires.py index 7010e84..162b788 100644 --- a/tests/test_fires.py +++ b/tests/test_fires.py @@ -489,42 +489,22 @@ def test_snapshot_skips_actor_entities(): assert len(bl) == 1 # actor was skipped -# -- URL parsing tests -- +# -- FIRESClient tests -- -def test_parse_dataset_url(): - from fediblockhole import _parse_dataset_url +def test_client_stores_dataset_url(): + from fediblockhole.fires import FIRESClient - server, did = _parse_dataset_url( + client = FIRESClient( "https://fires.example.com/datasets/019d3565-f022-777b-abbc-c43d649f294b" ) - assert server == "https://fires.example.com" - assert did == "019d3565-f022-777b-abbc-c43d649f294b" + assert client.dataset_url == "https://fires.example.com/datasets/019d3565-f022-777b-abbc-c43d649f294b" -def test_parse_dataset_url_trailing_slash(): - from fediblockhole import _parse_dataset_url +def test_client_strips_trailing_slash(): + from fediblockhole.fires import FIRESClient - server, did = _parse_dataset_url( + client = FIRESClient( "https://fires.example.com/datasets/019d3565-f022-777b-abbc-c43d649f294b/" ) - assert server == "https://fires.example.com" - assert did == "019d3565-f022-777b-abbc-c43d649f294b" - - -def test_parse_dataset_url_with_snapshot_path(): - from fediblockhole import _parse_dataset_url - - server, did = _parse_dataset_url( - "https://fires.example.com/datasets/019d3565-f022-777b-abbc-c43d649f294b/snapshot" - ) - assert server == "https://fires.example.com" - assert did == "019d3565-f022-777b-abbc-c43d649f294b" - - -def test_parse_dataset_url_invalid(): - from fediblockhole import _parse_dataset_url - import pytest - - with pytest.raises(ValueError, match="missing /datasets/"): - _parse_dataset_url("https://fires.example.com/labels/something") + assert client.dataset_url == "https://fires.example.com/datasets/019d3565-f022-777b-abbc-c43d649f294b" From 61055dd0a32f12820026569ee22d5ca232b34eac Mon Sep 17 00:00:00 2001 From: Shawn Grigson Date: Mon, 30 Mar 2026 14:45:05 -0500 Subject: [PATCH 4/9] handle 429 retries/backoff --- src/fediblockhole/__init__.py | 7 +++- src/fediblockhole/fires.py | 61 ++++++++++++++++++++++++++--------- 2 files changed, 51 insertions(+), 17 deletions(-) diff --git a/src/fediblockhole/__init__.py b/src/fediblockhole/__init__.py index 6eb19a8..dccc20b 100644 --- a/src/fediblockhole/__init__.py +++ b/src/fediblockhole/__init__.py @@ -304,7 +304,12 @@ def fetch_from_fires( from .fires import DEFAULT_STATE_FILE state = FIRESState(state_file or DEFAULT_STATE_FILE) - for source in fires_sources: + import time + + for source_idx, source in enumerate(fires_sources): + if source_idx > 0: + time.sleep(2) + if "url" not in source: log.warning( "FIRES: source must have a 'url' key with the full dataset URL. Skipping." diff --git a/src/fediblockhole/fires.py b/src/fediblockhole/fires.py index 8254d5f..5e4b190 100644 --- a/src/fediblockhole/fires.py +++ b/src/fediblockhole/fires.py @@ -123,18 +123,26 @@ def _headers(self) -> dict: "User-Agent": "FediBlockHole-FIRES/1.0", } - def _get(self, url: str) -> dict: - """Fetch a URL and return parsed JSON.""" - log.debug(f"FIRES fetch: {url}") - response = requests.get( - url, headers=self._headers(), timeout=REQUEST_TIMEOUT - ) - if response.status_code != 200: + def _get(self, url: str, retries: int = 3) -> dict: + """Fetch a URL and return parsed JSON. Retries on 429 with backoff.""" + import time + + for attempt in range(retries): + log.debug(f"FIRES fetch: {url}") + response = requests.get( + url, headers=self._headers(), timeout=REQUEST_TIMEOUT + ) + if response.status_code == 200: + return response.json() + if response.status_code == 429 and attempt < retries - 1: + wait = (attempt + 1) * 5 + log.warning(f"FIRES: rate limited on {url}, waiting {wait}s (attempt {attempt + 1}/{retries})") + time.sleep(wait) + continue log.error(f"FIRES request failed: {response.status_code} {url}") raise ValueError( f"FIRES request failed: {response.status_code}: {response.content}" ) - return response.json() def _ensure_endpoints(self): """Fetch the dataset metadata to discover endpoints.""" @@ -214,13 +222,22 @@ def build_public_comment(labels: list, label_names: dict, comment: str = "") -> return label_text or comment +# Module-level cache so labels fetched for one dataset don't need +# to be re-fetched for another dataset on the same (or different) server. +_label_cache: dict = {} + + def build_label_map_from_snapshot(client: FIRESClient, snapshot: dict) -> dict: """Build a label ID -> name map by fetching each label URL from the snapshot. FIRES snapshots include full label URLs in each item's 'labels' array. Each URL is a fetchable resource that returns the label data. - We collect unique label URLs across all items and fetch each one. + We collect unique label URLs across all items and fetch only the ones + we haven't seen before, with a short delay between requests to avoid + rate limiting. """ + import time + # Collect unique label URLs from all items label_urls = set() for item in snapshot.get("orderedItems", []): @@ -228,8 +245,16 @@ def build_label_map_from_snapshot(client: FIRESClient, snapshot: dict) -> dict: if isinstance(label_ref, str) and label_ref.startswith("http"): label_urls.add(label_ref) - label_map = {} - for url in label_urls: + # Filter to only labels we haven't fetched yet + new_urls = [url for url in label_urls if url not in _label_cache] + if new_urls: + log.info(f"FIRES: fetching {len(new_urls)} new labels ({len(label_urls) - len(new_urls)} cached)") + else: + log.info(f"FIRES: all {len(label_urls)} labels already cached") + + for i, url in enumerate(new_urls): + if i > 0: + time.sleep(1) try: data = client._get(url) label_id = data.get("id", url) @@ -241,15 +266,19 @@ def build_label_map_from_snapshot(client: FIRESClient, snapshot: dict) -> dict: else: name = data.get("name", "") if name: - label_map[label_id] = name - # Also key by the URL we fetched, in case id differs + _label_cache[url] = name if label_id != url: - label_map[url] = name + _label_cache[label_id] = name except Exception as e: log.warning(f"Could not fetch FIRES label {url}: {e}") - # Fall back to slug from URL slug = url.rstrip("/").split("/")[-1] - label_map[url] = slug + _label_cache[url] = slug + + # Build the map for this snapshot from the cache + label_map = {} + for url in label_urls: + if url in _label_cache: + label_map[url] = _label_cache[url] return label_map From e6f03fb5f97142912011534670de15c2eb73015b Mon Sep 17 00:00:00 2001 From: Shawn Grigson Date: Mon, 30 Mar 2026 17:56:45 -0500 Subject: [PATCH 5/9] cleanup - em's feedback --- README.md | 37 ++-- etc/sample.fediblockhole.conf.toml | 13 +- src/fediblockhole/__init__.py | 60 ++++-- src/fediblockhole/fires.py | 44 ++-- tests/test_fires.py | 327 ++++++++++++++++++++++++++++- 5 files changed, 421 insertions(+), 60 deletions(-) diff --git a/README.md b/README.md index f565ac7..20f1061 100644 --- a/README.md +++ b/README.md @@ -425,14 +425,14 @@ in `threshold` of those 5 sources to be included in the merged blocklist. ### Configuration Add FIRES sources to your config file using the `blocklist_fires_sources` list. -Each entry must have a `url` key pointing to the full dataset URL. The dataset +Each entry must have a `dataset` key pointing to the full dataset URL. The dataset URL is the canonical identifier per the FIRES spec. ```toml blocklist_fires_sources = [ - { url = 'https://fires.example/datasets/019d3565-f022-abbc-c43d649f294b' }, - { url = 'https://fires.example/datasets/019d3565-aabb-ccdd-eeff-112233445566', max_severity = 'silence' }, - { url = 'https://trusted-fires.example/datasets/uuid', retractions = true }, + { dataset = 'https://fires.example/datasets/019d3565-f022-abbc-c43d649f294b' }, + { dataset = 'https://fires.example/datasets/019d3565-aabb-ccdd-eeff-112233445566', max_severity = 'silence' }, + { dataset = 'https://trusted-fires.example/datasets/uuid', retractions = true }, ] ``` @@ -450,7 +450,7 @@ FIRES datasets are public, so no authentication is required to read them. Optional per-source settings: - `max_severity`: Cap the maximum severity applied (e.g., `'silence'`). Defaults to `'suspend'`. - - `ignore_accept`: When `true`, silently skip any `accept` policies from this source. Defaults to `false`. + - `ignore_accept`: When `true`, `accept` policies won't be added to the allowlist. However, `accept` still removes any block that this dataset previously added — it acts as an implicit retraction. Defaults to `false`. - `retractions`: When `true`, honor retractions from this source by removing blocks from your instance. See the Retractions section below. Defaults to `false`. ### State tracking and retractions @@ -484,7 +484,7 @@ set `ignore_accept = true` on the source: ```toml blocklist_fires_sources = [ - { url = 'https://fires.example/datasets/uuid', ignore_accept = true }, + { dataset = 'https://fires.example/datasets/uuid', ignore_accept = true }, ] ``` @@ -492,7 +492,7 @@ With `ignore_accept` enabled, `accept` recommendations are silently skipped. Block recommendations (`drop`, `reject`, `filter`) and retractions still work normally. -### Retractions: removing blocks that are no longer recommended +### Retractions: removing data that is no longer recommended or advised Historically, FediBlockHole has been additive — it adds and updates blocks but never removes them. This is safe but means blocks stay on your instance forever, @@ -505,18 +505,21 @@ There are two retraction mechanisms, and they can be used together: #### Source-level retractions (`retractions = true`) -This is the FIRES-native approach. When a trusted FIRES source explicitly -retracts a domain, the block is removed from your instance — **regardless of -who originally added it** — as long as no other source in your merged list still -recommends blocking it. +This is the FIRES-native approach. When a trusted FIRES source retracts a +domain (either via an explicit `Retraction` or an `accept` recommendation), +the block is removed from your instance — but only if that block was originally +added by the same dataset. A retraction from dataset A won't remove a block +that dataset B added. -This is dataset-level trust: you're saying "I trust this feed's judgment, -including its judgment that something should come off." +Blocks created from FIRES datasets are stamped with `FIRES:{dataset_url}` in +the `private_comment` field. Retraction removal checks this stamp to confirm +ownership before acting. If no other source in your merged list still recommends +blocking the domain, the block is removed. ```toml blocklist_fires_sources = [ - { url = 'https://fires.trusted.example/datasets/uuid-1', retractions = true }, - { url = 'https://other-fires.example/datasets/uuid-2', retractions = true }, + { dataset = 'https://fires.trusted.example/datasets/uuid-1', retractions = true }, + { dataset = 'https://other-fires.example/datasets/uuid-2', retractions = true }, ] ``` @@ -566,8 +569,8 @@ retractions — it just means no new changes are processed that run. | | Source retractions | General retractions | |---|---|---| -| Trigger | FIRES dataset explicitly retracts a domain | Domain falls out of all sources | -| Scope | Removes any matching block on the instance | Only removes blocks FediBlockHole added | +| Trigger | FIRES dataset retracts or accepts a domain | Domain falls out of all sources | +| Scope | Only removes blocks added by the retracting dataset | Only removes blocks FediBlockHole added | | Requires `override_private_comment` | No | Yes | | Requires `retractions = true` on source | Yes | No (global or per-destination) | | Works with CSV/instance sources | No (FIRES only) | Yes (any source) | diff --git a/etc/sample.fediblockhole.conf.toml b/etc/sample.fediblockhole.conf.toml index 579ca22..223dd4b 100644 --- a/etc/sample.fediblockhole.conf.toml +++ b/etc/sample.fediblockhole.conf.toml @@ -25,21 +25,22 @@ blocklist_url_sources = [ ] # List of FIRES dataset sources to read from. -# Each entry must have a 'url' key with the full dataset URL. +# Each entry must have a 'dataset' key with the full dataset URL. # The dataset URL is the canonical identifier per the FIRES spec. # # FIRES policies are mapped to Mastodon severities: -# drop/reject -> suspend, filter -> silence +# drop/reject -> suspend, filter -> silence, accept -> remove block # Each dataset counts as one source for threshold calculations. # # Optional per-source keys: # max_severity -- cap the highest severity applied (default: 'suspend') -# ignore_accept -- skip 'accept' policy entries (default: false) +# ignore_accept -- don't add accept entries to allowlist (default: false) +# note: accept still removes blocks from this dataset # retractions -- honor retractions from this source (default: false) blocklist_fires_sources = [ - # { url = 'https://fires.example/datasets/019d3565-f022-777b-abbc-c43d649f294b' }, - # { url = 'https://fires.example/datasets/019d3565-f022-777b-abbc-aabbccddeeff', max_severity = 'silence' }, - # { url = 'https://trusted-fires.example/datasets/uuid', retractions = true }, + # { dataset = 'https://fires.example/datasets/019d3565-f022-777b-abbc-c43d649f294b' }, + # { dataset = 'https://fires.example/datasets/019d3565-f022-777b-abbc-aabbccddeeff', max_severity = 'silence' }, + # { dataset = 'https://trusted-fires.example/datasets/uuid', retractions = true }, ] ## Path to the FIRES state file for tracking change cursors and retractions diff --git a/src/fediblockhole/__init__.py b/src/fediblockhole/__init__.py index dccc20b..ac003f0 100644 --- a/src/fediblockhole/__init__.py +++ b/src/fediblockhole/__init__.py @@ -296,7 +296,8 @@ def fetch_from_fires( log.info("Fetching domain blocks from FIRES datasets...") blocklists = [] allowlists = [] - trusted_retractions = set() + # Map of domain -> set of dataset URLs that retracted it + trusted_retractions = {} if not fires_sources: return blocklists, allowlists, trusted_retractions @@ -310,13 +311,13 @@ def fetch_from_fires( if source_idx > 0: time.sleep(2) - if "url" not in source: + # Accept both 'dataset' (preferred) and 'url' (legacy) keys + dataset_url = source.get("dataset", source.get("url", "")).rstrip("/") + if not dataset_url: log.warning( - "FIRES: source must have a 'url' key with the full dataset URL. Skipping." + "FIRES: source must have a 'dataset' key with the full dataset URL. Skipping." ) continue - - dataset_url = source["url"].rstrip("/") max_severity = source.get("max_severity", "suspend") ignore_accept = source.get("ignore_accept", False) honor_retractions = source.get("retractions", False) @@ -332,9 +333,8 @@ def fetch_from_fires( allowlists.append(al) if honor_retractions: - trusted_retractions.update( - state.get_retractions(dataset_url) - ) + for domain in state.get_retractions(dataset_url): + trusted_retractions.setdefault(domain, set()).add(dataset_url) if save_intermediate: save_intermediate_blocklist(bl, savedir, export_fields) @@ -772,7 +772,7 @@ def push_blocklist( scheme: str = "https", override_private_comment: str = None, apply_retractions: bool = False, - fires_retractions: set = None, + fires_retractions: dict = None, ): """Push a blocklist to a remote instance. @@ -845,7 +845,12 @@ def push_blocklist( log.info(f"Old block definition: {oldblock}") log.info(f"Pushing new block definition: {newblock}") blockdata = oldblock.copy() - blockdata.update(newblock) + # Preserve the existing private_comment on the server — + # we only stamp private_comment when creating a block, + # not when updating one. + update_block = newblock._asdict() + update_block.pop('private_comment', None) + blockdata.update(DomainBlock(**update_block)) log.debug(f"Block as dict: {blockdata._asdict()}") if not dryrun: @@ -924,19 +929,22 @@ def push_blocklist( else: log.debug("No retracted blocks to remove.") - # FIRES-sourced retractions: remove blocks from trusted feeds - # These don't require override_private_comment — if a trusted FIRES - # source says "retract this" and nothing else in the merged list - # counters it, the block gets removed regardless of who added it. + # FIRES-sourced retractions: remove blocks from trusted feeds. + # Only removes blocks that were originally added by the same FIRES dataset + # that issued the retraction, identified by matching 'FIRES:{dataset_url}' + # in the private_comment. This prevents dataset A's retraction from + # removing dataset B's block. if fires_retractions: log.info( f"Checking {len(fires_retractions)} FIRES retractions " f"against {host}..." ) removed = 0 - for domain, serverblock in serverblocks.items(): - if domain not in fires_retractions: + for domain, retracting_datasets in fires_retractions.items(): + if domain not in serverblocks: continue + serverblock = serverblocks[domain] + # If this domain is still in the merged blocklist (another # source still recommends it), don't remove it if domain in blocklist: @@ -946,6 +954,26 @@ def push_blocklist( ) continue + # Check if this block was added by one of the datasets + # that is now retracting it + private_comment = getattr(serverblock, 'private_comment', '') or '' + if private_comment.startswith('FIRES:'): + stamped_dataset = private_comment[len('FIRES:'):] + if stamped_dataset not in retracting_datasets: + log.debug( + f"FIRES retraction for {domain}: block is from " + f"{stamped_dataset}, not from retracting dataset(s). Skipping." + ) + continue + elif override_private_comment and private_comment == override_private_comment: + # Added by FediBlockHole generally (pre-FIRES stamp era), allow retraction + pass + else: + log.debug( + f"FIRES retraction for {domain}: block not from FIRES, skipping." + ) + continue + log.info( f"FIRES retraction: removing block for {domain} from {host} " f"(retracted by trusted FIRES source, not in any other source)" diff --git a/src/fediblockhole/fires.py b/src/fediblockhole/fires.py index 5e4b190..abc2939 100644 --- a/src/fediblockhole/fires.py +++ b/src/fediblockhole/fires.py @@ -334,31 +334,31 @@ def snapshot_to_blocklist( # Map FIRES policy to Mastodon severity policy = item.get("recommendedPolicy", "drop") - # Accept policy -> allowlist (unless ignored) + labels = item.get("labels", []) + public_comment = build_public_comment( + labels, label_map, item.get("comment", "") + ) + + # Accept policy: domain should not be blocked. + # An accept is effectively a retraction of any previous block. + # If ignore_accept is set, we skip the allowlist entry but + # still don't create a block for it. if policy in ALLOW_POLICIES: if not ignore_accept: allowlist.blocks[domain] = DomainBlock( domain=domain, severity="noop", - public_comment=build_public_comment( - item.get("labels", []), label_map, - item.get("comment", "") - ), + public_comment=public_comment, ) continue severity = fires_policy_to_severity(policy) - # Build a comment from labels and optional freeform comment - labels = item.get("labels", []) - public_comment = build_public_comment( - labels, label_map, item.get("comment", "") - ) - block = DomainBlock( domain=domain, severity=severity, public_comment=public_comment, + private_comment=f"FIRES:{origin}", ) # Apply max_severity cap @@ -384,9 +384,14 @@ def apply_changes( ) -> tuple: """Apply a list of FIRES change items to existing blocklist and allowlist. - Recommendations with accept policy go to the allowlist. - Other recommendations add/update blocklist entries. - Retractions remove entries from both lists and record in state. + Changes are applied in order and are overwrites, not merges. If the same + domain appears multiple times, the last entry wins. For example: + drop -> filter -> accept + results in the domain being accepted (not blocked), with no filters. + + Recommendations with accept policy remove blocks and optionally add + to the allowlist. Other recommendations create/overwrite blocklist + entries. Retractions remove entries from both lists. @param blocklist: The existing blocklist to modify @param allowlist: The existing allowlist to modify @@ -415,22 +420,25 @@ def apply_changes( ) if policy in ALLOW_POLICIES: + # Accept is an implicit retraction — always remove from blocklist, + # even when ignore_accept is set. The domain should not be blocked. + if domain in blocklist.blocks: + log.info(f"FIRES accept: removing block for {domain}") + del blocklist.blocks[domain] if not ignore_accept: - # Accept -> allowlist, remove from blocklist if present allowlist.blocks[domain] = DomainBlock( domain=domain, severity="noop", public_comment=public_comment, ) - if domain in blocklist.blocks: - del blocklist.blocks[domain] else: - # Block recommendation + # Block recommendation — overwrites any previous state severity = fires_policy_to_severity(policy) block = DomainBlock( domain=domain, severity=severity, public_comment=public_comment, + private_comment=f"FIRES:{dataset_url}", ) max_sev = BlockSeverity(max_severity) if block.severity > max_sev: diff --git a/tests/test_fires.py b/tests/test_fires.py index 162b788..c2f1582 100644 --- a/tests/test_fires.py +++ b/tests/test_fires.py @@ -380,7 +380,7 @@ def test_snapshot_ignore_accept(): def test_apply_changes_ignore_accept(): - """When ignore_accept=True, accept changes don't modify blocklist or allowlist.""" + """When ignore_accept=True, accept still removes blocks but doesn't add to allowlist.""" bl = Blocklist("test") al = Blocklist("test") from fediblockhole.const import DomainBlock @@ -399,8 +399,9 @@ def test_apply_changes_ignore_accept(): bl, al = apply_changes(bl, al, changes, LABEL_MAP, state, "test", ignore_accept=True) - # Should still be in blocklist, not moved to allowlist - assert "reformed.example" in bl + # Accept is an implicit retraction — block is removed regardless of ignore_accept + assert "reformed.example" not in bl + # But with ignore_accept, it should NOT be in the allowlist either assert "reformed.example" not in al @@ -489,6 +490,326 @@ def test_snapshot_skips_actor_entities(): assert len(bl) == 1 # actor was skipped +# -- Private comment stamping tests -- + + +def test_snapshot_stamps_private_comment(): + """Blocks from snapshots should be stamped with FIRES:{dataset_url}.""" + snapshot = { + "orderedItems": [ + { + "type": "Recommendation", + "entityKind": "domain", + "entityKey": "bad.example", + "recommendedPolicy": "drop", + "labels": [], + }, + ] + } + dataset_url = "https://fires.example/datasets/test-uuid" + bl, al = snapshot_to_blocklist(snapshot, dataset_url, LABEL_MAP) + + assert bl["bad.example"].private_comment == f"FIRES:{dataset_url}" + + +def test_apply_changes_stamps_private_comment(): + """Blocks from changes should be stamped with FIRES:{dataset_url}.""" + bl = Blocklist("test") + al = Blocklist("test") + dataset_url = "https://fires.example/datasets/test-uuid" + + changes = [ + { + "type": "Recommendation", + "entityKind": "domain", + "entityKey": "new.example", + "recommendedPolicy": "drop", + "labels": [], + } + ] + state = FIRESState(os.path.join(tempfile.mkdtemp(), "state.json")) + bl, al = apply_changes(bl, al, changes, LABEL_MAP, state, dataset_url) + + assert bl["new.example"].private_comment == f"FIRES:{dataset_url}" + + +def test_accept_does_not_stamp_allowlist(): + """Accept entries in the allowlist should not get a FIRES stamp.""" + snapshot = { + "orderedItems": [ + { + "type": "Recommendation", + "entityKind": "domain", + "entityKey": "good.example", + "recommendedPolicy": "accept", + "labels": [], + }, + ] + } + dataset_url = "https://fires.example/datasets/test-uuid" + bl, al = snapshot_to_blocklist(snapshot, dataset_url, LABEL_MAP) + + assert "good.example" in al + # Allowlist entries don't need a FIRES stamp since they aren't pushed as blocks + assert al["good.example"].private_comment != f"FIRES:{dataset_url}" + + +# -- Accept as implicit retraction tests -- + + +def test_accept_removes_block_from_same_dataset(): + """Accept should remove a block even without an explicit Retraction.""" + bl = Blocklist("test") + al = Blocklist("test") + from fediblockhole.const import DomainBlock + bl.blocks["turning.example"] = DomainBlock("turning.example", "suspend") + + changes = [ + { + "type": "Recommendation", + "entityKind": "domain", + "entityKey": "turning.example", + "recommendedPolicy": "accept", + "labels": [], + } + ] + state = FIRESState(os.path.join(tempfile.mkdtemp(), "state.json")) + bl, al = apply_changes(bl, al, changes, LABEL_MAP, state, "test-dataset") + + assert "turning.example" not in bl + assert "turning.example" in al + + +def test_accept_removes_block_even_with_ignore_accept(): + """Accept removes blocks even when ignore_accept=True, just skips allowlist.""" + bl = Blocklist("test") + al = Blocklist("test") + from fediblockhole.const import DomainBlock + bl.blocks["turning.example"] = DomainBlock("turning.example", "suspend") + + changes = [ + { + "type": "Recommendation", + "entityKind": "domain", + "entityKey": "turning.example", + "recommendedPolicy": "accept", + "labels": [], + } + ] + state = FIRESState(os.path.join(tempfile.mkdtemp(), "state.json")) + bl, al = apply_changes(bl, al, changes, LABEL_MAP, state, "test-dataset", + ignore_accept=True) + + assert "turning.example" not in bl + assert "turning.example" not in al + + +def test_changes_are_overwrites_not_merges(): + """Miss Em's example: drop -> filter -> accept, final result is accept. + + Changes are applied in order. Each one fully replaces the previous + state for that domain. No merging of policies or filters. + """ + bl = Blocklist("test") + al = Blocklist("test") + dataset_url = "https://fires.example/datasets/test-uuid" + + changes = [ + { + "type": "Recommendation", + "entityKind": "domain", + "entityKey": "a.example", + "recommendedPolicy": "drop", + "labels": [], + }, + { + "type": "Recommendation", + "entityKind": "domain", + "entityKey": "a.example", + "recommendedPolicy": "filter", + "labels": [], + }, + { + "type": "Recommendation", + "entityKind": "domain", + "entityKey": "a.example", + "recommendedPolicy": "accept", + "labels": [], + }, + ] + state = FIRESState(os.path.join(tempfile.mkdtemp(), "state.json")) + bl, al = apply_changes(bl, al, changes, LABEL_MAP, state, dataset_url) + + # Final state: accept wins. Not blocked, on the allowlist. + assert "a.example" not in bl + assert "a.example" in al + assert al["a.example"].severity.level == SeverityLevel.NONE + + +def test_drop_to_accept_to_drop(): + """Full lifecycle: drop creates block, accept removes it, drop adds it back.""" + bl = Blocklist("test") + al = Blocklist("test") + dataset_url = "https://fires.example/datasets/test-uuid" + + changes = [ + { + "type": "Recommendation", + "entityKind": "domain", + "entityKey": "flip.example", + "recommendedPolicy": "drop", + "labels": [], + }, + { + "type": "Recommendation", + "entityKind": "domain", + "entityKey": "flip.example", + "recommendedPolicy": "accept", + "labels": [], + }, + { + "type": "Recommendation", + "entityKind": "domain", + "entityKey": "flip.example", + "recommendedPolicy": "drop", + "labels": [], + }, + ] + state = FIRESState(os.path.join(tempfile.mkdtemp(), "state.json")) + bl, al = apply_changes(bl, al, changes, LABEL_MAP, state, dataset_url) + + # Final state: back to blocked + assert "flip.example" in bl + assert bl["flip.example"].private_comment == f"FIRES:{dataset_url}" + # Accept in the middle should have been superseded + assert "flip.example" not in al + + +# -- Per-dataset retraction ownership tests -- + + +def test_retraction_only_removes_own_blocks(): + """A retraction from dataset A should not remove a block added by dataset B.""" + bl = Blocklist("test") + al = Blocklist("test") + from fediblockhole.const import DomainBlock + + dataset_a = "https://fires.example/datasets/aaa" + dataset_b = "https://fires.example/datasets/bbb" + + # Dataset B added this block + bl.blocks["contested.example"] = DomainBlock( + "contested.example", "suspend", + private_comment=f"FIRES:{dataset_b}", + ) + + # Dataset A retracts it + changes = [ + { + "type": "Retraction", + "entityKind": "domain", + "entityKey": "contested.example", + "comment": "No longer recommended", + } + ] + state = FIRESState(os.path.join(tempfile.mkdtemp(), "state.json")) + bl, al = apply_changes(bl, al, changes, LABEL_MAP, state, dataset_a) + + # The block should still be there — dataset A didn't add it. + # apply_changes removes from the in-memory blocklist regardless, + # but the retraction is recorded in state. The ownership check + # happens at push time in push_blocklist, not here. + # What we CAN verify is that the retraction is recorded for dataset A. + assert "contested.example" in state.get_retractions(dataset_a) + + +def test_retraction_recorded_per_dataset(): + """Retractions should be keyed to the dataset that issued them.""" + dataset_a = "https://fires.example/datasets/aaa" + dataset_b = "https://fires.example/datasets/bbb" + + state = FIRESState(os.path.join(tempfile.mkdtemp(), "state.json")) + state.add_retraction(dataset_a, "bad.example") + state.add_retraction(dataset_b, "other.example") + + assert "bad.example" in state.get_retractions(dataset_a) + assert "bad.example" not in state.get_retractions(dataset_b) + assert "other.example" in state.get_retractions(dataset_b) + assert "other.example" not in state.get_retractions(dataset_a) + + +def test_accept_from_one_dataset_block_from_another(): + """Dataset A accepts a domain, dataset B blocks it. Block should survive.""" + from fediblockhole.const import DomainBlock + + dataset_a = "https://fires.example/datasets/aaa" + dataset_b = "https://fires.example/datasets/bbb" + + # Simulate processing dataset A first: it accepts the domain + bl_a = Blocklist(dataset_a) + al_a = Blocklist(dataset_a) + snapshot_a = { + "orderedItems": [ + { + "type": "Recommendation", + "entityKind": "domain", + "entityKey": "debated.example", + "recommendedPolicy": "accept", + "labels": [], + }, + ] + } + bl_a, al_a = snapshot_to_blocklist(snapshot_a, dataset_a, LABEL_MAP) + assert "debated.example" not in bl_a + assert "debated.example" in al_a + + # Simulate processing dataset B: it blocks the domain + snapshot_b = { + "orderedItems": [ + { + "type": "Recommendation", + "entityKind": "domain", + "entityKey": "debated.example", + "recommendedPolicy": "drop", + "labels": [], + }, + ] + } + bl_b, al_b = snapshot_to_blocklist(snapshot_b, dataset_b, LABEL_MAP) + assert "debated.example" in bl_b + assert bl_b["debated.example"].private_comment == f"FIRES:{dataset_b}" + + +def test_private_comment_stamp_not_overwritten_on_policy_change(): + """If a domain's policy changes within the same dataset, the stamp stays.""" + bl = Blocklist("test") + al = Blocklist("test") + dataset_url = "https://fires.example/datasets/test-uuid" + + changes = [ + { + "type": "Recommendation", + "entityKind": "domain", + "entityKey": "evolving.example", + "recommendedPolicy": "drop", + "labels": [], + }, + { + "type": "Recommendation", + "entityKind": "domain", + "entityKey": "evolving.example", + "recommendedPolicy": "filter", + "labels": [], + }, + ] + state = FIRESState(os.path.join(tempfile.mkdtemp(), "state.json")) + bl, al = apply_changes(bl, al, changes, LABEL_MAP, state, dataset_url) + + # Policy changed from drop to filter, but stamp should be the same dataset + assert bl["evolving.example"].severity.level == SeverityLevel.SILENCE + assert bl["evolving.example"].private_comment == f"FIRES:{dataset_url}" + + # -- FIRESClient tests -- From 53b433972ea257f8b509a57227533924b18e5fb0 Mon Sep 17 00:00:00 2001 From: Shawn Grigson Date: Mon, 30 Mar 2026 17:58:45 -0500 Subject: [PATCH 6/9] no need to support old format --- src/fediblockhole/__init__.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/fediblockhole/__init__.py b/src/fediblockhole/__init__.py index ac003f0..497ec59 100644 --- a/src/fediblockhole/__init__.py +++ b/src/fediblockhole/__init__.py @@ -311,13 +311,13 @@ def fetch_from_fires( if source_idx > 0: time.sleep(2) - # Accept both 'dataset' (preferred) and 'url' (legacy) keys - dataset_url = source.get("dataset", source.get("url", "")).rstrip("/") - if not dataset_url: + if "dataset" not in source: log.warning( "FIRES: source must have a 'dataset' key with the full dataset URL. Skipping." ) continue + + dataset_url = source["dataset"].rstrip("/") max_severity = source.get("max_severity", "suspend") ignore_accept = source.get("ignore_accept", False) honor_retractions = source.get("retractions", False) From b5efa02f10b3612f80bb58e162767ebf79533188 Mon Sep 17 00:00:00 2001 From: Shawn Grigson Date: Mon, 30 Mar 2026 18:02:13 -0500 Subject: [PATCH 7/9] clarify what ignore_accept does now --- README.md | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 20f1061..4e282ff 100644 --- a/README.md +++ b/README.md @@ -488,9 +488,10 @@ blocklist_fires_sources = [ ] ``` -With `ignore_accept` enabled, `accept` recommendations are silently skipped. -Block recommendations (`drop`, `reject`, `filter`) and retractions still work -normally. +With `ignore_accept` enabled, `accept` recommendations still remove any block +that this dataset previously added (since accept is an implicit retraction), +but the domain won't be added to the allowlist. This means other sources can +still block the domain without the accept overriding them. ### Retractions: removing data that is no longer recommended or advised From 79edf039ee9e3d9f914c49611e00af342d6de615 Mon Sep 17 00:00:00 2001 From: Shawn Grigson Date: Mon, 30 Mar 2026 18:11:04 -0500 Subject: [PATCH 8/9] fixed example config in docstring --- src/fediblockhole/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/fediblockhole/__init__.py b/src/fediblockhole/__init__.py index 497ec59..661ba32 100644 --- a/src/fediblockhole/__init__.py +++ b/src/fediblockhole/__init__.py @@ -275,11 +275,11 @@ def fetch_from_fires( ) -> list: """Fetch blocklists from FIRES datasets. - Each source is a dict with a 'url' key pointing to the full dataset URL. + Each source is a dict with a 'dataset' key pointing to the full dataset URL. The dataset URL is the canonical identifier per the FIRES spec. Example config: - { url = 'https://fires.example/datasets/019d3565-f022-777b-abbc-c43d649f294b' } + { dataset = 'https://fires.example/datasets/019d3565-f022-777b-abbc-c43d649f294b' } Optional per-source keys: max_severity -- cap the highest severity (default: 'suspend') From a78a491684310813fcd55aa5abbf2de8efce4077 Mon Sep 17 00:00:00 2001 From: Shawn Grigson Date: Tue, 31 Mar 2026 08:59:44 -0500 Subject: [PATCH 9/9] language handling --- README.md | 1 + etc/sample.fediblockhole.conf.toml | 1 + src/fediblockhole/__init__.py | 2 ++ src/fediblockhole/fires.py | 56 +++++++++++++++++++++++------- 4 files changed, 47 insertions(+), 13 deletions(-) diff --git a/README.md b/README.md index 4e282ff..6ed1e96 100644 --- a/README.md +++ b/README.md @@ -452,6 +452,7 @@ Optional per-source settings: - `max_severity`: Cap the maximum severity applied (e.g., `'silence'`). Defaults to `'suspend'`. - `ignore_accept`: When `true`, `accept` policies won't be added to the allowlist. However, `accept` still removes any block that this dataset previously added — it acts as an implicit retraction. Defaults to `false`. - `retractions`: When `true`, honor retractions from this source by removing blocks from your instance. See the Retractions section below. Defaults to `false`. + - `language`: Preferred language for label names from `nameMap`. FIRES labels support multilingual names; this selects which translation to use in block comments. Falls back to English, then any available language. Defaults to `'en'`. ### State tracking and retractions diff --git a/etc/sample.fediblockhole.conf.toml b/etc/sample.fediblockhole.conf.toml index 223dd4b..f4d9e77 100644 --- a/etc/sample.fediblockhole.conf.toml +++ b/etc/sample.fediblockhole.conf.toml @@ -37,6 +37,7 @@ blocklist_url_sources = [ # ignore_accept -- don't add accept entries to allowlist (default: false) # note: accept still removes blocks from this dataset # retractions -- honor retractions from this source (default: false) +# language -- preferred language for label names (default: 'en') blocklist_fires_sources = [ # { dataset = 'https://fires.example/datasets/019d3565-f022-777b-abbc-c43d649f294b' }, # { dataset = 'https://fires.example/datasets/019d3565-f022-777b-abbc-aabbccddeeff', max_severity = 'silence' }, diff --git a/src/fediblockhole/__init__.py b/src/fediblockhole/__init__.py index 661ba32..f2b1e39 100644 --- a/src/fediblockhole/__init__.py +++ b/src/fediblockhole/__init__.py @@ -321,12 +321,14 @@ def fetch_from_fires( max_severity = source.get("max_severity", "suspend") ignore_accept = source.get("ignore_accept", False) honor_retractions = source.get("retractions", False) + language = source.get("language", "en") try: bl, al = fetch_fires_blocklist( dataset_url, state, max_severity=max_severity, ignore_accept=ignore_accept, + language=language, ) blocklists.append(bl) if len(al) > 0: diff --git a/src/fediblockhole/fires.py b/src/fediblockhole/fires.py index abc2939..d5ab2fd 100644 --- a/src/fediblockhole/fires.py +++ b/src/fediblockhole/fires.py @@ -227,7 +227,7 @@ def build_public_comment(labels: list, label_names: dict, comment: str = "") -> _label_cache: dict = {} -def build_label_map_from_snapshot(client: FIRESClient, snapshot: dict) -> dict: +def build_label_map_from_snapshot(client: FIRESClient, snapshot: dict, language: str = "en") -> dict: """Build a label ID -> name map by fetching each label URL from the snapshot. FIRES snapshots include full label URLs in each item's 'labels' array. @@ -245,8 +245,8 @@ def build_label_map_from_snapshot(client: FIRESClient, snapshot: dict) -> dict: if isinstance(label_ref, str) and label_ref.startswith("http"): label_urls.add(label_ref) - # Filter to only labels we haven't fetched yet - new_urls = [url for url in label_urls if url not in _label_cache] + # Filter to only labels we haven't fetched yet for this language + new_urls = [url for url in label_urls if f"{language}:{url}" not in _label_cache] if new_urls: log.info(f"FIRES: fetching {len(new_urls)} new labels ({len(label_urls) - len(new_urls)} cached)") else: @@ -260,25 +260,29 @@ def build_label_map_from_snapshot(client: FIRESClient, snapshot: dict) -> dict: label_id = data.get("id", url) name_map = data.get("nameMap") if name_map and isinstance(name_map, dict): - name = name_map.get("en", name_map.get("en-US", "")) + name = name_map.get(language, name_map.get(f"{language}-US", "")) + if not name: + # Fall back to English, then any available language + name = name_map.get("en", name_map.get("en-US", "")) if not name: name = next(iter(name_map.values()), "") else: name = data.get("name", "") if name: - _label_cache[url] = name + _label_cache[f"{language}:{url}"] = name if label_id != url: - _label_cache[label_id] = name + _label_cache[f"{language}:{label_id}"] = name except Exception as e: log.warning(f"Could not fetch FIRES label {url}: {e}") slug = url.rstrip("/").split("/")[-1] - _label_cache[url] = slug + _label_cache[f"{language}:{url}"] = slug # Build the map for this snapshot from the cache label_map = {} for url in label_urls: - if url in _label_cache: - label_map[url] = _label_cache[url] + cache_key = f"{language}:{url}" + if cache_key in _label_cache: + label_map[url] = _label_cache[cache_key] return label_map @@ -306,6 +310,21 @@ def snapshot_to_blocklist( if retractions is None: retractions = set() + # Validate snapshot type per JSON-LD / ActivityStreams spec + snapshot_type = snapshot.get("type", "") + if snapshot_type not in ("OrderedCollection", "OrderedCollectionPage", "Collection"): + log.warning( + f"FIRES: unexpected snapshot type '{snapshot_type}', " + f"expected OrderedCollection or OrderedCollectionPage" + ) + + # orderedItems is the correct key per AS2; 'items' would be wrong + if "items" in snapshot and "orderedItems" not in snapshot: + raise ValueError( + "FIRES snapshot uses 'items' instead of 'orderedItems'. " + "This is not valid for an OrderedCollection." + ) + blocklist = Blocklist(origin) allowlist = Blocklist(origin) items = snapshot.get("orderedItems", []) @@ -332,7 +351,12 @@ def snapshot_to_blocklist( continue # Map FIRES policy to Mastodon severity - policy = item.get("recommendedPolicy", "drop") + policy = item.get("recommendedPolicy", "") + + # No policy means the entry is informational only (e.g., Advisory) + # and the dataset producer hasn't determined an action yet. + if not policy: + continue labels = item.get("labels", []) public_comment = build_public_comment( @@ -413,7 +437,12 @@ def apply_changes( continue if item_type == "Recommendation": - policy = item.get("recommendedPolicy", "drop") + policy = item.get("recommendedPolicy", "") + + # No policy means informational only — skip it + if not policy: + continue + labels = item.get("labels", []) public_comment = build_public_comment( labels, label_map, item.get("comment", "") @@ -471,6 +500,7 @@ def fetch_fires_blocklist( max_severity: str = "suspend", max_pages: int = 50, ignore_accept: bool = False, + language: str = "en", ) -> tuple: """Fetch a blocklist and allowlist from a FIRES dataset. @@ -501,7 +531,7 @@ def fetch_fires_blocklist( snapshot = client.get_snapshot() # Build label names from the label URLs in the snapshot - label_map = build_label_map_from_snapshot(client, snapshot) + label_map = build_label_map_from_snapshot(client, snapshot, language) blocklist, allowlist = snapshot_to_blocklist( snapshot, dataset_url, label_map, max_severity, retractions, @@ -524,7 +554,7 @@ def fetch_fires_blocklist( snapshot = client.get_snapshot() # Build label names from the label URLs in the snapshot - label_map = build_label_map_from_snapshot(client, snapshot) + label_map = build_label_map_from_snapshot(client, snapshot, language) blocklist, allowlist = snapshot_to_blocklist( snapshot, dataset_url, label_map, max_severity, retractions,