From b83832b2fbdbd8657c51be4dacf63b19dd40c622 Mon Sep 17 00:00:00 2001 From: Petar Shtuchkin Date: Wed, 1 Oct 2025 15:06:54 +0300 Subject: [PATCH 01/39] Black formatting --- src/redis_release/github_client.py | 76 +++++++++++------ src/redis_release/models.py | 10 +-- src/redis_release/orchestrator.py | 21 ++--- src/redis_release/state_manager.py | 110 +++++++++++++++++++++++-- src/redis_release/workflow_executor.py | 55 ++++++++++--- 5 files changed, 209 insertions(+), 63 deletions(-) diff --git a/src/redis_release/github_client.py b/src/redis_release/github_client.py index be673cb..23d4a81 100644 --- a/src/redis_release/github_client.py +++ b/src/redis_release/github_client.py @@ -82,7 +82,7 @@ def trigger_workflow( console.print(f"[green]Workflow triggered successfully[/green]") - workflow_run = self._identify_workflow(repo, workflow_file, workflow_uuid) + workflow_run = self.identify_workflow(repo, workflow_file, workflow_uuid) console.print(f"[dim] Run ID: {workflow_run.run_id}[/dim]") console.print( f"[dim] URL: https://github.com/{repo}/actions/runs/{workflow_run.run_id}[/dim]" @@ -262,7 +262,7 @@ def get_workflow_artifacts(self, repo: str, run_id: int) -> Dict[str, Dict]: "expires_at": "2023-01-31T00:00:00Z", "updated_at": "2023-01-01T00:00:00Z", "size_in_bytes": 1048576, - "digest": "sha256:mock-digest" + "digest": "sha256:mock-digest", }, "release_info": { "id": 67890, @@ -271,7 +271,7 @@ def get_workflow_artifacts(self, repo: str, run_id: int) -> Dict[str, Dict]: "expires_at": "2023-01-31T00:00:00Z", "updated_at": "2023-01-01T00:00:00Z", "size_in_bytes": 2097152, - "digest": "sha256:mock-digest-info" + "digest": "sha256:mock-digest-info", }, "mock-artifact": { "id": 11111, @@ -280,8 +280,8 @@ def get_workflow_artifacts(self, repo: str, run_id: int) -> Dict[str, Dict]: "expires_at": "2023-01-31T00:00:00Z", "updated_at": "2023-01-01T00:00:00Z", "size_in_bytes": 2048576, - "digest": "sha256:mock-digest-2" - } + "digest": "sha256:mock-digest-2", + }, } # Real GitHub API call to get artifacts @@ -310,7 +310,9 @@ def get_workflow_artifacts(self, repo: str, run_id: int) -> Dict[str, Dict]: "expires_at": artifact_data.get("expires_at"), "updated_at": artifact_data.get("updated_at"), "size_in_bytes": artifact_data.get("size_in_bytes"), - "digest": artifact_data.get("workflow_run", {}).get("head_sha") # Using head_sha as digest + "digest": artifact_data.get("workflow_run", {}).get( + "head_sha" + ), # Using head_sha as digest } artifacts[artifact_name] = artifact_info @@ -318,8 +320,12 @@ def get_workflow_artifacts(self, repo: str, run_id: int) -> Dict[str, Dict]: if artifacts: console.print(f"[green]Found {len(artifacts)} artifacts[/green]") for artifact_name, artifact_info in artifacts.items(): - size_mb = round(artifact_info.get("size_in_bytes", 0) / (1024 * 1024), 2) - console.print(f"[dim] {artifact_name} ({size_mb}MB) - ID: {artifact_info.get('id')}[/dim]") + size_mb = round( + artifact_info.get("size_in_bytes", 0) / (1024 * 1024), 2 + ) + console.print( + f"[dim] {artifact_name} ({size_mb}MB) - ID: {artifact_info.get('id')}[/dim]" + ) else: console.print( "[yellow]No artifacts found for this workflow run[/yellow]" @@ -331,7 +337,13 @@ def get_workflow_artifacts(self, repo: str, run_id: int) -> Dict[str, Dict]: console.print(f"[red]Failed to get artifacts: {e}[/red]") return {} - def extract_result(self, repo: str, artifacts: Dict[str, Dict], artifact_name: str, json_file_name: str) -> Optional[Dict[str, Any]]: + def extract_result( + self, + repo: str, + artifacts: Dict[str, Dict], + artifact_name: str, + json_file_name: str, + ) -> Optional[Dict[str, Any]]: """Extract JSON result from artifacts. Args: @@ -354,17 +366,21 @@ def extract_result(self, repo: str, artifacts: Dict[str, Dict], artifact_name: s console.print(f"[red]{artifact_name} artifact has no ID[/red]") return None - console.print(f"[blue]Extracting {json_file_name} from artifact {artifact_id}[/blue]") + console.print( + f"[blue]Extracting {json_file_name} from artifact {artifact_id}[/blue]" + ) if self.dry_run: - console.print(f"[yellow] (DRY RUN - returning mock {json_file_name})[/yellow]") + console.print( + f"[yellow] (DRY RUN - returning mock {json_file_name})[/yellow]" + ) return { "mock": True, "version": "1.0.0", "build_info": { "timestamp": "2023-01-01T00:00:00Z", - "commit": "mock-commit-hash" - } + "commit": "mock-commit-hash", + }, } # Download the artifact and extract JSON file @@ -392,20 +408,26 @@ def extract_result(self, repo: str, artifacts: Dict[str, Dict], artifact_name: s if json_file_name in zip_file.namelist(): with zip_file.open(json_file_name) as json_file: result_data = json.load(json_file) - console.print(f"[green]Successfully extracted {json_file_name}[/green]") + console.print( + f"[green]Successfully extracted {json_file_name}[/green]" + ) return result_data else: console.print(f"[red]{json_file_name} not found in artifact[/red]") return None except requests.exceptions.RequestException as e: - console.print(f"[red]Failed to download {artifact_name} artifact: {e}[/red]") + console.print( + f"[red]Failed to download {artifact_name} artifact: {e}[/red]" + ) return None except (zipfile.BadZipFile, json.JSONDecodeError, KeyError) as e: console.print(f"[red]Failed to extract {json_file_name}: {e}[/red]") return None - def extract_release_handle(self, repo: str, artifacts: Dict[str, Dict]) -> Optional[Dict[str, Any]]: + def extract_release_handle( + self, repo: str, artifacts: Dict[str, Dict] + ) -> Optional[Dict[str, Any]]: """Extract release_handle JSON from artifacts. This is a backward compatibility wrapper around extract_result. @@ -417,9 +439,11 @@ def extract_release_handle(self, repo: str, artifacts: Dict[str, Dict]) -> Optio Returns: Parsed JSON content from release_handle.json file, or None if not found """ - return self.extract_result(repo, artifacts, "release_handle", "release_handle.json") + return self.extract_result( + repo, artifacts, "release_handle", "release_handle.json" + ) - def _get_recent_workflow_runs( + def get_recent_workflow_runs( self, repo: str, workflow_file: str, limit: int = 10 ) -> List[WorkflowRun]: """Get recent workflow runs for a specific workflow. @@ -501,11 +525,11 @@ def _extract_uuid(self, text: str) -> Optional[str]: if not text: return None - uuid_pattern = r'[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}' + uuid_pattern = r"[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}" uuid_match = re.search(uuid_pattern, text, re.IGNORECASE) return uuid_match.group() if uuid_match else None - def _identify_workflow( + def identify_workflow( self, repo: str, workflow_file: str, workflow_uuid: str, max_tries: int = 10 ) -> WorkflowRun: """Identify a specific workflow run by UUID in its name. @@ -522,19 +546,23 @@ def _identify_workflow( Raises: RuntimeError: If workflow run cannot be found after max_tries """ - console.print(f"[blue]Searching for workflow run with UUID: {workflow_uuid}[/blue]") + console.print( + f"[blue]Searching for workflow run with UUID: {workflow_uuid}[/blue]" + ) for attempt in range(max_tries): time.sleep(2) if attempt > 0: console.print(f"[dim] Attempt {attempt + 1}/{max_tries}[/dim]") - runs = self._get_recent_workflow_runs(repo, workflow_file, limit=20) + runs = self.get_recent_workflow_runs(repo, workflow_file, limit=20) for run in runs: extracted_uuid = self._extract_uuid(run.workflow_id) if extracted_uuid and extracted_uuid.lower() == workflow_uuid.lower(): - console.print(f"[green]Found matching workflow run: {run.run_id}[/green]") + console.print( + f"[green]Found matching workflow run: {run.run_id}[/green]" + ) console.print(f"[dim] Workflow name: {run.workflow_id}[/dim]") console.print(f"[dim] Extracted UUID: {extracted_uuid}[/dim]") run.workflow_uuid = workflow_uuid @@ -542,7 +570,6 @@ def _identify_workflow( console.print("[dim] No matching workflow found, trying again...[/dim]") - raise RuntimeError( f"Could not find workflow run with UUID {workflow_uuid} after {max_tries} attempts. " f"The workflow may have failed to start or there may be a delay in GitHub's API." @@ -608,7 +635,6 @@ def get_tag_commit(self, repo: str, tag: str) -> Optional[str]: if self.dry_run: return f"mock-commit-{tag}" - url = f"https://api.github.com/repos/{repo}/tags" headers = { "Accept": "application/vnd.github.v3+json", diff --git a/src/redis_release/models.py b/src/redis_release/models.py index d64c897..7455dba 100644 --- a/src/redis_release/models.py +++ b/src/redis_release/models.py @@ -97,10 +97,7 @@ def is_build_successful(self) -> bool: """Check if all build workflows are completed successfully.""" if not self.packages: return False - return all( - pkg.is_build_phase_successful() - for pkg in self.packages.values() - ) + return all(pkg.is_build_phase_successful() for pkg in self.packages.values()) def is_build_phase_finished(self) -> bool: """Check if all build workflows are finished (successfully or not).""" @@ -123,10 +120,7 @@ def is_publish_successful(self) -> bool: """Check if all publish workflows are completed successfully.""" if not self.packages: return False - return all( - pkg.is_publish_phase_successful() - for pkg in self.packages.values() - ) + return all(pkg.is_publish_phase_successful() for pkg in self.packages.values()) def is_publish_phase_finished(self) -> bool: """Check if all publish workflows are finished (successfully or not).""" diff --git a/src/redis_release/orchestrator.py b/src/redis_release/orchestrator.py index 5212b80..ff11a6d 100644 --- a/src/redis_release/orchestrator.py +++ b/src/redis_release/orchestrator.py @@ -217,9 +217,11 @@ def execute_release( else: docker_state = state.packages.get(PackageType.DOCKER) self._print_completed_state_phase( - phase_completed=docker_state.build_completed if docker_state else False, + phase_completed=( + docker_state.build_completed if docker_state else False + ), workflow=docker_state.build_workflow if docker_state else None, - name="Build" + name="Build", ) state_manager.save_state(state) @@ -234,9 +236,11 @@ def execute_release( else: docker_state = state.packages.get(PackageType.DOCKER) self._print_completed_state_phase( - phase_completed=docker_state.publish_completed if docker_state else False, + phase_completed=( + docker_state.publish_completed if docker_state else False + ), workflow=docker_state.publish_workflow if docker_state else None, - name="Publish" + name="Publish", ) state_manager.save_state(state) @@ -270,10 +274,7 @@ def _should_run_publish_phase(self, state: ReleaseState) -> bool: return state.release_type == ReleaseType.PUBLIC def _print_completed_state_phase( - self, - phase_completed: bool, - workflow: Optional[WorkflowRun], - name: str + self, phase_completed: bool, workflow: Optional[WorkflowRun], name: str ) -> None: """Print the current phase state when phase is already completed.""" if phase_completed: @@ -315,7 +316,7 @@ def _execute_build_phase( state=state, repo=repo, orchestrator_config=self.docker_config, - timeout_minutes=45 + timeout_minutes=45, ) executor = PhaseExecutor() @@ -338,7 +339,7 @@ def _execute_publish_phase( state=state, repo=repo, orchestrator_config=self.docker_config, - timeout_minutes=30 # Publish might be faster than build + timeout_minutes=30, # Publish might be faster than build ) executor = PhaseExecutor() diff --git a/src/redis_release/state_manager.py b/src/redis_release/state_manager.py index 38f61b2..f5e87fb 100644 --- a/src/redis_release/state_manager.py +++ b/src/redis_release/state_manager.py @@ -11,12 +11,12 @@ from .models import ReleaseState -console = Console() +from builtins import NotImplementedError +console = Console() -class StateManager: - """Manages release state persistence in S3.""" +class S3Backed: def __init__( self, bucket_name: Optional[str] = None, @@ -60,7 +60,9 @@ def s3_client(self): self._s3_client = session.client("s3", region_name=self.aws_region) # Fall back to environment variables elif self.aws_access_key_id and self.aws_secret_access_key: - console.print("[blue]Using AWS credentials from environment variables[/blue]") + console.print( + "[blue]Using AWS credentials from environment variables[/blue]" + ) self._s3_client = boto3.client( "s3", aws_access_key_id=self.aws_access_key_id, @@ -70,16 +72,22 @@ def s3_client(self): ) else: console.print("[red]AWS credentials not found[/red]") - console.print("[yellow]Set AWS_PROFILE or AWS_ACCESS_KEY_ID/AWS_SECRET_ACCESS_KEY environment variables[/yellow]") + console.print( + "[yellow]Set AWS_PROFILE or AWS_ACCESS_KEY_ID/AWS_SECRET_ACCESS_KEY environment variables[/yellow]" + ) raise NoCredentialsError() # Test connection self._s3_client.head_bucket(Bucket=self.bucket_name) - console.print(f"[green]Connected to S3 bucket: {self.bucket_name}[/green]") + console.print( + f"[green]Connected to S3 bucket: {self.bucket_name}[/green]" + ) except ClientError as e: if e.response["Error"]["Code"] == "404": - console.print(f"[yellow]S3 bucket not found: {self.bucket_name}[/yellow]") + console.print( + f"[yellow]S3 bucket not found: {self.bucket_name}[/yellow]" + ) self._create_bucket() else: console.print(f"[red]S3 error: {e}[/red]") @@ -92,6 +100,94 @@ def s3_client(self): return self._s3_client + +class BlackboardStorage(S3Backed): + def __init__( + self, + bucket_name: Optional[str] = None, + dry_run: bool = False, + aws_region: str = "us-east-1", + aws_profile: Optional[str] = None, + ): + super().__init__(bucket_name, dry_run, aws_region, aws_profile) + + def get(self, tag: str) -> Optional[dict]: + """Load blackboard data from S3. + + Args: + tag: Release tag + + Returns: + ReleaseState object or None if not found + """ + state_key = f"release-state/{tag}-blackboard.json" + console.print(f"[blue] Loading state for tag: {tag}[/blue]") + + if self.dry_run: + raise NotImplementedError() + + try: + response = self.s3_client.get_object(Bucket=self.bucket_name, Key=state_key) + state_data = json.loads(response["Body"].read().decode("utf-8")) + + console.print(f"[green]State loaded successfully[/green]") + + return state_data + + except ClientError as e: + if e.response["Error"]["Code"] == "NoSuchKey": + console.print( + f"[yellow] No existing blackboard found for tag: {tag}[/yellow]" + ) + return None + else: + console.print(f"[red] Failed to load blackboard: {e}[/red]") + raise + + def put(self, tag: str, state: dict) -> None: + """Save release state to S3. + + Args: + state: ReleaseState object to save + """ + state_key = f"release-state/{tag}-blackboard.json" + console.print(f"[blue] Saving blackboard for tag: {tag}[/blue]") + + state_json = json.dumps(state, indent=2, default=str) + + if self.dry_run: + raise NotImplementedError() + + try: + self.s3_client.put_object( + Bucket=self.bucket_name, + Key=state_key, + Body=state_json, + ContentType="application/json", + Metadata={ + "tag": tag, + }, + ) + + console.print(f"[green] Blackboard saved successfully[/green]") + + except ClientError as e: + console.print(f"[red] Failed to save blackboard: {e}[/red]") + raise + + +class StateManager: + """Manages release state persistence in S3.""" + + def __init__( + self, + bucket_name: Optional[str] = None, + dry_run: bool = False, + aws_region: str = "us-east-1", + aws_profile: Optional[str] = None, + ): + super().__init__(bucket_name, dry_run, aws_region, aws_profile) + def _create_bucket(self) -> None: """Create S3 bucket if it doesn't exist.""" try: diff --git a/src/redis_release/workflow_executor.py b/src/redis_release/workflow_executor.py index 34f76b1..b44164e 100644 --- a/src/redis_release/workflow_executor.py +++ b/src/redis_release/workflow_executor.py @@ -1,4 +1,5 @@ """Workflow execution classes for Redis release automation.""" + import json from abc import ABC, abstractmethod from typing import Any, Dict, Optional @@ -6,7 +7,13 @@ from rich.console import Console from .github_client import GitHubClient -from .models import PackageState, PackageType, ReleaseState, WorkflowConclusion, WorkflowRun +from .models import ( + PackageState, + PackageType, + ReleaseState, + WorkflowConclusion, + WorkflowRun, +) console = Console() @@ -19,7 +26,7 @@ def __init__( state: ReleaseState, repo: str, orchestrator_config: Dict[str, Any], - timeout_minutes: int = 45 + timeout_minutes: int = 45, ): self.state = state self.repo = repo @@ -87,7 +94,9 @@ def set_result(self, result_data: Dict[str, Any]) -> None: pass @abstractmethod - def extract_result(self, github_client: GitHubClient, artifacts: Dict[str, Dict[str, Any]]) -> Optional[Dict[str, Any]]: + def extract_result( + self, github_client: GitHubClient, artifacts: Dict[str, Dict[str, Any]] + ) -> Optional[Dict[str, Any]]: """Extract phase-specific result data from artifacts.""" pass @@ -104,14 +113,18 @@ def _get_release_branch(self) -> str: """ tag_parts = self.state.tag.split(".") if len(tag_parts) < 2: - raise ValueError(f"Invalid tag format '{self.state.tag}': expected at least major.minor version") + raise ValueError( + f"Invalid tag format '{self.state.tag}': expected at least major.minor version" + ) try: # Validate that major and minor are numeric int(tag_parts[0]) int(tag_parts[1]) except ValueError: - raise ValueError(f"Invalid tag format '{self.state.tag}': major and minor versions must be numeric") + raise ValueError( + f"Invalid tag format '{self.state.tag}': major and minor versions must be numeric" + ) major_minor = f"{tag_parts[0]}.{tag_parts[1]}" return f"release/{major_minor}" @@ -163,9 +176,13 @@ def set_artifacts(self, artifacts: Dict[str, Dict[str, Any]]) -> None: def set_result(self, result_data: Dict[str, Any]) -> None: self.package_state.release_handle = result_data - def extract_result(self, github_client: GitHubClient, artifacts: Dict[str, Dict[str, Any]]) -> Optional[Dict[str, Any]]: + def extract_result( + self, github_client: GitHubClient, artifacts: Dict[str, Dict[str, Any]] + ) -> Optional[Dict[str, Any]]: """Extract release_handle from artifacts.""" - result = github_client.extract_result(self.repo, artifacts, "release_handle", "release_handle.json") + result = github_client.extract_result( + self.repo, artifacts, "release_handle", "release_handle.json" + ) if result is None: console.print("[red]Failed to extract release_handle from artifacts[/red]") return result @@ -200,7 +217,9 @@ def workflow_inputs(self) -> Dict[str, Any]: RuntimeError: If release_handle is not available in package state """ if not self.package_state.release_handle: - raise RuntimeError("release_handle is required for publish phase but not found in package state") + raise RuntimeError( + "release_handle is required for publish phase but not found in package state" + ) return { "release_handle": json.dumps(self.package_state.release_handle), @@ -224,9 +243,13 @@ def set_artifacts(self, artifacts: Dict[str, Dict[str, Any]]) -> None: def set_result(self, result_data: Dict[str, Any]) -> None: self.package_state.publish_info = result_data - def extract_result(self, github_client: GitHubClient, artifacts: Dict[str, Dict[str, Any]]) -> Optional[Dict[str, Any]]: + def extract_result( + self, github_client: GitHubClient, artifacts: Dict[str, Dict[str, Any]] + ) -> Optional[Dict[str, Any]]: """Extract release_info from artifacts.""" - result = github_client.extract_result(self.repo, artifacts, "release_info", "release_info.json") + result = github_client.extract_result( + self.repo, artifacts, "release_info", "release_info.json" + ) if result is None: console.print("[red]Failed to extract release_info from artifacts[/red]") return result @@ -277,7 +300,9 @@ def _trigger_workflow(self, phase: Phase, github_client: GitHubClient) -> bool: console.print(f"[red]Failed to trigger {phase.phase_name}: {e}[/red]") return False - def _wait_for_completion(self, phase: Phase, github_client: GitHubClient, workflow: WorkflowRun) -> bool: + def _wait_for_completion( + self, phase: Phase, github_client: GitHubClient, workflow: WorkflowRun + ) -> bool: """Wait for workflow completion and handle results.""" try: console.print(f"[blue]Waiting for {phase.phase_name} to complete...[/blue]") @@ -302,7 +327,9 @@ def _wait_for_completion(self, phase: Phase, github_client: GitHubClient, workfl console.print(f"[red]{phase.phase_name} failed: {e}[/red]") return False - def _handle_success(self, phase: Phase, github_client: GitHubClient, completed_run: WorkflowRun) -> bool: + def _handle_success( + self, phase: Phase, github_client: GitHubClient, completed_run: WorkflowRun + ) -> bool: """Handle successful workflow completion.""" phase.set_completed(True) @@ -321,7 +348,9 @@ def _handle_success(self, phase: Phase, github_client: GitHubClient, completed_r console.print(f"[green]{phase.phase_name} completed successfully[/green]") return True - def _handle_other_conclusion(self, phase: Phase, completed_run: WorkflowRun) -> bool: + def _handle_other_conclusion( + self, phase: Phase, completed_run: WorkflowRun + ) -> bool: """Handle non-success, non-failure conclusions.""" phase.set_completed(True) # completed, but not successful conclusion_text = ( From 33429cb12e1b8206cf7d2c271c11b0073dca5537 Mon Sep 17 00:00:00 2001 From: Petar Shtuchkin Date: Wed, 1 Oct 2025 15:09:52 +0300 Subject: [PATCH 02/39] bht workflow trigger and search working --- pyproject.toml | 2 + src/redis_release/bht/__init__.py | 0 src/redis_release/bht/behaviours.py | 200 ++++++++++++++++++ src/redis_release/bht/composites.py | 31 +++ src/redis_release/bht/state.py | 17 ++ src/redis_release/cli.py | 35 +++- src/redis_release/github_client_async.py | 256 +++++++++++++++++++++++ src/redis_release/logging_config.py | 39 ++++ src/redis_release/tree.py | 115 ++++++++++ 9 files changed, 692 insertions(+), 3 deletions(-) create mode 100644 src/redis_release/bht/__init__.py create mode 100644 src/redis_release/bht/behaviours.py create mode 100644 src/redis_release/bht/composites.py create mode 100644 src/redis_release/bht/state.py create mode 100644 src/redis_release/github_client_async.py create mode 100644 src/redis_release/logging_config.py create mode 100644 src/redis_release/tree.py diff --git a/pyproject.toml b/pyproject.toml index 7ac0fcc..bad5378 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,9 +24,11 @@ classifiers = [ dependencies = [ "typer[all]>=0.9.0", "requests>=2.28.0", + "aiohttp>=3.8.0", "boto3>=1.26.0", "rich>=13.0.0", "pydantic>=2.0.0", + "py_trees>=2.2,<3.0" ] [project.optional-dependencies] diff --git a/src/redis_release/bht/__init__.py b/src/redis_release/bht/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/redis_release/bht/behaviours.py b/src/redis_release/bht/behaviours.py new file mode 100644 index 0000000..2c1633b --- /dev/null +++ b/src/redis_release/bht/behaviours.py @@ -0,0 +1,200 @@ +import asyncio +import logging +import random +import uuid +from datetime import datetime +from typing import Optional + +import py_trees + +from ..github_client_async import GitHubClientAsync +from ..models import WorkflowRun +from .state import Workflow + +logger = logging.getLogger(__name__) + + +def log_exception_and_return_failure( + TaskName: str, e: Exception +) -> py_trees.common.Status: + logger.error( + f"[red]{TaskName} failed with exception:[/red] {type(e).__name__}: {e}" + ) + logger.error(f"[red]Full traceback:[/red]", exc_info=True) + return py_trees.common.Status.FAILURE + + +class RedisReleaseBehaviour(py_trees.behaviour.Behaviour): + def __init__(self, name: str) -> None: + random.seed() + print("wtf") + super().__init__(name=name) + self.release_state = self.attach_blackboard_client( + namespace="release_state/docker/build" + ) + self.release_state.register_key( + key="workflow", access=py_trees.common.Access.WRITE + ) + super(RedisReleaseBehaviour, self).__init__(name) + + def initialise(self) -> None: + # print(f'init {self.blackboard.foo}') + if self.release_state.exists("workflow"): + print(f"exists {self.release_state.workflow}") + else: + # self.release_state.set("workflow", {}) + self.release_state.set("workflow.uuid", random.randint(0, 10000)) + + def update(self) -> py_trees.common.Status: + print("update") + self.release_state.set("workflow.uuid", random.randint(0, 10000)) + return py_trees.common.Status.RUNNING + + def terminate(self, new_status: py_trees.common.Status) -> None: + print("terminate") + + +class TriggerWorkflow(py_trees.behaviour.Behaviour): + def __init__( + self, + name: str, + workflow: Workflow, + github_client: GitHubClientAsync, + ) -> None: + self.github_client = github_client + self.workflow = workflow + self.task: Optional[asyncio.Task[bool]] = None + super().__init__(name=name) + + def initialise(self) -> None: + self.workflow.uuid = str(uuid.uuid4()) + self.workflow.inputs["workflow_uuid"] = self.workflow.uuid + logger.info("initialise") + self.task = asyncio.create_task( + self.github_client.trigger_workflow( + self.workflow.repo, + self.workflow.workflow_file, + self.workflow.inputs, + self.workflow.ref, + ) + ) + + def update(self) -> py_trees.common.Status: + print("foo") + if self.task is None: + logger.error("[red]Task is None - workflow was not initialized[/red]") + return py_trees.common.Status.FAILURE + + if not self.task.done(): + return py_trees.common.Status.RUNNING + + try: + result = self.task.result() + self.workflow.triggered_at = datetime.now() + logger.info( + f"[green]Workflow triggered successfully:[/green] {self.workflow.uuid}" + ) + return py_trees.common.Status.SUCCESS + except Exception as e: + return log_exception_and_return_failure("TriggerWorkflow", e) + + def terminate(self, new_status: py_trees.common.Status) -> None: + # TODO: Cancel task + pass + + +class IdentifyWorkflowByUUID(py_trees.behaviour.Behaviour): + def __init__( + self, + name: str, + workflow: Workflow, + github_client: GitHubClientAsync, + ) -> None: + self.github_client = github_client + self.workflow = workflow + self.task: Optional[asyncio.Task[Optional[WorkflowRun]]] = None + super().__init__(name=name) + + def initialise(self) -> None: + if self.workflow.uuid is None: + logger.error("[red]Workflow UUID is None - cannot identify workflow[/red]") + return + + self.task = asyncio.create_task( + self.github_client.identify_workflow( + self.workflow.repo, self.workflow.workflow_file, self.workflow.uuid + ) + ) + + def update(self) -> py_trees.common.Status: + logger.debug("IdentifyWorkflowByUUID: update") + if self.task is None: + logger.error("[red]Task is None - behaviour was not initialized[/red]") + return py_trees.common.Status.FAILURE + + if not self.task.done(): + logger.debug( + f"IdentifyWorkflowByUUID: Task not yet done {self.task.cancelled()} {self.task.done()} {self.task}" + ) + return py_trees.common.Status.RUNNING + + try: + logger.debug("IdentifyWorkflowByUUID: before result") + result = self.task.result() + logger.debug(f"IdentifyWorkflowByUUID: result {result}") + if result is None: + logger.error("[red]Workflow not found[/red]") + return py_trees.common.Status.FAILURE + + self.workflow.run_id = result.run_id + logger.info( + f"[green]Workflow found successfully:[/green] uuid: {self.workflow.uuid}, run_id: {self.workflow.run_id}" + ) + return py_trees.common.Status.SUCCESS + except Exception as e: + return log_exception_and_return_failure("TriggerWorkflow", e) + + +class Sleep(py_trees.behaviour.Behaviour): + + task: Optional[asyncio.Task[None]] = None + + def __init__(self, name: str, sleep_time: float) -> None: + self.sleep_time = sleep_time + super().__init__(name=name) + + def initialise(self) -> None: + self.task = asyncio.create_task(asyncio.sleep(self.sleep_time)) + + def update(self) -> py_trees.common.Status: + if self.task is None: + logger.error("[red]Task is None - behaviour was not initialized[/red]") + return py_trees.common.Status.FAILURE + + if not self.task.done(): + return py_trees.common.Status.RUNNING + + return py_trees.common.Status.SUCCESS + + +class IsWorkflowTriggered(py_trees.behaviour.Behaviour): + def __init__(self, name: str, workflow: Workflow) -> None: + self.workflow = workflow + super().__init__(name=name) + + def update(self) -> py_trees.common.Status: + logger.debug(f"IsWorkflowTriggered: {self.workflow}") + if self.workflow.triggered_at is not None: + return py_trees.common.Status.SUCCESS + return py_trees.common.Status.FAILURE + + +class IsWorkflowIdentified(py_trees.behaviour.Behaviour): + def __init__(self, name: str, workflow: Workflow) -> None: + self.workflow = workflow + super().__init__(name=name) + + def update(self) -> py_trees.common.Status: + if self.workflow.run_id is not None: + return py_trees.common.Status.SUCCESS + return py_trees.common.Status.FAILURE diff --git a/src/redis_release/bht/composites.py b/src/redis_release/bht/composites.py new file mode 100644 index 0000000..51845de --- /dev/null +++ b/src/redis_release/bht/composites.py @@ -0,0 +1,31 @@ +from time import sleep + +from py_trees.composites import Selector, Sequence +from py_trees.decorators import Retry + +from ..github_client_async import GitHubClientAsync +from .behaviours import IdentifyWorkflowByUUID, IsWorkflowTriggered, Sleep +from .state import Workflow + + +class FindWorkflowByUUID(Sequence): + max_retries: int = 3 + + def __init__( + self, name: str, workflow: Workflow, github_client: GitHubClientAsync + ) -> None: + is_workflow_triggered = IsWorkflowTriggered("Is Workflow Triggered?", workflow) + identify_workflow = IdentifyWorkflowByUUID( + "Identify Workflow by UUID", workflow, github_client + ) + sleep = Sleep("Sleep", 5) + sleep_then_identify = Sequence( + "Sleep then Identify", memory=True, children=[sleep, identify_workflow] + ) + identify_loop = Retry( + f"Retry {self.max_retries} times", sleep_then_identify, self.max_retries + ) + + super().__init__( + name=name, memory=False, children=[is_workflow_triggered, identify_loop] + ) diff --git a/src/redis_release/bht/state.py b/src/redis_release/bht/state.py new file mode 100644 index 0000000..e5a8285 --- /dev/null +++ b/src/redis_release/bht/state.py @@ -0,0 +1,17 @@ +from datetime import datetime +from typing import Dict, Optional + +from pydantic import BaseModel +from pyparsing import Opt + + +class Workflow(BaseModel): + repo: str + workflow_file: str + inputs: Dict[str, str] + ref: str = "main" + uuid: Optional[str] = None + triggered_at: Optional[datetime] = None + started_at: Optional[datetime] = None + run_id: Optional[int] = None + url: Optional[str] = None diff --git a/src/redis_release/cli.py b/src/redis_release/cli.py index e898614..c74d59a 100644 --- a/src/redis_release/cli.py +++ b/src/redis_release/cli.py @@ -1,14 +1,19 @@ """Redis OSS Release Automation CLI.""" +import asyncio +import logging import os from typing import Optional +import py_trees import typer from rich.console import Console from rich.table import Table +from .logging_config import setup_logging from .models import ReleaseType from .orchestrator import ReleaseOrchestrator +from .tree import async_tick_tock2, testpt2 app = typer.Typer( name="redis-release", @@ -132,7 +137,11 @@ def status( # Publish status if not pkg_state.publish_completed: - publish_status = "[blue]In Progress[/blue]" if pkg_state.publish_workflow else "[dim]Not Started[/dim]" + publish_status = ( + "[blue]In Progress[/blue]" + if pkg_state.publish_workflow + else "[dim]Not Started[/dim]" + ) elif pkg_state.publish_workflow and pkg_state.publish_workflow.conclusion: if pkg_state.publish_workflow.conclusion.value == "success": publish_status = "[green]Success[/green]" @@ -155,7 +164,13 @@ def status( else: publish_artifacts = "[dim]None[/dim]" - table.add_row(pkg_type.value, build_status, publish_status, build_artifacts, publish_artifacts) + table.add_row( + pkg_type.value, + build_status, + publish_status, + build_artifacts, + publish_artifacts, + ) console.print(table) @@ -170,11 +185,25 @@ def status( f" Docker repo: [cyan]{state.docker_repo_commit[:8]}[/cyan]" ) - except Exception as e: console.print(f"[red] Failed to get status: {e}[/red]") raise typer.Exit(1) +@app.command() +def release_btree() -> None: + setup_logging(logging.DEBUG) + root = testpt2() + tree = py_trees.trees.BehaviourTree(root) + asyncio.run(async_tick_tock2(tree)) + + +@app.command() +def release_print() -> None: + root = testpt2() + py_trees.display.render_dot_tree(root) + print(py_trees.display.unicode_tree(root)) + + if __name__ == "__main__": app() diff --git a/src/redis_release/github_client_async.py b/src/redis_release/github_client_async.py new file mode 100644 index 0000000..9c5b613 --- /dev/null +++ b/src/redis_release/github_client_async.py @@ -0,0 +1,256 @@ +"""Async GitHub API client for workflow operations.""" + +import asyncio +import logging +import re +import uuid +from typing import Dict, List, Optional + +import aiohttp + +from .models import WorkflowConclusion, WorkflowRun, WorkflowStatus + +# Get logger for this module +logger = logging.getLogger(__name__) + + +class GitHubClientAsync: + """Async GitHub API client for workflow operations.""" + + def __init__(self, token: str): + """Initialize async GitHub client. + + Args: + token: GitHub API token + """ + self.token = token + + async def trigger_workflow( + self, repo: str, workflow_file: str, inputs: Dict[str, str], ref: str = "main" + ) -> bool: + """Trigger a workflow in a repository. + + Args: + repo: Repository name (e.g., "redis/docker-library-redis") + workflow_file: Workflow file name (e.g., "build.yml") + inputs: Workflow inputs + ref: Git reference to run workflow on + + Returns: + WorkflowRun object with basic information (workflow identification will be done separately) + """ + logger.info(f"[blue]Triggering workflow[/blue] {workflow_file} in {repo}") + logger.debug(f"Inputs: {inputs}") + logger.debug(f"Ref: {ref}") + logger.debug(f"Workflow UUID: [cyan]{inputs['workflow_uuid']}[/cyan]") + + url = f"https://api.github.com/repos/{repo}/actions/workflows/{workflow_file}/dispatches" + headers = { + "Authorization": f"Bearer {self.token}", + "Accept": "application/vnd.github.v3+json", + "X-GitHub-Api-Version": "2022-11-28", + } + + # Add the workflow UUID to inputs so it appears in the workflow run name + enhanced_inputs = inputs.copy() + + payload = {"ref": ref, "inputs": enhanced_inputs} + + try: + async with aiohttp.ClientSession() as session: + async with session.post( + url, + headers=headers, + json=payload, + timeout=aiohttp.ClientTimeout(total=30), + ) as response: + if response.status >= 400: + # Read response body for error details + try: + error_body = await response.text() + logger.error( + f"[red]Failed to trigger workflow:[/red] HTTP {response.status}" + ) + logger.error(f"[red]Response body:[/red] {error_body}") + except Exception: + logger.error( + f"[red]Failed to trigger workflow:[/red] HTTP {response.status}" + ) + response.raise_for_status() + + logger.info(f"[green]Workflow triggered successfully[/green]") + + return True + except aiohttp.ClientError as e: + logger.error(f"[red]Failed to trigger workflow:[/red] {e}") + raise + + return False + + async def identify_workflow_loop( + self, repo: str, workflow_file: str, workflow_uuid: str, max_tries: int = 10 + ) -> WorkflowRun: + """Identify a specific workflow run by UUID in its name. + + Args: + repo: Repository name + workflow_file: Workflow file name + workflow_uuid: UUID to search for in workflow run names + max_tries: Maximum number of attempts to find the workflow + + Returns: + WorkflowRun object with matching UUID + + Raises: + RuntimeError: If workflow run cannot be found after max_tries + """ + logger.info( + f"[blue]Searching for workflow run with UUID:[/blue] [cyan]{workflow_uuid}[/cyan]" + ) + + for attempt in range(max_tries): + await asyncio.sleep(2) + if attempt > 0: + logger.debug(f"Attempt {attempt + 1}/{max_tries}") + + runs = await self.get_recent_workflow_runs(repo, workflow_file, limit=20) + + for run in runs: + extracted_uuid = self._extract_uuid(run.workflow_id) + if extracted_uuid and extracted_uuid.lower() == workflow_uuid.lower(): + logger.info( + f"[green]Found matching workflow run:[/green] {run.run_id}" + ) + logger.debug(f"Workflow name: {run.workflow_id}") + logger.debug(f"Extracted UUID: {extracted_uuid}") + run.workflow_uuid = workflow_uuid + return run + + logger.debug("No matching workflow found, trying again...") + + raise RuntimeError( + f"Could not find workflow run with UUID {workflow_uuid} after {max_tries} attempts. " + f"The workflow may have failed to start or there may be a delay in GitHub's API." + ) + + async def identify_workflow( + self, repo: str, workflow_file: str, workflow_uuid: str + ) -> WorkflowRun | None: + + logger.debug( + f"[blue]Searching for workflow run with UUID:[/blue] [cyan]{workflow_uuid}[/cyan]" + ) + runs = await self.get_recent_workflow_runs(repo, workflow_file, limit=20) + + for run in runs: + extracted_uuid = self._extract_uuid(run.workflow_id) + if extracted_uuid and extracted_uuid.lower() == workflow_uuid.lower(): + logger.info(f"[green]Found matching workflow run:[/green] {run.run_id}") + logger.debug(f"Workflow name: {run.workflow_id}") + logger.debug(f"Extracted UUID: {extracted_uuid}") + run.workflow_uuid = workflow_uuid + return run + return None + + async def get_recent_workflow_runs( + self, repo: str, workflow_file: str, limit: int = 10 + ) -> List[WorkflowRun]: + """Get recent workflow runs for a specific workflow. + + Args: + repo: Repository name + workflow_file: Workflow file name + limit: Maximum number of runs to return + + Returns: + List of WorkflowRun objects, sorted by creation time (newest first) + """ + url = f"https://api.github.com/repos/{repo}/actions/workflows/{workflow_file}/runs" + headers = { + "Authorization": f"Bearer {self.token}", + "Accept": "application/vnd.github.v3+json", + "X-GitHub-Api-Version": "2022-11-28", + } + + params = {"per_page": limit, "page": 1} + + try: + async with aiohttp.ClientSession() as session: + async with session.get( + url, + headers=headers, + params=params, + timeout=aiohttp.ClientTimeout(total=30), + ) as response: + if response.status >= 400: + # Read response body for error details + try: + error_body = await response.text() + logger.error( + f"[red]Failed to get workflow runs:[/red] HTTP {response.status}" + ) + logger.error(f"[red]Response body:[/red] {error_body}") + except Exception: + logger.error( + f"[red]Failed to get workflow runs:[/red] HTTP {response.status}" + ) + response.raise_for_status() + + data = await response.json() + + runs = [] + for run_data in data.get("workflow_runs", []): + # Map GitHub API status to our enum + api_status = run_data.get("status", "unknown").lower() + if api_status == "queued": + status = WorkflowStatus.QUEUED + elif api_status == "in_progress": + status = WorkflowStatus.IN_PROGRESS + elif api_status == "completed": + status = WorkflowStatus.COMPLETED + else: + status = WorkflowStatus.PENDING + + # Map GitHub API conclusion to our enum + api_conclusion = run_data.get("conclusion") + conclusion = None + if api_conclusion == "success": + conclusion = WorkflowConclusion.SUCCESS + elif api_conclusion == "failure": + conclusion = WorkflowConclusion.FAILURE + + workflow_name = run_data.get("name", workflow_file) + workflow_uuid = self._extract_uuid(workflow_name) + + runs.append( + WorkflowRun( + repo=repo, + workflow_id=workflow_name, + workflow_uuid=workflow_uuid, + run_id=run_data.get("id"), + status=status, + conclusion=conclusion, + ) + ) + + return runs + + except aiohttp.ClientError as e: + logger.error(f"[red]Failed to get workflow runs:[/red] {e}") + return [] + + def _extract_uuid(self, text: str) -> Optional[str]: + """Extract UUID from a string if present. + + Args: + text: String to search for UUID pattern + + Returns: + UUID string if found, None otherwise + """ + if not text: + return None + + uuid_pattern = r"[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}" + uuid_match = re.search(uuid_pattern, text, re.IGNORECASE) + return uuid_match.group() if uuid_match else None diff --git a/src/redis_release/logging_config.py b/src/redis_release/logging_config.py new file mode 100644 index 0000000..3f22123 --- /dev/null +++ b/src/redis_release/logging_config.py @@ -0,0 +1,39 @@ +"""Logging configuration with Rich handler for beautiful colored output.""" + +import logging +from rich.logging import RichHandler + + +def setup_logging(level: int = logging.INFO, show_path: bool = True) -> None: + """Configure logging with Rich handler. + + Args: + level: Logging level (e.g., logging.INFO, logging.DEBUG) + show_path: Whether to show file path and line numbers in logs + + Example: + >>> from redis_release.logging_config import setup_logging + >>> import logging + >>> setup_logging(level=logging.DEBUG) + >>> logger = logging.getLogger(__name__) + >>> logger.info("[blue]Hello[/blue] [green]World[/green]") + """ + logging.basicConfig( + level=level, + format="%(message)s", + datefmt="[%X]", + handlers=[ + RichHandler( + rich_tracebacks=True, + show_time=True, + show_level=True, + show_path=show_path, + markup=True, # Enable Rich markup in log messages + tracebacks_show_locals=True, # Show local variables in tracebacks + ) + ], + ) + + # Optionally reduce noise from some verbose libraries + logging.getLogger("asyncio").setLevel(logging.WARNING) + logging.getLogger("aiohttp").setLevel(logging.WARNING) diff --git a/src/redis_release/tree.py b/src/redis_release/tree.py new file mode 100644 index 0000000..3e001e0 --- /dev/null +++ b/src/redis_release/tree.py @@ -0,0 +1,115 @@ +import asyncio +import logging +import os + +import py_trees +from py_trees.behaviour import Behaviour +from py_trees.blackboard import Blackboard + +from .bht.behaviours import IsWorkflowIdentified, RedisReleaseBehaviour, TriggerWorkflow +from .bht.composites import FindWorkflowByUUID +from .bht.state import Workflow +from .github_client_async import GitHubClientAsync +from .state_manager import BlackboardStorage + +logger = logging.getLogger(__name__) + + +def testpt(): + root = py_trees.composites.Selector("Redis Release", False) + childuno = RedisReleaseBehaviour("Child1") + + child1 = py_trees.behaviours.Success("Childsuc") + # child2 = py_trees.behaviours.Failure("Child2") + # root.add_children([child1, child2]) + root.add_children([childuno, child1]) + # res = py_trees.display.render_dot_tree(root) + # print(py_trees.display.xhtml_tree(root, "redis_release")) + # print(res) + print(py_trees.display.unicode_tree(root, show_status=True)) + print( + "sjj fsldkjf f sldkjf s fslkdjflskdjf sldkjflskdjfskjd lskdjflskdjflksdfjlkj" + ) + + return root + + +def testpt2() -> Behaviour: + github_client = GitHubClientAsync(token=os.getenv("GITHUB_TOKEN")) + root = py_trees.composites.Selector("Redis Release", False) + workflow = Workflow( + repo="Peter-Sh/docker-library-redis", + workflow_file="release_build_and_test.yml", + inputs={"release_tag": "8.5.7"}, + ref="release/8.2", + ) + is_workflow_identified = IsWorkflowIdentified("Is Workflow Identified?", workflow) + identify_workflow = FindWorkflowByUUID("Identify Workflow", workflow, github_client) + trigger_workflow = TriggerWorkflow("Trigger Workflow", workflow, github_client) + root.add_children([is_workflow_identified, identify_workflow, trigger_workflow]) + return root + + +def setup_blackboard(storage: dict): + Blackboard.storage = storage + Blackboard.enable_activity_stream() + + +def save_blackboard(bbs: BlackboardStorage): + try: + for a in Blackboard.activity_stream.data: + if a.activity_type == "INITIALISED" or a.activity_type == "WRITE": + print("saving") + bbs.put("8.2.1", Blackboard.storage) + finally: + Blackboard.activity_stream.clear() + + +async def async_tick_tock( + tree: py_trees.trees.BehaviourTree, period: float = 3.0 +) -> None: + # bbs = BlackboardStorage() + # stored = bbs.get("8.2.1") or {} + # setup_blackboard(stored) + # print(f"Stored data: {stored}") + # # bbs.put("8.2.1", {"test": "test"}) + print("starting tick tock") + while True: + tree.tick() + print("tick") + print( + py_trees.display.unicode_tree( + tree.root, show_status=True, show_only_visited=False + ) + ) + await asyncio.sleep(period) + # print(f"bb: {Blackboard.storage}") + # stream = [f"{a.key}:{a.activity_type}" for a in Blackboard.activity_stream.data] + # print(f"bbas: {stream}") + # save_blackboard(bbs) + # print("tock") + + +async def async_tick_tock2( + tree: py_trees.trees.BehaviourTree, period: float = 3.0 +) -> None: + tree.tick() + count_no_tasks_loop = 0 + while True: + logger.info("tick") + print( + py_trees.display.unicode_tree( + tree.root, show_status=True, show_only_visited=False + ) + ) + await asyncio.sleep(0) + other_tasks = asyncio.all_tasks() - {asyncio.current_task()} + logger.debug(other_tasks) + if not other_tasks: + count_no_tasks_loop += 1 + if count_no_tasks_loop > 1: + logger.info("Tree finished") + break + else: + await asyncio.wait(other_tasks, return_when=asyncio.FIRST_COMPLETED) + tree.tick() From 3eac9b5b616b87c3b52e9176a57bc505c244dd82 Mon Sep 17 00:00:00 2001 From: Petar Shtuchkin Date: Wed, 1 Oct 2025 17:35:14 +0300 Subject: [PATCH 03/39] Clean up, fix retrigger, logger wrapper --- src/redis_release/bht/behaviours.py | 69 +++++++---------- src/redis_release/bht/composites.py | 23 ++++-- src/redis_release/bht/logging_wrapper.py | 67 +++++++++++++++++ src/redis_release/bht/state.py | 2 +- src/redis_release/cli.py | 14 ++-- src/redis_release/github_client_async.py | 1 - src/redis_release/logging_config.py | 3 +- src/redis_release/models.py | 1 - src/redis_release/tree.py | 94 +++++++----------------- 9 files changed, 150 insertions(+), 124 deletions(-) create mode 100644 src/redis_release/bht/logging_wrapper.py diff --git a/src/redis_release/bht/behaviours.py b/src/redis_release/bht/behaviours.py index 2c1633b..fddb920 100644 --- a/src/redis_release/bht/behaviours.py +++ b/src/redis_release/bht/behaviours.py @@ -1,6 +1,5 @@ import asyncio import logging -import random import uuid from datetime import datetime from typing import Optional @@ -9,6 +8,7 @@ from ..github_client_async import GitHubClientAsync from ..models import WorkflowRun +from .logging_wrapper import PyTreesLoggerWrapper from .state import Workflow logger = logging.getLogger(__name__) @@ -24,36 +24,6 @@ def log_exception_and_return_failure( return py_trees.common.Status.FAILURE -class RedisReleaseBehaviour(py_trees.behaviour.Behaviour): - def __init__(self, name: str) -> None: - random.seed() - print("wtf") - super().__init__(name=name) - self.release_state = self.attach_blackboard_client( - namespace="release_state/docker/build" - ) - self.release_state.register_key( - key="workflow", access=py_trees.common.Access.WRITE - ) - super(RedisReleaseBehaviour, self).__init__(name) - - def initialise(self) -> None: - # print(f'init {self.blackboard.foo}') - if self.release_state.exists("workflow"): - print(f"exists {self.release_state.workflow}") - else: - # self.release_state.set("workflow", {}) - self.release_state.set("workflow.uuid", random.randint(0, 10000)) - - def update(self) -> py_trees.common.Status: - print("update") - self.release_state.set("workflow.uuid", random.randint(0, 10000)) - return py_trees.common.Status.RUNNING - - def terminate(self, new_status: py_trees.common.Status) -> None: - print("terminate") - - class TriggerWorkflow(py_trees.behaviour.Behaviour): def __init__( self, @@ -80,7 +50,6 @@ def initialise(self) -> None: ) def update(self) -> py_trees.common.Status: - print("foo") if self.task is None: logger.error("[red]Task is None - workflow was not initialized[/red]") return py_trees.common.Status.FAILURE @@ -96,6 +65,7 @@ def update(self) -> py_trees.common.Status: ) return py_trees.common.Status.SUCCESS except Exception as e: + self.workflow.trigger_failed = True return log_exception_and_return_failure("TriggerWorkflow", e) def terminate(self, new_status: py_trees.common.Status) -> None: @@ -110,14 +80,18 @@ def __init__( workflow: Workflow, github_client: GitHubClientAsync, ) -> None: + self.github_client = github_client self.workflow = workflow self.task: Optional[asyncio.Task[Optional[WorkflowRun]]] = None super().__init__(name=name) + self.logger = PyTreesLoggerWrapper(logging.getLogger(self.name)) def initialise(self) -> None: if self.workflow.uuid is None: - logger.error("[red]Workflow UUID is None - cannot identify workflow[/red]") + self.logger.error( + "[red]Workflow UUID is None - cannot identify workflow[/red]" + ) return self.task = asyncio.create_task( @@ -127,32 +101,32 @@ def initialise(self) -> None: ) def update(self) -> py_trees.common.Status: - logger.debug("IdentifyWorkflowByUUID: update") + self.logger.debug("IdentifyWorkflowByUUID: update") if self.task is None: - logger.error("[red]Task is None - behaviour was not initialized[/red]") + self.logger.error("red]Task is None - behaviour was not initialized[/red]") return py_trees.common.Status.FAILURE if not self.task.done(): - logger.debug( - f"IdentifyWorkflowByUUID: Task not yet done {self.task.cancelled()} {self.task.done()} {self.task}" + self.logger.debug( + f"Task not yet done {self.task.cancelled()} {self.task.done()} {self.task}" ) return py_trees.common.Status.RUNNING try: - logger.debug("IdentifyWorkflowByUUID: before result") + self.logger.debug("before result") result = self.task.result() - logger.debug(f"IdentifyWorkflowByUUID: result {result}") + self.logger.debug("result {result}") if result is None: - logger.error("[red]Workflow not found[/red]") + self.logger.error("[red]Workflow not found[/red]") return py_trees.common.Status.FAILURE self.workflow.run_id = result.run_id - logger.info( + self.logger.info( f"[green]Workflow found successfully:[/green] uuid: {self.workflow.uuid}, run_id: {self.workflow.run_id}" ) return py_trees.common.Status.SUCCESS except Exception as e: - return log_exception_and_return_failure("TriggerWorkflow", e) + return log_exception_and_return_failure(f"{self.name} TriggerWorkflow", e) class Sleep(py_trees.behaviour.Behaviour): @@ -177,6 +151,17 @@ def update(self) -> py_trees.common.Status: return py_trees.common.Status.SUCCESS +class IsWorkflowTriggerFailed(py_trees.behaviour.Behaviour): + def __init__(self, name: str, workflow: Workflow) -> None: + self.workflow = workflow + super().__init__(name=name) + + def update(self) -> py_trees.common.Status: + if self.workflow.trigger_failed: + return py_trees.common.Status.SUCCESS + return py_trees.common.Status.FAILURE + + class IsWorkflowTriggered(py_trees.behaviour.Behaviour): def __init__(self, name: str, workflow: Workflow) -> None: self.workflow = workflow diff --git a/src/redis_release/bht/composites.py b/src/redis_release/bht/composites.py index 51845de..85e7f25 100644 --- a/src/redis_release/bht/composites.py +++ b/src/redis_release/bht/composites.py @@ -12,18 +12,31 @@ class FindWorkflowByUUID(Sequence): max_retries: int = 3 def __init__( - self, name: str, workflow: Workflow, github_client: GitHubClientAsync + self, + name: str, + workflow: Workflow, + github_client: GitHubClientAsync, + log_prefix: str = "", ) -> None: - is_workflow_triggered = IsWorkflowTriggered("Is Workflow Triggered?", workflow) + if log_prefix != "": + log_prefix = f"{log_prefix}." + + is_workflow_triggered = IsWorkflowTriggered( + f"{log_prefix}Is Workflow Triggered?", workflow + ) identify_workflow = IdentifyWorkflowByUUID( - "Identify Workflow by UUID", workflow, github_client + f"{log_prefix}Identify Workflow by UUID", workflow, github_client ) sleep = Sleep("Sleep", 5) sleep_then_identify = Sequence( - "Sleep then Identify", memory=True, children=[sleep, identify_workflow] + f"{log_prefix}Sleep then Identify", + memory=True, + children=[sleep, identify_workflow], ) identify_loop = Retry( - f"Retry {self.max_retries} times", sleep_then_identify, self.max_retries + f"{log_prefix}Retry {self.max_retries} times", + sleep_then_identify, + self.max_retries, ) super().__init__( diff --git a/src/redis_release/bht/logging_wrapper.py b/src/redis_release/bht/logging_wrapper.py new file mode 100644 index 0000000..4fb4d0d --- /dev/null +++ b/src/redis_release/bht/logging_wrapper.py @@ -0,0 +1,67 @@ +"""Wrapper to make Python's logging.Logger compatible with py_trees.logging.Logger.""" + +import logging + +import py_trees.logging + + +class PyTreesLoggerWrapper(py_trees.logging.Logger): + """Wrapper that inherits from py_trees.logging.Logger and delegates to Python's logging.Logger. + + This class inherits from py_trees.logging.Logger to satisfy type checking requirements + while delegating all logging calls to a standard Python logging.Logger instance. + This allows py_trees behaviours to use Python's logging infrastructure (with Rich formatting) + while maintaining type compatibility with py_trees' expectations. + + Args: + logger: A Python logging.Logger instance to delegate to + + Example: + >>> import logging + >>> from redis_release.bht import logging_wrapper + >>> + >>> python_logger = logging.getLogger(__name__) + >>> wrapped_logger = logging_wrapper.Logger(python_logger) + >>> wrapped_logger.info("[blue]Hello[/blue] [green]World[/green]") + """ + + def __init__(self, logger: logging.Logger) -> None: + """Initialize the logger wrapper. + + Args: + logger: A Python logging.Logger instance to delegate to + """ + super().__init__() + self._logger = logger + + def debug(self, msg: str) -> None: + """Log a message with severity 'DEBUG'. + + Args: + msg: The message to log + """ + self._logger.debug(msg) + + def info(self, msg: str) -> None: + """Log a message with severity 'INFO'. + + Args: + msg: The message to log + """ + self._logger.info(msg) + + def warning(self, msg: str) -> None: + """Log a message with severity 'WARNING'. + + Args: + msg: The message to log + """ + self._logger.warning(msg) + + def error(self, msg: str) -> None: + """Log a message with severity 'ERROR'. + + Args: + msg: The message to log + """ + self._logger.error(msg) diff --git a/src/redis_release/bht/state.py b/src/redis_release/bht/state.py index e5a8285..c15ad8f 100644 --- a/src/redis_release/bht/state.py +++ b/src/redis_release/bht/state.py @@ -2,7 +2,6 @@ from typing import Dict, Optional from pydantic import BaseModel -from pyparsing import Opt class Workflow(BaseModel): @@ -12,6 +11,7 @@ class Workflow(BaseModel): ref: str = "main" uuid: Optional[str] = None triggered_at: Optional[datetime] = None + trigger_failed: bool = False started_at: Optional[datetime] = None run_id: Optional[int] = None url: Optional[str] = None diff --git a/src/redis_release/cli.py b/src/redis_release/cli.py index c74d59a..dc55827 100644 --- a/src/redis_release/cli.py +++ b/src/redis_release/cli.py @@ -13,7 +13,7 @@ from .logging_config import setup_logging from .models import ReleaseType from .orchestrator import ReleaseOrchestrator -from .tree import async_tick_tock2, testpt2 +from .tree import async_tick_tock, create_root_node app = typer.Typer( name="redis-release", @@ -191,16 +191,18 @@ def status( @app.command() -def release_btree() -> None: +def release_bht() -> None: + """Run release using behaviour tree implementation.""" setup_logging(logging.DEBUG) - root = testpt2() + root = create_root_node() tree = py_trees.trees.BehaviourTree(root) - asyncio.run(async_tick_tock2(tree)) + asyncio.run(async_tick_tock(tree)) @app.command() -def release_print() -> None: - root = testpt2() +def release_print_bht() -> None: + """Print and render (using graphviz) the release behaviour tree.""" + root = create_root_node() py_trees.display.render_dot_tree(root) print(py_trees.display.unicode_tree(root)) diff --git a/src/redis_release/github_client_async.py b/src/redis_release/github_client_async.py index 9c5b613..ab6c254 100644 --- a/src/redis_release/github_client_async.py +++ b/src/redis_release/github_client_async.py @@ -3,7 +3,6 @@ import asyncio import logging import re -import uuid from typing import Dict, List, Optional import aiohttp diff --git a/src/redis_release/logging_config.py b/src/redis_release/logging_config.py index 3f22123..e32e2f8 100644 --- a/src/redis_release/logging_config.py +++ b/src/redis_release/logging_config.py @@ -1,6 +1,7 @@ """Logging configuration with Rich handler for beautiful colored output.""" import logging + from rich.logging import RichHandler @@ -20,7 +21,7 @@ def setup_logging(level: int = logging.INFO, show_path: bool = True) -> None: """ logging.basicConfig( level=level, - format="%(message)s", + format="[cyan1]%(name)s:[/cyan1] %(message)s", datefmt="[%X]", handlers=[ RichHandler( diff --git a/src/redis_release/models.py b/src/redis_release/models.py index 7455dba..24013bd 100644 --- a/src/redis_release/models.py +++ b/src/redis_release/models.py @@ -85,7 +85,6 @@ class ReleaseState(BaseModel): tag: str release_type: ReleaseType - force_rebuild: bool = False packages: Dict[PackageType, PackageState] = Field(default_factory=dict) created_at: datetime = Field(default_factory=datetime.now) diff --git a/src/redis_release/tree.py b/src/redis_release/tree.py index 3e001e0..4d81208 100644 --- a/src/redis_release/tree.py +++ b/src/redis_release/tree.py @@ -4,94 +4,54 @@ import py_trees from py_trees.behaviour import Behaviour -from py_trees.blackboard import Blackboard - -from .bht.behaviours import IsWorkflowIdentified, RedisReleaseBehaviour, TriggerWorkflow +from py_trees.composites import Selector, Sequence +from py_trees.decorators import Inverter + +from .bht.behaviours import ( + IsWorkflowIdentified, + IsWorkflowTriggerFailed, + TriggerWorkflow, +) from .bht.composites import FindWorkflowByUUID from .bht.state import Workflow from .github_client_async import GitHubClientAsync -from .state_manager import BlackboardStorage logger = logging.getLogger(__name__) -def testpt(): - root = py_trees.composites.Selector("Redis Release", False) - childuno = RedisReleaseBehaviour("Child1") - - child1 = py_trees.behaviours.Success("Childsuc") - # child2 = py_trees.behaviours.Failure("Child2") - # root.add_children([child1, child2]) - root.add_children([childuno, child1]) - # res = py_trees.display.render_dot_tree(root) - # print(py_trees.display.xhtml_tree(root, "redis_release")) - # print(res) - print(py_trees.display.unicode_tree(root, show_status=True)) - print( - "sjj fsldkjf f sldkjf s fslkdjflskdjf sldkjflskdjfskjd lskdjflskdjflksdfjlkj" - ) - - return root - - -def testpt2() -> Behaviour: +def create_root_node() -> Behaviour: github_client = GitHubClientAsync(token=os.getenv("GITHUB_TOKEN")) - root = py_trees.composites.Selector("Redis Release", False) + root = Selector("Redis Release", False) workflow = Workflow( repo="Peter-Sh/docker-library-redis", workflow_file="release_build_and_test.yml", inputs={"release_tag": "8.5.7"}, ref="release/8.2", ) + is_workflow_identified = IsWorkflowIdentified("Is Workflow Identified?", workflow) - identify_workflow = FindWorkflowByUUID("Identify Workflow", workflow, github_client) - trigger_workflow = TriggerWorkflow("Trigger Workflow", workflow, github_client) + identify_workflow = FindWorkflowByUUID( + "Identify Workflow", workflow, github_client, "DOCKER" + ) + may_start_workflow = Inverter( + "May start workflow?", + IsWorkflowTriggerFailed("Is Workflow Trigger Failed?", workflow), + ) + + trigger_workflow = Sequence( + "Workflow trigger", + True, + [ + may_start_workflow, + TriggerWorkflow("Trigger Workflow", workflow, github_client), + ], + ) root.add_children([is_workflow_identified, identify_workflow, trigger_workflow]) return root -def setup_blackboard(storage: dict): - Blackboard.storage = storage - Blackboard.enable_activity_stream() - - -def save_blackboard(bbs: BlackboardStorage): - try: - for a in Blackboard.activity_stream.data: - if a.activity_type == "INITIALISED" or a.activity_type == "WRITE": - print("saving") - bbs.put("8.2.1", Blackboard.storage) - finally: - Blackboard.activity_stream.clear() - - async def async_tick_tock( tree: py_trees.trees.BehaviourTree, period: float = 3.0 -) -> None: - # bbs = BlackboardStorage() - # stored = bbs.get("8.2.1") or {} - # setup_blackboard(stored) - # print(f"Stored data: {stored}") - # # bbs.put("8.2.1", {"test": "test"}) - print("starting tick tock") - while True: - tree.tick() - print("tick") - print( - py_trees.display.unicode_tree( - tree.root, show_status=True, show_only_visited=False - ) - ) - await asyncio.sleep(period) - # print(f"bb: {Blackboard.storage}") - # stream = [f"{a.key}:{a.activity_type}" for a in Blackboard.activity_stream.data] - # print(f"bbas: {stream}") - # save_blackboard(bbs) - # print("tock") - - -async def async_tick_tock2( - tree: py_trees.trees.BehaviourTree, period: float = 3.0 ) -> None: tree.tick() count_no_tasks_loop = 0 From f8230a9a7a61cddb15928e4a55596fa6b40af3e0 Mon Sep 17 00:00:00 2001 From: Petar Shtuchkin Date: Thu, 2 Oct 2025 19:53:42 +0300 Subject: [PATCH 04/39] Flagging timeout decorator --- src/redis_release/bht/decorators.py | 79 +++++++++++++++++++++++++++++ 1 file changed, 79 insertions(+) create mode 100644 src/redis_release/bht/decorators.py diff --git a/src/redis_release/bht/decorators.py b/src/redis_release/bht/decorators.py new file mode 100644 index 0000000..ef75e63 --- /dev/null +++ b/src/redis_release/bht/decorators.py @@ -0,0 +1,79 @@ +import time +from typing import Optional + +from py_trees.decorators import Decorator, behaviour, common +from pydantic import BaseModel + + +class TimeoutWithFlag(Decorator): + """ + Executes a child/subtree with a timeout. + + A decorator that applies a timeout pattern to an existing behaviour. + If the timeout is reached, the encapsulated behaviour's + :meth:`~py_trees.behaviour.Behaviour.stop` method is called with + status :data:`~py_trees.common.Status.INVALID` and specified field in + container is set to True, otherwise it will + simply directly tick and return with the same status + as that of it's encapsulated behaviour. + """ + + def __init__( + self, + name: str, + child: behaviour.Behaviour, + duration: float = 5.0, + container: Optional[BaseModel] = None, + field: str = "", + ): + """ + Init with the decorated child and a timeout duration. + + Args: + child: the child behaviour or subtree + name: the decorator name + duration: timeout length in seconds + """ + super(TimeoutWithFlag, self).__init__(name=name, child=child) + self.duration = duration + self.finish_time = 0.0 + self.container = container + self.field = field + + def initialise(self) -> None: + """Reset the feedback message and finish time on behaviour entry.""" + self.finish_time = time.monotonic() + self.duration + self.feedback_message = "" + + def update(self) -> common.Status: + """ + Fail on timeout, or block / reflect the child's result accordingly. + + Terminate the child and return + :data:`~py_trees.common.Status.FAILURE` + if the timeout is exceeded. + + Returns: + the behaviour's new status :class:`~py_trees.common.Status` + """ + current_time = time.monotonic() + if ( + self.decorated.status == common.Status.RUNNING + and current_time > self.finish_time + ): + self.feedback_message = "timed out" + if self.container is not None: + setattr(self.container, self.field, True) + self.logger.debug( + "{}.update() {}".format(self.__class__.__name__, self.feedback_message) + ) + # invalidate the decorated (i.e. cancel it), could also put this logic in a terminate() method + self.decorated.stop(common.Status.INVALID) + return common.Status.FAILURE + if self.decorated.status == common.Status.RUNNING: + self.feedback_message = "time still ticking ... [remaining: {}s]".format( + self.finish_time - current_time + ) + else: + self.feedback_message = "child finished before timeout triggered" + return self.decorated.status From 69ce6c601f71cb5b42972053790c9e8cd2577422 Mon Sep 17 00:00:00 2001 From: Petar Shtuchkin Date: Thu, 2 Oct 2025 19:56:53 +0300 Subject: [PATCH 05/39] Add wait for completion, simplify and fix the tree --- src/redis_release/bht/behaviours.py | 174 ++++++++++++++++++----- src/redis_release/bht/composites.py | 95 ++++++++++++- src/redis_release/bht/state.py | 5 + src/redis_release/github_client_async.py | 121 ++++++++++------ src/redis_release/tree.py | 27 +++- 5 files changed, 329 insertions(+), 93 deletions(-) diff --git a/src/redis_release/bht/behaviours.py b/src/redis_release/bht/behaviours.py index fddb920..fdae919 100644 --- a/src/redis_release/bht/behaviours.py +++ b/src/redis_release/bht/behaviours.py @@ -1,30 +1,61 @@ +""" +Actions and Conditions for the Release Tree + +The guiding principles are: + +* Actions should be atomic and represent a single task. +* Actions should unconditionally perform their job. This simplifies reuse, as any conditions can be applied separately. +* Conditions should not have side effects (e.g., modifying state). +""" + import asyncio import logging import uuid from datetime import datetime -from typing import Optional +from token import OP +from typing import Any, Optional import py_trees from ..github_client_async import GitHubClientAsync -from ..models import WorkflowRun +from ..models import WorkflowConclusion, WorkflowRun, WorkflowStatus from .logging_wrapper import PyTreesLoggerWrapper from .state import Workflow logger = logging.getLogger(__name__) -def log_exception_and_return_failure( - TaskName: str, e: Exception -) -> py_trees.common.Status: - logger.error( - f"[red]{TaskName} failed with exception:[/red] {type(e).__name__}: {e}" - ) - logger.error(f"[red]Full traceback:[/red]", exc_info=True) - return py_trees.common.Status.FAILURE +class LoggingAction(py_trees.behaviour.Behaviour): + logger: PyTreesLoggerWrapper + + def __init__(self, name: str) -> None: + super().__init__(name=name) + self.logger = PyTreesLoggerWrapper(logging.getLogger(self.name)) + + def log_exception_and_return_failure(self, e: Exception) -> py_trees.common.Status: + self.logger.error(f"[red]failed with exception:[/red] {type(e).__name__}: {e}") + # use the underlying logger to get the full traceback + self.logger._logger.error(f"[red]Full traceback:[/red]", exc_info=True) + return py_trees.common.Status.FAILURE + + +class ReleaseAction(LoggingAction): + task: Optional[asyncio.Task[Any]] = None + def __init__(self, name: str) -> None: + super().__init__(name=name) -class TriggerWorkflow(py_trees.behaviour.Behaviour): + def check_task_exists(self) -> bool: + if self.task is None: + self.logger.error("[red]Task is None - workflow was not initialized[/red]") + return False + return True + + +### Actions ### + + +class TriggerWorkflow(ReleaseAction): def __init__( self, name: str, @@ -39,7 +70,6 @@ def __init__( def initialise(self) -> None: self.workflow.uuid = str(uuid.uuid4()) self.workflow.inputs["workflow_uuid"] = self.workflow.uuid - logger.info("initialise") self.task = asyncio.create_task( self.github_client.trigger_workflow( self.workflow.repo, @@ -50,30 +80,30 @@ def initialise(self) -> None: ) def update(self) -> py_trees.common.Status: - if self.task is None: - logger.error("[red]Task is None - workflow was not initialized[/red]") - return py_trees.common.Status.FAILURE + try: + assert self.task is not None - if not self.task.done(): - return py_trees.common.Status.RUNNING + if not self.task.done(): + return py_trees.common.Status.RUNNING - try: - result = self.task.result() + self.task.result() self.workflow.triggered_at = datetime.now() logger.info( f"[green]Workflow triggered successfully:[/green] {self.workflow.uuid}" ) + self.feedback_message = "workflow triggered" return py_trees.common.Status.SUCCESS except Exception as e: self.workflow.trigger_failed = True - return log_exception_and_return_failure("TriggerWorkflow", e) + self.feedback_message = "failed to trigger workflow" + return self.log_exception_and_return_failure(e) def terminate(self, new_status: py_trees.common.Status) -> None: # TODO: Cancel task pass -class IdentifyWorkflowByUUID(py_trees.behaviour.Behaviour): +class IdentifyWorkflowByUUID(ReleaseAction): def __init__( self, name: str, @@ -83,9 +113,7 @@ def __init__( self.github_client = github_client self.workflow = workflow - self.task: Optional[asyncio.Task[Optional[WorkflowRun]]] = None super().__init__(name=name) - self.logger = PyTreesLoggerWrapper(logging.getLogger(self.name)) def initialise(self) -> None: if self.workflow.uuid is None: @@ -101,21 +129,13 @@ def initialise(self) -> None: ) def update(self) -> py_trees.common.Status: - self.logger.debug("IdentifyWorkflowByUUID: update") - if self.task is None: - self.logger.error("red]Task is None - behaviour was not initialized[/red]") - return py_trees.common.Status.FAILURE + try: + assert self.task is not None - if not self.task.done(): - self.logger.debug( - f"Task not yet done {self.task.cancelled()} {self.task.done()} {self.task}" - ) - return py_trees.common.Status.RUNNING + if not self.task.done(): + return py_trees.common.Status.RUNNING - try: - self.logger.debug("before result") result = self.task.result() - self.logger.debug("result {result}") if result is None: self.logger.error("[red]Workflow not found[/red]") return py_trees.common.Status.FAILURE @@ -124,9 +144,54 @@ def update(self) -> py_trees.common.Status: self.logger.info( f"[green]Workflow found successfully:[/green] uuid: {self.workflow.uuid}, run_id: {self.workflow.run_id}" ) + self.feedback_message = ( + f"Workflow identified, run_id: {self.workflow.run_id}" + ) + return py_trees.common.Status.SUCCESS + except Exception as e: + return self.log_exception_and_return_failure(e) + + +class UpdateWorkflowStatus(ReleaseAction): + def __init__( + self, + name: str, + workflow: Workflow, + github_client: GitHubClientAsync, + ) -> None: + self.github_client = github_client + self.workflow = workflow + super().__init__(name=name) + + def initialise(self) -> None: + if self.workflow.run_id is None: + self.logger.error( + "[red]Workflow run_id is None - cannot check completion[/red]" + ) + return + + self.task = asyncio.create_task( + self.github_client.get_workflow_run( + self.workflow.repo, self.workflow.run_id + ) + ) + + def update(self) -> py_trees.common.Status: + try: + assert self.task is not None + + if not self.task.done(): + return py_trees.common.Status.RUNNING + + result = self.task.result() + self.workflow.status = result.status + self.workflow.conclusion = result.conclusion + self.feedback_message = ( + f" {self.workflow.status}, {self.workflow.conclusion}" + ) return py_trees.common.Status.SUCCESS except Exception as e: - return log_exception_and_return_failure(f"{self.name} TriggerWorkflow", e) + return self.log_exception_and_return_failure(e) class Sleep(py_trees.behaviour.Behaviour): @@ -151,6 +216,9 @@ def update(self) -> py_trees.common.Status: return py_trees.common.Status.SUCCESS +### Conditions ### + + class IsWorkflowTriggerFailed(py_trees.behaviour.Behaviour): def __init__(self, name: str, workflow: Workflow) -> None: self.workflow = workflow @@ -180,6 +248,40 @@ def __init__(self, name: str, workflow: Workflow) -> None: super().__init__(name=name) def update(self) -> py_trees.common.Status: + self.logger.debug(f"{self.workflow}") if self.workflow.run_id is not None: return py_trees.common.Status.SUCCESS return py_trees.common.Status.FAILURE + + +class IsWorkflowCompleted(py_trees.behaviour.Behaviour): + def __init__(self, name: str, workflow: Workflow) -> None: + self.workflow = workflow + super().__init__(name=name) + + def update(self) -> py_trees.common.Status: + if self.workflow.status == WorkflowStatus.COMPLETED: + return py_trees.common.Status.SUCCESS + return py_trees.common.Status.FAILURE + + +class IsWorkflowSuccessful(py_trees.behaviour.Behaviour): + def __init__(self, name: str, workflow: Workflow) -> None: + self.workflow = workflow + super().__init__(name=name) + + def update(self) -> py_trees.common.Status: + if self.workflow.conclusion == WorkflowConclusion.SUCCESS: + return py_trees.common.Status.SUCCESS + return py_trees.common.Status.FAILURE + + +class IsWorkflowTimedOut(py_trees.behaviour.Behaviour): + def __init__(self, name: str, workflow: Workflow) -> None: + self.workflow = workflow + super().__init__(name=name) + + def update(self) -> py_trees.common.Status: + if self.workflow.timed_out: + return py_trees.common.Status.SUCCESS + return py_trees.common.Status.FAILURE diff --git a/src/redis_release/bht/composites.py b/src/redis_release/bht/composites.py index 85e7f25..67f015c 100644 --- a/src/redis_release/bht/composites.py +++ b/src/redis_release/bht/composites.py @@ -1,15 +1,28 @@ +from ast import Invert +from socket import timeout from time import sleep from py_trees.composites import Selector, Sequence -from py_trees.decorators import Retry +from py_trees.decorators import Repeat, Retry from ..github_client_async import GitHubClientAsync -from .behaviours import IdentifyWorkflowByUUID, IsWorkflowTriggered, Sleep +from .behaviours import ( + IdentifyWorkflowByUUID, + IsWorkflowCompleted, + IsWorkflowIdentified, + IsWorkflowSuccessful, + IsWorkflowTimedOut, + IsWorkflowTriggered, + Sleep, + UpdateWorkflowStatus, +) +from .decorators import TimeoutWithFlag from .state import Workflow class FindWorkflowByUUID(Sequence): max_retries: int = 3 + poll_interval: int = 5 def __init__( self, @@ -27,7 +40,8 @@ def __init__( identify_workflow = IdentifyWorkflowByUUID( f"{log_prefix}Identify Workflow by UUID", workflow, github_client ) - sleep = Sleep("Sleep", 5) + sleep = Sleep("Sleep", self.poll_interval) + sleep_then_identify = Sequence( f"{log_prefix}Sleep then Identify", memory=True, @@ -38,7 +52,80 @@ def __init__( sleep_then_identify, self.max_retries, ) + identify_if_required = Selector( + f"{log_prefix}Identify if required", + False, + children=[ + IsWorkflowIdentified(f"{log_prefix}Is Workflow Identified?", workflow), + identify_loop, + ], + ) + + super().__init__( + name=name, + memory=False, + children=[is_workflow_triggered, identify_if_required], + ) + + +class WaitForWorkflowCompletion(Sequence): + poll_interval: int + timeout_seconds: int + + def __init__( + self, + name: str, + workflow: Workflow, + github_client: GitHubClientAsync, + log_prefix: str = "", + timeout_seconds: int = 3 * 60, + poll_interval: int = 10, + ) -> None: + if log_prefix != "": + log_prefix = f"{log_prefix}." + + self.poll_interval = poll_interval + self.timeout_seconds = timeout_seconds + + is_workflow_identified = IsWorkflowIdentified( + f"Is Workflow Identified?", workflow + ) + is_workflow_completed = IsWorkflowCompleted(f"Is Workflow Completed?", workflow) + is_worklow_timed_out = IsWorkflowTimedOut(f"Is Workflow Timed Out?", workflow) + update_workflow_status = UpdateWorkflowStatus( + f"{log_prefix}Update Workflow Status", workflow, github_client + ) + update_workflow_status_with_pause = Sequence( + f"{log_prefix}Update Workflow Status with Pause", + memory=True, + children=[ + Sleep("Sleep", self.poll_interval), + update_workflow_status, + ], + ) + + update_workflow_loop = TimeoutWithFlag( + "Timeout", + Repeat("Repeat", update_workflow_status_with_pause, -1), + self.timeout_seconds, + workflow, + "timed_out", + ) + # Sequence: super().__init__( - name=name, memory=False, children=[is_workflow_triggered, identify_loop] + name=name, + memory=False, + children=[ + is_workflow_identified, + Selector( + f"Wait for completion", + False, + children=[ + is_workflow_completed, + is_worklow_timed_out, + update_workflow_loop, + ], + ), + ], ) diff --git a/src/redis_release/bht/state.py b/src/redis_release/bht/state.py index c15ad8f..07a1d8d 100644 --- a/src/redis_release/bht/state.py +++ b/src/redis_release/bht/state.py @@ -3,6 +3,8 @@ from pydantic import BaseModel +from redis_release.models import WorkflowConclusion, WorkflowStatus + class Workflow(BaseModel): repo: str @@ -15,3 +17,6 @@ class Workflow(BaseModel): started_at: Optional[datetime] = None run_id: Optional[int] = None url: Optional[str] = None + status: Optional[WorkflowStatus] = None + conclusion: Optional[WorkflowConclusion] = None + timed_out: bool = False diff --git a/src/redis_release/github_client_async.py b/src/redis_release/github_client_async.py index ab6c254..893abdc 100644 --- a/src/redis_release/github_client_async.py +++ b/src/redis_release/github_client_async.py @@ -86,52 +86,6 @@ async def trigger_workflow( return False - async def identify_workflow_loop( - self, repo: str, workflow_file: str, workflow_uuid: str, max_tries: int = 10 - ) -> WorkflowRun: - """Identify a specific workflow run by UUID in its name. - - Args: - repo: Repository name - workflow_file: Workflow file name - workflow_uuid: UUID to search for in workflow run names - max_tries: Maximum number of attempts to find the workflow - - Returns: - WorkflowRun object with matching UUID - - Raises: - RuntimeError: If workflow run cannot be found after max_tries - """ - logger.info( - f"[blue]Searching for workflow run with UUID:[/blue] [cyan]{workflow_uuid}[/cyan]" - ) - - for attempt in range(max_tries): - await asyncio.sleep(2) - if attempt > 0: - logger.debug(f"Attempt {attempt + 1}/{max_tries}") - - runs = await self.get_recent_workflow_runs(repo, workflow_file, limit=20) - - for run in runs: - extracted_uuid = self._extract_uuid(run.workflow_id) - if extracted_uuid and extracted_uuid.lower() == workflow_uuid.lower(): - logger.info( - f"[green]Found matching workflow run:[/green] {run.run_id}" - ) - logger.debug(f"Workflow name: {run.workflow_id}") - logger.debug(f"Extracted UUID: {extracted_uuid}") - run.workflow_uuid = workflow_uuid - return run - - logger.debug("No matching workflow found, trying again...") - - raise RuntimeError( - f"Could not find workflow run with UUID {workflow_uuid} after {max_tries} attempts. " - f"The workflow may have failed to start or there may be a delay in GitHub's API." - ) - async def identify_workflow( self, repo: str, workflow_file: str, workflow_uuid: str ) -> WorkflowRun | None: @@ -151,6 +105,81 @@ async def identify_workflow( return run return None + async def get_workflow_run(self, repo: str, run_id: int) -> WorkflowRun: + """Get workflow run status. + + Args: + repo: Repository name + run_id: Workflow run ID + + Returns: + Updated WorkflowRun object + """ + url = f"https://api.github.com/repos/{repo}/actions/runs/{run_id}" + headers = { + "Authorization": f"Bearer {self.token}", + "Accept": "application/vnd.github.v3+json", + "X-GitHub-Api-Version": "2022-11-28", + } + + try: + async with aiohttp.ClientSession() as session: + async with session.get( + url, + headers=headers, + timeout=aiohttp.ClientTimeout(total=30), + ) as response: + if response.status >= 400: + # Read response body for error details + try: + error_body = await response.text() + logger.error( + f"[red]Failed to get workflow run:[/red] HTTP {response.status}" + ) + logger.error(f"[red]Response body:[/red] {error_body}") + except Exception: + logger.error( + f"[red]Failed to get workflow run:[/red] HTTP {response.status}" + ) + response.raise_for_status() + + data = await response.json() + + # Map GitHub API status to our enum + github_status = data.get("status", "unknown") + if github_status == "queued": + status = WorkflowStatus.QUEUED + elif github_status == "in_progress": + status = WorkflowStatus.IN_PROGRESS + elif github_status == "completed": + status = WorkflowStatus.COMPLETED + else: + status = WorkflowStatus.PENDING + + # Map GitHub API conclusion to our enum + github_conclusion = data.get("conclusion") + conclusion = None + if github_conclusion == "success": + conclusion = WorkflowConclusion.SUCCESS + elif github_conclusion == "failure": + conclusion = WorkflowConclusion.FAILURE + + workflow_name = data.get("name", "unknown") + workflow_uuid = self._extract_uuid(workflow_name) + + return WorkflowRun( + repo=repo, + workflow_id=workflow_name, + workflow_uuid=workflow_uuid, + run_id=data.get("id"), + status=status, + conclusion=conclusion, + ) + + except aiohttp.ClientError as e: + logger.error(f"[red]Failed to get workflow run:[/red] {e}") + raise + async def get_recent_workflow_runs( self, repo: str, workflow_file: str, limit: int = 10 ) -> List[WorkflowRun]: diff --git a/src/redis_release/tree.py b/src/redis_release/tree.py index 4d81208..e2f4420 100644 --- a/src/redis_release/tree.py +++ b/src/redis_release/tree.py @@ -8,11 +8,11 @@ from py_trees.decorators import Inverter from .bht.behaviours import ( - IsWorkflowIdentified, + IsWorkflowSuccessful, IsWorkflowTriggerFailed, TriggerWorkflow, ) -from .bht.composites import FindWorkflowByUUID +from .bht.composites import FindWorkflowByUUID, WaitForWorkflowCompletion from .bht.state import Workflow from .github_client_async import GitHubClientAsync @@ -21,7 +21,8 @@ def create_root_node() -> Behaviour: github_client = GitHubClientAsync(token=os.getenv("GITHUB_TOKEN")) - root = Selector("Redis Release", False) + root = Sequence("Workflow Goal", False) + workflow_run = Selector("Workflow Run", False) workflow = Workflow( repo="Peter-Sh/docker-library-redis", workflow_file="release_build_and_test.yml", @@ -29,12 +30,12 @@ def create_root_node() -> Behaviour: ref="release/8.2", ) - is_workflow_identified = IsWorkflowIdentified("Is Workflow Identified?", workflow) + is_workflow_successful = IsWorkflowSuccessful("Is Workflow Successful?", workflow) identify_workflow = FindWorkflowByUUID( "Identify Workflow", workflow, github_client, "DOCKER" ) may_start_workflow = Inverter( - "May start workflow?", + "Not", IsWorkflowTriggerFailed("Is Workflow Trigger Failed?", workflow), ) @@ -46,7 +47,17 @@ def create_root_node() -> Behaviour: TriggerWorkflow("Trigger Workflow", workflow, github_client), ], ) - root.add_children([is_workflow_identified, identify_workflow, trigger_workflow]) + wait_for_completion = WaitForWorkflowCompletion( + "Wait for completion", workflow, github_client, "DOCKER" + ) + workflow_run.add_children( + [ + wait_for_completion, + identify_workflow, + trigger_workflow, + ] + ) + root.add_children([workflow_run, is_workflow_successful]) return root @@ -67,9 +78,11 @@ async def async_tick_tock( logger.debug(other_tasks) if not other_tasks: count_no_tasks_loop += 1 + # tick the tree one more time in case flipped status would lead to new tasks if count_no_tasks_loop > 1: - logger.info("Tree finished") + logger.info(f"Tree finished with {tree.root.status}") break else: + count_no_tasks_loop = 0 await asyncio.wait(other_tasks, return_when=asyncio.FIRST_COMPLETED) tree.tick() From 495ddf20e0c8dee234b235e4c6d1e65ead123fa6 Mon Sep 17 00:00:00 2001 From: Petar Shtuchkin Date: Sat, 4 Oct 2025 18:02:02 +0300 Subject: [PATCH 06/39] github async client pagination, artifacts and tests --- src/redis_release/github_client_async.py | 372 ++++++++++++++++++----- src/tests/test_github_client_async.py | 153 ++++++++++ 2 files changed, 454 insertions(+), 71 deletions(-) create mode 100644 src/tests/test_github_client_async.py diff --git a/src/redis_release/github_client_async.py b/src/redis_release/github_client_async.py index 893abdc..8db27da 100644 --- a/src/redis_release/github_client_async.py +++ b/src/redis_release/github_client_async.py @@ -1,9 +1,8 @@ """Async GitHub API client for workflow operations.""" -import asyncio import logging import re -from typing import Dict, List, Optional +from typing import Any, Dict, List, Optional, Union import aiohttp @@ -24,6 +23,201 @@ def __init__(self, token: str): """ self.token = token + async def github_request( + self, + url: str, + headers: Dict[str, str], + method: str = "GET", + json: Optional[Dict[str, Any]] = None, + params: Optional[Dict[str, Any]] = None, + timeout: int = 30, + error_context: str = "request", + ) -> Dict[str, Any]: + """Make a single GitHub API request with error handling. + + Args: + url: The API URL to fetch + headers: HTTP headers to include in the request + method: HTTP method (GET, POST, PATCH, PUT, DELETE) + json: JSON payload for POST/PATCH/PUT requests + params: Query parameters + timeout: Request timeout in seconds + error_context: Context string for error messages (e.g., "trigger workflow", "get workflow run") + + Returns: + JSON response as a dictionary + + Raises: + aiohttp.ClientError: On HTTP errors + """ + async with aiohttp.ClientSession() as session: + request_method = getattr(session, method.lower()) + + kwargs = { + "headers": headers, + "timeout": aiohttp.ClientTimeout(total=timeout), + } + + if params is not None: + kwargs["params"] = params + + if json is not None: + kwargs["json"] = json + + async with request_method(url, **kwargs) as response: + if response.status >= 400: + # Read response body for error details + try: + error_body = await response.text() + logger.error( + f"[red]Failed to {error_context}:[/red] HTTP {response.status}" + ) + logger.error(f"[red]Response body:[/red] {error_body}") + except Exception: + logger.error( + f"[red]Failed to {error_context}:[/red] HTTP {response.status}" + ) + response.raise_for_status() + + # For methods that may not return content (like POST to workflow dispatch) + if response.status == 204 or not response.content_length: + return {} + + return await response.json() + + async def github_request_paginated( + self, + url: str, + headers: Dict[str, str], + params: Optional[Dict[str, Any]] = None, + timeout: int = 30, + merge_key: Optional[str] = None, + per_page: int = 100, + max_pages: Optional[int] = None, + ) -> Union[List[Dict[str, Any]], Dict[str, Any]]: + """Get paginated results from a GitHub API URL. + + Follows GitHub's pagination using Link headers as described in: + https://docs.github.com/en/rest/using-the-rest-api/using-pagination-in-the-rest-api + + Args: + url: The API URL to fetch + headers: HTTP headers to include in the request + params: Query parameters (per_page will be added/overridden) + timeout: Request timeout in seconds + merge_key: Key to merge results from dict responses (e.g., "artifacts", "workflow_runs") + per_page: Number of items per page (default: 100) + max_pages: Maximum number of pages to fetch (None = all pages) + + Returns: + - If response is a list: merged list of all items from all pages + - If response is a dict: merged dict with merge_key items combined and other fields from last page + + Raises: + ValueError: If response is dict but merge_key is not provided or not found in response + aiohttp.ClientError: On HTTP errors + """ + if params is None: + params = {} + + params["per_page"] = per_page + params["page"] = 1 + + all_results: List[Dict[str, Any]] = [] + merged_dict: Optional[Dict[str, Any]] = None + pages_fetched = 0 + link_header = "" + + async with aiohttp.ClientSession() as session: + while True: + if max_pages and pages_fetched >= max_pages: + break + + request_method = getattr(session, "get") + + kwargs = { + "headers": headers, + "params": params, + "timeout": aiohttp.ClientTimeout(total=timeout), + } + + async with request_method(url, **kwargs) as response: + if response.status >= 400: + try: + error_body = await response.text() + logger.error( + f"[red]Failed to fetch paginated URL:[/red] HTTP {response.status}" + ) + logger.error(f"[red]Response body:[/red] {error_body}") + except Exception: + logger.error( + f"[red]Failed to fetch paginated URL:[/red] HTTP {response.status}" + ) + response.raise_for_status() + + data = await response.json() + pages_fetched += 1 + + # Handle list responses + if isinstance(data, list): + all_results.extend(data) + # Handle dict responses + elif isinstance(data, dict): + if merge_key is None: + raise ValueError( + "merge_key is required when API returns a dictionary" + ) + if merge_key not in data: + raise ValueError( + f"merge_key '{merge_key}' not found in response" + ) + + # Initialize merged_dict on first page + if merged_dict is None: + merged_dict = data.copy() + else: + # Merge the items from merge_key + merged_dict[merge_key].extend(data[merge_key]) + # Update other fields from the latest page + for key, value in data.items(): + if key != merge_key: + merged_dict[key] = value + + # Check for Link header to determine if there are more pages + link_header = response.headers.get("Link", "") + if not link_header or 'rel="next"' not in link_header: + break + + # Increment page number for next request + params["page"] = params.get("page", 1) + 1 + + # Return appropriate result type + if isinstance(all_results, list) and len(all_results) > 0: + return all_results + elif merged_dict is not None: + return merged_dict + else: + return [] + + def _extract_next_url(self, link_header: str) -> Optional[str]: + """Extract the 'next' URL from a GitHub Link header. + + Args: + link_header: The Link header value + + Returns: + The next page URL if found, None otherwise + """ + # Link header format: ; rel="next", ; rel="last" + links = link_header.split(",") + for link in links: + if 'rel="next"' in link: + # Extract URL from + url_match = re.search(r"<([^>]+)>", link) + if url_match: + return url_match.group(1) + return None + async def trigger_workflow( self, repo: str, workflow_file: str, inputs: Dict[str, str], ref: str = "main" ) -> bool: @@ -56,36 +250,20 @@ async def trigger_workflow( payload = {"ref": ref, "inputs": enhanced_inputs} try: - async with aiohttp.ClientSession() as session: - async with session.post( - url, - headers=headers, - json=payload, - timeout=aiohttp.ClientTimeout(total=30), - ) as response: - if response.status >= 400: - # Read response body for error details - try: - error_body = await response.text() - logger.error( - f"[red]Failed to trigger workflow:[/red] HTTP {response.status}" - ) - logger.error(f"[red]Response body:[/red] {error_body}") - except Exception: - logger.error( - f"[red]Failed to trigger workflow:[/red] HTTP {response.status}" - ) - response.raise_for_status() - - logger.info(f"[green]Workflow triggered successfully[/green]") - - return True + await self.github_request( + url=url, + headers=headers, + method="POST", + json=payload, + timeout=30, + error_context="trigger workflow", + ) + logger.info(f"[green]Workflow triggered successfully[/green]") + return True except aiohttp.ClientError as e: logger.error(f"[red]Failed to trigger workflow:[/red] {e}") raise - return False - async def identify_workflow( self, repo: str, workflow_file: str, workflow_uuid: str ) -> WorkflowRun | None: @@ -123,27 +301,13 @@ async def get_workflow_run(self, repo: str, run_id: int) -> WorkflowRun: } try: - async with aiohttp.ClientSession() as session: - async with session.get( - url, - headers=headers, - timeout=aiohttp.ClientTimeout(total=30), - ) as response: - if response.status >= 400: - # Read response body for error details - try: - error_body = await response.text() - logger.error( - f"[red]Failed to get workflow run:[/red] HTTP {response.status}" - ) - logger.error(f"[red]Response body:[/red] {error_body}") - except Exception: - logger.error( - f"[red]Failed to get workflow run:[/red] HTTP {response.status}" - ) - response.raise_for_status() - - data = await response.json() + data = await self.github_request( + url=url, + headers=headers, + method="GET", + timeout=30, + error_context="get workflow run", + ) # Map GitHub API status to our enum github_status = data.get("status", "unknown") @@ -203,28 +367,14 @@ async def get_recent_workflow_runs( params = {"per_page": limit, "page": 1} try: - async with aiohttp.ClientSession() as session: - async with session.get( - url, - headers=headers, - params=params, - timeout=aiohttp.ClientTimeout(total=30), - ) as response: - if response.status >= 400: - # Read response body for error details - try: - error_body = await response.text() - logger.error( - f"[red]Failed to get workflow runs:[/red] HTTP {response.status}" - ) - logger.error(f"[red]Response body:[/red] {error_body}") - except Exception: - logger.error( - f"[red]Failed to get workflow runs:[/red] HTTP {response.status}" - ) - response.raise_for_status() - - data = await response.json() + data = await self.github_request( + url=url, + headers=headers, + method="GET", + params=params, + timeout=30, + error_context="get workflow runs", + ) runs = [] for run_data in data.get("workflow_runs", []): @@ -267,6 +417,86 @@ async def get_recent_workflow_runs( logger.error(f"[red]Failed to get workflow runs:[/red] {e}") return [] + async def get_workflow_artifacts(self, repo: str, run_id: int) -> Dict[str, Dict]: + """Get artifacts from a completed workflow. + + Args: + repo: Repository name + run_id: Workflow run ID + + Returns: + Dictionary with artifact names as keys and artifact details as values. + Each artifact dictionary contains: id, archive_download_url, created_at, + expires_at, updated_at, size_in_bytes, digest + """ + logger.info(f"[blue]Getting artifacts for workflow {run_id} in {repo}[/blue]") + + url = f"https://api.github.com/repos/{repo}/actions/runs/{run_id}/artifacts" + headers = { + "Authorization": f"Bearer {self.token}", + "Accept": "application/vnd.github.v3+json", + "X-GitHub-Api-Version": "2022-11-28", + } + + try: + data = await self.github_request_paginated( + url=url, + headers=headers, + params={}, + timeout=30, + merge_key="artifacts", + per_page=100, + max_pages=None, + ) + + artifacts = {} + + # data is a dict with "artifacts" key containing the merged list + if not isinstance(data, dict): + logger.error("[red]Unexpected response type from API[/red]") + return {} + + for artifact_data in data.get("artifacts", []): + artifact_name = artifact_data.get("name", "unknown") + + # Extract the required fields from the GitHub API response + artifact_info = { + "id": artifact_data.get("id"), + "archive_download_url": artifact_data.get("archive_download_url"), + "created_at": artifact_data.get("created_at"), + "expires_at": artifact_data.get("expires_at"), + "updated_at": artifact_data.get("updated_at"), + "size_in_bytes": artifact_data.get("size_in_bytes"), + "digest": artifact_data.get("workflow_run", {}).get( + "head_sha" + ), # Using head_sha as digest + } + + artifacts[artifact_name] = artifact_info + + if artifacts: + logger.info(f"[green]Found {len(artifacts)} artifacts[/green]") + for artifact_name, artifact_info in artifacts.items(): + size_mb = round( + artifact_info.get("size_in_bytes", 0) / (1024 * 1024), 2 + ) + logger.debug( + f" {artifact_name} ({size_mb}MB) - ID: {artifact_info.get('id')}" + ) + else: + logger.warning( + "[yellow]No artifacts found for this workflow run[/yellow]" + ) + + return artifacts + + except aiohttp.ClientError as e: + logger.error(f"[red]Failed to get artifacts: {e}[/red]") + return {} + except ValueError as e: + logger.error(f"[red]Failed to get artifacts: {e}[/red]") + return {} + def _extract_uuid(self, text: str) -> Optional[str]: """Extract UUID from a string if present. diff --git a/src/tests/test_github_client_async.py b/src/tests/test_github_client_async.py new file mode 100644 index 0000000..7ced0dc --- /dev/null +++ b/src/tests/test_github_client_async.py @@ -0,0 +1,153 @@ +"""Tests for GitHub API client functionality.""" + +import pytest +from aiohttp import web +from aiohttp.test_utils import AioHTTPTestCase + +from redis_release.github_client_async import GitHubClientAsync + + +class TestGitHubClientAsync(AioHTTPTestCase): + """Test cases for github_request_paginated method.""" + + async def get_application(self): + """Create a test application with mock endpoints.""" + app = web.Application() + app.router.add_get("/no-pagination", self.handle_no_pagination) + app.router.add_get("/array-pagination", self.handle_array_pagination) + app.router.add_get("/dict-pagination", self.handle_dict_pagination) + return app + + async def handle_no_pagination(self, request): + """Handle request without pagination (no Link header).""" + return web.json_response( + [{"id": 1, "name": "item1"}, {"id": 2, "name": "item2"}] + ) + + async def handle_array_pagination(self, request): + """Handle request with array response and pagination.""" + page = int(request.query.get("page", 1)) + + # Simulate 3 pages of data + if page == 1: + data = [{"id": 1, "name": "item1"}, {"id": 2, "name": "item2"}] + headers = {"Link": '; rel="next"'} + return web.json_response(data, headers=headers) + elif page == 2: + data = [{"id": 3, "name": "item3"}, {"id": 4, "name": "item4"}] + headers = {"Link": '; rel="next"'} + return web.json_response(data, headers=headers) + elif page == 3: + data = [{"id": 5, "name": "item5"}] + # No Link header on last page + return web.json_response(data) + else: + return web.json_response([]) + + async def handle_dict_pagination(self, request): + """Handle request with dict response and pagination.""" + page = int(request.query.get("page", 1)) + + # Simulate 2 pages of data with dict response + if page == 1: + data = { + "total_count": 5, + "artifacts": [ + {"id": 1, "name": "artifact1"}, + {"id": 2, "name": "artifact2"}, + {"id": 3, "name": "artifact3"}, + ], + } + headers = {"Link": '; rel="next"'} + return web.json_response(data, headers=headers) + elif page == 2: + data = { + "total_count": 5, + "artifacts": [ + {"id": 4, "name": "artifact4"}, + {"id": 5, "name": "artifact5"}, + ], + } + # No Link header on last page + return web.json_response(data) + else: + return web.json_response({"total_count": 5, "artifacts": []}) + + async def test_no_link_header(self): + """Test pagination with no Link header (single page response).""" + client = GitHubClientAsync(token="test-token") + url = self.server.make_url("/no-pagination") + headers = {"Authorization": "Bearer test-token"} + + result = await client.github_request_paginated( + url=str(url), + headers=headers, + params={}, + timeout=30, + per_page=30, + max_pages=None, + ) + + # Should return the single page of results + assert isinstance(result, list) + assert len(result) == 2 + assert result[0]["id"] == 1 + assert result[0]["name"] == "item1" + assert result[1]["id"] == 2 + assert result[1]["name"] == "item2" + + async def test_array_pagination(self): + """Test pagination with array response across multiple pages.""" + client = GitHubClientAsync(token="test-token") + url = self.server.make_url("/array-pagination") + headers = {"Authorization": "Bearer test-token"} + + result = await client.github_request_paginated( + url=str(url), + headers=headers, + params={}, + timeout=30, + per_page=30, + max_pages=None, + ) + + # Should merge all pages into a single array + assert isinstance(result, list) + assert len(result) == 5 + assert result[0]["id"] == 1 + assert result[1]["id"] == 2 + assert result[2]["id"] == 3 + assert result[3]["id"] == 4 + assert result[4]["id"] == 5 + + async def test_dict_pagination_with_merge_key(self): + """Test pagination with dict response and merge_key.""" + client = GitHubClientAsync(token="test-token") + url = self.server.make_url("/dict-pagination") + headers = {"Authorization": "Bearer test-token"} + + result = await client.github_request_paginated( + url=str(url), + headers=headers, + params={}, + timeout=30, + merge_key="artifacts", + per_page=30, + max_pages=None, + ) + + # Should merge artifacts from all pages + assert isinstance(result, dict) + assert "total_count" in result + assert result["total_count"] == 5 # Should have the value from the last page + assert "artifacts" in result + assert len(result["artifacts"]) == 5 + assert result["artifacts"][0]["id"] == 1 + assert result["artifacts"][1]["id"] == 2 + assert result["artifacts"][2]["id"] == 3 + assert result["artifacts"][3]["id"] == 4 + assert result["artifacts"][4]["id"] == 5 + + +if __name__ == "__main__": + pytest.main([__file__, "-v"]) From 5401d210f1de0152bb8163ebf5942d57a8a75e0a Mon Sep 17 00:00:00 2001 From: Petar Shtuchkin Date: Sat, 4 Oct 2025 22:48:01 +0300 Subject: [PATCH 07/39] Restructure state, refactor tree add args Prepare for branch detection action --- .gitignore | 2 - src/redis_release/bht/args.py | 13 + src/redis_release/bht/behaviours.py | 93 +++- src/redis_release/bht/composites.py | 106 ++++- src/redis_release/bht/state.py | 211 ++++++++- src/redis_release/bht/tree.py | 116 +++++ src/redis_release/cli.py | 56 ++- src/redis_release/config.py | 56 +++ src/redis_release/github_client_async.py | 6 +- src/redis_release/tree.py | 88 ---- src/tests/__init__.py | 2 + src/tests/test_github_client_async.py | 14 +- src/tests/test_state.py | 531 +++++++++++++++++++++++ 13 files changed, 1164 insertions(+), 130 deletions(-) create mode 100644 src/redis_release/bht/args.py create mode 100644 src/redis_release/bht/tree.py create mode 100644 src/redis_release/config.py delete mode 100644 src/redis_release/tree.py create mode 100644 src/tests/__init__.py create mode 100644 src/tests/test_state.py diff --git a/.gitignore b/.gitignore index a57e13a..e3e8162 100644 --- a/.gitignore +++ b/.gitignore @@ -236,8 +236,6 @@ $RECYCLE.BIN/ aws-credentials.json # Local test files -test_*.py -*_test.py temp_*.py # Local configuration diff --git a/src/redis_release/bht/args.py b/src/redis_release/bht/args.py new file mode 100644 index 0000000..6178378 --- /dev/null +++ b/src/redis_release/bht/args.py @@ -0,0 +1,13 @@ +"""Arguments for release automation.""" + +from typing import List + +from pydantic import BaseModel, Field + + +class ReleaseArgs(BaseModel): + """Arguments for release execution.""" + + release_tag: str + force_rebuild: List[str] = Field(default_factory=list) + diff --git a/src/redis_release/bht/behaviours.py b/src/redis_release/bht/behaviours.py index fdae919..6e27c9e 100644 --- a/src/redis_release/bht/behaviours.py +++ b/src/redis_release/bht/behaviours.py @@ -13,14 +13,15 @@ import uuid from datetime import datetime from token import OP -from typing import Any, Optional +from typing import Any, Dict, Optional import py_trees +from pydantic import BaseModel from ..github_client_async import GitHubClientAsync from ..models import WorkflowConclusion, WorkflowRun, WorkflowStatus from .logging_wrapper import PyTreesLoggerWrapper -from .state import Workflow +from .state import PackageMeta, ReleaseMeta, Workflow logger = logging.getLogger(__name__) @@ -55,27 +56,59 @@ def check_task_exists(self) -> bool: ### Actions ### +class IdentifyTargetRef(ReleaseAction): + def __init__( + self, + name: str, + package_meta: PackageMeta, + ) -> None: + self.package_meta = package_meta + super().__init__(name=name) + + def update(self) -> py_trees.common.Status: + # For now, just set a hardcoded ref + self.package_meta.ref = "release/8.2" + self.logger.info( + f"[green]Target ref identified:[/green] {self.package_meta.ref}" + ) + self.feedback_message = f"Target ref set to {self.package_meta.ref}" + return py_trees.common.Status.SUCCESS + + class TriggerWorkflow(ReleaseAction): def __init__( self, name: str, workflow: Workflow, + package_meta: PackageMeta, + release_meta: ReleaseMeta, github_client: GitHubClientAsync, ) -> None: self.github_client = github_client self.workflow = workflow + self.package_meta = package_meta + self.release_meta = release_meta self.task: Optional[asyncio.Task[bool]] = None super().__init__(name=name) def initialise(self) -> None: self.workflow.uuid = str(uuid.uuid4()) self.workflow.inputs["workflow_uuid"] = self.workflow.uuid + if self.release_meta.tag is None: + self.logger.error( + "[red]Release tag is None - cannot trigger workflow[/red]" + ) + self.workflow.ephemeral.trigger_failed = True + self.feedback_message = "failed to trigger workflow" + return + self.workflow.inputs["release_tag"] = self.release_meta.tag + ref = self.package_meta.ref if self.package_meta.ref is not None else "main" self.task = asyncio.create_task( self.github_client.trigger_workflow( - self.workflow.repo, + self.package_meta.repo, self.workflow.workflow_file, self.workflow.inputs, - self.workflow.ref, + ref, ) ) @@ -94,7 +127,7 @@ def update(self) -> py_trees.common.Status: self.feedback_message = "workflow triggered" return py_trees.common.Status.SUCCESS except Exception as e: - self.workflow.trigger_failed = True + self.workflow.ephemeral.trigger_failed = True self.feedback_message = "failed to trigger workflow" return self.log_exception_and_return_failure(e) @@ -109,10 +142,12 @@ def __init__( name: str, workflow: Workflow, github_client: GitHubClientAsync, + package_meta: PackageMeta, ) -> None: self.github_client = github_client self.workflow = workflow + self.package_meta = package_meta super().__init__(name=name) def initialise(self) -> None: @@ -124,7 +159,7 @@ def initialise(self) -> None: self.task = asyncio.create_task( self.github_client.identify_workflow( - self.workflow.repo, self.workflow.workflow_file, self.workflow.uuid + self.package_meta.repo, self.workflow.workflow_file, self.workflow.uuid ) ) @@ -158,9 +193,11 @@ def __init__( name: str, workflow: Workflow, github_client: GitHubClientAsync, + package_meta: PackageMeta, ) -> None: self.github_client = github_client self.workflow = workflow + self.package_meta = package_meta super().__init__(name=name) def initialise(self) -> None: @@ -172,7 +209,7 @@ def initialise(self) -> None: self.task = asyncio.create_task( self.github_client.get_workflow_run( - self.workflow.repo, self.workflow.run_id + self.package_meta.repo, self.workflow.run_id ) ) @@ -216,16 +253,43 @@ def update(self) -> py_trees.common.Status: return py_trees.common.Status.SUCCESS +class SetFlag(LoggingAction): + def __init__( + self, name: str, container: BaseModel, flag: str, value: bool = True + ) -> None: + self.container = container + self.flag = flag + self.flag_value = value + super().__init__(name=name) + + def update(self) -> py_trees.common.Status: + setattr(self.container, self.flag, self.flag_value) + self.logger.info(f"Set flag {self.flag} to {self.flag_value}") + self.feedback_message = f"flag {self.flag} set to {self.flag_value}" + return py_trees.common.Status.SUCCESS + + ### Conditions ### +class IsTargetRefIdentified(py_trees.behaviour.Behaviour): + def __init__(self, name: str, package_meta: PackageMeta) -> None: + self.package_meta = package_meta + super().__init__(name=name) + + def update(self) -> py_trees.common.Status: + if self.package_meta.ref is not None: + return py_trees.common.Status.SUCCESS + return py_trees.common.Status.FAILURE + + class IsWorkflowTriggerFailed(py_trees.behaviour.Behaviour): def __init__(self, name: str, workflow: Workflow) -> None: self.workflow = workflow super().__init__(name=name) def update(self) -> py_trees.common.Status: - if self.workflow.trigger_failed: + if self.workflow.ephemeral.trigger_failed: return py_trees.common.Status.SUCCESS return py_trees.common.Status.FAILURE @@ -282,6 +346,17 @@ def __init__(self, name: str, workflow: Workflow) -> None: super().__init__(name=name) def update(self) -> py_trees.common.Status: - if self.workflow.timed_out: + if self.workflow.ephemeral.timed_out: + return py_trees.common.Status.SUCCESS + return py_trees.common.Status.FAILURE + + +class IsWorkflowIdentifyFailed(py_trees.behaviour.Behaviour): + def __init__(self, name: str, workflow: Workflow) -> None: + self.workflow = workflow + super().__init__(name=name) + + def update(self) -> py_trees.common.Status: + if self.workflow.ephemeral.identify_failed: return py_trees.common.Status.SUCCESS return py_trees.common.Status.FAILURE diff --git a/src/redis_release/bht/composites.py b/src/redis_release/bht/composites.py index 67f015c..a697a5a 100644 --- a/src/redis_release/bht/composites.py +++ b/src/redis_release/bht/composites.py @@ -2,22 +2,29 @@ from socket import timeout from time import sleep +import py_trees from py_trees.composites import Selector, Sequence -from py_trees.decorators import Repeat, Retry +from py_trees.decorators import Inverter, Repeat, Retry from ..github_client_async import GitHubClientAsync +from .behaviours import IdentifyTargetRef as IdentifyTargetRefAction from .behaviours import ( IdentifyWorkflowByUUID, + IsTargetRefIdentified, IsWorkflowCompleted, IsWorkflowIdentified, + IsWorkflowIdentifyFailed, IsWorkflowSuccessful, IsWorkflowTimedOut, IsWorkflowTriggered, + IsWorkflowTriggerFailed, + SetFlag, Sleep, - UpdateWorkflowStatus, ) +from .behaviours import TriggerWorkflow as TriggerWorkflow +from .behaviours import UpdateWorkflowStatus from .decorators import TimeoutWithFlag -from .state import Workflow +from .state import PackageMeta, ReleaseMeta, Workflow class FindWorkflowByUUID(Sequence): @@ -28,6 +35,7 @@ def __init__( self, name: str, workflow: Workflow, + package_meta: PackageMeta, github_client: GitHubClientAsync, log_prefix: str = "", ) -> None: @@ -38,17 +46,29 @@ def __init__( f"{log_prefix}Is Workflow Triggered?", workflow ) identify_workflow = IdentifyWorkflowByUUID( - f"{log_prefix}Identify Workflow by UUID", workflow, github_client + f"{log_prefix}Identify Workflow by UUID", + workflow, + github_client, + package_meta, ) sleep = Sleep("Sleep", self.poll_interval) + is_workflow_identify_failed = IsWorkflowIdentifyFailed( + f"Identify Failed?", workflow + ) sleep_then_identify = Sequence( f"{log_prefix}Sleep then Identify", memory=True, children=[sleep, identify_workflow], ) + set_identify_failed_flag = SetFlag( + f"{log_prefix}Set Identify Failed Flag", + workflow.ephemeral, + "identify_failed", + True, + ) identify_loop = Retry( - f"{log_prefix}Retry {self.max_retries} times", + f"Retry {self.max_retries} times", sleep_then_identify, self.max_retries, ) @@ -56,15 +76,20 @@ def __init__( f"{log_prefix}Identify if required", False, children=[ - IsWorkflowIdentified(f"{log_prefix}Is Workflow Identified?", workflow), + IsWorkflowIdentified(f"Is Workflow Identified?", workflow), + is_workflow_identify_failed, identify_loop, + set_identify_failed_flag, ], ) super().__init__( name=name, memory=False, - children=[is_workflow_triggered, identify_if_required], + children=[ + is_workflow_triggered, + identify_if_required, + ], ) @@ -76,6 +101,7 @@ def __init__( self, name: str, workflow: Workflow, + package_meta: PackageMeta, github_client: GitHubClientAsync, log_prefix: str = "", timeout_seconds: int = 3 * 60, @@ -93,7 +119,7 @@ def __init__( is_workflow_completed = IsWorkflowCompleted(f"Is Workflow Completed?", workflow) is_worklow_timed_out = IsWorkflowTimedOut(f"Is Workflow Timed Out?", workflow) update_workflow_status = UpdateWorkflowStatus( - f"{log_prefix}Update Workflow Status", workflow, github_client + f"{log_prefix}Update Workflow Status", workflow, github_client, package_meta ) update_workflow_status_with_pause = Sequence( f"{log_prefix}Update Workflow Status with Pause", @@ -129,3 +155,67 @@ def __init__( ), ], ) + + +class IdentifyTargetRef(Selector): + """Composite to identify target ref if not already identified.""" + + def __init__( + self, + name: str, + package_meta: PackageMeta, + release_meta: ReleaseMeta, + log_prefix: str = "", + ) -> None: + if log_prefix != "": + log_prefix = f"{log_prefix}." + + is_target_ref_identified = IsTargetRefIdentified( + f"{log_prefix}Is Target Ref Identified?", package_meta + ) + identify_target_ref = IdentifyTargetRefAction( + f"{log_prefix}Identify Target Ref", package_meta + ) + + super().__init__( + name=name, + memory=False, + children=[is_target_ref_identified, identify_target_ref], + ) + + +class TriggerWorkflowGoal(Sequence): + def __init__( + self, + name: str, + workflow: Workflow, + package_meta: PackageMeta, + release_meta: ReleaseMeta, + github_client: GitHubClientAsync, + log_prefix: str = "", + ) -> None: + if log_prefix != "": + log_prefix = f"{log_prefix}." + + may_start_workflow = Inverter( + f"{log_prefix}Not Trigger Failed", + IsWorkflowTriggerFailed( + f"{log_prefix}Is Workflow Trigger Failed?", workflow + ), + ) + identify_target_ref = IdentifyTargetRef( + f"{log_prefix}Identify Target Ref", package_meta, release_meta, log_prefix + ) + trigger_workflow = TriggerWorkflow( + f"{log_prefix}Trigger Workflow", + workflow, + package_meta, + release_meta, + github_client, + ) + + super().__init__( + name=name, + memory=True, + children=[may_start_workflow, identify_target_ref, trigger_workflow], + ) diff --git a/src/redis_release/bht/state.py b/src/redis_release/bht/state.py index 07a1d8d..13cb3e4 100644 --- a/src/redis_release/bht/state.py +++ b/src/redis_release/bht/state.py @@ -1,22 +1,217 @@ +import json +import logging from datetime import datetime -from typing import Dict, Optional +from pathlib import Path +from typing import TYPE_CHECKING, Any, Dict, Optional, Union -from pydantic import BaseModel +from pydantic import BaseModel, Field from redis_release.models import WorkflowConclusion, WorkflowStatus +from ..config import Config + +if TYPE_CHECKING: + from .args import ReleaseArgs + +logger = logging.getLogger(__name__) + + +class WorkflowEphemeral(BaseModel): + """Ephemeral workflow state that is not persisted.""" + + trigger_failed: bool = False + identify_failed: bool = False + timed_out: bool = False + class Workflow(BaseModel): - repo: str - workflow_file: str - inputs: Dict[str, str] - ref: str = "main" + workflow_file: str = "" + inputs: Dict[str, str] = Field(default_factory=dict) uuid: Optional[str] = None triggered_at: Optional[datetime] = None - trigger_failed: bool = False started_at: Optional[datetime] = None run_id: Optional[int] = None url: Optional[str] = None status: Optional[WorkflowStatus] = None conclusion: Optional[WorkflowConclusion] = None - timed_out: bool = False + ephemeral: WorkflowEphemeral = Field( + default_factory=WorkflowEphemeral, exclude=True + ) + + +class Phase(BaseModel): + """State for a workflow phase (build or publish).""" + + workflow: Workflow = Field(default_factory=Workflow) + artifacts: Dict[str, Any] = Field(default_factory=dict) + result: Optional[Dict[str, Any]] = None + + +class PackageMetaEphemeral(BaseModel): + """Ephemeral package metadata that is not persisted.""" + + force_rebuild: bool = False + + +class PackageMeta(BaseModel): + """Metadata for a package.""" + + repo: str = "" + ref: Optional[str] = None + ephemeral: PackageMetaEphemeral = Field( + default_factory=PackageMetaEphemeral, exclude=True + ) + + +class Package(BaseModel): + """State for a package in the release.""" + + meta: PackageMeta = Field(default_factory=PackageMeta) + build: Phase = Field(default_factory=Phase) + publish: Phase = Field(default_factory=Phase) + + +class ReleaseMeta(BaseModel): + """Metadata for the release.""" + + tag: Optional[str] = None + + +class ReleaseState(BaseModel): + """Release state adapted for behavior tree usage.""" + + meta: ReleaseMeta = Field(default_factory=ReleaseMeta) + packages: Dict[str, Package] = Field(default_factory=dict) + + @classmethod + def from_config(cls, config: Config) -> "ReleaseState": + """Build ReleaseState from config with default values.""" + packages = {} + for package_name, package_config in config.packages.items(): + # Validate and get build workflow file + if not isinstance(package_config.build_workflow, str): + raise ValueError( + f"Package '{package_name}': build_workflow must be a string, " + f"got {type(package_config.build_workflow).__name__}" + ) + if not package_config.build_workflow.strip(): + raise ValueError( + f"Package '{package_name}': build_workflow cannot be empty" + ) + + # Validate and get publish workflow file + if not isinstance(package_config.publish_workflow, str): + raise ValueError( + f"Package '{package_name}': publish_workflow must be a string, " + f"got {type(package_config.publish_workflow).__name__}" + ) + if not package_config.publish_workflow.strip(): + raise ValueError( + f"Package '{package_name}': publish_workflow cannot be empty" + ) + + # Initialize package metadata + package_meta = PackageMeta( + repo=package_config.repo, + ref=None, + ) + + # Initialize build workflow + build_workflow = Workflow( + workflow_file=package_config.build_workflow, + inputs={}, + ) + + # Initialize publish workflow + publish_workflow = Workflow( + workflow_file=package_config.publish_workflow, + inputs={}, + ) + + # Create package state with initialized workflows + packages[package_name] = Package( + meta=package_meta, + build=Phase(workflow=build_workflow), + publish=Phase(workflow=publish_workflow), + ) + + return cls(packages=packages) + + @classmethod + def from_json(cls, data: Union[str, Dict, Path]) -> "ReleaseState": + """Load ReleaseState from JSON string, dict, or file path.""" + if isinstance(data, Path): + with open(data, "r") as f: + json_data = json.load(f) + elif isinstance(data, str): + json_data = json.loads(data) + else: + json_data = data + + return cls(**json_data) + + +class StateSyncer: + """Syncs ReleaseState to file only when changed.""" + + def __init__( + self, + config: Config, + args: Optional["ReleaseArgs"] = None, + file_path: Union[str, Path] = "state.json", + ): + self.config = config + self.args = args + self.file_path = Path(file_path) + self.last_dump: Optional[str] = None + self._state: Optional[ReleaseState] = None + + @property + def state(self) -> ReleaseState: + if self._state is None: + loaded = self.load() + if loaded is None: + self._state = ReleaseState.from_config(self.config) + # Set tag from args when creating from config + if self.args: + self._state.meta.tag = self.args.release_tag + else: + self._state = loaded + + # Apply force_rebuild flags from args + if self.args: + if "all" in self.args.force_rebuild: + # Set force_rebuild for all packages + for package_name in self._state.packages: + self._state.packages[ + package_name + ].meta.ephemeral.force_rebuild = True + else: + # Set force_rebuild for specific packages + for package_name in self.args.force_rebuild: + if package_name in self._state.packages: + self._state.packages[ + package_name + ].meta.ephemeral.force_rebuild = True + return self._state + + def load(self) -> Optional[ReleaseState]: + if not self.file_path.exists(): + return None + + with open(self.file_path, "r") as f: + json_data = json.load(f) + + state = ReleaseState(**json_data) + self.last_dump = state.model_dump_json(indent=2) + return state + + def sync(self) -> None: + """Save state to file if changed since last sync.""" + current_dump = self.state.model_dump_json(indent=2) + + if current_dump != self.last_dump: + self.last_dump = current_dump + with open(self.file_path, "w") as f: + f.write(current_dump) + logger.debug("State saved") diff --git a/src/redis_release/bht/tree.py b/src/redis_release/bht/tree.py new file mode 100644 index 0000000..59d1c75 --- /dev/null +++ b/src/redis_release/bht/tree.py @@ -0,0 +1,116 @@ +import asyncio +import logging +import os +from typing import Tuple + +import py_trees +from py_trees.behaviour import Behaviour +from py_trees.composites import Selector, Sequence +from py_trees.decorators import Inverter + +from ..config import Config +from ..github_client_async import GitHubClientAsync +from .args import ReleaseArgs +from .behaviours import IsWorkflowSuccessful +from .composites import ( + FindWorkflowByUUID, + TriggerWorkflowGoal, + WaitForWorkflowCompletion, +) +from .state import ReleaseState, StateSyncer + +logger = logging.getLogger(__name__) + + +def initialize_tree_and_state( + config: Config, args: ReleaseArgs +) -> Tuple[Behaviour, StateSyncer]: + github_client = GitHubClientAsync(token=os.getenv("GITHUB_TOKEN")) + state_syncer = StateSyncer(config, args) + + return (create_root_node(state_syncer.state, github_client), state_syncer) + + +def create_root_node( + state: ReleaseState, github_client: GitHubClientAsync +) -> Behaviour: + + # Get package and workflow + package = state.packages["docker"] + workflow = package.build.workflow + package_meta = package.meta + release_meta = state.meta + logger.debug("bedaa %s", state) + + root = Sequence("Workflow Goal", False) + workflow_run = Selector("Workflow Run", False) + + is_workflow_successful = IsWorkflowSuccessful("Is Workflow Successful?", workflow) + identify_workflow = FindWorkflowByUUID( + "Identify Workflow Goal", workflow, package_meta, github_client, "DOCKER" + ) + trigger_workflow = TriggerWorkflowGoal( + "Trigger Workflow Goal", + workflow, + package_meta, + release_meta, + github_client, + "DOCKER", + ) + wait_for_completion = WaitForWorkflowCompletion( + "Workflow Completion Goal", workflow, package_meta, github_client, "DOCKER" + ) + workflow_run.add_children( + [ + wait_for_completion, + identify_workflow, + trigger_workflow, + ] + ) + root.add_children([workflow_run, is_workflow_successful]) + return root + + +async def async_tick_tock( + tree: py_trees.trees.BehaviourTree, state_syncer: StateSyncer, period: float = 3.0 +) -> None: + """Drive Behaviour tree using async event loop + + The tree is always ticked once. + + Next tick happens when there is at least one task completed. + If async tasks list is empty the final tick is made and if + after that the async tasks queue is still empty the tree is + considered finished. + + """ + print( + py_trees.display.unicode_tree( + tree.root, show_status=True, show_only_visited=False + ) + ) + tree.tick() + count_no_tasks_loop = 0 + while True: + state_syncer.sync() + print( + py_trees.display.unicode_tree( + tree.root, show_status=True, show_only_visited=False + ) + ) + # TODO remove this sleep, since we are awaiting other_tasks + await asyncio.sleep(0) + other_tasks = asyncio.all_tasks() - {asyncio.current_task()} + logger.debug(other_tasks) + if not other_tasks: + count_no_tasks_loop += 1 + # tick the tree one more time in case flipped status would lead to new tasks + if count_no_tasks_loop > 1: + logger.info(f"Tree finished with {tree.root.status}") + break + else: + count_no_tasks_loop = 0 + await asyncio.wait(other_tasks, return_when=asyncio.FIRST_COMPLETED) + + logger.info("tick") + tree.tick() diff --git a/src/redis_release/cli.py b/src/redis_release/cli.py index dc55827..1c80dbf 100644 --- a/src/redis_release/cli.py +++ b/src/redis_release/cli.py @@ -3,17 +3,21 @@ import asyncio import logging import os -from typing import Optional +from typing import List, Optional import py_trees import typer from rich.console import Console from rich.table import Table +from redis_release.bht.args import ReleaseArgs +from redis_release.bht.state import ReleaseState + +from .bht.tree import async_tick_tock, create_root_node, initialize_tree_and_state +from .config import load_config from .logging_config import setup_logging from .models import ReleaseType from .orchestrator import ReleaseOrchestrator -from .tree import async_tick_tock, create_root_node app = typer.Typer( name="redis-release", @@ -191,18 +195,56 @@ def status( @app.command() -def release_bht() -> None: +def release_bht( + release_tag: str = typer.Argument(..., help="Release tag (e.g., 8.4-m01-int1)"), + config_file: Optional[str] = typer.Option( + None, "--config", "-c", help="Path to config file (default: config.yaml)" + ), + force_rebuild: Optional[List[str]] = typer.Option( + None, + "--force-rebuild", + help="Force rebuild for specific packages (can be specified multiple times). Use 'all' to force rebuild all packages.", + ), +) -> None: """Run release using behaviour tree implementation.""" setup_logging(logging.DEBUG) - root = create_root_node() + config_path = config_file or "config.yaml" + config = load_config(config_path) + + # Create release args + args = ReleaseArgs( + release_tag=release_tag, + force_rebuild=force_rebuild or [], + ) + + root, state_syncer = initialize_tree_and_state(config, args) tree = py_trees.trees.BehaviourTree(root) - asyncio.run(async_tick_tock(tree)) + asyncio.run(async_tick_tock(tree, state_syncer=state_syncer)) @app.command() -def release_print_bht() -> None: +def release_print_bht( + release_tag: str = typer.Argument(..., help="Release tag (e.g., 8.4-m01-int1)"), + config_file: Optional[str] = typer.Option( + None, "--config", "-c", help="Path to config file (default: config.yaml)" + ), + force_rebuild: Optional[List[str]] = typer.Option( + None, + "--force-rebuild", + help="Force rebuild for specific packages (can be specified multiple times). Use 'all' to force rebuild all packages.", + ), +) -> None: """Print and render (using graphviz) the release behaviour tree.""" - root = create_root_node() + config_path = config_file or "config.yaml" + config = load_config(config_path) + + # Create release args + args = ReleaseArgs( + release_tag=release_tag, + force_rebuild=force_rebuild or [], + ) + + root, _ = initialize_tree_and_state(config, args) py_trees.display.render_dot_tree(root) print(py_trees.display.unicode_tree(root)) diff --git a/src/redis_release/config.py b/src/redis_release/config.py new file mode 100644 index 0000000..0e078a1 --- /dev/null +++ b/src/redis_release/config.py @@ -0,0 +1,56 @@ +"""Configuration management for Redis release automation.""" + +from pathlib import Path +from typing import Dict, Union + +import yaml +from pydantic import BaseModel, Field + + +class PackageConfig(BaseModel): + """Configuration for a package type.""" + + repo: str + workflow_branch: str = "autodetect" + build_workflow: Union[str, bool] = Field(default=False) + build_timeout_minutes: int = Field(default=45) + publish_workflow: Union[str, bool] = Field(default=False) + publish_timeout_minutes: int = Field(default=10) + + +class Config(BaseModel): + """Root configuration model.""" + + version: int + packages: Dict[str, PackageConfig] + + @classmethod + def from_yaml(cls, path: Union[str, Path] = "config.yaml") -> "Config": + """Load configuration from YAML file.""" + config_path = Path(path) + if not config_path.exists(): + raise FileNotFoundError(f"Config file not found: {config_path}") + + with open(config_path, "r") as f: + data = yaml.safe_load(f) + + # Convert package configs to PackageConfig objects + if "packages" in data: + data["packages"] = { + name: PackageConfig(**pkg_data) + for name, pkg_data in data["packages"].items() + } + + return cls(**data) + + +def load_config(path: Union[str, Path] = "config.yaml") -> Config: + """Load configuration from YAML file. + + Args: + path: Path to config file, defaults to config.yaml in current directory + + Returns: + Loaded configuration object + """ + return Config.from_yaml(path) diff --git a/src/redis_release/github_client_async.py b/src/redis_release/github_client_async.py index 8db27da..09c6adc 100644 --- a/src/redis_release/github_client_async.py +++ b/src/redis_release/github_client_async.py @@ -64,6 +64,8 @@ async def github_request( if json is not None: kwargs["json"] = json + logger.debug(f"Making request to {url} with params {params}") + async with request_method(url, **kwargs) as response: if response.status >= 400: # Read response body for error details @@ -80,9 +82,10 @@ async def github_request( response.raise_for_status() # For methods that may not return content (like POST to workflow dispatch) - if response.status == 204 or not response.content_length: + if response.status == 204: return {} + # logger.debug(f"Response: {await response.json()}") return await response.json() async def github_request_paginated( @@ -272,6 +275,7 @@ async def identify_workflow( f"[blue]Searching for workflow run with UUID:[/blue] [cyan]{workflow_uuid}[/cyan]" ) runs = await self.get_recent_workflow_runs(repo, workflow_file, limit=20) + logger.debug(f"Found {runs} runs") for run in runs: extracted_uuid = self._extract_uuid(run.workflow_id) diff --git a/src/redis_release/tree.py b/src/redis_release/tree.py deleted file mode 100644 index e2f4420..0000000 --- a/src/redis_release/tree.py +++ /dev/null @@ -1,88 +0,0 @@ -import asyncio -import logging -import os - -import py_trees -from py_trees.behaviour import Behaviour -from py_trees.composites import Selector, Sequence -from py_trees.decorators import Inverter - -from .bht.behaviours import ( - IsWorkflowSuccessful, - IsWorkflowTriggerFailed, - TriggerWorkflow, -) -from .bht.composites import FindWorkflowByUUID, WaitForWorkflowCompletion -from .bht.state import Workflow -from .github_client_async import GitHubClientAsync - -logger = logging.getLogger(__name__) - - -def create_root_node() -> Behaviour: - github_client = GitHubClientAsync(token=os.getenv("GITHUB_TOKEN")) - root = Sequence("Workflow Goal", False) - workflow_run = Selector("Workflow Run", False) - workflow = Workflow( - repo="Peter-Sh/docker-library-redis", - workflow_file="release_build_and_test.yml", - inputs={"release_tag": "8.5.7"}, - ref="release/8.2", - ) - - is_workflow_successful = IsWorkflowSuccessful("Is Workflow Successful?", workflow) - identify_workflow = FindWorkflowByUUID( - "Identify Workflow", workflow, github_client, "DOCKER" - ) - may_start_workflow = Inverter( - "Not", - IsWorkflowTriggerFailed("Is Workflow Trigger Failed?", workflow), - ) - - trigger_workflow = Sequence( - "Workflow trigger", - True, - [ - may_start_workflow, - TriggerWorkflow("Trigger Workflow", workflow, github_client), - ], - ) - wait_for_completion = WaitForWorkflowCompletion( - "Wait for completion", workflow, github_client, "DOCKER" - ) - workflow_run.add_children( - [ - wait_for_completion, - identify_workflow, - trigger_workflow, - ] - ) - root.add_children([workflow_run, is_workflow_successful]) - return root - - -async def async_tick_tock( - tree: py_trees.trees.BehaviourTree, period: float = 3.0 -) -> None: - tree.tick() - count_no_tasks_loop = 0 - while True: - logger.info("tick") - print( - py_trees.display.unicode_tree( - tree.root, show_status=True, show_only_visited=False - ) - ) - await asyncio.sleep(0) - other_tasks = asyncio.all_tasks() - {asyncio.current_task()} - logger.debug(other_tasks) - if not other_tasks: - count_no_tasks_loop += 1 - # tick the tree one more time in case flipped status would lead to new tasks - if count_no_tasks_loop > 1: - logger.info(f"Tree finished with {tree.root.status}") - break - else: - count_no_tasks_loop = 0 - await asyncio.wait(other_tasks, return_when=asyncio.FIRST_COMPLETED) - tree.tick() diff --git a/src/tests/__init__.py b/src/tests/__init__.py new file mode 100644 index 0000000..3330cd1 --- /dev/null +++ b/src/tests/__init__.py @@ -0,0 +1,2 @@ +"""Tests for redis_release package.""" + diff --git a/src/tests/test_github_client_async.py b/src/tests/test_github_client_async.py index 7ced0dc..ebd6369 100644 --- a/src/tests/test_github_client_async.py +++ b/src/tests/test_github_client_async.py @@ -10,7 +10,7 @@ class TestGitHubClientAsync(AioHTTPTestCase): """Test cases for github_request_paginated method.""" - async def get_application(self): + async def get_application(self) -> web.Application: """Create a test application with mock endpoints.""" app = web.Application() app.router.add_get("/no-pagination", self.handle_no_pagination) @@ -18,13 +18,13 @@ async def get_application(self): app.router.add_get("/dict-pagination", self.handle_dict_pagination) return app - async def handle_no_pagination(self, request): + async def handle_no_pagination(self, request: web.Request) -> web.Response: """Handle request without pagination (no Link header).""" return web.json_response( [{"id": 1, "name": "item1"}, {"id": 2, "name": "item2"}] ) - async def handle_array_pagination(self, request): + async def handle_array_pagination(self, request: web.Request) -> web.Response: """Handle request with array response and pagination.""" page = int(request.query.get("page", 1)) @@ -44,7 +44,7 @@ async def handle_array_pagination(self, request): else: return web.json_response([]) - async def handle_dict_pagination(self, request): + async def handle_dict_pagination(self, request: web.Request) -> web.Response: """Handle request with dict response and pagination.""" page = int(request.query.get("page", 1)) @@ -73,7 +73,7 @@ async def handle_dict_pagination(self, request): else: return web.json_response({"total_count": 5, "artifacts": []}) - async def test_no_link_header(self): + async def test_no_link_header(self) -> None: """Test pagination with no Link header (single page response).""" client = GitHubClientAsync(token="test-token") url = self.server.make_url("/no-pagination") @@ -96,7 +96,7 @@ async def test_no_link_header(self): assert result[1]["id"] == 2 assert result[1]["name"] == "item2" - async def test_array_pagination(self): + async def test_array_pagination(self) -> None: """Test pagination with array response across multiple pages.""" client = GitHubClientAsync(token="test-token") url = self.server.make_url("/array-pagination") @@ -120,7 +120,7 @@ async def test_array_pagination(self): assert result[3]["id"] == 4 assert result[4]["id"] == 5 - async def test_dict_pagination_with_merge_key(self): + async def test_dict_pagination_with_merge_key(self) -> None: """Test pagination with dict response and merge_key.""" client = GitHubClientAsync(token="test-token") url = self.server.make_url("/dict-pagination") diff --git a/src/tests/test_state.py b/src/tests/test_state.py new file mode 100644 index 0000000..2f70ab9 --- /dev/null +++ b/src/tests/test_state.py @@ -0,0 +1,531 @@ +"""Tests for ReleaseState functionality.""" + +import json +from pathlib import Path + +import pytest + +from redis_release.bht.args import ReleaseArgs +from redis_release.bht.state import ReleaseState, StateSyncer, Workflow +from redis_release.config import Config, PackageConfig + + +class TestReleaseStateFromConfig: + """Test cases for ReleaseState.from_config method.""" + + def test_from_config_with_valid_workflows(self) -> None: + """Test from_config with valid workflow files.""" + # Create a minimal config with valid workflow files + config = Config( + version=1, + packages={ + "test-package": PackageConfig( + repo="test/repo", + build_workflow="build.yml", + publish_workflow="publish.yml", + ) + }, + ) + + state = ReleaseState.from_config(config) + + assert "test-package" in state.packages + assert state.packages["test-package"].meta.repo == "test/repo" + assert state.packages["test-package"].meta.ref is None + assert ( + state.packages["test-package"].build.workflow.workflow_file == "build.yml" + ) + assert ( + state.packages["test-package"].publish.workflow.workflow_file + == "publish.yml" + ) + + def test_from_config_with_empty_build_workflow(self) -> None: + """Test from_config fails when build_workflow is empty.""" + config = Config( + version=1, + packages={ + "test-package": PackageConfig( + repo="test/repo", + build_workflow="", + publish_workflow="publish.yml", + ) + }, + ) + + with pytest.raises(ValueError, match="build_workflow cannot be empty"): + ReleaseState.from_config(config) + + def test_from_config_with_empty_publish_workflow(self) -> None: + """Test from_config fails when publish_workflow is empty.""" + config = Config( + version=1, + packages={ + "test-package": PackageConfig( + repo="test/repo", + build_workflow="build.yml", + publish_workflow="", + ) + }, + ) + + with pytest.raises(ValueError, match="publish_workflow cannot be empty"): + ReleaseState.from_config(config) + + def test_from_config_with_whitespace_only_build_workflow(self) -> None: + """Test from_config fails when build_workflow is whitespace only.""" + config = Config( + version=1, + packages={ + "test-package": PackageConfig( + repo="test/repo", + build_workflow=" ", + publish_workflow="publish.yml", + ) + }, + ) + + with pytest.raises(ValueError, match="build_workflow cannot be empty"): + ReleaseState.from_config(config) + + def test_from_config_with_whitespace_only_publish_workflow(self) -> None: + """Test from_config fails when publish_workflow is whitespace only.""" + config = Config( + version=1, + packages={ + "test-package": PackageConfig( + repo="test/repo", + build_workflow="build.yml", + publish_workflow=" ", + ) + }, + ) + + with pytest.raises(ValueError, match="publish_workflow cannot be empty"): + ReleaseState.from_config(config) + + def test_from_config_with_multiple_packages(self) -> None: + """Test from_config with multiple packages.""" + config = Config( + version=1, + packages={ + "package1": PackageConfig( + repo="test/repo1", + build_workflow="build1.yml", + publish_workflow="publish1.yml", + ), + "package2": PackageConfig( + repo="test/repo2", + build_workflow="build2.yml", + publish_workflow="publish2.yml", + ), + }, + ) + + state = ReleaseState.from_config(config) + + assert len(state.packages) == 2 + assert "package1" in state.packages + assert "package2" in state.packages + assert state.packages["package1"].build.workflow.workflow_file == "build1.yml" + assert state.packages["package2"].build.workflow.workflow_file == "build2.yml" + + def test_from_config_error_message_includes_package_name(self) -> None: + """Test that error messages include the package name for debugging.""" + config = Config( + version=1, + packages={ + "my-special-package": PackageConfig( + repo="test/repo", + build_workflow="", + publish_workflow="publish.yml", + ) + }, + ) + + with pytest.raises(ValueError, match="my-special-package"): + ReleaseState.from_config(config) + + def test_from_config_with_boolean_build_workflow(self) -> None: + """Test from_config fails when build_workflow is a boolean.""" + config = Config( + version=1, + packages={ + "test-package": PackageConfig( + repo="test/repo", + build_workflow=False, + publish_workflow="publish.yml", + ) + }, + ) + + with pytest.raises(ValueError, match="build_workflow must be a string"): + ReleaseState.from_config(config) + + def test_from_config_with_boolean_publish_workflow(self) -> None: + """Test from_config fails when publish_workflow is a boolean.""" + config = Config( + version=1, + packages={ + "test-package": PackageConfig( + repo="test/repo", + build_workflow="build.yml", + publish_workflow=False, + ) + }, + ) + + with pytest.raises(ValueError, match="publish_workflow must be a string"): + ReleaseState.from_config(config) + + +class TestWorkflowEphemeral: + """Test cases for Workflow ephemeral field.""" + + def test_ephemeral_field_exists(self) -> None: + """Test that ephemeral field is accessible.""" + workflow = Workflow(workflow_file="test.yml") + + assert hasattr(workflow, "ephemeral") + assert workflow.ephemeral.trigger_failed is False + assert workflow.ephemeral.timed_out is False + + def test_ephemeral_field_can_be_modified(self) -> None: + """Test that ephemeral field values can be modified.""" + workflow = Workflow(workflow_file="test.yml") + + workflow.ephemeral.trigger_failed = True + workflow.ephemeral.timed_out = True + + assert workflow.ephemeral.trigger_failed is True + assert workflow.ephemeral.timed_out is True + + def test_ephemeral_field_not_serialized_to_json(self) -> None: + """Test that ephemeral field is excluded from JSON serialization.""" + workflow = Workflow(workflow_file="test.yml") + workflow.ephemeral.trigger_failed = True + workflow.ephemeral.timed_out = True + + # Serialize to JSON + json_str = workflow.model_dump_json() + json_data = json.loads(json_str) + + # Verify ephemeral field is not in JSON + assert "ephemeral" not in json_data + assert "trigger_failed" not in json_data + assert "timed_out" not in json_data + + # Verify other fields are present + assert "workflow_file" in json_data + assert json_data["workflow_file"] == "test.yml" + + def test_ephemeral_field_not_in_model_dump(self) -> None: + """Test that ephemeral field is excluded from model_dump.""" + workflow = Workflow(workflow_file="test.yml") + workflow.ephemeral.trigger_failed = True + + # Get dict representation + data = workflow.model_dump() + + # Verify ephemeral field is not in dict + assert "ephemeral" not in data + assert "trigger_failed" not in data + assert "timed_out" not in data + + def test_ephemeral_field_initialized_on_deserialization(self) -> None: + """Test that ephemeral field is initialized when loading from JSON.""" + json_str = '{"workflow_file": "test.yml", "inputs": {}}' + + workflow = Workflow.model_validate_json(json_str) + + # Ephemeral field should be initialized with defaults + assert hasattr(workflow, "ephemeral") + assert workflow.ephemeral.trigger_failed is False + assert workflow.ephemeral.timed_out is False + + def test_release_state_ephemeral_not_serialized(self) -> None: + """Test that ephemeral fields are not serialized in ReleaseState.""" + config = Config( + version=1, + packages={ + "test-package": PackageConfig( + repo="test/repo", + build_workflow="build.yml", + publish_workflow="publish.yml", + ) + }, + ) + + state = ReleaseState.from_config(config) + + # Modify ephemeral fields + state.packages["test-package"].build.workflow.ephemeral.trigger_failed = True + state.packages["test-package"].publish.workflow.ephemeral.timed_out = True + + # Serialize to JSON + json_str = state.model_dump_json() + json_data = json.loads(json_str) + + # Verify ephemeral fields are not in JSON + build_workflow = json_data["packages"]["test-package"]["build"]["workflow"] + publish_workflow = json_data["packages"]["test-package"]["publish"]["workflow"] + + assert "ephemeral" not in build_workflow + assert "trigger_failed" not in build_workflow + assert "ephemeral" not in publish_workflow + assert "timed_out" not in publish_workflow + + +class TestReleaseMeta: + """Test cases for ReleaseMeta functionality.""" + + def test_release_meta_tag_field(self) -> None: + """Test that ReleaseMeta has tag field.""" + state = ReleaseState() + assert state.meta.tag is None + + state.meta.tag = "8.4-m01" + assert state.meta.tag == "8.4-m01" + + def test_release_meta_serialization(self) -> None: + """Test that ReleaseMeta is serialized correctly.""" + state = ReleaseState() + state.meta.tag = "8.4-m01" + + json_str = state.model_dump_json() + json_data = json.loads(json_str) + + assert "meta" in json_data + assert json_data["meta"]["tag"] == "8.4-m01" + + def test_release_meta_deserialization(self) -> None: + """Test that ReleaseMeta is deserialized correctly.""" + json_str = '{"meta": {"tag": "8.4-m01"}, "packages": {}}' + state = ReleaseState.model_validate_json(json_str) + + assert state.meta.tag == "8.4-m01" + + +class TestPackageMetaEphemeral: + """Test cases for PackageMetaEphemeral functionality.""" + + def test_ephemeral_field_exists(self) -> None: + """Test that ephemeral field exists and force_rebuild defaults to False.""" + config = Config( + version=1, + packages={ + "test-package": PackageConfig( + repo="test/repo", + build_workflow="build.yml", + publish_workflow="publish.yml", + ) + }, + ) + + state = ReleaseState.from_config(config) + assert state.packages["test-package"].meta.ephemeral.force_rebuild is False + + def test_force_rebuild_field_can_be_modified(self) -> None: + """Test that force_rebuild field can be modified.""" + config = Config( + version=1, + packages={ + "test-package": PackageConfig( + repo="test/repo", + build_workflow="build.yml", + publish_workflow="publish.yml", + ) + }, + ) + + state = ReleaseState.from_config(config) + state.packages["test-package"].meta.ephemeral.force_rebuild = True + assert state.packages["test-package"].meta.ephemeral.force_rebuild is True + + def test_ephemeral_not_serialized(self) -> None: + """Test that ephemeral field is not serialized to JSON.""" + config = Config( + version=1, + packages={ + "test-package": PackageConfig( + repo="test/repo", + build_workflow="build.yml", + publish_workflow="publish.yml", + ) + }, + ) + + state = ReleaseState.from_config(config) + state.packages["test-package"].meta.ephemeral.force_rebuild = True + + json_str = state.model_dump_json() + json_data = json.loads(json_str) + + assert "ephemeral" not in json_data["packages"]["test-package"]["meta"] + assert "force_rebuild" not in json_data["packages"]["test-package"]["meta"] + + +class TestStateSyncerWithArgs: + """Test cases for StateSyncer with ReleaseArgs.""" + + def test_state_syncer_sets_tag_from_args(self, tmp_path: Path) -> None: + """Test that StateSyncer sets tag from ReleaseArgs when creating from config.""" + config = Config( + version=1, + packages={ + "test-package": PackageConfig( + repo="test/repo", + build_workflow="build.yml", + publish_workflow="publish.yml", + ) + }, + ) + + args = ReleaseArgs(release_tag="8.4-m01", force_rebuild=[]) + state_file = tmp_path / "state.json" + syncer = StateSyncer(config, args, file_path=state_file) + + assert syncer.state.meta.tag == "8.4-m01" + + def test_state_syncer_sets_force_rebuild_from_args(self, tmp_path: Path) -> None: + """Test that StateSyncer sets force_rebuild flags from ReleaseArgs.""" + config = Config( + version=1, + packages={ + "docker": PackageConfig( + repo="test/docker", + build_workflow="build.yml", + publish_workflow="publish.yml", + ), + "redis": PackageConfig( + repo="test/redis", + build_workflow="build.yml", + publish_workflow="publish.yml", + ), + }, + ) + + args = ReleaseArgs(release_tag="8.4-m01", force_rebuild=["docker"]) + state_file = tmp_path / "state.json" + syncer = StateSyncer(config, args, file_path=state_file) + + assert syncer.state.packages["docker"].meta.ephemeral.force_rebuild is True + assert syncer.state.packages["redis"].meta.ephemeral.force_rebuild is False + + def test_state_syncer_sets_multiple_force_rebuild_from_args( + self, tmp_path: Path + ) -> None: + """Test that StateSyncer sets multiple force_rebuild flags from ReleaseArgs.""" + config = Config( + version=1, + packages={ + "docker": PackageConfig( + repo="test/docker", + build_workflow="build.yml", + publish_workflow="publish.yml", + ), + "redis": PackageConfig( + repo="test/redis", + build_workflow="build.yml", + publish_workflow="publish.yml", + ), + "snap": PackageConfig( + repo="test/snap", + build_workflow="build.yml", + publish_workflow="publish.yml", + ), + }, + ) + + args = ReleaseArgs(release_tag="8.4-m01", force_rebuild=["docker", "snap"]) + state_file = tmp_path / "state.json" + syncer = StateSyncer(config, args, file_path=state_file) + + assert syncer.state.packages["docker"].meta.ephemeral.force_rebuild is True + assert syncer.state.packages["redis"].meta.ephemeral.force_rebuild is False + assert syncer.state.packages["snap"].meta.ephemeral.force_rebuild is True + + def test_state_syncer_without_args(self, tmp_path: Path) -> None: + """Test that StateSyncer works without ReleaseArgs.""" + config = Config( + version=1, + packages={ + "test-package": PackageConfig( + repo="test/repo", + build_workflow="build.yml", + publish_workflow="publish.yml", + ) + }, + ) + + state_file = tmp_path / "state.json" + syncer = StateSyncer(config, args=None, file_path=state_file) + + assert syncer.state.meta.tag is None + assert ( + syncer.state.packages["test-package"].meta.ephemeral.force_rebuild is False + ) + + def test_state_syncer_force_rebuild_all(self, tmp_path: Path) -> None: + """Test that StateSyncer sets force_rebuild for all packages when 'all' is specified.""" + config = Config( + version=1, + packages={ + "docker": PackageConfig( + repo="test/docker", + build_workflow="build.yml", + publish_workflow="publish.yml", + ), + "redis": PackageConfig( + repo="test/redis", + build_workflow="build.yml", + publish_workflow="publish.yml", + ), + "snap": PackageConfig( + repo="test/snap", + build_workflow="build.yml", + publish_workflow="publish.yml", + ), + }, + ) + + args = ReleaseArgs(release_tag="8.4-m01", force_rebuild=["all"]) + state_file = tmp_path / "state.json" + syncer = StateSyncer(config, args, file_path=state_file) + + # All packages should have force_rebuild set to True + assert syncer.state.packages["docker"].meta.ephemeral.force_rebuild is True + assert syncer.state.packages["redis"].meta.ephemeral.force_rebuild is True + assert syncer.state.packages["snap"].meta.ephemeral.force_rebuild is True + + def test_state_syncer_force_rebuild_all_with_other_values( + self, tmp_path: Path + ) -> None: + """Test that 'all' takes precedence even if other package names are specified.""" + config = Config( + version=1, + packages={ + "docker": PackageConfig( + repo="test/docker", + build_workflow="build.yml", + publish_workflow="publish.yml", + ), + "redis": PackageConfig( + repo="test/redis", + build_workflow="build.yml", + publish_workflow="publish.yml", + ), + }, + ) + + args = ReleaseArgs(release_tag="8.4-m01", force_rebuild=["docker", "all"]) + state_file = tmp_path / "state.json" + syncer = StateSyncer(config, args, file_path=state_file) + + # All packages should have force_rebuild set to True + assert syncer.state.packages["docker"].meta.ephemeral.force_rebuild is True + assert syncer.state.packages["redis"].meta.ephemeral.force_rebuild is True + + +if __name__ == "__main__": + pytest.main([__file__, "-v"]) From ee7ff18368bd600c97ec1a1040748622261360b4 Mon Sep 17 00:00:00 2001 From: Petar Shtuchkin Date: Mon, 6 Oct 2025 19:21:37 +0300 Subject: [PATCH 08/39] Simplified tree with FlagGuards, tests for composites --- pyproject.toml | 9 ++ src/redis_release/bht/behaviours.py | 51 ---------- src/redis_release/bht/composites.py | 113 ++++++++++------------ src/redis_release/bht/decorators.py | 141 +++++++++++++++++----------- src/redis_release/bht/state.py | 12 ++- src/redis_release/bht/tree.py | 10 +- src/redis_release/config.py | 5 +- src/tests/test_state.py | 93 ++++++++++++++++++ src/tests/test_tree.py | 89 ++++++++++++++++++ 9 files changed, 346 insertions(+), 177 deletions(-) create mode 100644 src/tests/test_tree.py diff --git a/pyproject.toml b/pyproject.toml index bad5378..61cb611 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -34,6 +34,7 @@ dependencies = [ [project.optional-dependencies] dev = [ "pytest>=7.0.0", + "pytest-asyncio>=0.21.0", "pytest-cov>=4.0.0", "black>=23.0.0", "isort>=5.12.0", @@ -73,3 +74,11 @@ python_version = "3.8" warn_return_any = true warn_unused_configs = true disallow_untyped_defs = true + +[tool.pytest.ini_options] +asyncio_mode = "auto" +asyncio_default_fixture_loop_scope = "function" +log_cli = true +log_cli_level = "DEBUG" +log_cli_format = "%(asctime)s [%(levelname)8s] %(name)s - %(message)s" +log_cli_date_format = "%Y-%m-%d %H:%M:%S" diff --git a/src/redis_release/bht/behaviours.py b/src/redis_release/bht/behaviours.py index 6e27c9e..c25c3a7 100644 --- a/src/redis_release/bht/behaviours.py +++ b/src/redis_release/bht/behaviours.py @@ -98,7 +98,6 @@ def initialise(self) -> None: self.logger.error( "[red]Release tag is None - cannot trigger workflow[/red]" ) - self.workflow.ephemeral.trigger_failed = True self.feedback_message = "failed to trigger workflow" return self.workflow.inputs["release_tag"] = self.release_meta.tag @@ -127,7 +126,6 @@ def update(self) -> py_trees.common.Status: self.feedback_message = "workflow triggered" return py_trees.common.Status.SUCCESS except Exception as e: - self.workflow.ephemeral.trigger_failed = True self.feedback_message = "failed to trigger workflow" return self.log_exception_and_return_failure(e) @@ -253,22 +251,6 @@ def update(self) -> py_trees.common.Status: return py_trees.common.Status.SUCCESS -class SetFlag(LoggingAction): - def __init__( - self, name: str, container: BaseModel, flag: str, value: bool = True - ) -> None: - self.container = container - self.flag = flag - self.flag_value = value - super().__init__(name=name) - - def update(self) -> py_trees.common.Status: - setattr(self.container, self.flag, self.flag_value) - self.logger.info(f"Set flag {self.flag} to {self.flag_value}") - self.feedback_message = f"flag {self.flag} set to {self.flag_value}" - return py_trees.common.Status.SUCCESS - - ### Conditions ### @@ -283,17 +265,6 @@ def update(self) -> py_trees.common.Status: return py_trees.common.Status.FAILURE -class IsWorkflowTriggerFailed(py_trees.behaviour.Behaviour): - def __init__(self, name: str, workflow: Workflow) -> None: - self.workflow = workflow - super().__init__(name=name) - - def update(self) -> py_trees.common.Status: - if self.workflow.ephemeral.trigger_failed: - return py_trees.common.Status.SUCCESS - return py_trees.common.Status.FAILURE - - class IsWorkflowTriggered(py_trees.behaviour.Behaviour): def __init__(self, name: str, workflow: Workflow) -> None: self.workflow = workflow @@ -338,25 +309,3 @@ def update(self) -> py_trees.common.Status: if self.workflow.conclusion == WorkflowConclusion.SUCCESS: return py_trees.common.Status.SUCCESS return py_trees.common.Status.FAILURE - - -class IsWorkflowTimedOut(py_trees.behaviour.Behaviour): - def __init__(self, name: str, workflow: Workflow) -> None: - self.workflow = workflow - super().__init__(name=name) - - def update(self) -> py_trees.common.Status: - if self.workflow.ephemeral.timed_out: - return py_trees.common.Status.SUCCESS - return py_trees.common.Status.FAILURE - - -class IsWorkflowIdentifyFailed(py_trees.behaviour.Behaviour): - def __init__(self, name: str, workflow: Workflow) -> None: - self.workflow = workflow - super().__init__(name=name) - - def update(self) -> py_trees.common.Status: - if self.workflow.ephemeral.identify_failed: - return py_trees.common.Status.SUCCESS - return py_trees.common.Status.FAILURE diff --git a/src/redis_release/bht/composites.py b/src/redis_release/bht/composites.py index a697a5a..6d59ebf 100644 --- a/src/redis_release/bht/composites.py +++ b/src/redis_release/bht/composites.py @@ -1,29 +1,20 @@ -from ast import Invert -from socket import timeout -from time import sleep - -import py_trees from py_trees.composites import Selector, Sequence -from py_trees.decorators import Inverter, Repeat, Retry +from py_trees.decorators import Inverter, Repeat, Retry, Timeout from ..github_client_async import GitHubClientAsync -from .behaviours import IdentifyTargetRef as IdentifyTargetRefAction from .behaviours import ( + IdentifyTargetRef, IdentifyWorkflowByUUID, IsTargetRefIdentified, IsWorkflowCompleted, IsWorkflowIdentified, - IsWorkflowIdentifyFailed, IsWorkflowSuccessful, - IsWorkflowTimedOut, IsWorkflowTriggered, - IsWorkflowTriggerFailed, - SetFlag, Sleep, ) from .behaviours import TriggerWorkflow as TriggerWorkflow from .behaviours import UpdateWorkflowStatus -from .decorators import TimeoutWithFlag +from .decorators import FlagGuard from .state import PackageMeta, ReleaseMeta, Workflow @@ -53,33 +44,28 @@ def __init__( ) sleep = Sleep("Sleep", self.poll_interval) - is_workflow_identify_failed = IsWorkflowIdentifyFailed( - f"Identify Failed?", workflow - ) sleep_then_identify = Sequence( f"{log_prefix}Sleep then Identify", memory=True, children=[sleep, identify_workflow], ) - set_identify_failed_flag = SetFlag( - f"{log_prefix}Set Identify Failed Flag", - workflow.ephemeral, - "identify_failed", - True, - ) identify_loop = Retry( f"Retry {self.max_retries} times", sleep_then_identify, self.max_retries, ) + identify_guard = FlagGuard( + None, + identify_loop, + workflow.ephemeral, + "identify_failed", + ) identify_if_required = Selector( f"{log_prefix}Identify if required", False, children=[ IsWorkflowIdentified(f"Is Workflow Identified?", workflow), - is_workflow_identify_failed, - identify_loop, - set_identify_failed_flag, + identify_guard, ], ) @@ -104,20 +90,18 @@ def __init__( package_meta: PackageMeta, github_client: GitHubClientAsync, log_prefix: str = "", - timeout_seconds: int = 3 * 60, poll_interval: int = 10, ) -> None: if log_prefix != "": log_prefix = f"{log_prefix}." self.poll_interval = poll_interval - self.timeout_seconds = timeout_seconds + self.timeout_seconds = workflow.timeout_minutes * 60 is_workflow_identified = IsWorkflowIdentified( f"Is Workflow Identified?", workflow ) is_workflow_completed = IsWorkflowCompleted(f"Is Workflow Completed?", workflow) - is_worklow_timed_out = IsWorkflowTimedOut(f"Is Workflow Timed Out?", workflow) update_workflow_status = UpdateWorkflowStatus( f"{log_prefix}Update Workflow Status", workflow, github_client, package_meta ) @@ -130,11 +114,14 @@ def __init__( ], ) - update_workflow_loop = TimeoutWithFlag( - "Timeout", - Repeat("Repeat", update_workflow_status_with_pause, -1), - self.timeout_seconds, - workflow, + update_workflow_loop = FlagGuard( + None, + Timeout( + f"Timeout {workflow.timeout_minutes}m", + Repeat("Repeat", update_workflow_status_with_pause, -1), + self.timeout_seconds, + ), + workflow.ephemeral, "timed_out", ) @@ -149,7 +136,6 @@ def __init__( False, children=[ is_workflow_completed, - is_worklow_timed_out, update_workflow_loop, ], ), @@ -157,14 +143,14 @@ def __init__( ) -class IdentifyTargetRef(Selector): - """Composite to identify target ref if not already identified.""" - +class TriggerWorkflowGoal(Sequence): def __init__( self, name: str, + workflow: Workflow, package_meta: PackageMeta, release_meta: ReleaseMeta, + github_client: GitHubClientAsync, log_prefix: str = "", ) -> None: if log_prefix != "": @@ -173,49 +159,52 @@ def __init__( is_target_ref_identified = IsTargetRefIdentified( f"{log_prefix}Is Target Ref Identified?", package_meta ) - identify_target_ref = IdentifyTargetRefAction( - f"{log_prefix}Identify Target Ref", package_meta + is_workflow_triggered = IsWorkflowTriggered( + f"{log_prefix}Is Workflow Triggered?", workflow + ) + trigger_workflow = TriggerWorkflow( + f"{log_prefix}Trigger Workflow", + workflow, + package_meta, + release_meta, + github_client, + ) + trigger_guard = FlagGuard( + None, + trigger_workflow, + workflow.ephemeral, + "trigger_failed", + ) + trigger_workflow_if_req = Selector( + f"{log_prefix}Trigger Workflow if Required", + memory=False, + children=[is_workflow_triggered, trigger_guard], ) super().__init__( name=name, memory=False, - children=[is_target_ref_identified, identify_target_ref], + children=[is_target_ref_identified, trigger_workflow_if_req], ) -class TriggerWorkflowGoal(Sequence): +class IdentifyTargetRefGoal(FlagGuard): def __init__( self, name: str, - workflow: Workflow, package_meta: PackageMeta, release_meta: ReleaseMeta, - github_client: GitHubClientAsync, log_prefix: str = "", ) -> None: if log_prefix != "": log_prefix = f"{log_prefix}." - may_start_workflow = Inverter( - f"{log_prefix}Not Trigger Failed", - IsWorkflowTriggerFailed( - f"{log_prefix}Is Workflow Trigger Failed?", workflow - ), - ) - identify_target_ref = IdentifyTargetRef( - f"{log_prefix}Identify Target Ref", package_meta, release_meta, log_prefix - ) - trigger_workflow = TriggerWorkflow( - f"{log_prefix}Trigger Workflow", - workflow, - package_meta, - release_meta, - github_client, - ) - super().__init__( - name=name, - memory=True, - children=[may_start_workflow, identify_target_ref, trigger_workflow], + None, + IdentifyTargetRef( + f"{log_prefix}Identify Target Ref", + package_meta, + ), + package_meta.ephemeral, + "identify_ref_failed", ) diff --git a/src/redis_release/bht/decorators.py b/src/redis_release/bht/decorators.py index ef75e63..8af4c3d 100644 --- a/src/redis_release/bht/decorators.py +++ b/src/redis_release/bht/decorators.py @@ -1,79 +1,108 @@ -import time -from typing import Optional +import logging +from typing import Iterator, List, Optional from py_trees.decorators import Decorator, behaviour, common from pydantic import BaseModel +from redis_release.bht.logging_wrapper import PyTreesLoggerWrapper -class TimeoutWithFlag(Decorator): + +class DecoratorWithLogging(Decorator): + logger: PyTreesLoggerWrapper + + def __init__(self, name: str, child: behaviour.Behaviour) -> None: + super().__init__(name=name, child=child) + self.logger = PyTreesLoggerWrapper(logging.getLogger(self.name)) + + +class FlagGuard(DecoratorWithLogging): """ - Executes a child/subtree with a timeout. - - A decorator that applies a timeout pattern to an existing behaviour. - If the timeout is reached, the encapsulated behaviour's - :meth:`~py_trees.behaviour.Behaviour.stop` method is called with - status :data:`~py_trees.common.Status.INVALID` and specified field in - container is set to True, otherwise it will - simply directly tick and return with the same status - as that of it's encapsulated behaviour. + A decorator that guards behaviour execution based on a flag value. + + If the flag in the container matches the expected flag_value, the guard + returns guard_status immediately without executing the decorated behaviour. + + If the decorated behaviour executes and its status is in the raise_on list, + the flag is set to flag_value. + + Args: + name: the decorator name + child: the child behaviour or subtree + container: the BaseModel instance containing the flag + flag: the name of the flag field in the container + flag_value: the value to check/set for the flag (default: True) + guard_status: the status to return when the guard is triggered (default: FAILURE) + raise_on: list of statuses that should trigger setting the flag (default: [FAILURE]) + when raise_on is set to None, the flag is never raised (expected to be raised by other means) """ def __init__( self, - name: str, + name: Optional[str], child: behaviour.Behaviour, - duration: float = 5.0, - container: Optional[BaseModel] = None, - field: str = "", + container: BaseModel, + flag: str, + flag_value: bool = True, + guard_status: common.Status = common.Status.FAILURE, + raise_on: Optional[List[common.Status]] = None, ): - """ - Init with the decorated child and a timeout duration. + if not hasattr(container, flag): + raise ValueError( + f"Field '{flag}' does not exist on {container.__class__.__name__}" + ) + + current_value = getattr(container, flag) + if current_value is not None and type(current_value) != type(flag_value): + raise TypeError( + f"Field '{flag}' type mismatch: expected {type(flag_value)}, got {type(current_value)}" + ) - Args: - child: the child behaviour or subtree - name: the decorator name - duration: timeout length in seconds - """ - super(TimeoutWithFlag, self).__init__(name=name, child=child) - self.duration = duration - self.finish_time = 0.0 self.container = container - self.field = field + self.flag = flag + self.flag_value = flag_value + self.guard_status = guard_status + self.raise_on = raise_on if raise_on is not None else [common.Status.FAILURE] + if name is None: + name = f"Guard({flag}={flag_value})" + super(FlagGuard, self).__init__(name=name, child=child) - def initialise(self) -> None: - """Reset the feedback message and finish time on behaviour entry.""" - self.finish_time = time.monotonic() + self.duration - self.feedback_message = "" + def _is_flag_active(self) -> bool: + current_flag_value = getattr(self.container, self.flag, None) + return current_flag_value == self.flag_value def update(self) -> common.Status: - """ - Fail on timeout, or block / reflect the child's result accordingly. + current_flag_value = getattr(self.container, self.flag, None) + if current_flag_value == self.flag_value: + self.logger.debug(f"Returning guard status: {self.guard_status}") + return self.guard_status - Terminate the child and return - :data:`~py_trees.common.Status.FAILURE` - if the timeout is exceeded. + return self.decorated.status + + def tick(self) -> Iterator[behaviour.Behaviour]: + """ + Tick the child or bounce back with the original status if already completed. - Returns: - the behaviour's new status :class:`~py_trees.common.Status` + Yields: + a reference to itself or a behaviour in it's child subtree """ - current_time = time.monotonic() - if ( - self.decorated.status == common.Status.RUNNING - and current_time > self.finish_time - ): - self.feedback_message = "timed out" - if self.container is not None: - setattr(self.container, self.field, True) + if self._is_flag_active(): + # ignore the child + for node in behaviour.Behaviour.tick(self): + yield node + else: + # tick the child + for node in Decorator.tick(self): + yield node + + def terminate(self, new_status: common.Status) -> None: + if self._is_flag_active(): + return + + if new_status in self.raise_on: + setattr(self.container, self.flag, self.flag_value) + self.feedback_message = f"{self.flag} set to {self.flag_value}" self.logger.debug( - "{}.update() {}".format(self.__class__.__name__, self.feedback_message) - ) - # invalidate the decorated (i.e. cancel it), could also put this logic in a terminate() method - self.decorated.stop(common.Status.INVALID) - return common.Status.FAILURE - if self.decorated.status == common.Status.RUNNING: - self.feedback_message = "time still ticking ... [remaining: {}s]".format( - self.finish_time - current_time + f"Terminating with status {new_status}, setting {self.flag} to {self.flag_value}" ) else: - self.feedback_message = "child finished before timeout triggered" - return self.decorated.status + self.logger.debug(f"Terminating with status {new_status}, no flag change") diff --git a/src/redis_release/bht/state.py b/src/redis_release/bht/state.py index 13cb3e4..6d71424 100644 --- a/src/redis_release/bht/state.py +++ b/src/redis_release/bht/state.py @@ -5,6 +5,7 @@ from typing import TYPE_CHECKING, Any, Dict, Optional, Union from pydantic import BaseModel, Field +from rich.pretty import pretty_repr from redis_release.models import WorkflowConclusion, WorkflowStatus @@ -32,6 +33,7 @@ class Workflow(BaseModel): started_at: Optional[datetime] = None run_id: Optional[int] = None url: Optional[str] = None + timeout_minutes: int = 45 status: Optional[WorkflowStatus] = None conclusion: Optional[WorkflowConclusion] = None ephemeral: WorkflowEphemeral = Field( @@ -51,6 +53,7 @@ class PackageMetaEphemeral(BaseModel): """Ephemeral package metadata that is not persisted.""" force_rebuild: bool = False + identify_ref_failed: bool = False class PackageMeta(BaseModel): @@ -113,19 +116,21 @@ def from_config(cls, config: Config) -> "ReleaseState": # Initialize package metadata package_meta = PackageMeta( repo=package_config.repo, - ref=None, + ref=package_config.ref, ) # Initialize build workflow build_workflow = Workflow( workflow_file=package_config.build_workflow, - inputs={}, + inputs=package_config.build_inputs.copy(), + timeout_minutes=package_config.build_timeout_minutes, ) # Initialize publish workflow publish_workflow = Workflow( workflow_file=package_config.publish_workflow, - inputs={}, + inputs=package_config.publish_inputs.copy(), + timeout_minutes=package_config.publish_timeout_minutes, ) # Create package state with initialized workflows @@ -193,6 +198,7 @@ def state(self) -> ReleaseState: self._state.packages[ package_name ].meta.ephemeral.force_rebuild = True + logger.debug(pretty_repr(self._state)) return self._state def load(self) -> Optional[ReleaseState]: diff --git a/src/redis_release/bht/tree.py b/src/redis_release/bht/tree.py index 59d1c75..a2491fc 100644 --- a/src/redis_release/bht/tree.py +++ b/src/redis_release/bht/tree.py @@ -14,6 +14,7 @@ from .behaviours import IsWorkflowSuccessful from .composites import ( FindWorkflowByUUID, + IdentifyTargetRefGoal, TriggerWorkflowGoal, WaitForWorkflowCompletion, ) @@ -40,7 +41,6 @@ def create_root_node( workflow = package.build.workflow package_meta = package.meta release_meta = state.meta - logger.debug("bedaa %s", state) root = Sequence("Workflow Goal", False) workflow_run = Selector("Workflow Run", False) @@ -57,6 +57,9 @@ def create_root_node( github_client, "DOCKER", ) + identify_target_ref = IdentifyTargetRefGoal( + "Identify Target Ref Goal", package_meta, release_meta, "DOCKER" + ) wait_for_completion = WaitForWorkflowCompletion( "Workflow Completion Goal", workflow, package_meta, github_client, "DOCKER" ) @@ -65,6 +68,7 @@ def create_root_node( wait_for_completion, identify_workflow, trigger_workflow, + identify_target_ref, ] ) root.add_children([workflow_run, is_workflow_successful]) @@ -98,15 +102,13 @@ async def async_tick_tock( tree.root, show_status=True, show_only_visited=False ) ) - # TODO remove this sleep, since we are awaiting other_tasks - await asyncio.sleep(0) other_tasks = asyncio.all_tasks() - {asyncio.current_task()} logger.debug(other_tasks) if not other_tasks: count_no_tasks_loop += 1 # tick the tree one more time in case flipped status would lead to new tasks if count_no_tasks_loop > 1: - logger.info(f"Tree finished with {tree.root.status}") + logger.info(f"The Tree converged to {tree.root.status}") break else: count_no_tasks_loop = 0 diff --git a/src/redis_release/config.py b/src/redis_release/config.py index 0e078a1..a8dd7cc 100644 --- a/src/redis_release/config.py +++ b/src/redis_release/config.py @@ -1,7 +1,7 @@ """Configuration management for Redis release automation.""" from pathlib import Path -from typing import Dict, Union +from typing import Dict, Optional, Union import yaml from pydantic import BaseModel, Field @@ -11,11 +11,14 @@ class PackageConfig(BaseModel): """Configuration for a package type.""" repo: str + ref: Optional[str] = None workflow_branch: str = "autodetect" build_workflow: Union[str, bool] = Field(default=False) build_timeout_minutes: int = Field(default=45) + build_inputs: Dict[str, str] = Field(default_factory=dict) publish_workflow: Union[str, bool] = Field(default=False) publish_timeout_minutes: int = Field(default=10) + publish_inputs: Dict[str, str] = Field(default_factory=dict) class Config(BaseModel): diff --git a/src/tests/test_state.py b/src/tests/test_state.py index 2f70ab9..e335a1c 100644 --- a/src/tests/test_state.py +++ b/src/tests/test_state.py @@ -39,6 +39,99 @@ def test_from_config_with_valid_workflows(self) -> None: state.packages["test-package"].publish.workflow.workflow_file == "publish.yml" ) + # Check default timeout values + assert state.packages["test-package"].build.workflow.timeout_minutes == 45 + assert state.packages["test-package"].publish.workflow.timeout_minutes == 10 + + def test_from_config_with_custom_timeout_values(self) -> None: + """Test from_config respects custom timeout values from config.""" + config = Config( + version=1, + packages={ + "test-package": PackageConfig( + repo="test/repo", + build_workflow="build.yml", + build_timeout_minutes=60, + publish_workflow="publish.yml", + publish_timeout_minutes=20, + ) + }, + ) + + state = ReleaseState.from_config(config) + + assert state.packages["test-package"].build.workflow.timeout_minutes == 60 + assert state.packages["test-package"].publish.workflow.timeout_minutes == 20 + + def test_from_config_with_ref(self) -> None: + """Test from_config respects ref field from config.""" + config = Config( + version=1, + packages={ + "test-package": PackageConfig( + repo="test/repo", + ref="release/8.0", + build_workflow="build.yml", + publish_workflow="publish.yml", + ) + }, + ) + + state = ReleaseState.from_config(config) + + assert state.packages["test-package"].meta.ref == "release/8.0" + + def test_from_config_with_workflow_inputs(self) -> None: + """Test from_config respects build_inputs and publish_inputs from config.""" + config = Config( + version=1, + packages={ + "test-package": PackageConfig( + repo="test/repo", + build_workflow="build.yml", + build_inputs={"key1": "value1", "key2": "value2"}, + publish_workflow="publish.yml", + publish_inputs={"publish_key": "publish_value"}, + ) + }, + ) + + state = ReleaseState.from_config(config) + + assert state.packages["test-package"].build.workflow.inputs == { + "key1": "value1", + "key2": "value2", + } + assert state.packages["test-package"].publish.workflow.inputs == { + "publish_key": "publish_value" + } + + def test_from_config_with_all_optional_fields(self) -> None: + """Test from_config with all optional fields set.""" + config = Config( + version=1, + packages={ + "test-package": PackageConfig( + repo="test/repo", + ref="main", + build_workflow="build.yml", + build_timeout_minutes=60, + build_inputs={"build_arg": "build_val"}, + publish_workflow="publish.yml", + publish_timeout_minutes=20, + publish_inputs={"publish_arg": "publish_val"}, + ) + }, + ) + + state = ReleaseState.from_config(config) + + pkg = state.packages["test-package"] + assert pkg.meta.ref == "main" + assert pkg.build.workflow.timeout_minutes == 60 + assert pkg.build.workflow.inputs == {"build_arg": "build_val"} + assert pkg.publish.workflow.timeout_minutes == 20 + assert pkg.publish.workflow.inputs == {"publish_arg": "publish_val"} def test_from_config_with_empty_build_workflow(self) -> None: """Test from_config fails when build_workflow is empty.""" diff --git a/src/tests/test_tree.py b/src/tests/test_tree.py new file mode 100644 index 0000000..aabfd30 --- /dev/null +++ b/src/tests/test_tree.py @@ -0,0 +1,89 @@ +"""Tests for behavior tree composites.""" + +import asyncio +from typing import Optional +from unittest.mock import AsyncMock, MagicMock + +import py_trees + +from redis_release.bht.composites import TriggerWorkflowGoal +from redis_release.bht.state import PackageMeta, ReleaseMeta, Workflow +from redis_release.github_client_async import GitHubClientAsync + + +async def async_tick_tock( + tree: py_trees.trees.BehaviourTree, + cutoff: int = 100, + period: float = 0.01, +) -> None: + """Drive Behaviour tree using async event loop with tick cutoff. + + Args: + tree: The behavior tree to tick + cutoff: Maximum number of ticks before stopping + period: Time to wait between ticks (default: 0.01s) + """ + tree.tick() + tick_count = 1 + count_no_tasks_loop = 0 + + while tick_count < cutoff: + other_tasks = asyncio.all_tasks() - {asyncio.current_task()} + + if not other_tasks: + count_no_tasks_loop += 1 + # tick the tree one more time in case flipped status would lead to new tasks + if count_no_tasks_loop > 1: + break + else: + count_no_tasks_loop = 0 + await asyncio.wait(other_tasks, return_when=asyncio.FIRST_COMPLETED) + + tree.tick() + tick_count += 1 + await asyncio.sleep(period) + + +async def test_trigger_workflow_goal_handles_trigger_failure() -> None: + """Test that TriggerWorkflowGoal sets trigger_failed flag when TriggerWorkflow fails. + + This test verifies: + 1. When TriggerWorkflow returns FAILURE, the trigger_failed flag is set + 2. GitHub client's trigger_workflow is called only once (not repeatedly) + """ + # Setup state + workflow = Workflow(workflow_file="test.yml", inputs={}) + package_meta = PackageMeta(repo="test/repo", ref="main") + release_meta = ReleaseMeta(tag="1.0.0") + + # Mock GitHub client + github_client = MagicMock(spec=GitHubClientAsync) + github_client.trigger_workflow = AsyncMock(side_effect=Exception("Trigger failed")) + + # Create the composite + trigger_goal = TriggerWorkflowGoal( + name="Test Trigger Goal", + workflow=workflow, + package_meta=package_meta, + release_meta=release_meta, + github_client=github_client, + ) + + # Setup tree + tree = py_trees.trees.BehaviourTree(root=trigger_goal) + tree.setup(timeout=15) + + # Run the tree + await async_tick_tock(tree, cutoff=10) + + # Assertions + assert ( + workflow.ephemeral.trigger_failed is True + ), "trigger_failed flag should be set" + assert github_client.trigger_workflow.call_count == 1, ( + f"GitHub trigger_workflow should be called exactly once, " + f"but was called {github_client.trigger_workflow.call_count} times" + ) + assert ( + tree.root.status == py_trees.common.Status.FAILURE + ), "Tree should end in FAILURE state" From 5ba9672545a497a33921449a2eced3b338073c30 Mon Sep 17 00:00:00 2001 From: Petar Shtuchkin Date: Mon, 6 Oct 2025 21:00:30 +0300 Subject: [PATCH 09/39] Remove phase, add new flags --- src/redis_release/bht/decorators.py | 5 +++- src/redis_release/bht/state.py | 20 ++++++-------- src/redis_release/bht/tree.py | 2 +- src/tests/test_state.py | 41 +++++++++++++---------------- 4 files changed, 31 insertions(+), 37 deletions(-) diff --git a/src/redis_release/bht/decorators.py b/src/redis_release/bht/decorators.py index 8af4c3d..838f796 100644 --- a/src/redis_release/bht/decorators.py +++ b/src/redis_release/bht/decorators.py @@ -63,7 +63,10 @@ def __init__( self.guard_status = guard_status self.raise_on = raise_on if raise_on is not None else [common.Status.FAILURE] if name is None: - name = f"Guard({flag}={flag_value})" + if self.flag_value is True: + name = f"Unless {flag}" + else: + name = f"If {flag}" super(FlagGuard, self).__init__(name=name, child=child) def _is_flag_active(self) -> bool: diff --git a/src/redis_release/bht/state.py b/src/redis_release/bht/state.py index 6d71424..d942273 100644 --- a/src/redis_release/bht/state.py +++ b/src/redis_release/bht/state.py @@ -23,6 +23,8 @@ class WorkflowEphemeral(BaseModel): trigger_failed: bool = False identify_failed: bool = False timed_out: bool = False + artifacts_download_failed: bool = False + extract_result_failed: bool = False class Workflow(BaseModel): @@ -36,19 +38,13 @@ class Workflow(BaseModel): timeout_minutes: int = 45 status: Optional[WorkflowStatus] = None conclusion: Optional[WorkflowConclusion] = None + artifacts: Dict[str, Any] = Field(default_factory=dict) + result: Optional[Dict[str, Any]] = None ephemeral: WorkflowEphemeral = Field( default_factory=WorkflowEphemeral, exclude=True ) -class Phase(BaseModel): - """State for a workflow phase (build or publish).""" - - workflow: Workflow = Field(default_factory=Workflow) - artifacts: Dict[str, Any] = Field(default_factory=dict) - result: Optional[Dict[str, Any]] = None - - class PackageMetaEphemeral(BaseModel): """Ephemeral package metadata that is not persisted.""" @@ -70,8 +66,8 @@ class Package(BaseModel): """State for a package in the release.""" meta: PackageMeta = Field(default_factory=PackageMeta) - build: Phase = Field(default_factory=Phase) - publish: Phase = Field(default_factory=Phase) + build: Workflow = Field(default_factory=Workflow) + publish: Workflow = Field(default_factory=Workflow) class ReleaseMeta(BaseModel): @@ -136,8 +132,8 @@ def from_config(cls, config: Config) -> "ReleaseState": # Create package state with initialized workflows packages[package_name] = Package( meta=package_meta, - build=Phase(workflow=build_workflow), - publish=Phase(workflow=publish_workflow), + build=build_workflow, + publish=publish_workflow, ) return cls(packages=packages) diff --git a/src/redis_release/bht/tree.py b/src/redis_release/bht/tree.py index a2491fc..3087b51 100644 --- a/src/redis_release/bht/tree.py +++ b/src/redis_release/bht/tree.py @@ -38,7 +38,7 @@ def create_root_node( # Get package and workflow package = state.packages["docker"] - workflow = package.build.workflow + workflow = package.build package_meta = package.meta release_meta = state.meta diff --git a/src/tests/test_state.py b/src/tests/test_state.py index e335a1c..5b40035 100644 --- a/src/tests/test_state.py +++ b/src/tests/test_state.py @@ -32,16 +32,11 @@ def test_from_config_with_valid_workflows(self) -> None: assert "test-package" in state.packages assert state.packages["test-package"].meta.repo == "test/repo" assert state.packages["test-package"].meta.ref is None - assert ( - state.packages["test-package"].build.workflow.workflow_file == "build.yml" - ) - assert ( - state.packages["test-package"].publish.workflow.workflow_file - == "publish.yml" - ) + assert state.packages["test-package"].build.workflow_file == "build.yml" + assert state.packages["test-package"].publish.workflow_file == "publish.yml" # Check default timeout values - assert state.packages["test-package"].build.workflow.timeout_minutes == 45 - assert state.packages["test-package"].publish.workflow.timeout_minutes == 10 + assert state.packages["test-package"].build.timeout_minutes == 45 + assert state.packages["test-package"].publish.timeout_minutes == 10 def test_from_config_with_custom_timeout_values(self) -> None: """Test from_config respects custom timeout values from config.""" @@ -60,8 +55,8 @@ def test_from_config_with_custom_timeout_values(self) -> None: state = ReleaseState.from_config(config) - assert state.packages["test-package"].build.workflow.timeout_minutes == 60 - assert state.packages["test-package"].publish.workflow.timeout_minutes == 20 + assert state.packages["test-package"].build.timeout_minutes == 60 + assert state.packages["test-package"].publish.timeout_minutes == 20 def test_from_config_with_ref(self) -> None: """Test from_config respects ref field from config.""" @@ -98,11 +93,11 @@ def test_from_config_with_workflow_inputs(self) -> None: state = ReleaseState.from_config(config) - assert state.packages["test-package"].build.workflow.inputs == { + assert state.packages["test-package"].build.inputs == { "key1": "value1", "key2": "value2", } - assert state.packages["test-package"].publish.workflow.inputs == { + assert state.packages["test-package"].publish.inputs == { "publish_key": "publish_value" } @@ -128,10 +123,10 @@ def test_from_config_with_all_optional_fields(self) -> None: pkg = state.packages["test-package"] assert pkg.meta.ref == "main" - assert pkg.build.workflow.timeout_minutes == 60 - assert pkg.build.workflow.inputs == {"build_arg": "build_val"} - assert pkg.publish.workflow.timeout_minutes == 20 - assert pkg.publish.workflow.inputs == {"publish_arg": "publish_val"} + assert pkg.build.timeout_minutes == 60 + assert pkg.build.inputs == {"build_arg": "build_val"} + assert pkg.publish.timeout_minutes == 20 + assert pkg.publish.inputs == {"publish_arg": "publish_val"} def test_from_config_with_empty_build_workflow(self) -> None: """Test from_config fails when build_workflow is empty.""" @@ -220,8 +215,8 @@ def test_from_config_with_multiple_packages(self) -> None: assert len(state.packages) == 2 assert "package1" in state.packages assert "package2" in state.packages - assert state.packages["package1"].build.workflow.workflow_file == "build1.yml" - assert state.packages["package2"].build.workflow.workflow_file == "build2.yml" + assert state.packages["package1"].build.workflow_file == "build1.yml" + assert state.packages["package2"].build.workflow_file == "build2.yml" def test_from_config_error_message_includes_package_name(self) -> None: """Test that error messages include the package name for debugging.""" @@ -352,16 +347,16 @@ def test_release_state_ephemeral_not_serialized(self) -> None: state = ReleaseState.from_config(config) # Modify ephemeral fields - state.packages["test-package"].build.workflow.ephemeral.trigger_failed = True - state.packages["test-package"].publish.workflow.ephemeral.timed_out = True + state.packages["test-package"].build.ephemeral.trigger_failed = True + state.packages["test-package"].publish.ephemeral.timed_out = True # Serialize to JSON json_str = state.model_dump_json() json_data = json.loads(json_str) # Verify ephemeral fields are not in JSON - build_workflow = json_data["packages"]["test-package"]["build"]["workflow"] - publish_workflow = json_data["packages"]["test-package"]["publish"]["workflow"] + build_workflow = json_data["packages"]["test-package"]["build"] + publish_workflow = json_data["packages"]["test-package"]["publish"] assert "ephemeral" not in build_workflow assert "trigger_failed" not in build_workflow From 5a8e29de845b66e059f56443fc01f0cad6e6d07f Mon Sep 17 00:00:00 2001 From: Petar Shtuchkin Date: Mon, 6 Oct 2025 21:00:51 +0300 Subject: [PATCH 10/39] Add artifacts and extract result method --- src/redis_release/github_client_async.py | 84 ++++++++++++++++++++++++ 1 file changed, 84 insertions(+) diff --git a/src/redis_release/github_client_async.py b/src/redis_release/github_client_async.py index 09c6adc..cb87959 100644 --- a/src/redis_release/github_client_async.py +++ b/src/redis_release/github_client_async.py @@ -1,7 +1,10 @@ """Async GitHub API client for workflow operations.""" +import io +import json import logging import re +import zipfile from typing import Any, Dict, List, Optional, Union import aiohttp @@ -501,6 +504,87 @@ async def get_workflow_artifacts(self, repo: str, run_id: int) -> Dict[str, Dict logger.error(f"[red]Failed to get artifacts: {e}[/red]") return {} + async def download_and_extract_json_result( + self, + repo: str, + artifacts: Dict[str, Dict], + artifact_name: str, + json_file_name: str, + ) -> Optional[Dict[str, Any]]: + """Download artifact and extract JSON result from it. + + Args: + repo: Repository name + artifacts: Dictionary of artifacts from get_workflow_artifacts + artifact_name: Name of the artifact to extract from + json_file_name: Name of the JSON file within the artifact + + Returns: + Parsed JSON content from the specified file, or None if not found + """ + if artifact_name not in artifacts: + logger.warning(f"[yellow]No {artifact_name} artifact found[/yellow]") + return None + + target_artifact = artifacts[artifact_name] + artifact_id = target_artifact.get("id") + + if not artifact_id: + logger.error(f"[red]{artifact_name} artifact has no ID[/red]") + return None + + logger.info( + f"[blue]Extracting {json_file_name} from artifact {artifact_id}[/blue]" + ) + + # Download the artifact and extract JSON file + download_url = target_artifact.get("archive_download_url") + if not download_url: + logger.error(f"[red]{artifact_name} artifact has no download URL[/red]") + return None + + headers = { + "Authorization": f"Bearer {self.token}", + "Accept": "application/vnd.github.v3+json", + "X-GitHub-Api-Version": "2022-11-28", + } + + try: + # Download the artifact zip file + async with aiohttp.ClientSession() as session: + async with session.get( + download_url, + headers=headers, + timeout=aiohttp.ClientTimeout(total=30), + ) as response: + if response.status >= 400: + logger.error( + f"[red]Failed to download artifact:[/red] HTTP {response.status}" + ) + response.raise_for_status() + + artifact_content = await response.read() + + # Extract JSON file from the zip + with zipfile.ZipFile(io.BytesIO(artifact_content)) as zip_file: + if json_file_name in zip_file.namelist(): + with zip_file.open(json_file_name) as json_file_obj: + result_data = json.load(json_file_obj) + logger.info( + f"[green]Successfully extracted {json_file_name}[/green]" + ) + return result_data + else: + logger.error(f"[red]{json_file_name} not found in artifact[/red]") + return None + + except aiohttp.ClientError as e: + logger.error(f"[red]Failed to download {artifact_name} artifact: {e}[/red]") + return None + except (zipfile.BadZipFile, json.JSONDecodeError, KeyError) as e: + logger.error(f"[red]Failed to extract {json_file_name}: {e}[/red]") + return None + def _extract_uuid(self, text: str) -> Optional[str]: """Extract UUID from a string if present. From 0f002aa7f87af090a857855a811dc86dea1213ec Mon Sep 17 00:00:00 2001 From: Petar Shtuchkin Date: Tue, 7 Oct 2025 10:39:49 +0300 Subject: [PATCH 11/39] Add artifacts download --- src/redis_release/bht/behaviours.py | 127 +++++++++++++++++ src/redis_release/bht/composites.py | 214 ++++++++++++++++++++++++++++ src/redis_release/bht/tree.py | 61 +++----- src/tests/test_tree.py | 181 ++++++++++++++++++++++- 4 files changed, 540 insertions(+), 43 deletions(-) diff --git a/src/redis_release/bht/behaviours.py b/src/redis_release/bht/behaviours.py index c25c3a7..52f82ae 100644 --- a/src/redis_release/bht/behaviours.py +++ b/src/redis_release/bht/behaviours.py @@ -309,3 +309,130 @@ def update(self) -> py_trees.common.Status: if self.workflow.conclusion == WorkflowConclusion.SUCCESS: return py_trees.common.Status.SUCCESS return py_trees.common.Status.FAILURE + + +class HasWorkflowArtifacts(py_trees.behaviour.Behaviour): + def __init__(self, name: str, workflow: Workflow) -> None: + self.workflow = workflow + super().__init__(name=name) + + def update(self) -> py_trees.common.Status: + if self.workflow.artifacts: + return py_trees.common.Status.SUCCESS + return py_trees.common.Status.FAILURE + + +class HasWorkflowResult(py_trees.behaviour.Behaviour): + def __init__(self, name: str, workflow: Workflow) -> None: + self.workflow = workflow + super().__init__(name=name) + + def update(self) -> py_trees.common.Status: + if self.workflow.result is not None: + return py_trees.common.Status.SUCCESS + return py_trees.common.Status.FAILURE + + +### Actions ### + + +class GetWorkflowArtifactsList(ReleaseAction): + def __init__( + self, + name: str, + workflow: Workflow, + github_client: GitHubClientAsync, + package_meta: PackageMeta, + ) -> None: + self.github_client = github_client + self.workflow = workflow + self.package_meta = package_meta + super().__init__(name=name) + + def initialise(self) -> None: + if self.workflow.run_id is None: + self.logger.error( + "[red]Workflow run_id is None - cannot get artifacts[/red]" + ) + return + + self.task = asyncio.create_task( + self.github_client.get_workflow_artifacts( + self.package_meta.repo, self.workflow.run_id + ) + ) + + def update(self) -> py_trees.common.Status: + try: + assert self.task is not None + + if not self.task.done(): + return py_trees.common.Status.RUNNING + + result = self.task.result() + self.workflow.artifacts = result + self.logger.info( + f"[green]Downloaded artifacts list:[/green] {len(result)} artifacts" + ) + self.feedback_message = f"Downloaded {len(result)} artifacts" + return py_trees.common.Status.SUCCESS + except Exception as e: + self.feedback_message = "failed to download artifacts list" + return self.log_exception_and_return_failure(e) + + +class ExtractArtifactResult(ReleaseAction): + def __init__( + self, + name: str, + workflow: Workflow, + artifact_name: str, + github_client: GitHubClientAsync, + package_meta: PackageMeta, + ) -> None: + self.github_client = github_client + self.workflow = workflow + self.artifact_name = artifact_name + self.package_meta = package_meta + super().__init__(name=name) + + def initialise(self) -> None: + if not self.workflow.artifacts: + self.logger.error( + "[red]Workflow artifacts is empty - cannot extract result[/red]" + ) + return + + self.task = asyncio.create_task( + self.github_client.download_and_extract_json_result( + self.package_meta.repo, + self.workflow.artifacts, + self.artifact_name, + "result.json", + ) + ) + + def update(self) -> py_trees.common.Status: + try: + assert self.task is not None + + if not self.task.done(): + return py_trees.common.Status.RUNNING + + result = self.task.result() + if result is None: + self.logger.error( + f"[red]Failed to extract result from {self.artifact_name}[/red]" + ) + self.feedback_message = "failed to extract result" + return py_trees.common.Status.FAILURE + + self.workflow.result = result + self.logger.info( + f"[green]Extracted result from {self.artifact_name}[/green]" + ) + self.feedback_message = f"Extracted result from {self.artifact_name}" + return py_trees.common.Status.SUCCESS + except Exception as e: + self.feedback_message = "failed to extract result" + return self.log_exception_and_return_failure(e) diff --git a/src/redis_release/bht/composites.py b/src/redis_release/bht/composites.py index 6d59ebf..b7a652d 100644 --- a/src/redis_release/bht/composites.py +++ b/src/redis_release/bht/composites.py @@ -3,6 +3,10 @@ from ..github_client_async import GitHubClientAsync from .behaviours import ( + ExtractArtifactResult, + GetWorkflowArtifactsList, + HasWorkflowArtifacts, + HasWorkflowResult, IdentifyTargetRef, IdentifyWorkflowByUUID, IsTargetRefIdentified, @@ -208,3 +212,213 @@ def __init__( package_meta.ephemeral, "identify_ref_failed", ) + + +class GetResultGoal(Sequence): + def __init__( + self, + name: str, + workflow: Workflow, + artifact_name: str, + package_meta: PackageMeta, + github_client: GitHubClientAsync, + log_prefix: str = "", + ) -> None: + if log_prefix != "": + log_prefix = f"{log_prefix}." + + # Extract Result Goal: Sequence to extract result if artifacts exist + has_artifacts = HasWorkflowArtifacts( + f"{log_prefix}Has Workflow Artifacts?", workflow + ) + + # Extract Result If Required: Selector to check if result exists or extract it + has_result = HasWorkflowResult(f"{log_prefix}Has Workflow Result?", workflow) + extract_result = ExtractArtifactResult( + f"{log_prefix}Extract Artifact Result", + workflow, + artifact_name, + github_client, + package_meta, + ) + extract_result_guard = FlagGuard( + None, + extract_result, + workflow.ephemeral, + "extract_result_failed", + ) + extract_result_if_required = Selector( + f"{log_prefix}Extract Result If Required", + memory=False, + children=[has_result, extract_result_guard], + ) + + extract_result_goal = Sequence( + f"{log_prefix}Extract Result Goal", + memory=False, + children=[has_artifacts, extract_result_if_required], + ) + + # Get Workflow Artifacts (guarded) + get_artifacts = GetWorkflowArtifactsList( + f"{log_prefix}Get Workflow Artifacts List", + workflow, + github_client, + package_meta, + ) + get_artifacts_guard = FlagGuard( + None, + get_artifacts, + workflow.ephemeral, + "artifacts_download_failed", + ) + + # Get Result Process: Selector to try extracting or downloading artifacts + get_result_process = Selector( + f"{log_prefix}Get Result Process", + memory=False, + children=[ + extract_result_goal, + get_artifacts_guard, + ], + ) + + # Final check: Has Workflow Result? + has_workflow_result = HasWorkflowResult( + f"{log_prefix}Has Workflow Result?", workflow + ) + + super().__init__( + name=name, + memory=False, + children=[ + get_result_process, + has_workflow_result, + ], + ) + + +class WorkflowSuccessGoal(Sequence): + """Composite that ensures a workflow runs successfully. + + This is a Sequence that: + 1. Runs the workflow (via Workflow Run selector) + 2. Checks that the workflow succeeded + """ + + def __init__( + self, + name: str, + workflow: Workflow, + package_meta: PackageMeta, + release_meta: ReleaseMeta, + github_client: GitHubClientAsync, + log_prefix: str = "", + ) -> None: + if log_prefix != "": + log_prefix = f"{log_prefix}." + + # Workflow Run: Selector to get workflow running + is_workflow_successful = IsWorkflowSuccessful( + f"{log_prefix}Is Workflow Successful?", workflow + ) + identify_workflow = FindWorkflowByUUID( + f"{log_prefix}Identify Workflow Goal", + workflow, + package_meta, + github_client, + log_prefix, + ) + trigger_workflow = TriggerWorkflowGoal( + f"{log_prefix}Trigger Workflow Goal", + workflow, + package_meta, + release_meta, + github_client, + log_prefix, + ) + identify_target_ref = IdentifyTargetRefGoal( + f"{log_prefix}Identify Target Ref Goal", + package_meta, + release_meta, + log_prefix, + ) + wait_for_completion = WaitForWorkflowCompletion( + f"{log_prefix}Workflow Completion Goal", + workflow, + package_meta, + github_client, + log_prefix, + ) + + workflow_run = Selector( + f"{log_prefix}Workflow Run", + memory=False, + children=[ + wait_for_completion, + identify_workflow, + trigger_workflow, + identify_target_ref, + ], + ) + + super().__init__( + name=name, + memory=False, + children=[ + workflow_run, + is_workflow_successful, + ], + ) + + +class ReleasePhaseGoal(Sequence): + """Composite that manages a complete release phase (build or publish). + + This is a Sequence that: + 1. Ensures the workflow runs successfully (WorkflowSuccessGoal) + 2. Gets the result from the workflow artifacts (GetResultGoal) + """ + + def __init__( + self, + phase_name: str, + workflow: Workflow, + artifact_name: str, + package_meta: PackageMeta, + release_meta: ReleaseMeta, + github_client: GitHubClientAsync, + log_prefix: str = "", + ) -> None: + if log_prefix != "": + log_prefix = f"{log_prefix}." + + name = f"{log_prefix}{phase_name.capitalize()} Phase Goal" + phase_log_prefix = f"{log_prefix}{phase_name.upper()}" + + workflow_success = WorkflowSuccessGoal( + f"{phase_log_prefix}.Workflow Success Goal", + workflow, + package_meta, + release_meta, + github_client, + phase_log_prefix, + ) + + get_result = GetResultGoal( + f"{phase_log_prefix}.Get Result Goal", + workflow, + artifact_name, + package_meta, + github_client, + phase_log_prefix, + ) + + super().__init__( + name=name, + memory=False, + children=[ + workflow_success, + get_result, + ], + ) diff --git a/src/redis_release/bht/tree.py b/src/redis_release/bht/tree.py index 3087b51..3e9fba3 100644 --- a/src/redis_release/bht/tree.py +++ b/src/redis_release/bht/tree.py @@ -5,19 +5,11 @@ import py_trees from py_trees.behaviour import Behaviour -from py_trees.composites import Selector, Sequence -from py_trees.decorators import Inverter from ..config import Config from ..github_client_async import GitHubClientAsync from .args import ReleaseArgs -from .behaviours import IsWorkflowSuccessful -from .composites import ( - FindWorkflowByUUID, - IdentifyTargetRefGoal, - TriggerWorkflowGoal, - WaitForWorkflowCompletion, -) +from .composites import ReleasePhaseGoal from .state import ReleaseState, StateSyncer logger = logging.getLogger(__name__) @@ -38,45 +30,25 @@ def create_root_node( # Get package and workflow package = state.packages["docker"] - workflow = package.build package_meta = package.meta release_meta = state.meta - root = Sequence("Workflow Goal", False) - workflow_run = Selector("Workflow Run", False) - - is_workflow_successful = IsWorkflowSuccessful("Is Workflow Successful?", workflow) - identify_workflow = FindWorkflowByUUID( - "Identify Workflow Goal", workflow, package_meta, github_client, "DOCKER" - ) - trigger_workflow = TriggerWorkflowGoal( - "Trigger Workflow Goal", - workflow, - package_meta, - release_meta, - github_client, - "DOCKER", - ) - identify_target_ref = IdentifyTargetRefGoal( - "Identify Target Ref Goal", package_meta, release_meta, "DOCKER" + # Create build phase goal + build_phase = ReleasePhaseGoal( + phase_name="build", + workflow=package.build, + artifact_name="build-result", + package_meta=package_meta, + release_meta=release_meta, + github_client=github_client, + log_prefix="DOCKER", ) - wait_for_completion = WaitForWorkflowCompletion( - "Workflow Completion Goal", workflow, package_meta, github_client, "DOCKER" - ) - workflow_run.add_children( - [ - wait_for_completion, - identify_workflow, - trigger_workflow, - identify_target_ref, - ] - ) - root.add_children([workflow_run, is_workflow_successful]) - return root + + return build_phase async def async_tick_tock( - tree: py_trees.trees.BehaviourTree, state_syncer: StateSyncer, period: float = 3.0 + tree: py_trees.trees.BehaviourTree, state_syncer: StateSyncer, cutoff: int = 100 ) -> None: """Drive Behaviour tree using async event loop @@ -95,8 +67,13 @@ async def async_tick_tock( ) tree.tick() count_no_tasks_loop = 0 + count = 0 while True: + count += 1 state_syncer.sync() + if count > cutoff: + logger.info(f"The Tree has not converged, hit cutoff limit {cutoff}") + break print( py_trees.display.unicode_tree( tree.root, show_status=True, show_only_visited=False @@ -108,7 +85,7 @@ async def async_tick_tock( count_no_tasks_loop += 1 # tick the tree one more time in case flipped status would lead to new tasks if count_no_tasks_loop > 1: - logger.info(f"The Tree converged to {tree.root.status}") + logger.info(f"The Tree has converged to {tree.root.status}") break else: count_no_tasks_loop = 0 diff --git a/src/tests/test_tree.py b/src/tests/test_tree.py index aabfd30..b380436 100644 --- a/src/tests/test_tree.py +++ b/src/tests/test_tree.py @@ -6,7 +6,7 @@ import py_trees -from redis_release.bht.composites import TriggerWorkflowGoal +from redis_release.bht.composites import GetResultGoal, TriggerWorkflowGoal from redis_release.bht.state import PackageMeta, ReleaseMeta, Workflow from redis_release.github_client_async import GitHubClientAsync @@ -87,3 +87,182 @@ async def test_trigger_workflow_goal_handles_trigger_failure() -> None: assert ( tree.root.status == py_trees.common.Status.FAILURE ), "Tree should end in FAILURE state" + + +async def test_get_result_goal_with_existing_artifacts() -> None: + """Test GetResultGoal when artifacts already exist. + + This test verifies: + 1. When artifacts exist, ExtractArtifactResult is called + 2. The result is extracted and stored in workflow.result + 3. GetWorkflowArtifactsList is not called + """ + # Setup state + workflow = Workflow( + workflow_file="test.yml", + run_id=123, + artifacts={"test-artifact": {"id": 456}}, + ) + package_meta = PackageMeta(repo="test/repo") + + # Mock GitHub client + github_client = MagicMock(spec=GitHubClientAsync) + github_client.download_and_extract_json_result = AsyncMock( + return_value={"key": "value"} + ) + + # Create the composite + get_result_goal = GetResultGoal( + name="Get Result Goal", + workflow=workflow, + artifact_name="test-artifact", + package_meta=package_meta, + github_client=github_client, + ) + + # Setup tree + tree = py_trees.trees.BehaviourTree(root=get_result_goal) + tree.setup(timeout=15) + + # Run the tree + await async_tick_tock(tree, cutoff=10) + + # Assertions + assert workflow.result == {"key": "value"}, "Result should be extracted" + github_client.download_and_extract_json_result.assert_called_once() + + +async def test_get_result_goal_downloads_artifacts_first() -> None: + """Test GetResultGoal downloads artifacts when they don't exist. + + This test verifies: + 1. When artifacts don't exist, GetWorkflowArtifactsList is called + 2. The artifacts list is downloaded and stored in workflow.artifacts + """ + # Setup state + workflow = Workflow( + workflow_file="test.yml", + run_id=123, + artifacts={}, # No artifacts initially + ) + package_meta = PackageMeta(repo="test/repo") + + # Mock GitHub client + github_client = MagicMock(spec=GitHubClientAsync) + github_client.get_workflow_artifacts = AsyncMock( + return_value={"test-artifact": {"id": 456}} + ) + + # Create the composite + get_result_goal = GetResultGoal( + name="Get Result Goal", + workflow=workflow, + artifact_name="test-artifact", + package_meta=package_meta, + github_client=github_client, + ) + + # Setup tree + tree = py_trees.trees.BehaviourTree(root=get_result_goal) + tree.setup(timeout=15) + + # Run the tree + await async_tick_tock(tree, cutoff=10) + + # Assertions + assert workflow.artifacts == { + "test-artifact": {"id": 456} + }, "Artifacts should be downloaded" + github_client.get_workflow_artifacts.assert_called_once_with("test/repo", 123) + + +async def test_get_result_goal_handles_download_failure() -> None: + """Test GetResultGoal handles artifact download failure. + + This test verifies: + 1. When GetWorkflowArtifactsList fails, artifacts_download_failed flag is set + 2. The tree ends in FAILURE state + """ + # Setup state + workflow = Workflow( + workflow_file="test.yml", + run_id=123, + artifacts={}, + ) + package_meta = PackageMeta(repo="test/repo") + + # Mock GitHub client + github_client = MagicMock(spec=GitHubClientAsync) + github_client.get_workflow_artifacts = AsyncMock( + side_effect=Exception("Download failed") + ) + + # Create the composite + get_result_goal = GetResultGoal( + name="Get Result Goal", + workflow=workflow, + artifact_name="test-artifact", + package_meta=package_meta, + github_client=github_client, + ) + + # Setup tree + tree = py_trees.trees.BehaviourTree(root=get_result_goal) + tree.setup(timeout=15) + + # Run the tree + await async_tick_tock(tree, cutoff=10) + + # Assertions + assert tree.root.status == py_trees.common.Status.FAILURE + assert workflow.ephemeral.artifacts_download_failed is True + github_client.get_workflow_artifacts.assert_called_once() + + +async def test_get_result_goal_handles_extract_failure() -> None: + """Test GetResultGoal handles result extraction failure and falls back. + + This test verifies: + 1. When ExtractArtifactResult fails, extract_result_failed flag is set + 2. The Selector falls back to GetWorkflowArtifactsList + 3. Artifacts are downloaded but goal fails because no result was extracted + """ + # Setup state + workflow = Workflow( + workflow_file="test.yml", + run_id=123, + artifacts={"test-artifact": {"id": 456}}, + ) + package_meta = PackageMeta(repo="test/repo") + + # Mock GitHub client + github_client = MagicMock(spec=GitHubClientAsync) + github_client.download_and_extract_json_result = AsyncMock(return_value=None) + github_client.get_workflow_artifacts = AsyncMock( + return_value={"test-artifact": {"id": 456}} + ) + + # Create the composite + get_result_goal = GetResultGoal( + name="Get Result Goal", + workflow=workflow, + artifact_name="test-artifact", + package_meta=package_meta, + github_client=github_client, + ) + + # Setup tree + tree = py_trees.trees.BehaviourTree(root=get_result_goal) + tree.setup(timeout=15) + + # Run the tree + await async_tick_tock(tree, cutoff=10) + + # Assertions - Goal fails because no result was extracted (even though artifacts were downloaded) + assert workflow.ephemeral.extract_result_failed is True + assert workflow.artifacts == { + "test-artifact": {"id": 456} + }, "Artifacts should be downloaded" + # Both methods should be called - extract fails, then download succeeds + github_client.download_and_extract_json_result.assert_called_once() + github_client.get_workflow_artifacts.assert_called_once() From 77fd7e23895c6315e6987d8ebd479927c288058b Mon Sep 17 00:00:00 2001 From: Petar Shtuchkin Date: Wed, 8 Oct 2025 16:36:54 +0300 Subject: [PATCH 12/39] Introduce backchaining Use log_prefix only in the logs Use tree handlers and simplify async_tick_tock Fix artifact download errors --- src/redis_release/bht/backchain.py | 103 +++++++++ src/redis_release/bht/behaviours.py | 271 +++++++++++------------ src/redis_release/bht/composites.py | 321 +++++----------------------- src/redis_release/bht/decorators.py | 13 +- src/redis_release/bht/ppas.py | 168 +++++++++++++++ src/redis_release/bht/state.py | 2 +- src/redis_release/bht/tree.py | 116 ++++++---- src/redis_release/cli.py | 32 ++- src/redis_release/logging_config.py | 1 + src/tests/test_tree.py | 24 ++- 10 files changed, 574 insertions(+), 477 deletions(-) create mode 100644 src/redis_release/bht/backchain.py create mode 100644 src/redis_release/bht/ppas.py diff --git a/src/redis_release/bht/backchain.py b/src/redis_release/bht/backchain.py new file mode 100644 index 0000000..091dda9 --- /dev/null +++ b/src/redis_release/bht/backchain.py @@ -0,0 +1,103 @@ +import logging +from typing import Optional, Union + +from py_trees.behaviour import Behaviour +from py_trees.composites import Selector, Sequence + +logger = logging.getLogger(__name__) + + +def find_chain_anchor_point( + root: Behaviour, +) -> Sequence: + for child in root.children: + if len(child.children) > 1: + return find_chain_anchor_point(child) + if isinstance(root, Sequence): + return root + else: + raise Exception("No chain anchor_point found") + + +def latch_chains(*chains: Union[Selector, Sequence]) -> None: + assert len(chains) >= 2 + first = chains[0] + for chain in chains[1:]: + latch_chain_to_chain(first, chain) + first = chain + + +def latch_chain_to_chain( + first: Behaviour, + next: Union[Selector, Sequence], +) -> None: + """Latch two chains together. Both are expected to be formed using PPAs. + + If precondition exists in the anchor point, it is replaced by the next chain. + Otherwise the next chain is added as a leftmost child to the anchor point. + + If the next chain is a sequence, its children are merged into the anchor point. + + Args: + ppa: PPA composite to latch to + link: Link composite to latch + """ + anchor_point = find_chain_anchor_point(first) + next_postcondition: Optional[Behaviour] = None + anchor_precondition: Optional[Behaviour] = None + + if type(next) == Selector and len(next.children) > 0: + next_postcondition = next.children[0] + + assert len(anchor_point.children) == 1 or len(anchor_point.children) == 2 + anchor_precondition = anchor_point.children[0] + + # If anchor point has both precondition and action, remove anchor_precondition if it matches the next_postcondition + # very weak check that the anchor_precondition is the same as the next_postcondition: + if ( + len(anchor_point.children) == 2 + and next_postcondition is not None + and type(next_postcondition) == type(anchor_precondition) + and next_postcondition.name == anchor_precondition.name + ): + anchor_point.children.pop(0) + logger.debug(f"Removed precondition from PPA {anchor_precondition.name}") + + if type(next) == Sequence: + # If next is a sequence, merge next's children into achor_point sequence to the left + for child in reversed(next.children): + child.parent = anchor_point + anchor_point.children.insert(0, child) + else: + next.parent = anchor_point + anchor_point.children.insert(0, next) + + +def create_PPA( + name: str, + action: Behaviour, + postcondition: Optional[Behaviour] = None, + precondition: Optional[Behaviour] = None, +) -> Union[Sequence, Selector]: + """Create a PPA (Precondition-Postcondition-Action) composite.""" + + sequence = Sequence( + name=f"{name}", + memory=False, + children=[], + ) + if precondition is not None: + sequence.add_child(precondition) + sequence.add_child(action) + + if postcondition is not None: + selector = Selector( + name=f"{name} Goal", + memory=False, + children=[], + ) + selector.add_child(postcondition) + selector.add_child(sequence) + return selector + else: + return sequence diff --git a/src/redis_release/bht/behaviours.py b/src/redis_release/bht/behaviours.py index 52f82ae..a91ebdc 100644 --- a/src/redis_release/bht/behaviours.py +++ b/src/redis_release/bht/behaviours.py @@ -15,7 +15,8 @@ from token import OP from typing import Any, Dict, Optional -import py_trees +from py_trees.behaviour import Behaviour +from py_trees.common import Status from pydantic import BaseModel from ..github_client_async import GitHubClientAsync @@ -26,25 +27,29 @@ logger = logging.getLogger(__name__) -class LoggingAction(py_trees.behaviour.Behaviour): +class LoggingAction(Behaviour): logger: PyTreesLoggerWrapper - def __init__(self, name: str) -> None: + def __init__(self, name: str, log_prefix: str = "") -> None: super().__init__(name=name) - self.logger = PyTreesLoggerWrapper(logging.getLogger(self.name)) + if log_prefix != "": + log_prefix = f"{log_prefix}." + self.logger = PyTreesLoggerWrapper( + logging.getLogger(f"{log_prefix}{self.name}") + ) - def log_exception_and_return_failure(self, e: Exception) -> py_trees.common.Status: + def log_exception_and_return_failure(self, e: Exception) -> Status: self.logger.error(f"[red]failed with exception:[/red] {type(e).__name__}: {e}") # use the underlying logger to get the full traceback self.logger._logger.error(f"[red]Full traceback:[/red]", exc_info=True) - return py_trees.common.Status.FAILURE + return Status.FAILURE class ReleaseAction(LoggingAction): task: Optional[asyncio.Task[Any]] = None - def __init__(self, name: str) -> None: - super().__init__(name=name) + def __init__(self, name: str, log_prefix: str = "") -> None: + super().__init__(name=name, log_prefix=log_prefix) def check_task_exists(self) -> bool: if self.task is None: @@ -58,21 +63,21 @@ def check_task_exists(self) -> bool: class IdentifyTargetRef(ReleaseAction): def __init__( - self, - name: str, - package_meta: PackageMeta, + self, name: str, package_meta: PackageMeta, log_prefix: str = "" ) -> None: self.package_meta = package_meta - super().__init__(name=name) + super().__init__(name=name, log_prefix=log_prefix) - def update(self) -> py_trees.common.Status: + def update(self) -> Status: + if self.package_meta.ref is not None: + return Status.SUCCESS # For now, just set a hardcoded ref self.package_meta.ref = "release/8.2" self.logger.info( f"[green]Target ref identified:[/green] {self.package_meta.ref}" ) self.feedback_message = f"Target ref set to {self.package_meta.ref}" - return py_trees.common.Status.SUCCESS + return Status.SUCCESS class TriggerWorkflow(ReleaseAction): @@ -83,13 +88,14 @@ def __init__( package_meta: PackageMeta, release_meta: ReleaseMeta, github_client: GitHubClientAsync, + log_prefix: str = "", ) -> None: self.github_client = github_client self.workflow = workflow self.package_meta = package_meta self.release_meta = release_meta self.task: Optional[asyncio.Task[bool]] = None - super().__init__(name=name) + super().__init__(name=name, log_prefix=log_prefix) def initialise(self) -> None: self.workflow.uuid = str(uuid.uuid4()) @@ -111,12 +117,12 @@ def initialise(self) -> None: ) ) - def update(self) -> py_trees.common.Status: + def update(self) -> Status: try: assert self.task is not None if not self.task.done(): - return py_trees.common.Status.RUNNING + return Status.RUNNING self.task.result() self.workflow.triggered_at = datetime.now() @@ -124,12 +130,12 @@ def update(self) -> py_trees.common.Status: f"[green]Workflow triggered successfully:[/green] {self.workflow.uuid}" ) self.feedback_message = "workflow triggered" - return py_trees.common.Status.SUCCESS + return Status.SUCCESS except Exception as e: self.feedback_message = "failed to trigger workflow" return self.log_exception_and_return_failure(e) - def terminate(self, new_status: py_trees.common.Status) -> None: + def terminate(self, new_status: Status) -> None: # TODO: Cancel task pass @@ -141,12 +147,13 @@ def __init__( workflow: Workflow, github_client: GitHubClientAsync, package_meta: PackageMeta, + log_prefix: str = "", ) -> None: self.github_client = github_client self.workflow = workflow self.package_meta = package_meta - super().__init__(name=name) + super().__init__(name=name, log_prefix=log_prefix) def initialise(self) -> None: if self.workflow.uuid is None: @@ -161,17 +168,17 @@ def initialise(self) -> None: ) ) - def update(self) -> py_trees.common.Status: + def update(self) -> Status: try: assert self.task is not None if not self.task.done(): - return py_trees.common.Status.RUNNING + return Status.RUNNING result = self.task.result() if result is None: self.logger.error("[red]Workflow not found[/red]") - return py_trees.common.Status.FAILURE + return Status.FAILURE self.workflow.run_id = result.run_id self.logger.info( @@ -180,7 +187,7 @@ def update(self) -> py_trees.common.Status: self.feedback_message = ( f"Workflow identified, run_id: {self.workflow.run_id}" ) - return py_trees.common.Status.SUCCESS + return Status.SUCCESS except Exception as e: return self.log_exception_and_return_failure(e) @@ -192,11 +199,12 @@ def __init__( workflow: Workflow, github_client: GitHubClientAsync, package_meta: PackageMeta, + log_prefix: str = "", ) -> None: self.github_client = github_client self.workflow = workflow self.package_meta = package_meta - super().__init__(name=name) + super().__init__(name=name, log_prefix=log_prefix) def initialise(self) -> None: if self.workflow.run_id is None: @@ -211,12 +219,12 @@ def initialise(self) -> None: ) ) - def update(self) -> py_trees.common.Status: + def update(self) -> Status: try: assert self.task is not None if not self.task.done(): - return py_trees.common.Status.RUNNING + return Status.RUNNING result = self.task.result() self.workflow.status = result.status @@ -224,116 +232,31 @@ def update(self) -> py_trees.common.Status: self.feedback_message = ( f" {self.workflow.status}, {self.workflow.conclusion}" ) - return py_trees.common.Status.SUCCESS + return Status.SUCCESS except Exception as e: return self.log_exception_and_return_failure(e) -class Sleep(py_trees.behaviour.Behaviour): +class Sleep(LoggingAction): task: Optional[asyncio.Task[None]] = None - def __init__(self, name: str, sleep_time: float) -> None: + def __init__(self, name: str, sleep_time: float, log_prefix: str = "") -> None: self.sleep_time = sleep_time - super().__init__(name=name) + super().__init__(name=name, log_prefix=log_prefix) def initialise(self) -> None: self.task = asyncio.create_task(asyncio.sleep(self.sleep_time)) - def update(self) -> py_trees.common.Status: + def update(self) -> Status: if self.task is None: logger.error("[red]Task is None - behaviour was not initialized[/red]") - return py_trees.common.Status.FAILURE + return Status.FAILURE if not self.task.done(): - return py_trees.common.Status.RUNNING - - return py_trees.common.Status.SUCCESS - - -### Conditions ### - - -class IsTargetRefIdentified(py_trees.behaviour.Behaviour): - def __init__(self, name: str, package_meta: PackageMeta) -> None: - self.package_meta = package_meta - super().__init__(name=name) - - def update(self) -> py_trees.common.Status: - if self.package_meta.ref is not None: - return py_trees.common.Status.SUCCESS - return py_trees.common.Status.FAILURE - - -class IsWorkflowTriggered(py_trees.behaviour.Behaviour): - def __init__(self, name: str, workflow: Workflow) -> None: - self.workflow = workflow - super().__init__(name=name) - - def update(self) -> py_trees.common.Status: - logger.debug(f"IsWorkflowTriggered: {self.workflow}") - if self.workflow.triggered_at is not None: - return py_trees.common.Status.SUCCESS - return py_trees.common.Status.FAILURE - - -class IsWorkflowIdentified(py_trees.behaviour.Behaviour): - def __init__(self, name: str, workflow: Workflow) -> None: - self.workflow = workflow - super().__init__(name=name) - - def update(self) -> py_trees.common.Status: - self.logger.debug(f"{self.workflow}") - if self.workflow.run_id is not None: - return py_trees.common.Status.SUCCESS - return py_trees.common.Status.FAILURE - - -class IsWorkflowCompleted(py_trees.behaviour.Behaviour): - def __init__(self, name: str, workflow: Workflow) -> None: - self.workflow = workflow - super().__init__(name=name) - - def update(self) -> py_trees.common.Status: - if self.workflow.status == WorkflowStatus.COMPLETED: - return py_trees.common.Status.SUCCESS - return py_trees.common.Status.FAILURE + return Status.RUNNING - -class IsWorkflowSuccessful(py_trees.behaviour.Behaviour): - def __init__(self, name: str, workflow: Workflow) -> None: - self.workflow = workflow - super().__init__(name=name) - - def update(self) -> py_trees.common.Status: - if self.workflow.conclusion == WorkflowConclusion.SUCCESS: - return py_trees.common.Status.SUCCESS - return py_trees.common.Status.FAILURE - - -class HasWorkflowArtifacts(py_trees.behaviour.Behaviour): - def __init__(self, name: str, workflow: Workflow) -> None: - self.workflow = workflow - super().__init__(name=name) - - def update(self) -> py_trees.common.Status: - if self.workflow.artifacts: - return py_trees.common.Status.SUCCESS - return py_trees.common.Status.FAILURE - - -class HasWorkflowResult(py_trees.behaviour.Behaviour): - def __init__(self, name: str, workflow: Workflow) -> None: - self.workflow = workflow - super().__init__(name=name) - - def update(self) -> py_trees.common.Status: - if self.workflow.result is not None: - return py_trees.common.Status.SUCCESS - return py_trees.common.Status.FAILURE - - -### Actions ### + return Status.SUCCESS class GetWorkflowArtifactsList(ReleaseAction): @@ -341,13 +264,14 @@ def __init__( self, name: str, workflow: Workflow, - github_client: GitHubClientAsync, package_meta: PackageMeta, + github_client: GitHubClientAsync, + log_prefix: str = "", ) -> None: self.github_client = github_client self.workflow = workflow self.package_meta = package_meta - super().__init__(name=name) + super().__init__(name=name, log_prefix=log_prefix) def initialise(self) -> None: if self.workflow.run_id is None: @@ -362,20 +286,20 @@ def initialise(self) -> None: ) ) - def update(self) -> py_trees.common.Status: + def update(self) -> Status: try: assert self.task is not None if not self.task.done(): - return py_trees.common.Status.RUNNING + return Status.RUNNING result = self.task.result() self.workflow.artifacts = result self.logger.info( - f"[green]Downloaded artifacts list:[/green] {len(result)} artifacts" + f"[green]Downloaded artifacts list:[/green] {len(result)} {result} artifacts" ) self.feedback_message = f"Downloaded {len(result)} artifacts" - return py_trees.common.Status.SUCCESS + return Status.SUCCESS except Exception as e: self.feedback_message = "failed to download artifacts list" return self.log_exception_and_return_failure(e) @@ -389,12 +313,13 @@ def __init__( artifact_name: str, github_client: GitHubClientAsync, package_meta: PackageMeta, + log_prefix: str = "", ) -> None: self.github_client = github_client self.workflow = workflow self.artifact_name = artifact_name self.package_meta = package_meta - super().__init__(name=name) + super().__init__(name=name, log_prefix=log_prefix) def initialise(self) -> None: if not self.workflow.artifacts: @@ -412,12 +337,12 @@ def initialise(self) -> None: ) ) - def update(self) -> py_trees.common.Status: + def update(self) -> Status: try: assert self.task is not None if not self.task.done(): - return py_trees.common.Status.RUNNING + return Status.RUNNING result = self.task.result() if result is None: @@ -425,14 +350,98 @@ def update(self) -> py_trees.common.Status: f"[red]Failed to extract result from {self.artifact_name}[/red]" ) self.feedback_message = "failed to extract result" - return py_trees.common.Status.FAILURE + return Status.FAILURE self.workflow.result = result self.logger.info( f"[green]Extracted result from {self.artifact_name}[/green]" ) self.feedback_message = f"Extracted result from {self.artifact_name}" - return py_trees.common.Status.SUCCESS + return Status.SUCCESS except Exception as e: self.feedback_message = "failed to extract result" return self.log_exception_and_return_failure(e) + + +### Conditions ### + + +class IsTargetRefIdentified(LoggingAction): + def __init__( + self, name: str, package_meta: PackageMeta, log_prefix: str = "" + ) -> None: + self.package_meta = package_meta + super().__init__(name=name, log_prefix=log_prefix) + + def update(self) -> Status: + if self.package_meta.ref is not None: + return Status.SUCCESS + return Status.FAILURE + + +class IsWorkflowTriggered(LoggingAction): + def __init__(self, name: str, workflow: Workflow, log_prefix: str = "") -> None: + self.workflow = workflow + super().__init__(name=name, log_prefix=log_prefix) + + def update(self) -> Status: + self.logger.debug(f"IsWorkflowTriggered: {self.workflow}") + if self.workflow.triggered_at is not None: + return Status.SUCCESS + return Status.FAILURE + + +class IsWorkflowIdentified(LoggingAction): + def __init__(self, name: str, workflow: Workflow, log_prefix: str = "") -> None: + self.workflow = workflow + super().__init__(name=name, log_prefix=log_prefix) + + def update(self) -> Status: + self.logger.debug(f"{self.workflow}") + if self.workflow.run_id is not None: + return Status.SUCCESS + return Status.FAILURE + + +class IsWorkflowCompleted(LoggingAction): + def __init__(self, name: str, workflow: Workflow, log_prefix: str = "") -> None: + self.workflow = workflow + super().__init__(name=name, log_prefix=log_prefix) + + def update(self) -> Status: + if self.workflow.status == WorkflowStatus.COMPLETED: + return Status.SUCCESS + return Status.FAILURE + + +class IsWorkflowSuccessful(LoggingAction): + def __init__(self, name: str, workflow: Workflow, log_prefix: str = "") -> None: + self.workflow = workflow + super().__init__(name=name, log_prefix=log_prefix) + + def update(self) -> Status: + if self.workflow.conclusion == WorkflowConclusion.SUCCESS: + return Status.SUCCESS + return Status.FAILURE + + +class HasWorkflowArtifacts(LoggingAction): + def __init__(self, name: str, workflow: Workflow, log_prefix: str = "") -> None: + self.workflow = workflow + super().__init__(name=name, log_prefix=log_prefix) + + def update(self) -> Status: + if self.workflow.artifacts is not None: + return Status.SUCCESS + return Status.FAILURE + + +class HasWorkflowResult(LoggingAction): + def __init__(self, name: str, workflow: Workflow, log_prefix: str = "") -> None: + self.workflow = workflow + super().__init__(name=name, log_prefix=log_prefix) + + def update(self) -> Status: + if self.workflow.result is not None: + return Status.SUCCESS + return Status.FAILURE diff --git a/src/redis_release/bht/composites.py b/src/redis_release/bht/composites.py index b7a652d..067ec4d 100644 --- a/src/redis_release/bht/composites.py +++ b/src/redis_release/bht/composites.py @@ -22,7 +22,7 @@ from .state import PackageMeta, ReleaseMeta, Workflow -class FindWorkflowByUUID(Sequence): +class FindWorkflowByUUID(FlagGuard): max_retries: int = 3 poll_interval: int = 5 @@ -34,22 +34,17 @@ def __init__( github_client: GitHubClientAsync, log_prefix: str = "", ) -> None: - if log_prefix != "": - log_prefix = f"{log_prefix}." - - is_workflow_triggered = IsWorkflowTriggered( - f"{log_prefix}Is Workflow Triggered?", workflow - ) identify_workflow = IdentifyWorkflowByUUID( - f"{log_prefix}Identify Workflow by UUID", + "Identify Workflow by UUID", workflow, github_client, package_meta, + log_prefix=log_prefix, ) - sleep = Sleep("Sleep", self.poll_interval) + sleep = Sleep("Sleep", self.poll_interval, log_prefix=log_prefix) sleep_then_identify = Sequence( - f"{log_prefix}Sleep then Identify", + "Sleep then Identify", memory=True, children=[sleep, identify_workflow], ) @@ -58,32 +53,16 @@ def __init__( sleep_then_identify, self.max_retries, ) - identify_guard = FlagGuard( - None, + super().__init__( + None if name is "" else name, identify_loop, workflow.ephemeral, "identify_failed", - ) - identify_if_required = Selector( - f"{log_prefix}Identify if required", - False, - children=[ - IsWorkflowIdentified(f"Is Workflow Identified?", workflow), - identify_guard, - ], + log_prefix=log_prefix, ) - super().__init__( - name=name, - memory=False, - children=[ - is_workflow_triggered, - identify_if_required, - ], - ) - -class WaitForWorkflowCompletion(Sequence): +class WaitForWorkflowCompletion(FlagGuard): poll_interval: int timeout_seconds: int @@ -96,29 +75,26 @@ def __init__( log_prefix: str = "", poll_interval: int = 10, ) -> None: - if log_prefix != "": - log_prefix = f"{log_prefix}." - self.poll_interval = poll_interval self.timeout_seconds = workflow.timeout_minutes * 60 - is_workflow_identified = IsWorkflowIdentified( - f"Is Workflow Identified?", workflow - ) - is_workflow_completed = IsWorkflowCompleted(f"Is Workflow Completed?", workflow) update_workflow_status = UpdateWorkflowStatus( - f"{log_prefix}Update Workflow Status", workflow, github_client, package_meta + "Update Workflow Status", + workflow, + github_client, + package_meta, + log_prefix=log_prefix, ) update_workflow_status_with_pause = Sequence( - f"{log_prefix}Update Workflow Status with Pause", + "Update Workflow Status with Pause", memory=True, children=[ - Sleep("Sleep", self.poll_interval), + Sleep("Sleep", self.poll_interval, log_prefix=log_prefix), update_workflow_status, ], ) - update_workflow_loop = FlagGuard( + super().__init__( None, Timeout( f"Timeout {workflow.timeout_minutes}m", @@ -127,27 +103,11 @@ def __init__( ), workflow.ephemeral, "timed_out", - ) - - # Sequence: - super().__init__( - name=name, - memory=False, - children=[ - is_workflow_identified, - Selector( - f"Wait for completion", - False, - children=[ - is_workflow_completed, - update_workflow_loop, - ], - ), - ], + log_prefix=log_prefix, ) -class TriggerWorkflowGoal(Sequence): +class TriggerWorkflowGuarded(FlagGuard): def __init__( self, name: str, @@ -157,38 +117,20 @@ def __init__( github_client: GitHubClientAsync, log_prefix: str = "", ) -> None: - if log_prefix != "": - log_prefix = f"{log_prefix}." - - is_target_ref_identified = IsTargetRefIdentified( - f"{log_prefix}Is Target Ref Identified?", package_meta - ) - is_workflow_triggered = IsWorkflowTriggered( - f"{log_prefix}Is Workflow Triggered?", workflow - ) trigger_workflow = TriggerWorkflow( - f"{log_prefix}Trigger Workflow", + "Trigger Workflow", workflow, package_meta, release_meta, github_client, + log_prefix=log_prefix, ) - trigger_guard = FlagGuard( - None, + super().__init__( + None if name is "" else name, trigger_workflow, workflow.ephemeral, "trigger_failed", - ) - trigger_workflow_if_req = Selector( - f"{log_prefix}Trigger Workflow if Required", - memory=False, - children=[is_workflow_triggered, trigger_guard], - ) - - super().__init__( - name=name, - memory=False, - children=[is_target_ref_identified, trigger_workflow_if_req], + log_prefix=log_prefix, ) @@ -200,225 +142,64 @@ def __init__( release_meta: ReleaseMeta, log_prefix: str = "", ) -> None: - if log_prefix != "": - log_prefix = f"{log_prefix}." - super().__init__( None, IdentifyTargetRef( - f"{log_prefix}Identify Target Ref", + "Identify Target Ref", package_meta, + log_prefix=log_prefix, ), package_meta.ephemeral, "identify_ref_failed", + log_prefix=log_prefix, ) -class GetResultGoal(Sequence): +class DownloadArtifactsListGuarded(FlagGuard): def __init__( self, name: str, workflow: Workflow, - artifact_name: str, package_meta: PackageMeta, github_client: GitHubClientAsync, log_prefix: str = "", ) -> None: - if log_prefix != "": - log_prefix = f"{log_prefix}." - - # Extract Result Goal: Sequence to extract result if artifacts exist - has_artifacts = HasWorkflowArtifacts( - f"{log_prefix}Has Workflow Artifacts?", workflow - ) - - # Extract Result If Required: Selector to check if result exists or extract it - has_result = HasWorkflowResult(f"{log_prefix}Has Workflow Result?", workflow) - extract_result = ExtractArtifactResult( - f"{log_prefix}Extract Artifact Result", - workflow, - artifact_name, - github_client, - package_meta, - ) - extract_result_guard = FlagGuard( - None, - extract_result, - workflow.ephemeral, - "extract_result_failed", - ) - extract_result_if_required = Selector( - f"{log_prefix}Extract Result If Required", - memory=False, - children=[has_result, extract_result_guard], - ) - - extract_result_goal = Sequence( - f"{log_prefix}Extract Result Goal", - memory=False, - children=[has_artifacts, extract_result_if_required], - ) - - # Get Workflow Artifacts (guarded) - get_artifacts = GetWorkflowArtifactsList( - f"{log_prefix}Get Workflow Artifacts List", - workflow, - github_client, - package_meta, - ) - get_artifacts_guard = FlagGuard( - None, - get_artifacts, + super().__init__( + name if name is not "" else None, + GetWorkflowArtifactsList( + "Get Workflow Artifacts List", + workflow, + package_meta, + github_client, + log_prefix=log_prefix, + ), workflow.ephemeral, "artifacts_download_failed", + log_prefix=log_prefix, ) - # Get Result Process: Selector to try extracting or downloading artifacts - get_result_process = Selector( - f"{log_prefix}Get Result Process", - memory=False, - children=[ - extract_result_goal, - get_artifacts_guard, - ], - ) - - # Final check: Has Workflow Result? - has_workflow_result = HasWorkflowResult( - f"{log_prefix}Has Workflow Result?", workflow - ) - - super().__init__( - name=name, - memory=False, - children=[ - get_result_process, - has_workflow_result, - ], - ) - - -class WorkflowSuccessGoal(Sequence): - """Composite that ensures a workflow runs successfully. - - This is a Sequence that: - 1. Runs the workflow (via Workflow Run selector) - 2. Checks that the workflow succeeded - """ +class ExtractArtifactResultGuarded(FlagGuard): def __init__( self, name: str, workflow: Workflow, - package_meta: PackageMeta, - release_meta: ReleaseMeta, - github_client: GitHubClientAsync, - log_prefix: str = "", - ) -> None: - if log_prefix != "": - log_prefix = f"{log_prefix}." - - # Workflow Run: Selector to get workflow running - is_workflow_successful = IsWorkflowSuccessful( - f"{log_prefix}Is Workflow Successful?", workflow - ) - identify_workflow = FindWorkflowByUUID( - f"{log_prefix}Identify Workflow Goal", - workflow, - package_meta, - github_client, - log_prefix, - ) - trigger_workflow = TriggerWorkflowGoal( - f"{log_prefix}Trigger Workflow Goal", - workflow, - package_meta, - release_meta, - github_client, - log_prefix, - ) - identify_target_ref = IdentifyTargetRefGoal( - f"{log_prefix}Identify Target Ref Goal", - package_meta, - release_meta, - log_prefix, - ) - wait_for_completion = WaitForWorkflowCompletion( - f"{log_prefix}Workflow Completion Goal", - workflow, - package_meta, - github_client, - log_prefix, - ) - - workflow_run = Selector( - f"{log_prefix}Workflow Run", - memory=False, - children=[ - wait_for_completion, - identify_workflow, - trigger_workflow, - identify_target_ref, - ], - ) - - super().__init__( - name=name, - memory=False, - children=[ - workflow_run, - is_workflow_successful, - ], - ) - - -class ReleasePhaseGoal(Sequence): - """Composite that manages a complete release phase (build or publish). - - This is a Sequence that: - 1. Ensures the workflow runs successfully (WorkflowSuccessGoal) - 2. Gets the result from the workflow artifacts (GetResultGoal) - """ - - def __init__( - self, - phase_name: str, - workflow: Workflow, artifact_name: str, package_meta: PackageMeta, - release_meta: ReleaseMeta, github_client: GitHubClientAsync, log_prefix: str = "", ) -> None: - if log_prefix != "": - log_prefix = f"{log_prefix}." - - name = f"{log_prefix}{phase_name.capitalize()} Phase Goal" - phase_log_prefix = f"{log_prefix}{phase_name.upper()}" - - workflow_success = WorkflowSuccessGoal( - f"{phase_log_prefix}.Workflow Success Goal", - workflow, - package_meta, - release_meta, - github_client, - phase_log_prefix, - ) - - get_result = GetResultGoal( - f"{phase_log_prefix}.Get Result Goal", - workflow, - artifact_name, - package_meta, - github_client, - phase_log_prefix, - ) - super().__init__( - name=name, - memory=False, - children=[ - workflow_success, - get_result, - ], + name if name is not "" else None, + ExtractArtifactResult( + "Extract Artifact Result", + workflow, + artifact_name, + github_client, + package_meta, + log_prefix=log_prefix, + ), + workflow.ephemeral, + "extract_result_failed", + log_prefix=log_prefix, ) diff --git a/src/redis_release/bht/decorators.py b/src/redis_release/bht/decorators.py index 838f796..42a7abb 100644 --- a/src/redis_release/bht/decorators.py +++ b/src/redis_release/bht/decorators.py @@ -10,9 +10,15 @@ class DecoratorWithLogging(Decorator): logger: PyTreesLoggerWrapper - def __init__(self, name: str, child: behaviour.Behaviour) -> None: + def __init__( + self, name: str, child: behaviour.Behaviour, log_prefix: str = "" + ) -> None: super().__init__(name=name, child=child) - self.logger = PyTreesLoggerWrapper(logging.getLogger(self.name)) + if log_prefix != "": + log_prefix = f"{log_prefix}." + self.logger = PyTreesLoggerWrapper( + logging.getLogger(f"{log_prefix}{self.name}") + ) class FlagGuard(DecoratorWithLogging): @@ -45,6 +51,7 @@ def __init__( flag_value: bool = True, guard_status: common.Status = common.Status.FAILURE, raise_on: Optional[List[common.Status]] = None, + log_prefix: str = "", ): if not hasattr(container, flag): raise ValueError( @@ -67,7 +74,7 @@ def __init__( name = f"Unless {flag}" else: name = f"If {flag}" - super(FlagGuard, self).__init__(name=name, child=child) + super(FlagGuard, self).__init__(name=name, child=child, log_prefix=log_prefix) def _is_flag_active(self) -> bool: current_flag_value = getattr(self.container, self.flag, None) diff --git a/src/redis_release/bht/ppas.py b/src/redis_release/bht/ppas.py new file mode 100644 index 0000000..62b2c3d --- /dev/null +++ b/src/redis_release/bht/ppas.py @@ -0,0 +1,168 @@ +from typing import Union + +from py_trees.composites import Selector, Sequence + +from ..github_client_async import GitHubClientAsync +from .backchain import create_PPA +from .behaviours import ( + HasWorkflowArtifacts, + HasWorkflowResult, + IsTargetRefIdentified, + IsWorkflowCompleted, + IsWorkflowIdentified, + IsWorkflowSuccessful, + IsWorkflowTriggered, +) +from .composites import ( + DownloadArtifactsListGuarded, + ExtractArtifactResultGuarded, + FindWorkflowByUUID, + IdentifyTargetRefGoal, + TriggerWorkflowGuarded, + WaitForWorkflowCompletion, +) +from .state import PackageMeta, ReleaseMeta, ReleaseState, Workflow + + +def create_workflow_success_ppa( + workflow: Workflow, + log_prefix: str, +) -> Union[Selector, Sequence]: + return create_PPA( + "Workflow Success", + IsWorkflowSuccessful( + "Is Workflow Successful?", workflow, log_prefix=log_prefix + ), + ) + + +def create_workflow_completion_ppa( + workflow: Workflow, + package_meta: PackageMeta, + github_client: GitHubClientAsync, + log_prefix: str, +) -> Union[Selector, Sequence]: + return create_PPA( + "Wait for Completion", + WaitForWorkflowCompletion( + "", + workflow, + package_meta, + github_client, + log_prefix=log_prefix, + ), + IsWorkflowCompleted(f"Is Workflow Completed?", workflow, log_prefix=log_prefix), + IsWorkflowIdentified( + f"Is Workflow Identified?", workflow, log_prefix=log_prefix + ), + ) + + +def create_find_workflow_by_uuid_ppa( + workflow: Workflow, + package_meta: PackageMeta, + github_client: GitHubClientAsync, + log_prefix: str, +) -> Union[Selector, Sequence]: + return create_PPA( + "Find Workflow", + FindWorkflowByUUID( + "", + workflow, + package_meta, + github_client, + log_prefix=log_prefix, + ), + IsWorkflowIdentified( + "Is Workflow Identified?", workflow, log_prefix=log_prefix + ), + IsWorkflowTriggered("Is Workflow Triggered?", workflow, log_prefix=log_prefix), + ) + + +def create_trigger_workflow_ppa( + workflow: Workflow, + package_meta: PackageMeta, + release_meta: ReleaseMeta, + github_client: GitHubClientAsync, + log_prefix: str, +) -> Union[Selector, Sequence]: + return create_PPA( + "Trigger Workflow", + TriggerWorkflowGuarded( + "", + workflow, + package_meta, + release_meta, + github_client, + log_prefix=log_prefix, + ), + IsWorkflowTriggered("Is Workflow Triggered?", workflow, log_prefix=log_prefix), + IsTargetRefIdentified( + "Is Target Ref Identified?", package_meta, log_prefix=log_prefix + ), + ) + + +def create_identify_target_ref_ppa( + package_meta: PackageMeta, + release_meta: ReleaseMeta, + log_prefix: str, +) -> Union[Selector, Sequence]: + return create_PPA( + "Identify Target Ref", + IdentifyTargetRefGoal( + "", + package_meta, + release_meta, + log_prefix=log_prefix, + ), + ) + + +def create_download_artifacts_ppa( + workflow: Workflow, + package_meta: PackageMeta, + github_client: GitHubClientAsync, + log_prefix: str, +) -> Union[Selector, Sequence]: + return create_PPA( + "Download Artifacts", + DownloadArtifactsListGuarded( + "", + workflow, + package_meta, + github_client, + log_prefix=log_prefix, + ), + HasWorkflowArtifacts( + "Has Workflow Artifacts?", workflow, log_prefix=log_prefix + ), + IsWorkflowSuccessful( + "Is Workflow Successful?", workflow, log_prefix=log_prefix + ), + ) + + +def create_extract_artifact_result_ppa( + workflow: Workflow, + artifact_name: str, + package_meta: PackageMeta, + github_client: GitHubClientAsync, + log_prefix: str, +) -> Union[Selector, Sequence]: + return create_PPA( + "Extract Artifact Result", + ExtractArtifactResultGuarded( + "", + workflow, + artifact_name, + package_meta, + github_client, + log_prefix=log_prefix, + ), + HasWorkflowResult("Has Workflow Result?", workflow, log_prefix=log_prefix), + HasWorkflowArtifacts( + "Has Workflow Artifacts?", workflow, log_prefix=log_prefix + ), + ) diff --git a/src/redis_release/bht/state.py b/src/redis_release/bht/state.py index d942273..3a53fb3 100644 --- a/src/redis_release/bht/state.py +++ b/src/redis_release/bht/state.py @@ -38,7 +38,7 @@ class Workflow(BaseModel): timeout_minutes: int = 45 status: Optional[WorkflowStatus] = None conclusion: Optional[WorkflowConclusion] = None - artifacts: Dict[str, Any] = Field(default_factory=dict) + artifacts: Optional[Dict[str, Any]] = None result: Optional[Dict[str, Any]] = None ephemeral: WorkflowEphemeral = Field( default_factory=WorkflowEphemeral, exclude=True diff --git a/src/redis_release/bht/tree.py b/src/redis_release/bht/tree.py index 3e9fba3..2bd1069 100644 --- a/src/redis_release/bht/tree.py +++ b/src/redis_release/bht/tree.py @@ -3,13 +3,22 @@ import os from typing import Tuple -import py_trees from py_trees.behaviour import Behaviour +from py_trees.display import unicode_tree +from py_trees.trees import BehaviourTree +from rich.text import Text from ..config import Config from ..github_client_async import GitHubClientAsync from .args import ReleaseArgs -from .composites import ReleasePhaseGoal +from .backchain import latch_chains +from .ppas import ( + create_find_workflow_by_uuid_ppa, + create_identify_target_ref_ppa, + create_trigger_workflow_ppa, + create_workflow_completion_ppa, + create_workflow_success_ppa, +) from .state import ReleaseState, StateSyncer logger = logging.getLogger(__name__) @@ -17,39 +26,77 @@ def initialize_tree_and_state( config: Config, args: ReleaseArgs -) -> Tuple[Behaviour, StateSyncer]: +) -> Tuple[BehaviourTree, StateSyncer]: github_client = GitHubClientAsync(token=os.getenv("GITHUB_TOKEN")) state_syncer = StateSyncer(config, args) - return (create_root_node(state_syncer.state, github_client), state_syncer) + root = create_root_node(state_syncer.state, github_client) + tree = BehaviourTree(root) + tree.add_pre_tick_handler(lambda _: state_syncer.sync()) + tree.add_post_tick_handler(log_tree_state_with_markup) + + return (tree, state_syncer) + + +def log_tree_state_with_markup(tree: BehaviourTree) -> None: + rich_markup = Text.from_ansi( + unicode_tree(tree.root, show_status=True, show_only_visited=False) + ).markup + logger.debug(f"\n{rich_markup}") def create_root_node( state: ReleaseState, github_client: GitHubClientAsync ) -> Behaviour: - # Get package and workflow - package = state.packages["docker"] - package_meta = package.meta - release_meta = state.meta - - # Create build phase goal - build_phase = ReleasePhaseGoal( - phase_name="build", - workflow=package.build, - artifact_name="build-result", - package_meta=package_meta, - release_meta=release_meta, - github_client=github_client, - log_prefix="DOCKER", - ) + root = create_workflow_success_tree_branch(state, github_client) + + return root - return build_phase + +def create_workflow_success_tree_branch( + state: ReleaseState, github_client: GitHubClientAsync +) -> Behaviour: + + workflow_success = create_workflow_success_ppa( + state.packages["docker"].build, + "docker", + ) + workflow_complete = create_workflow_completion_ppa( + state.packages["docker"].build, + state.packages["docker"].meta, + github_client, + "docker", + ) + find_workflow_by_uud = create_find_workflow_by_uuid_ppa( + state.packages["docker"].build, + state.packages["docker"].meta, + github_client, + "docker", + ) + trigger_workflow = create_trigger_workflow_ppa( + state.packages["docker"].build, + state.packages["docker"].meta, + state.meta, + github_client, + "docker", + ) + identify_target_ref = create_identify_target_ref_ppa( + state.packages["docker"].meta, + state.meta, + "docker", + ) + latch_chains( + workflow_success, + workflow_complete, + find_workflow_by_uud, + trigger_workflow, + identify_target_ref, + ) + return workflow_success -async def async_tick_tock( - tree: py_trees.trees.BehaviourTree, state_syncer: StateSyncer, cutoff: int = 100 -) -> None: +async def async_tick_tock(tree: BehaviourTree, cutoff: int = 100) -> None: """Drive Behaviour tree using async event loop The tree is always ticked once. @@ -60,25 +107,13 @@ async def async_tick_tock( considered finished. """ - print( - py_trees.display.unicode_tree( - tree.root, show_status=True, show_only_visited=False - ) - ) - tree.tick() count_no_tasks_loop = 0 - count = 0 while True: - count += 1 - state_syncer.sync() - if count > cutoff: - logger.info(f"The Tree has not converged, hit cutoff limit {cutoff}") + tree.tick() + if tree.count > cutoff: + logger.error(f"The Tree has not converged, hit cutoff limit {cutoff}") break - print( - py_trees.display.unicode_tree( - tree.root, show_status=True, show_only_visited=False - ) - ) + other_tasks = asyncio.all_tasks() - {asyncio.current_task()} logger.debug(other_tasks) if not other_tasks: @@ -90,6 +125,3 @@ async def async_tick_tock( else: count_no_tasks_loop = 0 await asyncio.wait(other_tasks, return_when=asyncio.FIRST_COMPLETED) - - logger.info("tick") - tree.tick() diff --git a/src/redis_release/cli.py b/src/redis_release/cli.py index 1c80dbf..ca96e0a 100644 --- a/src/redis_release/cli.py +++ b/src/redis_release/cli.py @@ -5,15 +5,15 @@ import os from typing import List, Optional -import py_trees import typer +from py_trees.display import render_dot_tree, unicode_tree from rich.console import Console from rich.table import Table from redis_release.bht.args import ReleaseArgs from redis_release.bht.state import ReleaseState -from .bht.tree import async_tick_tock, create_root_node, initialize_tree_and_state +from .bht.tree import async_tick_tock, initialize_tree_and_state from .config import load_config from .logging_config import setup_logging from .models import ReleaseType @@ -195,35 +195,29 @@ def status( @app.command() -def release_bht( +def release_print_bht( release_tag: str = typer.Argument(..., help="Release tag (e.g., 8.4-m01-int1)"), config_file: Optional[str] = typer.Option( None, "--config", "-c", help="Path to config file (default: config.yaml)" ), - force_rebuild: Optional[List[str]] = typer.Option( - None, - "--force-rebuild", - help="Force rebuild for specific packages (can be specified multiple times). Use 'all' to force rebuild all packages.", - ), ) -> None: - """Run release using behaviour tree implementation.""" - setup_logging(logging.DEBUG) + """Print and render (using graphviz) the release behaviour tree.""" config_path = config_file or "config.yaml" config = load_config(config_path) # Create release args args = ReleaseArgs( release_tag=release_tag, - force_rebuild=force_rebuild or [], + force_rebuild=[], ) - root, state_syncer = initialize_tree_and_state(config, args) - tree = py_trees.trees.BehaviourTree(root) - asyncio.run(async_tick_tock(tree, state_syncer=state_syncer)) + tree, _ = initialize_tree_and_state(config, args) + render_dot_tree(tree.root) + print(unicode_tree(tree.root)) @app.command() -def release_print_bht( +def release_bht( release_tag: str = typer.Argument(..., help="Release tag (e.g., 8.4-m01-int1)"), config_file: Optional[str] = typer.Option( None, "--config", "-c", help="Path to config file (default: config.yaml)" @@ -234,7 +228,8 @@ def release_print_bht( help="Force rebuild for specific packages (can be specified multiple times). Use 'all' to force rebuild all packages.", ), ) -> None: - """Print and render (using graphviz) the release behaviour tree.""" + """Run release using behaviour tree implementation.""" + setup_logging(logging.DEBUG) config_path = config_file or "config.yaml" config = load_config(config_path) @@ -244,9 +239,8 @@ def release_print_bht( force_rebuild=force_rebuild or [], ) - root, _ = initialize_tree_and_state(config, args) - py_trees.display.render_dot_tree(root) - print(py_trees.display.unicode_tree(root)) + tree, _ = initialize_tree_and_state(config, args) + asyncio.run(async_tick_tock(tree)) if __name__ == "__main__": diff --git a/src/redis_release/logging_config.py b/src/redis_release/logging_config.py index e32e2f8..91f2283 100644 --- a/src/redis_release/logging_config.py +++ b/src/redis_release/logging_config.py @@ -31,6 +31,7 @@ def setup_logging(level: int = logging.INFO, show_path: bool = True) -> None: show_path=show_path, markup=True, # Enable Rich markup in log messages tracebacks_show_locals=True, # Show local variables in tracebacks + omit_repeated_times=False, # Force timestamp on every line ) ], ) diff --git a/src/tests/test_tree.py b/src/tests/test_tree.py index b380436..ca3294a 100644 --- a/src/tests/test_tree.py +++ b/src/tests/test_tree.py @@ -1,18 +1,22 @@ """Tests for behavior tree composites.""" import asyncio +import logging from typing import Optional from unittest.mock import AsyncMock, MagicMock -import py_trees +from py_trees.common import Status +from py_trees.trees import BehaviourTree from redis_release.bht.composites import GetResultGoal, TriggerWorkflowGoal from redis_release.bht.state import PackageMeta, ReleaseMeta, Workflow from redis_release.github_client_async import GitHubClientAsync +logger = logging.getLogger(__name__) + async def async_tick_tock( - tree: py_trees.trees.BehaviourTree, + tree: BehaviourTree, cutoff: int = 100, period: float = 0.01, ) -> None: @@ -70,7 +74,7 @@ async def test_trigger_workflow_goal_handles_trigger_failure() -> None: ) # Setup tree - tree = py_trees.trees.BehaviourTree(root=trigger_goal) + tree = BehaviourTree(root=trigger_goal) tree.setup(timeout=15) # Run the tree @@ -84,9 +88,7 @@ async def test_trigger_workflow_goal_handles_trigger_failure() -> None: f"GitHub trigger_workflow should be called exactly once, " f"but was called {github_client.trigger_workflow.call_count} times" ) - assert ( - tree.root.status == py_trees.common.Status.FAILURE - ), "Tree should end in FAILURE state" + assert tree.root.status == Status.FAILURE, "Tree should end in FAILURE state" async def test_get_result_goal_with_existing_artifacts() -> None: @@ -121,7 +123,7 @@ async def test_get_result_goal_with_existing_artifacts() -> None: ) # Setup tree - tree = py_trees.trees.BehaviourTree(root=get_result_goal) + tree = BehaviourTree(root=get_result_goal) tree.setup(timeout=15) # Run the tree @@ -163,7 +165,7 @@ async def test_get_result_goal_downloads_artifacts_first() -> None: ) # Setup tree - tree = py_trees.trees.BehaviourTree(root=get_result_goal) + tree = BehaviourTree(root=get_result_goal) tree.setup(timeout=15) # Run the tree @@ -207,14 +209,14 @@ async def test_get_result_goal_handles_download_failure() -> None: ) # Setup tree - tree = py_trees.trees.BehaviourTree(root=get_result_goal) + tree = BehaviourTree(root=get_result_goal) tree.setup(timeout=15) # Run the tree await async_tick_tock(tree, cutoff=10) # Assertions - assert tree.root.status == py_trees.common.Status.FAILURE + assert tree.root.status == Status.FAILURE assert workflow.ephemeral.artifacts_download_failed is True github_client.get_workflow_artifacts.assert_called_once() @@ -252,7 +254,7 @@ async def test_get_result_goal_handles_extract_failure() -> None: ) # Setup tree - tree = py_trees.trees.BehaviourTree(root=get_result_goal) + tree = BehaviourTree(root=get_result_goal) tree.setup(timeout=15) # Run the tree From 60804695f4c68d530262c9ed09950908dce7fcaf Mon Sep 17 00:00:00 2001 From: Petar Shtuchkin Date: Wed, 8 Oct 2025 18:30:28 +0300 Subject: [PATCH 13/39] extend latching, print parts, full package workflow branch --- src/redis_release/bht/backchain.py | 23 +++ src/redis_release/bht/composites.py | 8 +- src/redis_release/bht/tree.py | 38 +++- src/redis_release/cli.py | 100 ++++++++++- src/tests/test_ppa.py | 120 +++++++++++++ src/tests/test_tree.py | 270 ---------------------------- 6 files changed, 276 insertions(+), 283 deletions(-) create mode 100644 src/tests/test_ppa.py delete mode 100644 src/tests/test_tree.py diff --git a/src/redis_release/bht/backchain.py b/src/redis_release/bht/backchain.py index 091dda9..10ab896 100644 --- a/src/redis_release/bht/backchain.py +++ b/src/redis_release/bht/backchain.py @@ -1,3 +1,10 @@ +"""Tools for creating behavior trees using backchaining. + +See Michele Colledanchise and Petter Ögren +Behavior Trees in Robotics and AI +3.5 Creating Deliberative BTs using Backchaining +""" + import logging from typing import Optional, Union @@ -46,8 +53,24 @@ def latch_chain_to_chain( next_postcondition: Optional[Behaviour] = None anchor_precondition: Optional[Behaviour] = None + # Trying to guess from the structure which node may be a postcondition + # Later we compare it with the anchor point precondition and when they match + # we assume it is the postcondition that could be removed as a part of backchaining if type(next) == Selector and len(next.children) > 0: next_postcondition = next.children[0] + if type(next) == Sequence: + if len(next.children) == 1: + # This is a PPA with only one action which may be interpreted as a postcondition + # Like Sequence --> IsWorkflowSuccessful? + next_postcondition = next.children[0] + elif ( + len(next.children) > 1 + and type(next.children[0]) == Selector + and len(next.children[-1].children) == 0 + ): + # The same as above but when another chain is already latched to this PPA + # and therefore it now has leftmost Selector children and rightmost action + next_postcondition = next.children[-1] assert len(anchor_point.children) == 1 or len(anchor_point.children) == 2 anchor_precondition = anchor_point.children[0] diff --git a/src/redis_release/bht/composites.py b/src/redis_release/bht/composites.py index 067ec4d..0e8f9e2 100644 --- a/src/redis_release/bht/composites.py +++ b/src/redis_release/bht/composites.py @@ -54,7 +54,7 @@ def __init__( self.max_retries, ) super().__init__( - None if name is "" else name, + None if name == "" else name, identify_loop, workflow.ephemeral, "identify_failed", @@ -126,7 +126,7 @@ def __init__( log_prefix=log_prefix, ) super().__init__( - None if name is "" else name, + None if name == "" else name, trigger_workflow, workflow.ephemeral, "trigger_failed", @@ -165,7 +165,7 @@ def __init__( log_prefix: str = "", ) -> None: super().__init__( - name if name is not "" else None, + None if name == "" else name, GetWorkflowArtifactsList( "Get Workflow Artifacts List", workflow, @@ -190,7 +190,7 @@ def __init__( log_prefix: str = "", ) -> None: super().__init__( - name if name is not "" else None, + None if name == "" else name, ExtractArtifactResult( "Extract Artifact Result", workflow, diff --git a/src/redis_release/bht/tree.py b/src/redis_release/bht/tree.py index 2bd1069..403235a 100644 --- a/src/redis_release/bht/tree.py +++ b/src/redis_release/bht/tree.py @@ -1,9 +1,10 @@ import asyncio import logging import os -from typing import Tuple +from typing import Tuple, Union from py_trees.behaviour import Behaviour +from py_trees.composites import Selector, Sequence from py_trees.display import unicode_tree from py_trees.trees import BehaviourTree from rich.text import Text @@ -13,6 +14,8 @@ from .args import ReleaseArgs from .backchain import latch_chains from .ppas import ( + create_download_artifacts_ppa, + create_extract_artifact_result_ppa, create_find_workflow_by_uuid_ppa, create_identify_target_ref_ppa, create_trigger_workflow_ppa, @@ -49,14 +52,23 @@ def create_root_node( state: ReleaseState, github_client: GitHubClientAsync ) -> Behaviour: - root = create_workflow_success_tree_branch(state, github_client) + root = create_package_workflow_tree_branch(state, github_client) return root +def create_package_workflow_tree_branch( + state: ReleaseState, github_client: GitHubClientAsync +) -> Union[Selector, Sequence]: + workflow_result = create_workflow_result_tree_branch(state, github_client) + workflow_success = create_workflow_success_tree_branch(state, github_client) + latch_chains(workflow_result, workflow_success) + return workflow_result + + def create_workflow_success_tree_branch( state: ReleaseState, github_client: GitHubClientAsync -) -> Behaviour: +) -> Union[Selector, Sequence]: workflow_success = create_workflow_success_ppa( state.packages["docker"].build, @@ -96,6 +108,26 @@ def create_workflow_success_tree_branch( return workflow_success +def create_workflow_result_tree_branch( + state: ReleaseState, github_client: GitHubClientAsync +) -> Union[Selector, Sequence]: + extract_artifact_result = create_extract_artifact_result_ppa( + state.packages["docker"].build, + "release_handle", + state.packages["docker"].meta, + github_client, + "docker", + ) + download_artifacts = create_download_artifacts_ppa( + state.packages["docker"].build, + state.packages["docker"].meta, + github_client, + "docker", + ) + latch_chains(extract_artifact_result, download_artifacts) + return extract_artifact_result + + async def async_tick_tock(tree: BehaviourTree, cutoff: int = 100) -> None: """Drive Behaviour tree using async event loop diff --git a/src/redis_release/cli.py b/src/redis_release/cli.py index ca96e0a..f6124a6 100644 --- a/src/redis_release/cli.py +++ b/src/redis_release/cli.py @@ -11,10 +11,31 @@ from rich.table import Table from redis_release.bht.args import ReleaseArgs -from redis_release.bht.state import ReleaseState +from redis_release.bht.state import ( + Package, + PackageMeta, + ReleaseMeta, + ReleaseState, + Workflow, +) -from .bht.tree import async_tick_tock, initialize_tree_and_state +from .bht.ppas import ( + create_download_artifacts_ppa, + create_extract_artifact_result_ppa, + create_find_workflow_by_uuid_ppa, + create_identify_target_ref_ppa, + create_trigger_workflow_ppa, + create_workflow_completion_ppa, + create_workflow_success_ppa, +) +from .bht.tree import ( + async_tick_tock, + create_workflow_result_tree_branch, + create_workflow_success_tree_branch, + initialize_tree_and_state, +) from .config import load_config +from .github_client_async import GitHubClientAsync from .logging_config import setup_logging from .models import ReleaseType from .orchestrator import ReleaseOrchestrator @@ -200,8 +221,14 @@ def release_print_bht( config_file: Optional[str] = typer.Option( None, "--config", "-c", help="Path to config file (default: config.yaml)" ), + name: Optional[str] = typer.Option( + None, + "--name", + "-n", + help="Name of specific PPA or tree branch to print. PPAs: 'workflow_success', 'workflow_completion', 'find_workflow', 'trigger_workflow', 'identify_target_ref', 'download_artifacts', 'extract_artifact_result'. Tree branches: 'workflow_success_branch', 'workflow_result_branch'", + ), ) -> None: - """Print and render (using graphviz) the release behaviour tree.""" + """Print and render (using graphviz) the release behaviour tree or a specific PPA.""" config_path = config_file or "config.yaml" config = load_config(config_path) @@ -211,9 +238,70 @@ def release_print_bht( force_rebuild=[], ) - tree, _ = initialize_tree_and_state(config, args) - render_dot_tree(tree.root) - print(unicode_tree(tree.root)) + if name: + # Print specific PPA or tree branch + github_client = GitHubClientAsync(token=os.getenv("GITHUB_TOKEN", "dummy")) + + # Create mock state objects for PPA creation + workflow = Workflow(workflow_file="test.yml", inputs={}) + package_meta = PackageMeta(repo="redis/redis", ref="main") + release_meta = ReleaseMeta(tag=release_tag) + log_prefix = "test" + + # Create mock ReleaseState for tree branch functions + package = Package( + meta=package_meta, + build=workflow, + publish=Workflow(workflow_file="publish.yml", inputs={}), + ) + state = ReleaseState(meta=release_meta, packages={"docker": package}) + + # Map PPA names to creation functions + ppa_creators = { + "workflow_success": lambda: create_workflow_success_ppa( + workflow, log_prefix + ), + "workflow_completion": lambda: create_workflow_completion_ppa( + workflow, package_meta, github_client, log_prefix + ), + "find_workflow": lambda: create_find_workflow_by_uuid_ppa( + workflow, package_meta, github_client, log_prefix + ), + "trigger_workflow": lambda: create_trigger_workflow_ppa( + workflow, package_meta, release_meta, github_client, log_prefix + ), + "identify_target_ref": lambda: create_identify_target_ref_ppa( + package_meta, release_meta, log_prefix + ), + "download_artifacts": lambda: create_download_artifacts_ppa( + workflow, package_meta, github_client, log_prefix + ), + "extract_artifact_result": lambda: create_extract_artifact_result_ppa( + workflow, "test-artifact", package_meta, github_client, log_prefix + ), + # Tree branch functions + "workflow_success_branch": lambda: create_workflow_success_tree_branch( + state, github_client + ), + "workflow_result_branch": lambda: create_workflow_result_tree_branch( + state, github_client + ), + } + + if name not in ppa_creators: + console.print( + f"[red]Error: Unknown name '{name}'. Available options: {', '.join(ppa_creators.keys())}[/red]" + ) + raise typer.Exit(1) + + ppa = ppa_creators[name]() + render_dot_tree(ppa) + print(unicode_tree(ppa)) + else: + # Print full release tree + tree, _ = initialize_tree_and_state(config, args) + render_dot_tree(tree.root) + print(unicode_tree(tree.root)) @app.command() diff --git a/src/tests/test_ppa.py b/src/tests/test_ppa.py new file mode 100644 index 0000000..3db4eae --- /dev/null +++ b/src/tests/test_ppa.py @@ -0,0 +1,120 @@ +import asyncio +from typing import Optional +from unittest.mock import AsyncMock, MagicMock + +from py_trees.trees import BehaviourTree + +from redis_release.bht.ppas import create_download_artifacts_ppa +from redis_release.bht.state import PackageMeta, ReleaseMeta, Workflow +from redis_release.bht.tree import async_tick_tock, log_tree_state_with_markup +from redis_release.github_client_async import GitHubClientAsync +from redis_release.models import WorkflowConclusion + + +async def test_download_artifacts_ppa_with_empty_artifacts() -> None: + # Setup state + workflow = Workflow(workflow_file="test.yml", inputs={}) + workflow.conclusion = WorkflowConclusion.SUCCESS # Mock successful workflow + workflow.run_id = 123 + package_meta = PackageMeta(repo="test/repo", ref="main") + release_meta = ReleaseMeta(tag="1.0.0") + assert workflow.artifacts is None + + # Mock GitHub client + github_client = MagicMock(spec=GitHubClientAsync) + github_client.get_workflow_artifacts = AsyncMock(return_value={}) + + # Create PPA + ppa = create_download_artifacts_ppa(workflow, package_meta, github_client, "") + + tree = BehaviourTree(root=ppa) + tree.add_post_tick_handler(log_tree_state_with_markup) + + await async_tick_tock(tree, cutoff=10) + + github_client.get_workflow_artifacts.assert_called_once() + assert workflow.artifacts == {} + + +async def test_download_artifacts_ppa_with_artifacts() -> None: + # Setup state + workflow = Workflow(workflow_file="test.yml", inputs={}) + workflow.conclusion = WorkflowConclusion.SUCCESS # Mock successful workflow + workflow.run_id = 123 + package_meta = PackageMeta(repo="test/repo", ref="main") + assert workflow.artifacts is None + + # Mock GitHub client with non-empty artifacts + github_client = MagicMock(spec=GitHubClientAsync) + mock_artifacts = { + "build-artifact": { + "id": 456, + "archive_download_url": "https://api.github.com/repos/test/repo/actions/artifacts/456/zip", + "created_at": "2024-01-01T00:00:00Z", + "expires_at": "2024-01-31T00:00:00Z", + "updated_at": "2024-01-01T00:00:00Z", + "size_in_bytes": 1024, + "digest": "abc123", + } + } + github_client.get_workflow_artifacts = AsyncMock(return_value=mock_artifacts) + + # Create PPA + ppa = create_download_artifacts_ppa(workflow, package_meta, github_client, "") + + tree = BehaviourTree(root=ppa) + tree.add_post_tick_handler(log_tree_state_with_markup) + + await async_tick_tock(tree, cutoff=10) + + github_client.get_workflow_artifacts.assert_called_once() + assert workflow.artifacts == mock_artifacts + + +async def test_download_artifacts_ppa_not_called_when_conclusion_not_success() -> None: + # Setup state + workflow = Workflow(workflow_file="test.yml", inputs={}) + workflow.conclusion = WorkflowConclusion.FAILURE # Mock failed workflow + workflow.run_id = 123 + package_meta = PackageMeta(repo="test/repo", ref="main") + + # Mock GitHub client + github_client = MagicMock(spec=GitHubClientAsync) + github_client.get_workflow_artifacts.return_value = AsyncMock(return_value={}) + + # Create PPA + ppa = create_download_artifacts_ppa(workflow, package_meta, github_client, "") + + tree = BehaviourTree(root=ppa) + tree.add_post_tick_handler(log_tree_state_with_markup) + + await async_tick_tock(tree, cutoff=10) + + # GitHub client should not be called when workflow conclusion is not SUCCESS + github_client.get_workflow_artifacts.assert_not_called() + assert workflow.artifacts is None + + +async def test_download_artifacts_ppa_not_called_when_artifacts_already_empty() -> None: + # Setup state + workflow = Workflow(workflow_file="test.yml", inputs={}) + workflow.conclusion = WorkflowConclusion.SUCCESS # Mock successful workflow + workflow.run_id = 123 + workflow.artifacts = {} # Artifacts already set to empty dict + package_meta = PackageMeta(repo="test/repo", ref="main") + + # Mock GitHub client + github_client = MagicMock(spec=GitHubClientAsync) + github_client.get_workflow_artifacts.return_value = AsyncMock(return_value={}) + + # Create PPA + ppa = create_download_artifacts_ppa(workflow, package_meta, github_client, "") + + tree = BehaviourTree(root=ppa) + tree.add_post_tick_handler(log_tree_state_with_markup) + + await async_tick_tock(tree, cutoff=10) + + # GitHub client should not be called when artifacts are already set (even if empty) + github_client.get_workflow_artifacts.assert_not_called() + assert workflow.artifacts == {} diff --git a/src/tests/test_tree.py b/src/tests/test_tree.py deleted file mode 100644 index ca3294a..0000000 --- a/src/tests/test_tree.py +++ /dev/null @@ -1,270 +0,0 @@ -"""Tests for behavior tree composites.""" - -import asyncio -import logging -from typing import Optional -from unittest.mock import AsyncMock, MagicMock - -from py_trees.common import Status -from py_trees.trees import BehaviourTree - -from redis_release.bht.composites import GetResultGoal, TriggerWorkflowGoal -from redis_release.bht.state import PackageMeta, ReleaseMeta, Workflow -from redis_release.github_client_async import GitHubClientAsync - -logger = logging.getLogger(__name__) - - -async def async_tick_tock( - tree: BehaviourTree, - cutoff: int = 100, - period: float = 0.01, -) -> None: - """Drive Behaviour tree using async event loop with tick cutoff. - - Args: - tree: The behavior tree to tick - cutoff: Maximum number of ticks before stopping - period: Time to wait between ticks (default: 0.01s) - """ - tree.tick() - tick_count = 1 - count_no_tasks_loop = 0 - - while tick_count < cutoff: - other_tasks = asyncio.all_tasks() - {asyncio.current_task()} - - if not other_tasks: - count_no_tasks_loop += 1 - # tick the tree one more time in case flipped status would lead to new tasks - if count_no_tasks_loop > 1: - break - else: - count_no_tasks_loop = 0 - await asyncio.wait(other_tasks, return_when=asyncio.FIRST_COMPLETED) - - tree.tick() - tick_count += 1 - await asyncio.sleep(period) - - -async def test_trigger_workflow_goal_handles_trigger_failure() -> None: - """Test that TriggerWorkflowGoal sets trigger_failed flag when TriggerWorkflow fails. - - This test verifies: - 1. When TriggerWorkflow returns FAILURE, the trigger_failed flag is set - 2. GitHub client's trigger_workflow is called only once (not repeatedly) - """ - # Setup state - workflow = Workflow(workflow_file="test.yml", inputs={}) - package_meta = PackageMeta(repo="test/repo", ref="main") - release_meta = ReleaseMeta(tag="1.0.0") - - # Mock GitHub client - github_client = MagicMock(spec=GitHubClientAsync) - github_client.trigger_workflow = AsyncMock(side_effect=Exception("Trigger failed")) - - # Create the composite - trigger_goal = TriggerWorkflowGoal( - name="Test Trigger Goal", - workflow=workflow, - package_meta=package_meta, - release_meta=release_meta, - github_client=github_client, - ) - - # Setup tree - tree = BehaviourTree(root=trigger_goal) - tree.setup(timeout=15) - - # Run the tree - await async_tick_tock(tree, cutoff=10) - - # Assertions - assert ( - workflow.ephemeral.trigger_failed is True - ), "trigger_failed flag should be set" - assert github_client.trigger_workflow.call_count == 1, ( - f"GitHub trigger_workflow should be called exactly once, " - f"but was called {github_client.trigger_workflow.call_count} times" - ) - assert tree.root.status == Status.FAILURE, "Tree should end in FAILURE state" - - -async def test_get_result_goal_with_existing_artifacts() -> None: - """Test GetResultGoal when artifacts already exist. - - This test verifies: - 1. When artifacts exist, ExtractArtifactResult is called - 2. The result is extracted and stored in workflow.result - 3. GetWorkflowArtifactsList is not called - """ - # Setup state - workflow = Workflow( - workflow_file="test.yml", - run_id=123, - artifacts={"test-artifact": {"id": 456}}, - ) - package_meta = PackageMeta(repo="test/repo") - - # Mock GitHub client - github_client = MagicMock(spec=GitHubClientAsync) - github_client.download_and_extract_json_result = AsyncMock( - return_value={"key": "value"} - ) - - # Create the composite - get_result_goal = GetResultGoal( - name="Get Result Goal", - workflow=workflow, - artifact_name="test-artifact", - package_meta=package_meta, - github_client=github_client, - ) - - # Setup tree - tree = BehaviourTree(root=get_result_goal) - tree.setup(timeout=15) - - # Run the tree - await async_tick_tock(tree, cutoff=10) - - # Assertions - assert workflow.result == {"key": "value"}, "Result should be extracted" - github_client.download_and_extract_json_result.assert_called_once() - - -async def test_get_result_goal_downloads_artifacts_first() -> None: - """Test GetResultGoal downloads artifacts when they don't exist. - - This test verifies: - 1. When artifacts don't exist, GetWorkflowArtifactsList is called - 2. The artifacts list is downloaded and stored in workflow.artifacts - """ - # Setup state - workflow = Workflow( - workflow_file="test.yml", - run_id=123, - artifacts={}, # No artifacts initially - ) - package_meta = PackageMeta(repo="test/repo") - - # Mock GitHub client - github_client = MagicMock(spec=GitHubClientAsync) - github_client.get_workflow_artifacts = AsyncMock( - return_value={"test-artifact": {"id": 456}} - ) - - # Create the composite - get_result_goal = GetResultGoal( - name="Get Result Goal", - workflow=workflow, - artifact_name="test-artifact", - package_meta=package_meta, - github_client=github_client, - ) - - # Setup tree - tree = BehaviourTree(root=get_result_goal) - tree.setup(timeout=15) - - # Run the tree - await async_tick_tock(tree, cutoff=10) - - # Assertions - assert workflow.artifacts == { - "test-artifact": {"id": 456} - }, "Artifacts should be downloaded" - github_client.get_workflow_artifacts.assert_called_once_with("test/repo", 123) - - -async def test_get_result_goal_handles_download_failure() -> None: - """Test GetResultGoal handles artifact download failure. - - This test verifies: - 1. When GetWorkflowArtifactsList fails, artifacts_download_failed flag is set - 2. The tree ends in FAILURE state - """ - # Setup state - workflow = Workflow( - workflow_file="test.yml", - run_id=123, - artifacts={}, - ) - package_meta = PackageMeta(repo="test/repo") - - # Mock GitHub client - github_client = MagicMock(spec=GitHubClientAsync) - github_client.get_workflow_artifacts = AsyncMock( - side_effect=Exception("Download failed") - ) - - # Create the composite - get_result_goal = GetResultGoal( - name="Get Result Goal", - workflow=workflow, - artifact_name="test-artifact", - package_meta=package_meta, - github_client=github_client, - ) - - # Setup tree - tree = BehaviourTree(root=get_result_goal) - tree.setup(timeout=15) - - # Run the tree - await async_tick_tock(tree, cutoff=10) - - # Assertions - assert tree.root.status == Status.FAILURE - assert workflow.ephemeral.artifacts_download_failed is True - github_client.get_workflow_artifacts.assert_called_once() - - -async def test_get_result_goal_handles_extract_failure() -> None: - """Test GetResultGoal handles result extraction failure and falls back. - - This test verifies: - 1. When ExtractArtifactResult fails, extract_result_failed flag is set - 2. The Selector falls back to GetWorkflowArtifactsList - 3. Artifacts are downloaded but goal fails because no result was extracted - """ - # Setup state - workflow = Workflow( - workflow_file="test.yml", - run_id=123, - artifacts={"test-artifact": {"id": 456}}, - ) - package_meta = PackageMeta(repo="test/repo") - - # Mock GitHub client - github_client = MagicMock(spec=GitHubClientAsync) - github_client.download_and_extract_json_result = AsyncMock(return_value=None) - github_client.get_workflow_artifacts = AsyncMock( - return_value={"test-artifact": {"id": 456}} - ) - - # Create the composite - get_result_goal = GetResultGoal( - name="Get Result Goal", - workflow=workflow, - artifact_name="test-artifact", - package_meta=package_meta, - github_client=github_client, - ) - - # Setup tree - tree = BehaviourTree(root=get_result_goal) - tree.setup(timeout=15) - - # Run the tree - await async_tick_tock(tree, cutoff=10) - - # Assertions - Goal fails because no result was extracted (even though artifacts were downloaded) - assert workflow.ephemeral.extract_result_failed is True - assert workflow.artifacts == { - "test-artifact": {"id": 456} - }, "Artifacts should be downloaded" - # Both methods should be called - extract fails, then download succeeds - github_client.download_and_extract_json_result.assert_called_once() - github_client.get_workflow_artifacts.assert_called_once() From 2c50fac6890402c6324b3f9c1244bcc18458ea0b Mon Sep 17 00:00:00 2001 From: Petar Shtuchkin Date: Thu, 9 Oct 2025 14:03:12 +0300 Subject: [PATCH 14/39] Separate build and publish chains, full working package process --- src/redis_release/bht/backchain.py | 14 +- src/redis_release/bht/behaviours.py | 64 +++++++ src/redis_release/bht/ppas.py | 19 ++- src/redis_release/bht/state.py | 2 + src/redis_release/bht/tree.py | 249 ++++++++++++++++++++-------- src/redis_release/cli.py | 23 ++- src/redis_release/config.py | 1 + 7 files changed, 294 insertions(+), 78 deletions(-) diff --git a/src/redis_release/bht/backchain.py b/src/redis_release/bht/backchain.py index 10ab896..2bb441b 100644 --- a/src/redis_release/bht/backchain.py +++ b/src/redis_release/bht/backchain.py @@ -53,6 +53,8 @@ def latch_chain_to_chain( next_postcondition: Optional[Behaviour] = None anchor_precondition: Optional[Behaviour] = None + logger.debug(f"Latching {next.name} to {anchor_point.name}") + # Trying to guess from the structure which node may be a postcondition # Later we compare it with the anchor point precondition and when they match # we assume it is the postcondition that could be removed as a part of backchaining @@ -72,9 +74,13 @@ def latch_chain_to_chain( # and therefore it now has leftmost Selector children and rightmost action next_postcondition = next.children[-1] - assert len(anchor_point.children) == 1 or len(anchor_point.children) == 2 + assert len(anchor_point.children) > 0 anchor_precondition = anchor_point.children[0] + logger.debug( + f"Anchor precondition: {anchor_precondition.name}, Next postcondition: {next_postcondition.name if next_postcondition else 'None'}" + ) + # If anchor point has both precondition and action, remove anchor_precondition if it matches the next_postcondition # very weak check that the anchor_precondition is the same as the next_postcondition: if ( @@ -91,9 +97,15 @@ def latch_chain_to_chain( for child in reversed(next.children): child.parent = anchor_point anchor_point.children.insert(0, child) + logger.debug( + f"Merged child {child.name} to anchor point {anchor_point.name}" + ) else: next.parent = anchor_point anchor_point.children.insert(0, next) + logger.debug( + f"Added chain {next.name} directly to anchor point {anchor_point.name}" + ) def create_PPA( diff --git a/src/redis_release/bht/behaviours.py b/src/redis_release/bht/behaviours.py index a91ebdc..c53b912 100644 --- a/src/redis_release/bht/behaviours.py +++ b/src/redis_release/bht/behaviours.py @@ -9,7 +9,9 @@ """ import asyncio +import json import logging +import re import uuid from datetime import datetime from token import OP @@ -17,10 +19,13 @@ from py_trees.behaviour import Behaviour from py_trees.common import Status +from py_trees.composites import Selector, Sequence +from py_trees.decorators import Inverter, Repeat, Retry, Timeout from pydantic import BaseModel from ..github_client_async import GitHubClientAsync from ..models import WorkflowConclusion, WorkflowRun, WorkflowStatus +from .decorators import FlagGuard from .logging_wrapper import PyTreesLoggerWrapper from .state import PackageMeta, ReleaseMeta, Workflow @@ -31,6 +36,8 @@ class LoggingAction(Behaviour): logger: PyTreesLoggerWrapper def __init__(self, name: str, log_prefix: str = "") -> None: + if name == "": + name = f"{self.__class__.__name__}" super().__init__(name=name) if log_prefix != "": log_prefix = f"{log_prefix}." @@ -445,3 +452,60 @@ def update(self) -> Status: if self.workflow.result is not None: return Status.SUCCESS return Status.FAILURE + + +class NeedToPublish(LoggingAction): + """Check the release type and package configuration to determine if we need to run publish workflow.""" + + def __init__( + self, + name: str, + package_meta: PackageMeta, + release_meta: ReleaseMeta, + log_prefix: str = "", + ) -> None: + self.package_meta = package_meta + self.release_meta = release_meta + super().__init__(name=name, log_prefix=log_prefix) + + def update(self) -> Status: + # Check if this is an internal release by matching the pattern -int\d*$ in the tag + if self.release_meta.tag and re.search(r"-int\d*$", self.release_meta.tag): + self.logger.debug(f"Asssuming internal release: {self.release_meta.tag}") + if self.package_meta.publish_internal_release: + self.logger.debug( + f"Publishing internal release: {self.release_meta.tag}" + ) + return Status.SUCCESS + self.logger.debug( + f"Skip publishing internal release: {self.release_meta.tag}" + ) + return Status.FAILURE + + self.logger.debug(f"Public release: {self.release_meta.tag}") + return Status.SUCCESS + + +class AttachReleaseHandleToPublishWorkflow(LoggingAction): + def __init__( + self, + name: str, + build_workflow: Workflow, + publish_workflow: Workflow, + log_prefix: str = "", + ) -> None: + self.build_workflow = build_workflow + self.publish_workflow = publish_workflow + super().__init__(name=name, log_prefix=log_prefix) + + def update(self) -> Status: + if "release_handle" in self.publish_workflow.inputs: + return Status.SUCCESS + + if self.build_workflow.result is None: + return Status.FAILURE + + self.publish_workflow.inputs["release_handle"] = json.dumps( + self.build_workflow.result + ) + return Status.SUCCESS diff --git a/src/redis_release/bht/ppas.py b/src/redis_release/bht/ppas.py index 62b2c3d..60532f0 100644 --- a/src/redis_release/bht/ppas.py +++ b/src/redis_release/bht/ppas.py @@ -5,6 +5,7 @@ from ..github_client_async import GitHubClientAsync from .backchain import create_PPA from .behaviours import ( + AttachReleaseHandleToPublishWorkflow, HasWorkflowArtifacts, HasWorkflowResult, IsTargetRefIdentified, @@ -145,8 +146,8 @@ def create_download_artifacts_ppa( def create_extract_artifact_result_ppa( - workflow: Workflow, artifact_name: str, + workflow: Workflow, package_meta: PackageMeta, github_client: GitHubClientAsync, log_prefix: str, @@ -166,3 +167,19 @@ def create_extract_artifact_result_ppa( "Has Workflow Artifacts?", workflow, log_prefix=log_prefix ), ) + + +def create_attach_release_handle_ppa( + build_workflow: Workflow, + publish_workflow: Workflow, + log_prefix: str, +) -> Union[Selector, Sequence]: + return create_PPA( + "Attach Release Handle", + AttachReleaseHandleToPublishWorkflow( + "Attach Release Handle", + build_workflow, + publish_workflow, + log_prefix=log_prefix, + ), + ) diff --git a/src/redis_release/bht/state.py b/src/redis_release/bht/state.py index 3a53fb3..508546e 100644 --- a/src/redis_release/bht/state.py +++ b/src/redis_release/bht/state.py @@ -57,6 +57,7 @@ class PackageMeta(BaseModel): repo: str = "" ref: Optional[str] = None + publish_internal_release: bool = False ephemeral: PackageMetaEphemeral = Field( default_factory=PackageMetaEphemeral, exclude=True ) @@ -113,6 +114,7 @@ def from_config(cls, config: Config) -> "ReleaseState": package_meta = PackageMeta( repo=package_config.repo, ref=package_config.ref, + publish_internal_release=package_config.publish_internal_release, ) # Initialize build workflow diff --git a/src/redis_release/bht/tree.py b/src/redis_release/bht/tree.py index 403235a..5f38091 100644 --- a/src/redis_release/bht/tree.py +++ b/src/redis_release/bht/tree.py @@ -4,7 +4,9 @@ from typing import Tuple, Union from py_trees.behaviour import Behaviour +from py_trees.common import Status from py_trees.composites import Selector, Sequence +from py_trees.decorators import Inverter from py_trees.display import unicode_tree from py_trees.trees import BehaviourTree from rich.text import Text @@ -13,7 +15,9 @@ from ..github_client_async import GitHubClientAsync from .args import ReleaseArgs from .backchain import latch_chains +from .behaviours import NeedToPublish from .ppas import ( + create_attach_release_handle_ppa, create_download_artifacts_ppa, create_extract_artifact_result_ppa, create_find_workflow_by_uuid_ppa, @@ -22,11 +26,44 @@ create_workflow_completion_ppa, create_workflow_success_ppa, ) -from .state import ReleaseState, StateSyncer +from .state import ( + Package, + PackageMeta, + ReleaseMeta, + ReleaseState, + StateSyncer, + Workflow, +) logger = logging.getLogger(__name__) +async def async_tick_tock(tree: BehaviourTree, cutoff: int = 100) -> None: + """Drive Behaviour tree using async event loop + + The tree is always ticked once. + + Next tick happens while there is at least one task completed + or the tree is in RUNNING state. + + """ + while True: + tree.tick() + if tree.count > cutoff: + logger.error(f"The Tree has not converged, hit cutoff limit {cutoff}") + break + + other_tasks = asyncio.all_tasks() - {asyncio.current_task()} + logger.debug(other_tasks) + if not other_tasks: + # Let the tree continue running if it's not converged + if tree.root.status != Status.RUNNING: + logger.info(f"The Tree has converged to {tree.root.status}") + break + else: + await asyncio.wait(other_tasks, return_when=asyncio.FIRST_COMPLETED) + + def initialize_tree_and_state( config: Config, args: ReleaseArgs ) -> Tuple[BehaviourTree, StateSyncer]: @@ -35,7 +72,7 @@ def initialize_tree_and_state( root = create_root_node(state_syncer.state, github_client) tree = BehaviourTree(root) - tree.add_pre_tick_handler(lambda _: state_syncer.sync()) + tree.add_post_tick_handler(lambda _: state_syncer.sync()) tree.add_post_tick_handler(log_tree_state_with_markup) return (tree, state_syncer) @@ -52,108 +89,182 @@ def create_root_node( state: ReleaseState, github_client: GitHubClientAsync ) -> Behaviour: - root = create_package_workflow_tree_branch(state, github_client) + root = create_package_release_tree_branch( + state.packages["docker"], state.meta, github_client, "docker" + ) return root -def create_package_workflow_tree_branch( - state: ReleaseState, github_client: GitHubClientAsync +def create_package_release_tree_branch( + package: Package, + release_meta: ReleaseMeta, + github_client: GitHubClientAsync, + package_name: str, ) -> Union[Selector, Sequence]: - workflow_result = create_workflow_result_tree_branch(state, github_client) - workflow_success = create_workflow_success_tree_branch(state, github_client) - latch_chains(workflow_result, workflow_success) - return workflow_result + build = create_build_workflow_tree_branch( + package.build, + package.meta, + release_meta, + github_client, + package_name, + ) + publish = create_publish_workflow_tree_branch( + package.build, + package.publish, + package.meta, + release_meta, + github_client, + package_name, + ) + package_release = Sequence( + f"Package Release: {package_name}", + memory=False, + children=[build, publish], + ) + return package_release -def create_workflow_success_tree_branch( - state: ReleaseState, github_client: GitHubClientAsync +def create_build_workflow_tree_branch( + workflow: Workflow, + package_meta: PackageMeta, + release_meta: ReleaseMeta, + github_client: GitHubClientAsync, + package_name: str, +) -> Union[Selector, Sequence]: + return create_workflow_with_result_tree_branch( + "release_handle", + workflow, + package_meta, + release_meta, + github_client, + package_name, + ) + + +def create_publish_workflow_tree_branch( + build_workflow: Workflow, + publish_workflow: Workflow, + package_meta: PackageMeta, + release_meta: ReleaseMeta, + github_client: GitHubClientAsync, + package_name: str, ) -> Union[Selector, Sequence]: + workflow_result = create_workflow_with_result_tree_branch( + "release_info", + publish_workflow, + package_meta, + release_meta, + github_client, + package_name, + ) + attach_release_handle = create_attach_release_handle_ppa( + build_workflow, publish_workflow, package_name + ) + latch_chains(workflow_result, attach_release_handle) - workflow_success = create_workflow_success_ppa( - state.packages["docker"].build, - "docker", + not_need_to_publish = Inverter( + "Not", + NeedToPublish( + "Need To Publish?", package_meta, release_meta, log_prefix=package_name + ), ) + return Selector( + "Publish", memory=False, children=[not_need_to_publish, workflow_result] + ) + + +def create_workflow_with_result_tree_branch( + artifact_name: str, + workflow: Workflow, + package_meta: PackageMeta, + release_meta: ReleaseMeta, + github_client: GitHubClientAsync, + package_name: str, +) -> Union[Selector, Sequence]: + """ + Creates a workflow process that succedes when the workflow + is successful and a result artifact is extracted and json decoded. + """ + workflow_result = create_extract_result_tree_branch( + artifact_name, + workflow, + package_meta, + github_client, + package_name, + ) + workflow_complete = create_workflow_complete_tree_branch( + workflow, + package_meta, + release_meta, + github_client, + package_name, + ) + + latch_chains(workflow_result, workflow_complete) + + return workflow_result + + +def create_workflow_complete_tree_branch( + workflow: Workflow, + package_meta: PackageMeta, + release_meta: ReleaseMeta, + github_client: GitHubClientAsync, + log_prefix: str, +) -> Union[Selector, Sequence]: workflow_complete = create_workflow_completion_ppa( - state.packages["docker"].build, - state.packages["docker"].meta, + workflow, + package_meta, github_client, - "docker", + log_prefix, ) find_workflow_by_uud = create_find_workflow_by_uuid_ppa( - state.packages["docker"].build, - state.packages["docker"].meta, + workflow, + package_meta, github_client, - "docker", + log_prefix, ) trigger_workflow = create_trigger_workflow_ppa( - state.packages["docker"].build, - state.packages["docker"].meta, - state.meta, + workflow, + package_meta, + release_meta, github_client, - "docker", + log_prefix, ) identify_target_ref = create_identify_target_ref_ppa( - state.packages["docker"].meta, - state.meta, - "docker", + package_meta, + release_meta, + log_prefix, ) latch_chains( - workflow_success, workflow_complete, find_workflow_by_uud, trigger_workflow, identify_target_ref, ) - return workflow_success + return workflow_complete -def create_workflow_result_tree_branch( - state: ReleaseState, github_client: GitHubClientAsync +def create_extract_result_tree_branch( + artifact_name: str, + workflow: Workflow, + package_meta: PackageMeta, + github_client: GitHubClientAsync, + log_prefix: str, ) -> Union[Selector, Sequence]: extract_artifact_result = create_extract_artifact_result_ppa( - state.packages["docker"].build, - "release_handle", - state.packages["docker"].meta, + artifact_name, + workflow, + package_meta, github_client, - "docker", + log_prefix, ) download_artifacts = create_download_artifacts_ppa( - state.packages["docker"].build, - state.packages["docker"].meta, + workflow, + package_meta, github_client, - "docker", + log_prefix, ) latch_chains(extract_artifact_result, download_artifacts) return extract_artifact_result - - -async def async_tick_tock(tree: BehaviourTree, cutoff: int = 100) -> None: - """Drive Behaviour tree using async event loop - - The tree is always ticked once. - - Next tick happens when there is at least one task completed. - If async tasks list is empty the final tick is made and if - after that the async tasks queue is still empty the tree is - considered finished. - - """ - count_no_tasks_loop = 0 - while True: - tree.tick() - if tree.count > cutoff: - logger.error(f"The Tree has not converged, hit cutoff limit {cutoff}") - break - - other_tasks = asyncio.all_tasks() - {asyncio.current_task()} - logger.debug(other_tasks) - if not other_tasks: - count_no_tasks_loop += 1 - # tick the tree one more time in case flipped status would lead to new tasks - if count_no_tasks_loop > 1: - logger.info(f"The Tree has converged to {tree.root.status}") - break - else: - count_no_tasks_loop = 0 - await asyncio.wait(other_tasks, return_when=asyncio.FIRST_COMPLETED) diff --git a/src/redis_release/cli.py b/src/redis_release/cli.py index f6124a6..8b369ef 100644 --- a/src/redis_release/cli.py +++ b/src/redis_release/cli.py @@ -30,8 +30,11 @@ ) from .bht.tree import ( async_tick_tock, - create_workflow_result_tree_branch, - create_workflow_success_tree_branch, + create_build_workflow_tree_branch, + create_extract_result_tree_branch, + create_publish_workflow_tree_branch, + create_workflow_complete_tree_branch, + create_workflow_with_result_tree_branch, initialize_tree_and_state, ) from .config import load_config @@ -277,14 +280,20 @@ def release_print_bht( workflow, package_meta, github_client, log_prefix ), "extract_artifact_result": lambda: create_extract_artifact_result_ppa( - workflow, "test-artifact", package_meta, github_client, log_prefix + "test-artifact", workflow, package_meta, github_client, log_prefix ), # Tree branch functions - "workflow_success_branch": lambda: create_workflow_success_tree_branch( - state, github_client + "workflow_complete_branch": lambda: create_workflow_complete_tree_branch( + workflow, package_meta, release_meta, github_client, "" ), - "workflow_result_branch": lambda: create_workflow_result_tree_branch( - state, github_client + "workflow_with_result_branch": lambda: create_workflow_with_result_tree_branch( + "artifact", workflow, package_meta, release_meta, github_client, "" + ), + "publish_worflow_branch": lambda: create_publish_workflow_tree_branch( + workflow, workflow, package_meta, release_meta, github_client, "" + ), + "build_workflow_branch": lambda: create_build_workflow_tree_branch( + workflow, package_meta, release_meta, github_client, "" ), } diff --git a/src/redis_release/config.py b/src/redis_release/config.py index a8dd7cc..062e38e 100644 --- a/src/redis_release/config.py +++ b/src/redis_release/config.py @@ -13,6 +13,7 @@ class PackageConfig(BaseModel): repo: str ref: Optional[str] = None workflow_branch: str = "autodetect" + publish_internal_release: bool = False build_workflow: Union[str, bool] = Field(default=False) build_timeout_minutes: int = Field(default=45) build_inputs: Dict[str, str] = Field(default_factory=dict) From 96bd0cb3000499c398af3e9b2240f2d7d22194f0 Mon Sep 17 00:00:00 2001 From: Petar Shtuchkin Date: Thu, 9 Oct 2025 18:00:43 +0300 Subject: [PATCH 15/39] Sync state to s3, configure third party logging --- src/redis_release/bht/state.py | 290 ++++++++++++++++++++++++++-- src/redis_release/bht/tree.py | 29 ++- src/redis_release/cli.py | 5 +- src/redis_release/logging_config.py | 17 +- src/redis_release/state_manager.py | 99 ++-------- src/tests/test_state.py | 50 ++--- 6 files changed, 357 insertions(+), 133 deletions(-) diff --git a/src/redis_release/bht/state.py b/src/redis_release/bht/state.py index 508546e..573f897 100644 --- a/src/redis_release/bht/state.py +++ b/src/redis_release/bht/state.py @@ -1,13 +1,16 @@ import json import logging +import uuid from datetime import datetime from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, Optional, Union +from typing import TYPE_CHECKING, Any, Dict, Optional, Protocol, Union +from botocore.exceptions import ClientError from pydantic import BaseModel, Field from rich.pretty import pretty_repr from redis_release.models import WorkflowConclusion, WorkflowStatus +from redis_release.state_manager import S3Backed, logger from ..config import Config @@ -155,19 +158,39 @@ def from_json(cls, data: Union[str, Dict, Path]) -> "ReleaseState": class StateSyncer: - """Syncs ReleaseState to file only when changed.""" + """Syncs ReleaseState to storage backend only when changed. + + Can be used as a context manager to automatically acquire and release locks. + """ def __init__( self, + storage: StateStorage, config: Config, - args: Optional["ReleaseArgs"] = None, - file_path: Union[str, Path] = "state.json", + args: "ReleaseArgs", ): + self.tag = args.release_tag + self.storage = storage self.config = config self.args = args - self.file_path = Path(file_path) self.last_dump: Optional[str] = None self._state: Optional[ReleaseState] = None + self._lock_acquired = False + + def __enter__(self) -> "StateSyncer": + """Acquire lock when entering context.""" + if not self.storage.acquire_lock(self.tag): + raise RuntimeError(f"Failed to acquire lock for tag: {self.tag}") + self._lock_acquired = True + logger.info(f"Lock acquired for tag: {self.tag}") + return self + + def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None: + """Release lock when exiting context.""" + if self._lock_acquired: + self.storage.release_lock(self.tag) + self._lock_acquired = False + logger.info(f"Lock released for tag: {self.tag}") @property def state(self) -> ReleaseState: @@ -175,9 +198,8 @@ def state(self) -> ReleaseState: loaded = self.load() if loaded is None: self._state = ReleaseState.from_config(self.config) - # Set tag from args when creating from config - if self.args: - self._state.meta.tag = self.args.release_tag + # Set tag when creating from config + self._state.meta.tag = self.tag else: self._state = loaded @@ -200,22 +222,258 @@ def state(self) -> ReleaseState: return self._state def load(self) -> Optional[ReleaseState]: - if not self.file_path.exists(): + """Load state from storage backend.""" + state_data = self.storage.get(self.tag) + if state_data is None: return None - with open(self.file_path, "r") as f: - json_data = json.load(f) - - state = ReleaseState(**json_data) + state = ReleaseState(**state_data) self.last_dump = state.model_dump_json(indent=2) return state def sync(self) -> None: - """Save state to file if changed since last sync.""" + """Save state to storage backend if changed since last sync.""" current_dump = self.state.model_dump_json(indent=2) if current_dump != self.last_dump: self.last_dump = current_dump - with open(self.file_path, "w") as f: - f.write(current_dump) + state_dict = json.loads(current_dump) + self.storage.put(self.tag, state_dict) logger.debug("State saved") + + +class StateStorage(Protocol): + """Protocol for state storage backends.""" + + def get(self, tag: str) -> Optional[dict]: + """Load state data by tag. + + Args: + tag: Release tag + + Returns: + State dict or None if not found + """ + ... + + def put(self, tag: str, state: dict) -> None: + """Save state data by tag. + + Args: + tag: Release tag + state: State dict to save + """ + ... + + def acquire_lock(self, tag: str) -> bool: + """Acquire a lock for the release process. + + Args: + tag: Release tag + + Returns: + True if lock acquired successfully + """ + ... + + def release_lock(self, tag: str) -> bool: + """Release a lock for the release process. + + Args: + tag: Release tag + + Returns: + True if lock released successfully + """ + ... + + +class InMemoryStateStorage: + """In-memory state storage for testing.""" + + def __init__(self) -> None: + self._storage: Dict[str, dict] = {} + self._locks: Dict[str, bool] = {} + + def get(self, tag: str) -> Optional[dict]: + """Load state data by tag.""" + return self._storage.get(tag) + + def put(self, tag: str, state: dict) -> None: + """Save state data by tag.""" + self._storage[tag] = state + + def acquire_lock(self, tag: str) -> bool: + """Acquire a lock for the release process.""" + if self._locks.get(tag, False): + return False + self._locks[tag] = True + return True + + def release_lock(self, tag: str) -> bool: + """Release a lock for the release process.""" + self._locks[tag] = False + return True + + +class S3StateStorage(S3Backed): + def __init__( + self, + bucket_name: Optional[str] = None, + aws_region: str = "us-east-1", + aws_profile: Optional[str] = None, + owner: Optional[str] = None, + ): + super().__init__(bucket_name, False, aws_region, aws_profile) + # Generate UUID for this instance to use as lock owner + self.owner = owner if owner else str(uuid.uuid4()) + + def get(self, tag: str) -> Optional[dict]: + """Load blackboard data from S3. + + Args: + tag: Release tag + + Returns: + ReleaseState object or None if not found + """ + state_key = f"release-state/{tag}-blackboard.json" + logger.info(f"Loading blackboard for tag: {tag}") + + if self.s3_client is None: + raise RuntimeError("S3 client not initialized") + + try: + response = self.s3_client.get_object(Bucket=self.bucket_name, Key=state_key) + state_data: dict = json.loads(response["Body"].read().decode("utf-8")) + + logger.info("Blackboard loaded successfully") + + return state_data + + except ClientError as e: + if e.response["Error"]["Code"] == "NoSuchKey": + logger.info(f"No existing blackboard found for tag: {tag}") + return None + else: + logger.error(f"Failed to load blackboard: {e}") + raise + + def put(self, tag: str, state: dict) -> None: + """Save release state to S3. + + Args: + state: ReleaseState object to save + """ + state_key = f"release-state/{tag}-blackboard.json" + logger.info(f"Saving blackboard for tag: {tag}") + + if self.s3_client is None: + raise RuntimeError("S3 client not initialized") + + state_json = json.dumps(state, indent=2, default=str) + + try: + self.s3_client.put_object( + Bucket=self.bucket_name, + Key=state_key, + Body=state_json, + ContentType="application/json", + Metadata={ + "tag": tag, + }, + ) + + logger.info("Blackboard saved successfully") + + except ClientError as e: + logger.error(f"Failed to save blackboard: {e}") + raise + + def acquire_lock(self, tag: str) -> bool: + """Acquire a lock for the release process. + + Args: + tag: Release tag + + Returns: + True if lock acquired successfully + """ + lock_key = f"release-locks/{tag}.lock" + logger.info(f"Acquiring lock for tag: {tag}") + + if self.s3_client is None: + raise RuntimeError("S3 client not initialized") + + lock_data = { + "tag": tag, + "owner": self.owner, + "acquired_at": datetime.now().isoformat(), + } + + try: + self.s3_client.put_object( + Bucket=self.bucket_name, + Key=lock_key, + Body=json.dumps(lock_data, indent=2), + ContentType="application/json", + # fail if object already exists + IfNoneMatch="*", + ) + + logger.info("Lock acquired successfully") + return True + + except ClientError as e: + if e.response["Error"]["Code"] == "PreconditionFailed": + try: + response = self.s3_client.get_object( + Bucket=self.bucket_name, Key=lock_key + ) + existing_lock = json.loads(response["Body"].read().decode("utf-8")) + logger.warning( + f"Lock already held by: {existing_lock.get('owner', 'unknown')}, " + f"acquired at: {existing_lock.get('acquired_at', 'unknown')}" + ) + except: + logger.warning("Lock exists but couldn't read details") + return False + else: + logger.error(f"Failed to acquire lock: {e}") + raise + + def release_lock(self, tag: str) -> bool: + """Release a lock for the release process. + + Args: + tag: Release tag + + Returns: + True if lock released successfully + """ + lock_key = f"release-locks/{tag}.lock" + logger.info(f"Releasing lock for tag: {tag}") + + if self.s3_client is None: + raise RuntimeError("S3 client not initialized") + + try: + # check if we own the lock + response = self.s3_client.get_object(Bucket=self.bucket_name, Key=lock_key) + lock_data = json.loads(response["Body"].read().decode("utf-8")) + + if lock_data.get("owner") != self.owner: + logger.error(f"Cannot release lock owned by: {lock_data.get('owner')}") + return False + + self.s3_client.delete_object(Bucket=self.bucket_name, Key=lock_key) + logger.info("Lock released successfully") + return True + + except ClientError as e: + if e.response["Error"]["Code"] == "NoSuchKey": + logger.info(f"No lock found for tag: {tag}") + return True + else: + logger.error(f"Failed to release lock: {e}") + raise diff --git a/src/redis_release/bht/tree.py b/src/redis_release/bht/tree.py index 5f38091..a41ea34 100644 --- a/src/redis_release/bht/tree.py +++ b/src/redis_release/bht/tree.py @@ -1,7 +1,8 @@ import asyncio import logging import os -from typing import Tuple, Union +from contextlib import contextmanager +from typing import Any, Iterator, Tuple, Union from py_trees.behaviour import Behaviour from py_trees.common import Status @@ -31,6 +32,7 @@ PackageMeta, ReleaseMeta, ReleaseState, + S3StateStorage, StateSyncer, Workflow, ) @@ -64,18 +66,27 @@ async def async_tick_tock(tree: BehaviourTree, cutoff: int = 100) -> None: await asyncio.wait(other_tasks, return_when=asyncio.FIRST_COMPLETED) +@contextmanager def initialize_tree_and_state( config: Config, args: ReleaseArgs -) -> Tuple[BehaviourTree, StateSyncer]: +) -> Iterator[Tuple[BehaviourTree, StateSyncer]]: github_client = GitHubClientAsync(token=os.getenv("GITHUB_TOKEN")) - state_syncer = StateSyncer(config, args) - root = create_root_node(state_syncer.state, github_client) - tree = BehaviourTree(root) - tree.add_post_tick_handler(lambda _: state_syncer.sync()) - tree.add_post_tick_handler(log_tree_state_with_markup) - - return (tree, state_syncer) + # Create S3 storage backend + storage = S3StateStorage() + + # Create state syncer with storage backend and acquire lock + with StateSyncer( + storage=storage, + config=config, + args=args, + ) as state_syncer: + root = create_root_node(state_syncer.state, github_client) + tree = BehaviourTree(root) + tree.add_post_tick_handler(lambda _: state_syncer.sync()) + tree.add_post_tick_handler(log_tree_state_with_markup) + + yield (tree, state_syncer) def log_tree_state_with_markup(tree: BehaviourTree) -> None: diff --git a/src/redis_release/cli.py b/src/redis_release/cli.py index 8b369ef..144a901 100644 --- a/src/redis_release/cli.py +++ b/src/redis_release/cli.py @@ -336,8 +336,9 @@ def release_bht( force_rebuild=force_rebuild or [], ) - tree, _ = initialize_tree_and_state(config, args) - asyncio.run(async_tick_tock(tree)) + # Use context manager version with automatic lock management + with initialize_tree_and_state(config, args) as (tree, _): + asyncio.run(async_tick_tock(tree)) if __name__ == "__main__": diff --git a/src/redis_release/logging_config.py b/src/redis_release/logging_config.py index 91f2283..1470d79 100644 --- a/src/redis_release/logging_config.py +++ b/src/redis_release/logging_config.py @@ -5,12 +5,17 @@ from rich.logging import RichHandler -def setup_logging(level: int = logging.INFO, show_path: bool = True) -> None: +def setup_logging( + level: int = logging.INFO, + show_path: bool = True, + third_party_level: int = logging.WARNING, +) -> None: """Configure logging with Rich handler. Args: level: Logging level (e.g., logging.INFO, logging.DEBUG) show_path: Whether to show file path and line numbers in logs + third_party_level: Logging level for third-party libraries (botocore, boto3, etc.) Example: >>> from redis_release.logging_config import setup_logging @@ -18,6 +23,9 @@ def setup_logging(level: int = logging.INFO, show_path: bool = True) -> None: >>> setup_logging(level=logging.DEBUG) >>> logger = logging.getLogger(__name__) >>> logger.info("[blue]Hello[/blue] [green]World[/green]") + + # To see botocore debug logs: + >>> setup_logging(level=logging.DEBUG, third_party_level=logging.DEBUG) """ logging.basicConfig( level=level, @@ -37,5 +45,8 @@ def setup_logging(level: int = logging.INFO, show_path: bool = True) -> None: ) # Optionally reduce noise from some verbose libraries - logging.getLogger("asyncio").setLevel(logging.WARNING) - logging.getLogger("aiohttp").setLevel(logging.WARNING) + logging.getLogger("asyncio").setLevel(third_party_level) + logging.getLogger("aiohttp").setLevel(third_party_level) + logging.getLogger("botocore").setLevel(third_party_level) + logging.getLogger("boto3").setLevel(third_party_level) + logging.getLogger("urllib3").setLevel(third_party_level) diff --git a/src/redis_release/state_manager.py b/src/redis_release/state_manager.py index f5e87fb..46640b9 100644 --- a/src/redis_release/state_manager.py +++ b/src/redis_release/state_manager.py @@ -1,7 +1,9 @@ """State management for Redis release automation.""" import json +import logging import os +from builtins import NotImplementedError from datetime import datetime from typing import Optional @@ -11,9 +13,8 @@ from .models import ReleaseState -from builtins import NotImplementedError - console = Console() +logger = logging.getLogger(__name__) class S3Backed: @@ -49,7 +50,7 @@ def __init__( self._local_state_cache = {} @property - def s3_client(self): + def s3_client(self) -> Optional[boto3.client]: """Lazy initialization of S3 client.""" if self._s3_client is None and not self.dry_run: try: @@ -101,82 +102,7 @@ def s3_client(self): return self._s3_client -class BlackboardStorage(S3Backed): - def __init__( - self, - bucket_name: Optional[str] = None, - dry_run: bool = False, - aws_region: str = "us-east-1", - aws_profile: Optional[str] = None, - ): - super().__init__(bucket_name, dry_run, aws_region, aws_profile) - - def get(self, tag: str) -> Optional[dict]: - """Load blackboard data from S3. - - Args: - tag: Release tag - - Returns: - ReleaseState object or None if not found - """ - state_key = f"release-state/{tag}-blackboard.json" - console.print(f"[blue] Loading state for tag: {tag}[/blue]") - - if self.dry_run: - raise NotImplementedError() - - try: - response = self.s3_client.get_object(Bucket=self.bucket_name, Key=state_key) - state_data = json.loads(response["Body"].read().decode("utf-8")) - - console.print(f"[green]State loaded successfully[/green]") - - return state_data - - except ClientError as e: - if e.response["Error"]["Code"] == "NoSuchKey": - console.print( - f"[yellow] No existing blackboard found for tag: {tag}[/yellow]" - ) - return None - else: - console.print(f"[red] Failed to load blackboard: {e}[/red]") - raise - - def put(self, tag: str, state: dict) -> None: - """Save release state to S3. - - Args: - state: ReleaseState object to save - """ - state_key = f"release-state/{tag}-blackboard.json" - console.print(f"[blue] Saving blackboard for tag: {tag}[/blue]") - - state_json = json.dumps(state, indent=2, default=str) - - if self.dry_run: - raise NotImplementedError() - - try: - self.s3_client.put_object( - Bucket=self.bucket_name, - Key=state_key, - Body=state_json, - ContentType="application/json", - Metadata={ - "tag": tag, - }, - ) - - console.print(f"[green] Blackboard saved successfully[/green]") - - except ClientError as e: - console.print(f"[red] Failed to save blackboard: {e}[/red]") - raise - - -class StateManager: +class StateManager(S3Backed): """Manages release state persistence in S3.""" def __init__( @@ -190,6 +116,9 @@ def __init__( def _create_bucket(self) -> None: """Create S3 bucket if it doesn't exist.""" + if self._s3_client is None: + raise RuntimeError("S3 client not initialized") + try: console.print(f"[blue] Creating S3 bucket: {self.bucket_name}[/blue]") @@ -239,6 +168,9 @@ def load_state(self, tag: str) -> Optional[ReleaseState]: console.print("[yellow] (DRY RUN - no state found in cache)[/yellow]") return None + if self.s3_client is None: + raise RuntimeError("S3 client not initialized") + try: response = self.s3_client.get_object(Bucket=self.bucket_name, Key=state_key) state_data = json.loads(response["Body"].read().decode("utf-8")) @@ -274,6 +206,9 @@ def save_state(self, state: ReleaseState) -> None: self._local_state_cache[state_key] = state_data return + if self.s3_client is None: + raise RuntimeError("S3 client not initialized") + try: self.s3_client.put_object( Bucket=self.bucket_name, @@ -309,6 +244,9 @@ def acquire_lock(self, tag: str, owner: str) -> bool: console.print("[yellow] (DRY RUN - lock acquired)[/yellow]") return True + if self.s3_client is None: + raise RuntimeError("S3 client not initialized") + lock_data = { "tag": tag, "owner": owner, @@ -365,6 +303,9 @@ def release_lock(self, tag: str, owner: str) -> bool: console.print("[yellow] (DRY RUN - lock released)[/yellow]") return True + if self.s3_client is None: + raise RuntimeError("S3 client not initialized") + try: # check if we own the lock response = self.s3_client.get_object(Bucket=self.bucket_name, Key=lock_key) diff --git a/src/tests/test_state.py b/src/tests/test_state.py index 5b40035..b87daf5 100644 --- a/src/tests/test_state.py +++ b/src/tests/test_state.py @@ -6,7 +6,12 @@ import pytest from redis_release.bht.args import ReleaseArgs -from redis_release.bht.state import ReleaseState, StateSyncer, Workflow +from redis_release.bht.state import ( + InMemoryStateStorage, + ReleaseState, + StateSyncer, + Workflow, +) from redis_release.config import Config, PackageConfig @@ -456,7 +461,7 @@ def test_ephemeral_not_serialized(self) -> None: class TestStateSyncerWithArgs: """Test cases for StateSyncer with ReleaseArgs.""" - def test_state_syncer_sets_tag_from_args(self, tmp_path: Path) -> None: + def test_state_syncer_sets_tag_from_args(self) -> None: """Test that StateSyncer sets tag from ReleaseArgs when creating from config.""" config = Config( version=1, @@ -470,12 +475,12 @@ def test_state_syncer_sets_tag_from_args(self, tmp_path: Path) -> None: ) args = ReleaseArgs(release_tag="8.4-m01", force_rebuild=[]) - state_file = tmp_path / "state.json" - syncer = StateSyncer(config, args, file_path=state_file) + storage = InMemoryStateStorage() + syncer = StateSyncer(storage=storage, config=config, args=args) assert syncer.state.meta.tag == "8.4-m01" - def test_state_syncer_sets_force_rebuild_from_args(self, tmp_path: Path) -> None: + def test_state_syncer_sets_force_rebuild_from_args(self) -> None: """Test that StateSyncer sets force_rebuild flags from ReleaseArgs.""" config = Config( version=1, @@ -494,15 +499,13 @@ def test_state_syncer_sets_force_rebuild_from_args(self, tmp_path: Path) -> None ) args = ReleaseArgs(release_tag="8.4-m01", force_rebuild=["docker"]) - state_file = tmp_path / "state.json" - syncer = StateSyncer(config, args, file_path=state_file) + storage = InMemoryStateStorage() + syncer = StateSyncer(storage=storage, config=config, args=args) assert syncer.state.packages["docker"].meta.ephemeral.force_rebuild is True assert syncer.state.packages["redis"].meta.ephemeral.force_rebuild is False - def test_state_syncer_sets_multiple_force_rebuild_from_args( - self, tmp_path: Path - ) -> None: + def test_state_syncer_sets_multiple_force_rebuild_from_args(self) -> None: """Test that StateSyncer sets multiple force_rebuild flags from ReleaseArgs.""" config = Config( version=1, @@ -526,14 +529,14 @@ def test_state_syncer_sets_multiple_force_rebuild_from_args( ) args = ReleaseArgs(release_tag="8.4-m01", force_rebuild=["docker", "snap"]) - state_file = tmp_path / "state.json" - syncer = StateSyncer(config, args, file_path=state_file) + storage = InMemoryStateStorage() + syncer = StateSyncer(storage=storage, config=config, args=args) assert syncer.state.packages["docker"].meta.ephemeral.force_rebuild is True assert syncer.state.packages["redis"].meta.ephemeral.force_rebuild is False assert syncer.state.packages["snap"].meta.ephemeral.force_rebuild is True - def test_state_syncer_without_args(self, tmp_path: Path) -> None: + def test_state_syncer_without_args(self) -> None: """Test that StateSyncer works without ReleaseArgs.""" config = Config( version=1, @@ -546,15 +549,16 @@ def test_state_syncer_without_args(self, tmp_path: Path) -> None: }, ) - state_file = tmp_path / "state.json" - syncer = StateSyncer(config, args=None, file_path=state_file) + args = ReleaseArgs(release_tag="test-tag", force_rebuild=[]) + storage = InMemoryStateStorage() + syncer = StateSyncer(storage=storage, config=config, args=args) - assert syncer.state.meta.tag is None + assert syncer.state.meta.tag == "test-tag" assert ( syncer.state.packages["test-package"].meta.ephemeral.force_rebuild is False ) - def test_state_syncer_force_rebuild_all(self, tmp_path: Path) -> None: + def test_state_syncer_force_rebuild_all(self) -> None: """Test that StateSyncer sets force_rebuild for all packages when 'all' is specified.""" config = Config( version=1, @@ -578,17 +582,15 @@ def test_state_syncer_force_rebuild_all(self, tmp_path: Path) -> None: ) args = ReleaseArgs(release_tag="8.4-m01", force_rebuild=["all"]) - state_file = tmp_path / "state.json" - syncer = StateSyncer(config, args, file_path=state_file) + storage = InMemoryStateStorage() + syncer = StateSyncer(storage=storage, config=config, args=args) # All packages should have force_rebuild set to True assert syncer.state.packages["docker"].meta.ephemeral.force_rebuild is True assert syncer.state.packages["redis"].meta.ephemeral.force_rebuild is True assert syncer.state.packages["snap"].meta.ephemeral.force_rebuild is True - def test_state_syncer_force_rebuild_all_with_other_values( - self, tmp_path: Path - ) -> None: + def test_state_syncer_force_rebuild_all_with_other_values(self) -> None: """Test that 'all' takes precedence even if other package names are specified.""" config = Config( version=1, @@ -607,8 +609,8 @@ def test_state_syncer_force_rebuild_all_with_other_values( ) args = ReleaseArgs(release_tag="8.4-m01", force_rebuild=["docker", "all"]) - state_file = tmp_path / "state.json" - syncer = StateSyncer(config, args, file_path=state_file) + storage = InMemoryStateStorage() + syncer = StateSyncer(storage=storage, config=config, args=args) # All packages should have force_rebuild set to True assert syncer.state.packages["docker"].meta.ephemeral.force_rebuild is True From 7bd7eda20cd4183072436f33da01db5c919f1d20 Mon Sep 17 00:00:00 2001 From: Petar Shtuchkin Date: Fri, 10 Oct 2025 13:54:38 +0300 Subject: [PATCH 16/39] ParallelBarrier, support for force_rebuild and convenient restarts --- src/redis_release/bht/backchain.py | 4 +- src/redis_release/bht/behaviours.py | 59 ++++++- src/redis_release/bht/composites.py | 194 ++++++++++++++++++++++- src/redis_release/bht/state.py | 161 +++++++++++-------- src/redis_release/bht/tree.py | 145 +++++++++++++---- src/redis_release/cli.py | 10 +- src/redis_release/state_manager.py | 92 +++++------ src/tests/test_parallel.py | 149 ++++++++++++++++++ src/tests/test_parallel_integration.py | 205 +++++++++++++++++++++++++ 9 files changed, 865 insertions(+), 154 deletions(-) create mode 100644 src/tests/test_parallel.py create mode 100644 src/tests/test_parallel_integration.py diff --git a/src/redis_release/bht/backchain.py b/src/redis_release/bht/backchain.py index 2bb441b..fa37b74 100644 --- a/src/redis_release/bht/backchain.py +++ b/src/redis_release/bht/backchain.py @@ -40,7 +40,9 @@ def latch_chain_to_chain( ) -> None: """Latch two chains together. Both are expected to be formed using PPAs. - If precondition exists in the anchor point, it is replaced by the next chain. + If both precondition in the anchor point and postcondition of the next + chain exist, and they are the same type then the precondition in the + anchor point is replaced by the next chain. Otherwise the next chain is added as a leftmost child to the anchor point. If the next chain is a sequence, its children are merged into the anchor point. diff --git a/src/redis_release/bht/behaviours.py b/src/redis_release/bht/behaviours.py index c53b912..d6714e8 100644 --- a/src/redis_release/bht/behaviours.py +++ b/src/redis_release/bht/behaviours.py @@ -21,13 +21,14 @@ from py_trees.common import Status from py_trees.composites import Selector, Sequence from py_trees.decorators import Inverter, Repeat, Retry, Timeout -from pydantic import BaseModel + +from redis_release.bht.state import reset_model_to_defaults from ..github_client_async import GitHubClientAsync from ..models import WorkflowConclusion, WorkflowRun, WorkflowStatus from .decorators import FlagGuard from .logging_wrapper import PyTreesLoggerWrapper -from .state import PackageMeta, ReleaseMeta, Workflow +from .state import Package, PackageMeta, ReleaseMeta, Workflow logger = logging.getLogger(__name__) @@ -115,6 +116,7 @@ def initialise(self) -> None: return self.workflow.inputs["release_tag"] = self.release_meta.tag ref = self.package_meta.ref if self.package_meta.ref is not None else "main" + self.workflow.ephemeral.trigger_attempted = True self.task = asyncio.create_task( self.github_client.trigger_workflow( self.package_meta.repo, @@ -370,6 +372,46 @@ def update(self) -> Status: return self.log_exception_and_return_failure(e) +class ResetPackageState(ReleaseAction): + def __init__( + self, + name: str, + package: Package, + default_package: Package, + log_prefix: str = "", + ) -> None: + self.package = package + self.default_package = default_package + super().__init__(name=name, log_prefix=log_prefix) + + def update(self) -> Status: + reset_model_to_defaults(self.package, self.default_package) + + self.feedback_message = "Package state reset to default values" + self.logger.info(f"[green]{self.feedback_message}[/green]") + return Status.SUCCESS + + +class ResetWorkflowState(ReleaseAction): + def __init__( + self, + name: str, + workflow: Workflow, + default_workflow: Workflow, + log_prefix: str = "", + ) -> None: + self.workflow = workflow + self.default_workflow = default_workflow + super().__init__(name=name, log_prefix=log_prefix) + + def update(self) -> Status: # type: ignore + reset_model_to_defaults(self.workflow, self.default_workflow) + + self.feedback_message = "Workflow state reset to default values" + self.logger.info(f"[green]{self.feedback_message}[/green]") + return Status.SUCCESS + + ### Conditions ### @@ -509,3 +551,16 @@ def update(self) -> Status: self.build_workflow.result ) return Status.SUCCESS + + +class IsForceRebuild(LoggingAction): + def __init__( + self, name: str, package_meta: PackageMeta, log_prefix: str = "" + ) -> None: + self.package_meta = package_meta + super().__init__(name=name, log_prefix=log_prefix) + + def update(self) -> Status: + if self.package_meta.ephemeral.force_rebuild: + return Status.SUCCESS + return Status.FAILURE diff --git a/src/redis_release/bht/composites.py b/src/redis_release/bht/composites.py index 0e8f9e2..f03ba93 100644 --- a/src/redis_release/bht/composites.py +++ b/src/redis_release/bht/composites.py @@ -1,5 +1,10 @@ -from py_trees.composites import Selector, Sequence -from py_trees.decorators import Inverter, Repeat, Retry, Timeout +from typing import Iterator, List, Optional +from typing import Sequence as TypingSequence + +from py_trees.behaviour import Behaviour +from py_trees.common import OneShotPolicy, Status +from py_trees.composites import Composite, Selector, Sequence +from py_trees.decorators import Repeat, Retry, SuccessIsRunning, Timeout from ..github_client_async import GitHubClientAsync from .behaviours import ( @@ -14,12 +19,89 @@ IsWorkflowIdentified, IsWorkflowSuccessful, IsWorkflowTriggered, + ResetPackageState, + ResetWorkflowState, Sleep, ) from .behaviours import TriggerWorkflow as TriggerWorkflow from .behaviours import UpdateWorkflowStatus from .decorators import FlagGuard -from .state import PackageMeta, ReleaseMeta, Workflow +from .state import Package, PackageMeta, ReleaseMeta, Workflow + + +class ParallelBarrier(Composite): + """ + A simplified parallel composite that runs all children until convergence. + + This parallel composite: + - Ticks all children on each tick + - Skips children that have already converged (SUCCESS or FAILURE) in synchronized mode + - Returns FAILURE if any child returns FAILURE + - Returns SUCCESS if all children return SUCCESS + - Returns RUNNING if any child is still RUNNING + + Unlike py_trees.Parallel, this composite: + - Has no policy configuration (always waits for all children) + - Always operates in synchronized mode (skips converged children) + - Has simpler logic focused on the all-must-succeed pattern + + Args: + name: the composite behaviour name + children: list of children to add + """ + + def __init__( + self, + name: str, + children: Optional[TypingSequence[Behaviour]] = None, + ): + super().__init__(name, children) + + def tick(self) -> Iterator[Behaviour]: + """ + Tick all children until they converge, then determine status. + """ + # Initialise if first time + if self.status != Status.RUNNING: + # subclass (user) handling + self.initialise() + + # Handle empty children case + if not self.children: + self.current_child = None + self.stop(Status.SUCCESS) + yield self + return + + # Tick all children, skipping those that have already converged + for child in self.children: + # Skip children that have already converged (synchronized mode) + if child.status in [Status.SUCCESS, Status.FAILURE]: + continue + # Tick the child + for node in child.tick(): + yield node + + # Determine new status based on children's statuses + self.current_child = self.children[-1] + + new_status = Status.INVALID + has_running = any(child.status == Status.RUNNING for child in self.children) + if has_running: + new_status = Status.RUNNING + else: + has_failed = any(child.status == Status.FAILURE for child in self.children) + if has_failed: + new_status = Status.FAILURE + else: + new_status = Status.SUCCESS + + # If we've reached a final status, stop and terminate running children + if new_status != Status.RUNNING: + self.stop(new_status) + + self.status = new_status + yield self class FindWorkflowByUUID(FlagGuard): @@ -203,3 +285,109 @@ def __init__( "extract_result_failed", log_prefix=log_prefix, ) + + +class ResetPackageStateGuarded(FlagGuard): + """ + Reset package once if force_rebuild is True. + Always returns SUCCESS. + """ + + def __init__( + self, + name: str, + package: Package, + default_package: Package, + log_prefix: str = "", + ) -> None: + super().__init__( + None if name == "" else name, + ResetPackageState( + "Reset Package State", + package, + default_package, + log_prefix=log_prefix, + ), + package.meta.ephemeral, + "force_rebuild", + flag_value=False, + raise_on=[Status.SUCCESS, Status.FAILURE], + guard_status=Status.SUCCESS, + log_prefix=log_prefix, + ) + + +class RestartPackageGuarded(FlagGuard): + """ + Reset package if we didn't trigger the workflow in current run + This is intended to be used for build workflow since if build has failed + we have to reset not only build but also publish which effectively means + we have to reset the entire package and restart from scratch. + + When reset is made we return RUNNING to give the tree opportunity to run the workflow again. + """ + + def __init__( + self, + name: str, + package: Package, + workflow: Workflow, + default_package: Package, + log_prefix: str = "", + ) -> None: + reset_package_state = ResetPackageState( + "Reset Package State", + package, + default_package, + log_prefix=log_prefix, + ) + reset_package_state_wrapped = SuccessIsRunning( + "Success is Running", reset_package_state + ) + super().__init__( + None if name == "" else name, + reset_package_state_wrapped, + workflow.ephemeral, + "trigger_attempted", + flag_value=True, + raise_on=[], + guard_status=Status.FAILURE, + log_prefix=log_prefix, + ) + + +class RestartWorkflowGuarded(FlagGuard): + """ + Reset workflow if we didn't trigger the workflow in current run + + This will only reset the workflow state + + When reset is made we return RUNNING to give the tree opportunity to run the workflow again. + """ + + def __init__( + self, + name: str, + workflow: Workflow, + default_workflow: Workflow, + log_prefix: str = "", + ) -> None: + reset_workflow_state = ResetWorkflowState( + "Reset Workflow State", + workflow, + default_workflow, + log_prefix=log_prefix, + ) + reset_workflow_state_wrapped = SuccessIsRunning( + "Success is Running", reset_workflow_state + ) + super().__init__( + None if name == "" else name, + reset_workflow_state_wrapped, + workflow.ephemeral, + "trigger_attempted", + flag_value=True, + raise_on=[], + guard_status=Status.FAILURE, + log_prefix=log_prefix, + ) diff --git a/src/redis_release/bht/state.py b/src/redis_release/bht/state.py index 573f897..221499b 100644 --- a/src/redis_release/bht/state.py +++ b/src/redis_release/bht/state.py @@ -24,6 +24,7 @@ class WorkflowEphemeral(BaseModel): """Ephemeral workflow state that is not persisted.""" trigger_failed: bool = False + trigger_attempted: bool = False identify_failed: bool = False timed_out: bool = False artifacts_download_failed: bool = False @@ -157,6 +158,52 @@ def from_json(cls, data: Union[str, Dict, Path]) -> "ReleaseState": return cls(**json_data) +class StateStorage(Protocol): + """Protocol for state storage backends.""" + + def get(self, tag: str) -> Optional[dict]: + """Load state data by tag. + + Args: + tag: Release tag + + Returns: + State dict or None if not found + """ + ... + + def put(self, tag: str, state: dict) -> None: + """Save state data by tag. + + Args: + tag: Release tag + state: State dict to save + """ + ... + + def acquire_lock(self, tag: str) -> bool: + """Acquire a lock for the release process. + + Args: + tag: Release tag + + Returns: + True if lock acquired successfully + """ + ... + + def release_lock(self, tag: str) -> bool: + """Release a lock for the release process. + + Args: + tag: Release tag + + Returns: + True if lock released successfully + """ + ... + + class StateSyncer: """Syncs ReleaseState to storage backend only when changed. @@ -197,30 +244,34 @@ def state(self) -> ReleaseState: if self._state is None: loaded = self.load() if loaded is None: - self._state = ReleaseState.from_config(self.config) - # Set tag when creating from config - self._state.meta.tag = self.tag + self._state = self.default_state() else: self._state = loaded - - # Apply force_rebuild flags from args - if self.args: - if "all" in self.args.force_rebuild: - # Set force_rebuild for all packages - for package_name in self._state.packages: - self._state.packages[ - package_name - ].meta.ephemeral.force_rebuild = True - else: - # Set force_rebuild for specific packages - for package_name in self.args.force_rebuild: - if package_name in self._state.packages: - self._state.packages[ - package_name - ].meta.ephemeral.force_rebuild = True + self.apply_args(self._state) logger.debug(pretty_repr(self._state)) return self._state + def default_state(self) -> ReleaseState: + """Create default state from config.""" + state = ReleaseState.from_config(self.config) + self.apply_args(state) + return state + + def apply_args(self, state: ReleaseState) -> None: + """Apply arguments to state.""" + state.meta.tag = self.tag + + if self.args: + if "all" in self.args.force_rebuild: + # Set force_rebuild for all packages + for package_name in state.packages: + state.packages[package_name].meta.ephemeral.force_rebuild = True + else: + # Set force_rebuild for specific packages + for package_name in self.args.force_rebuild: + if package_name in state.packages: + state.packages[package_name].meta.ephemeral.force_rebuild = True + def load(self) -> Optional[ReleaseState]: """Load state from storage backend.""" state_data = self.storage.get(self.tag) @@ -242,52 +293,6 @@ def sync(self) -> None: logger.debug("State saved") -class StateStorage(Protocol): - """Protocol for state storage backends.""" - - def get(self, tag: str) -> Optional[dict]: - """Load state data by tag. - - Args: - tag: Release tag - - Returns: - State dict or None if not found - """ - ... - - def put(self, tag: str, state: dict) -> None: - """Save state data by tag. - - Args: - tag: Release tag - state: State dict to save - """ - ... - - def acquire_lock(self, tag: str) -> bool: - """Acquire a lock for the release process. - - Args: - tag: Release tag - - Returns: - True if lock acquired successfully - """ - ... - - def release_lock(self, tag: str) -> bool: - """Release a lock for the release process. - - Args: - tag: Release tag - - Returns: - True if lock released successfully - """ - ... - - class InMemoryStateStorage: """In-memory state storage for testing.""" @@ -477,3 +482,29 @@ def release_lock(self, tag: str) -> bool: else: logger.error(f"Failed to release lock: {e}") raise + + +def reset_model_to_defaults(target: BaseModel, default: BaseModel) -> None: + """Recursively reset a BaseModel in-place with values from default model.""" + for field_name, field_info in default.model_fields.items(): + default_value = getattr(default, field_name) + + if isinstance(default_value, BaseModel): + # Recursive case: field is a BaseModel + target_value = getattr(target, field_name) + if isinstance(target_value, BaseModel): + reset_model_to_defaults(target_value, default_value) + else: + raise TypeError( + f"Field '{field_name}' type mismatch: expected {type(default_value)}, got {type(target_value)}" + ) + else: + # Base case: field is not a BaseModel, copy the value + if isinstance(default_value, (list, dict, set)): + # Deep copy collections + import copy + + setattr(target, field_name, copy.deepcopy(default_value)) + else: + # Simple value, copy directly + setattr(target, field_name, default_value) diff --git a/src/redis_release/bht/tree.py b/src/redis_release/bht/tree.py index a41ea34..5038f8c 100644 --- a/src/redis_release/bht/tree.py +++ b/src/redis_release/bht/tree.py @@ -2,7 +2,7 @@ import logging import os from contextlib import contextmanager -from typing import Any, Iterator, Tuple, Union +from typing import Any, Iterator, Optional, Set, Tuple, Union from py_trees.behaviour import Behaviour from py_trees.common import Status @@ -10,6 +10,7 @@ from py_trees.decorators import Inverter from py_trees.display import unicode_tree from py_trees.trees import BehaviourTree +from py_trees.visitors import SnapshotVisitor from rich.text import Text from ..config import Config @@ -17,6 +18,12 @@ from .args import ReleaseArgs from .backchain import latch_chains from .behaviours import NeedToPublish +from .composites import ( + ParallelBarrier, + ResetPackageStateGuarded, + RestartPackageGuarded, + RestartWorkflowGuarded, +) from .ppas import ( create_attach_release_handle_ppa, create_download_artifacts_ppa, @@ -33,6 +40,7 @@ ReleaseMeta, ReleaseState, S3StateStorage, + StateStorage, StateSyncer, Workflow, ) @@ -56,24 +64,41 @@ async def async_tick_tock(tree: BehaviourTree, cutoff: int = 100) -> None: break other_tasks = asyncio.all_tasks() - {asyncio.current_task()} - logger.debug(other_tasks) + _debug_log_active_tasks(other_tasks) + if not other_tasks: # Let the tree continue running if it's not converged if tree.root.status != Status.RUNNING: - logger.info(f"The Tree has converged to {tree.root.status}") + color = "green" if tree.root.status == Status.SUCCESS else "red" + logger.info( + f"[bold][white]The Tree has converged to [/white][{color}]{tree.root.status}[/{color}][/bold]" + ) break else: await asyncio.wait(other_tasks, return_when=asyncio.FIRST_COMPLETED) +def _debug_log_active_tasks(other_tasks: Set[asyncio.Task[Any]]) -> None: + for task in other_tasks: + task_name = getattr(task, "get_name", lambda: "unnamed")() + coro_name = ( + task.get_coro().__name__ + if hasattr(task.get_coro(), "__name__") + else str(task.get_coro()) + ) + logger.debug(f"Active task: {task_name} - {coro_name}") + + @contextmanager def initialize_tree_and_state( - config: Config, args: ReleaseArgs + config: Config, + args: ReleaseArgs, + storage: Optional[StateStorage] = None, ) -> Iterator[Tuple[BehaviourTree, StateSyncer]]: github_client = GitHubClientAsync(token=os.getenv("GITHUB_TOKEN")) - # Create S3 storage backend - storage = S3StateStorage() + if storage is None: + storage = S3StateStorage() # Create state syncer with storage backend and acquire lock with StateSyncer( @@ -81,8 +106,15 @@ def initialize_tree_and_state( config=config, args=args, ) as state_syncer: - root = create_root_node(state_syncer.state, github_client) + root = create_root_node( + state_syncer.state, state_syncer.default_state(), github_client + ) tree = BehaviourTree(root) + + # Add snapshot visitor to track visited nodes + snapshot_visitor = SnapshotVisitor() + tree.visitors.append(snapshot_visitor) + tree.add_post_tick_handler(lambda _: state_syncer.sync()) tree.add_post_tick_handler(log_tree_state_with_markup) @@ -90,67 +122,116 @@ def initialize_tree_and_state( def log_tree_state_with_markup(tree: BehaviourTree) -> None: + # Get the snapshot visitor if it exists + snapshot_visitor = None + for visitor in tree.visitors: + if isinstance(visitor, SnapshotVisitor): + snapshot_visitor = visitor + break + + visited = snapshot_visitor.visited if snapshot_visitor else {} + previously_visited = snapshot_visitor.previously_visited if snapshot_visitor else {} + rich_markup = Text.from_ansi( - unicode_tree(tree.root, show_status=True, show_only_visited=False) + unicode_tree( + tree.root, + show_status=True, + show_only_visited=True, + visited=visited, + previously_visited=previously_visited, + ) ).markup logger.debug(f"\n{rich_markup}") def create_root_node( - state: ReleaseState, github_client: GitHubClientAsync + state: ReleaseState, default_state: ReleaseState, github_client: GitHubClientAsync ) -> Behaviour: - root = create_package_release_tree_branch( - state.packages["docker"], state.meta, github_client, "docker" + root = ParallelBarrier( + "Redis Release", + children=[], ) - + for package_name, package in state.packages.items(): + root.add_child( + create_package_release_tree_branch( + package, + state.meta, + default_state.packages[package_name], + github_client, + package_name, + ) + ) return root def create_package_release_tree_branch( package: Package, release_meta: ReleaseMeta, + default_package: Package, github_client: GitHubClientAsync, package_name: str, ) -> Union[Selector, Sequence]: build = create_build_workflow_tree_branch( - package.build, - package.meta, + package, release_meta, + default_package, github_client, package_name, ) + build.name = f"Build {package_name}" publish = create_publish_workflow_tree_branch( package.build, package.publish, package.meta, release_meta, + default_package.publish, github_client, package_name, ) + reset_package_state = ResetPackageStateGuarded( + "", + package, + default_package, + log_prefix=package_name, + ) + publish.name = f"Publish {package_name}" package_release = Sequence( - f"Package Release: {package_name}", + f"Package Release {package_name}", memory=False, - children=[build, publish], + children=[reset_package_state, build, publish], ) return package_release def create_build_workflow_tree_branch( - workflow: Workflow, - package_meta: PackageMeta, + package: Package, release_meta: ReleaseMeta, + default_package: Package, github_client: GitHubClientAsync, package_name: str, ) -> Union[Selector, Sequence]: - return create_workflow_with_result_tree_branch( + + build_workflow = create_workflow_with_result_tree_branch( "release_handle", - workflow, - package_meta, + package.build, + package.meta, release_meta, github_client, - package_name, + f"{package_name}.build", ) + assert isinstance(build_workflow, Selector) + + reset_package_state = RestartPackageGuarded( + "", + package, + package.build, + default_package, + log_prefix=f"{package_name}.build", + ) + build_workflow.add_child(reset_package_state) + + return build_workflow def create_publish_workflow_tree_branch( @@ -158,6 +239,7 @@ def create_publish_workflow_tree_branch( publish_workflow: Workflow, package_meta: PackageMeta, release_meta: ReleaseMeta, + default_publish_workflow: Workflow, github_client: GitHubClientAsync, package_name: str, ) -> Union[Selector, Sequence]: @@ -167,21 +249,32 @@ def create_publish_workflow_tree_branch( package_meta, release_meta, github_client, - package_name, + f"{package_name}.publish", ) attach_release_handle = create_attach_release_handle_ppa( - build_workflow, publish_workflow, package_name + build_workflow, publish_workflow, log_prefix=f"{package_name}.publish" ) latch_chains(workflow_result, attach_release_handle) not_need_to_publish = Inverter( "Not", NeedToPublish( - "Need To Publish?", package_meta, release_meta, log_prefix=package_name + "Need To Publish?", + package_meta, + release_meta, + log_prefix=f"{package_name}.publish", ), ) + reset_publish_workflow_state = RestartWorkflowGuarded( + "", + publish_workflow, + default_publish_workflow, + log_prefix=f"{package_name}.publish", + ) return Selector( - "Publish", memory=False, children=[not_need_to_publish, workflow_result] + "Publish", + memory=False, + children=[not_need_to_publish, workflow_result, reset_publish_workflow_state], ) diff --git a/src/redis_release/cli.py b/src/redis_release/cli.py index 144a901..d7e9a0f 100644 --- a/src/redis_release/cli.py +++ b/src/redis_release/cli.py @@ -12,6 +12,7 @@ from redis_release.bht.args import ReleaseArgs from redis_release.bht.state import ( + InMemoryStateStorage, Package, PackageMeta, ReleaseMeta, @@ -308,9 +309,12 @@ def release_print_bht( print(unicode_tree(ppa)) else: # Print full release tree - tree, _ = initialize_tree_and_state(config, args) - render_dot_tree(tree.root) - print(unicode_tree(tree.root)) + with initialize_tree_and_state(config, args, InMemoryStateStorage()) as ( + tree, + _, + ): + render_dot_tree(tree.root) + print(unicode_tree(tree.root)) @app.command() diff --git a/src/redis_release/state_manager.py b/src/redis_release/state_manager.py index 46640b9..99b1c0b 100644 --- a/src/redis_release/state_manager.py +++ b/src/redis_release/state_manager.py @@ -9,11 +9,9 @@ import boto3 from botocore.exceptions import ClientError, NoCredentialsError -from rich.console import Console from .models import ReleaseState -console = Console() logger = logging.getLogger(__name__) @@ -56,14 +54,12 @@ def s3_client(self) -> Optional[boto3.client]: try: # Try profile-based authentication first if self.aws_profile: - console.print(f"[blue]Using AWS profile: {self.aws_profile}[/blue]") + logger.info(f"Using AWS profile: {self.aws_profile}") session = boto3.Session(profile_name=self.aws_profile) self._s3_client = session.client("s3", region_name=self.aws_region) # Fall back to environment variables elif self.aws_access_key_id and self.aws_secret_access_key: - console.print( - "[blue]Using AWS credentials from environment variables[/blue]" - ) + logger.info("Using AWS credentials from environment variables") self._s3_client = boto3.client( "s3", aws_access_key_id=self.aws_access_key_id, @@ -72,31 +68,27 @@ def s3_client(self) -> Optional[boto3.client]: region_name=self.aws_region, ) else: - console.print("[red]AWS credentials not found[/red]") - console.print( - "[yellow]Set AWS_PROFILE or AWS_ACCESS_KEY_ID/AWS_SECRET_ACCESS_KEY environment variables[/yellow]" + logger.error("AWS credentials not found") + logger.warning( + "Set AWS_PROFILE or AWS_ACCESS_KEY_ID/AWS_SECRET_ACCESS_KEY environment variables" ) raise NoCredentialsError() # Test connection self._s3_client.head_bucket(Bucket=self.bucket_name) - console.print( - f"[green]Connected to S3 bucket: {self.bucket_name}[/green]" - ) + logger.info(f"Connected to S3 bucket: {self.bucket_name}") except ClientError as e: if e.response["Error"]["Code"] == "404": - console.print( - f"[yellow]S3 bucket not found: {self.bucket_name}[/yellow]" - ) + logger.warning(f"S3 bucket not found: {self.bucket_name}") self._create_bucket() else: - console.print(f"[red]S3 error: {e}[/red]") + logger.error(f"S3 error: {e}") raise except NoCredentialsError: raise except Exception as e: - console.print(f"[red]AWS authentication error: {e}[/red]") + logger.error(f"AWS authentication error: {e}") raise return self._s3_client @@ -120,7 +112,7 @@ def _create_bucket(self) -> None: raise RuntimeError("S3 client not initialized") try: - console.print(f"[blue] Creating S3 bucket: {self.bucket_name}[/blue]") + logger.info(f"Creating S3 bucket: {self.bucket_name}") if self.aws_region == "us-east-1": self._s3_client.create_bucket(Bucket=self.bucket_name) @@ -134,17 +126,13 @@ def _create_bucket(self) -> None: Bucket=self.bucket_name, VersioningConfiguration={"Status": "Enabled"} ) - console.print( - f"[green] S3 bucket created successfully: {self.bucket_name}[/green]" - ) + logger.info(f"S3 bucket created successfully: {self.bucket_name}") except ClientError as e: if e.response["Error"]["Code"] == "BucketAlreadyOwnedByYou": - console.print( - f"[yellow] Bucket already exists: {self.bucket_name}[/yellow]" - ) + logger.warning(f"Bucket already exists: {self.bucket_name}") else: - console.print(f"[red] Failed to create bucket: {e}[/red]") + logger.error(f"Failed to create bucket: {e}") raise def load_state(self, tag: str) -> Optional[ReleaseState]: @@ -157,15 +145,15 @@ def load_state(self, tag: str) -> Optional[ReleaseState]: ReleaseState object or None if not found """ state_key = f"release-state/{tag}.json" - console.print(f"[blue] Loading state for tag: {tag}[/blue]") + logger.info(f"Loading state for tag: {tag}") if self.dry_run: state_data = self._local_state_cache.get(state_key) if state_data: - console.print("[yellow] (DRY RUN - loaded from local cache)[/yellow]") + logger.debug("DRY RUN - loaded from local cache") return ReleaseState.model_validate(state_data) else: - console.print("[yellow] (DRY RUN - no state found in cache)[/yellow]") + logger.debug("DRY RUN - no state found in cache") return None if self.s3_client is None: @@ -175,18 +163,16 @@ def load_state(self, tag: str) -> Optional[ReleaseState]: response = self.s3_client.get_object(Bucket=self.bucket_name, Key=state_key) state_data = json.loads(response["Body"].read().decode("utf-8")) - console.print(f"[green]State loaded successfully[/green]") + logger.info("State loaded successfully") return ReleaseState.model_validate(state_data) except ClientError as e: if e.response["Error"]["Code"] == "NoSuchKey": - console.print( - f"[yellow] No existing state found for tag: {tag}[/yellow]" - ) + logger.warning(f"No existing state found for tag: {tag}") return None else: - console.print(f"[red] Failed to load state: {e}[/red]") + logger.error(f"Failed to load state: {e}") raise def save_state(self, state: ReleaseState) -> None: @@ -196,13 +182,13 @@ def save_state(self, state: ReleaseState) -> None: state: ReleaseState object to save """ state_key = f"release-state/{state.tag}.json" - console.print(f"[blue] Saving state for tag: {state.tag}[/blue]") + logger.info(f"Saving state for tag: {state.tag}") state_data = state.model_dump(mode="json") state_json = json.dumps(state_data, indent=2, default=str) if self.dry_run: - console.print("[yellow] (DRY RUN - saved to local cache)[/yellow]") + logger.debug("DRY RUN - saved to local cache") self._local_state_cache[state_key] = state_data return @@ -221,10 +207,10 @@ def save_state(self, state: ReleaseState) -> None: }, ) - console.print(f"[green] State saved successfully[/green]") + logger.info("State saved successfully") except ClientError as e: - console.print(f"[red] Failed to save state: {e}[/red]") + logger.error(f"Failed to save state: {e}") raise def acquire_lock(self, tag: str, owner: str) -> bool: @@ -238,10 +224,10 @@ def acquire_lock(self, tag: str, owner: str) -> bool: True if lock acquired successfully """ lock_key = f"release-locks/{tag}.lock" - console.print(f"[blue] Acquiring lock for tag: {tag}[/blue]") + logger.info(f"Acquiring lock for tag: {tag}") if self.dry_run: - console.print("[yellow] (DRY RUN - lock acquired)[/yellow]") + logger.debug("DRY RUN - lock acquired") return True if self.s3_client is None: @@ -263,7 +249,7 @@ def acquire_lock(self, tag: str, owner: str) -> bool: IfNoneMatch="*", ) - console.print(f"[green] Lock acquired successfully[/green]") + logger.info("Lock acquired successfully") return True except ClientError as e: @@ -273,17 +259,17 @@ def acquire_lock(self, tag: str, owner: str) -> bool: Bucket=self.bucket_name, Key=lock_key ) existing_lock = json.loads(response["Body"].read().decode("utf-8")) - console.print( - f"[red] Lock already held by: {existing_lock.get('owner', 'unknown')}[/red]" + logger.error( + f"Lock already held by: {existing_lock.get('owner', 'unknown')}" ) - console.print( - f"[dim] Acquired at: {existing_lock.get('acquired_at', 'unknown')}[/dim]" + logger.debug( + f"Acquired at: {existing_lock.get('acquired_at', 'unknown')}" ) except: - console.print(f"[red] Lock exists but couldn't read details[/red]") + logger.error("Lock exists but couldn't read details") return False else: - console.print(f"[red] Failed to acquire lock: {e}[/red]") + logger.error(f"Failed to acquire lock: {e}") raise def release_lock(self, tag: str, owner: str) -> bool: @@ -297,10 +283,10 @@ def release_lock(self, tag: str, owner: str) -> bool: True if lock released successfully """ lock_key = f"release-locks/{tag}.lock" - console.print(f"[blue] Releasing lock for tag: {tag}[/blue]") + logger.info(f"Releasing lock for tag: {tag}") if self.dry_run: - console.print("[yellow] (DRY RUN - lock released)[/yellow]") + logger.debug("DRY RUN - lock released") return True if self.s3_client is None: @@ -312,19 +298,17 @@ def release_lock(self, tag: str, owner: str) -> bool: lock_data = json.loads(response["Body"].read().decode("utf-8")) if lock_data.get("owner") != owner: - console.print( - f"[red] Cannot release lock owned by: {lock_data.get('owner')}[/red]" - ) + logger.error(f"Cannot release lock owned by: {lock_data.get('owner')}") return False self.s3_client.delete_object(Bucket=self.bucket_name, Key=lock_key) - console.print(f"[green] Lock released successfully[/green]") + logger.info("Lock released successfully") return True except ClientError as e: if e.response["Error"]["Code"] == "NoSuchKey": - console.print(f"[yellow] No lock found for tag: {tag}[/yellow]") + logger.warning(f"No lock found for tag: {tag}") return True else: - console.print(f"[red] Failed to release lock: {e}[/red]") + logger.error(f"Failed to release lock: {e}") raise diff --git a/src/tests/test_parallel.py b/src/tests/test_parallel.py new file mode 100644 index 0000000..84455a8 --- /dev/null +++ b/src/tests/test_parallel.py @@ -0,0 +1,149 @@ +#!/usr/bin/env python3 +"""Test the simplified Parallel composite.""" + +from py_trees.behaviour import Behaviour +from py_trees.common import Status + +from redis_release.bht.composites import ParallelBarrier + + +class MockBehaviour(Behaviour): + """Mock behaviour for testing.""" + + def __init__(self, name: str, return_status: Status, ticks_until_done: int = 1): + super().__init__(name) + self.return_status = return_status + self.ticks_until_done = ticks_until_done + self.tick_count = 0 + + def update(self) -> Status: + self.tick_count += 1 + if self.tick_count >= self.ticks_until_done: + return self.return_status + return Status.RUNNING + + +def test_all_success(): + """Test that parallel returns SUCCESS when all children succeed.""" + print("\n=== Test: All children succeed ===") + parallel = ParallelBarrier( + "Test Parallel", + children=[ + MockBehaviour("Child 1", Status.SUCCESS, ticks_until_done=1), + MockBehaviour("Child 2", Status.SUCCESS, ticks_until_done=2), + MockBehaviour("Child 3", Status.SUCCESS, ticks_until_done=1), + ], + ) + + # First tick - some children still running + list(parallel.tick()) + print(f"After tick 1: {parallel.status}") + print(f" Child 1: {parallel.children[0].status}") + print(f" Child 2: {parallel.children[1].status}") + print(f" Child 3: {parallel.children[2].status}") + + # Second tick - all should succeed + list(parallel.tick()) + print(f"After tick 2: {parallel.status}") + print(f" Child 1: {parallel.children[0].status}") + print(f" Child 2: {parallel.children[1].status}") + print(f" Child 3: {parallel.children[2].status}") + + assert parallel.status == Status.SUCCESS, f"Expected SUCCESS, got {parallel.status}" + print("✓ Test passed!") + + +def test_one_failure(): + """Test that parallel returns FAILURE when one child fails.""" + print("\n=== Test: One child fails ===") + parallel = ParallelBarrier( + "Test Parallel", + children=[ + MockBehaviour("Child 1", Status.SUCCESS, ticks_until_done=1), + MockBehaviour("Child 2", Status.FAILURE, ticks_until_done=2), + MockBehaviour("Child 3", Status.SUCCESS, ticks_until_done=1), + ], + ) + + # First tick + list(parallel.tick()) + print(f"After tick 1: {parallel.status}") + print(f" Child 1: {parallel.children[0].status}") + print(f" Child 2: {parallel.children[1].status}") + print(f" Child 3: {parallel.children[2].status}") + + # Second tick - child 2 fails + list(parallel.tick()) + print(f"After tick 2: {parallel.status}") + print(f" Child 1: {parallel.children[0].status}") + print(f" Child 2: {parallel.children[1].status}") + print(f" Child 3: {parallel.children[2].status}") + + assert parallel.status == Status.FAILURE, f"Expected FAILURE, got {parallel.status}" + print("✓ Test passed!") + + +def test_synchronized_mode(): + """Test that converged children are skipped on subsequent ticks.""" + print("\n=== Test: Synchronized mode (skip converged children) ===") + + # Create children that track how many times they're ticked + child1 = MockBehaviour("Child 1", Status.SUCCESS, ticks_until_done=1) + child2 = MockBehaviour("Child 2", Status.SUCCESS, ticks_until_done=3) + child3 = MockBehaviour("Child 3", Status.SUCCESS, ticks_until_done=1) + + parallel = ParallelBarrier("Test Parallel", children=[child1, child2, child3]) + + # First tick - child1 and child3 succeed, child2 still running + list(parallel.tick()) + print(f"After tick 1:") + print(f" Child 1: {child1.status}, tick_count={child1.tick_count}") + print(f" Child 2: {child2.status}, tick_count={child2.tick_count}") + print(f" Child 3: {child3.status}, tick_count={child3.tick_count}") + + # Second tick - only child2 should be ticked + list(parallel.tick()) + print(f"After tick 2:") + print(f" Child 1: {child1.status}, tick_count={child1.tick_count}") + print(f" Child 2: {child2.status}, tick_count={child2.tick_count}") + print(f" Child 3: {child3.status}, tick_count={child3.tick_count}") + + # Third tick - only child2 should be ticked again + list(parallel.tick()) + print(f"After tick 3:") + print(f" Child 1: {child1.status}, tick_count={child1.tick_count}") + print(f" Child 2: {child2.status}, tick_count={child2.tick_count}") + print(f" Child 3: {child3.status}, tick_count={child3.tick_count}") + + # Verify that child1 and child3 were only ticked once (synchronized mode) + assert ( + child1.tick_count == 1 + ), f"Child 1 should be ticked once, got {child1.tick_count}" + assert ( + child3.tick_count == 1 + ), f"Child 3 should be ticked once, got {child3.tick_count}" + assert ( + child2.tick_count == 3 + ), f"Child 2 should be ticked 3 times, got {child2.tick_count}" + assert parallel.status == Status.SUCCESS, f"Expected SUCCESS, got {parallel.status}" + print("✓ Test passed!") + + +def test_empty_children(): + """Test that parallel with no children returns SUCCESS.""" + print("\n=== Test: Empty children ===") + parallel = ParallelBarrier("Test Parallel", children=[]) + + list(parallel.tick()) + print(f"Status: {parallel.status}") + + assert parallel.status == Status.SUCCESS, f"Expected SUCCESS, got {parallel.status}" + print("✓ Test passed!") + + +if __name__ == "__main__": + test_all_success() + test_one_failure() + test_synchronized_mode() + test_empty_children() + print("\n✅ All tests passed!") diff --git a/src/tests/test_parallel_integration.py b/src/tests/test_parallel_integration.py new file mode 100644 index 0000000..3a34217 --- /dev/null +++ b/src/tests/test_parallel_integration.py @@ -0,0 +1,205 @@ +#!/usr/bin/env python3 +"""Integration test for the simplified Parallel composite with the tree structure.""" + +from py_trees.behaviour import Behaviour +from py_trees.common import Status +from py_trees.composites import Sequence + +from redis_release.bht.composites import ParallelBarrier + + +class SimpleAction(Behaviour): + """Simple action that succeeds after N ticks.""" + + def __init__(self, name: str, ticks_to_complete: int = 1): + super().__init__(name) + self.ticks_to_complete = ticks_to_complete + self.tick_count = 0 + + def update(self) -> Status: + self.tick_count += 1 + if self.tick_count >= self.ticks_to_complete: + return Status.SUCCESS + return Status.RUNNING + + +class SimpleCondition(Behaviour): + """Simple condition that always returns SUCCESS.""" + + def __init__(self, name: str): + super().__init__(name) + + def update(self) -> Status: + return Status.SUCCESS + + +def test_parallel_with_sequences(): + """Test parallel with sequence children (similar to package release structure).""" + print("\n=== Test: Parallel with Sequence children ===") + + # Create sequences that simulate package releases + package1 = Sequence( + "Package 1", + memory=True, + children=[ + SimpleCondition("Check Package 1"), + SimpleAction("Build Package 1", ticks_to_complete=2), + SimpleAction("Publish Package 1", ticks_to_complete=1), + ], + ) + + package2 = Sequence( + "Package 2", + memory=True, + children=[ + SimpleCondition("Check Package 2"), + SimpleAction("Build Package 2", ticks_to_complete=1), + SimpleAction("Publish Package 2", ticks_to_complete=2), + ], + ) + + package3 = Sequence( + "Package 3", + memory=True, + children=[ + SimpleCondition("Check Package 3"), + SimpleAction("Build Package 3", ticks_to_complete=1), + SimpleAction("Publish Package 3", ticks_to_complete=1), + ], + ) + + # Create parallel to run all packages + parallel = ParallelBarrier( + "Release All Packages", + children=[package1, package2, package3], + ) + + # Tick until completion + tick_count = 0 + while parallel.status == Status.RUNNING or tick_count == 0: + tick_count += 1 + print(f"\n--- Tick {tick_count} ---") + list(parallel.tick()) + print(f"Parallel status: {parallel.status}") + print(f" Package 1: {package1.status}") + print(f" Package 2: {package2.status}") + print(f" Package 3: {package3.status}") + + if tick_count > 10: + print("ERROR: Too many ticks!") + break + + print(f"\nFinal status: {parallel.status}") + assert parallel.status == Status.SUCCESS, f"Expected SUCCESS, got {parallel.status}" + assert package1.status == Status.SUCCESS + assert package2.status == Status.SUCCESS + assert package3.status == Status.SUCCESS + print("✓ Test passed!") + + +def test_parallel_with_one_failing_sequence(): + """Test parallel where one sequence fails.""" + print("\n=== Test: Parallel with one failing sequence ===") + + class FailingAction(Behaviour): + def __init__(self, name: str): + super().__init__(name) + + def update(self) -> Status: + return Status.FAILURE + + package1 = Sequence( + "Package 1", + memory=True, + children=[ + SimpleAction("Build Package 1", ticks_to_complete=1), + ], + ) + + package2 = Sequence( + "Package 2 (will fail)", + memory=True, + children=[ + SimpleAction("Build Package 2", ticks_to_complete=1), + FailingAction("Publish Package 2 (fails)"), + ], + ) + + package3 = Sequence( + "Package 3", + memory=True, + children=[ + SimpleAction("Build Package 3", ticks_to_complete=1), + ], + ) + + parallel = ParallelBarrier( + "Release All Packages", + children=[package1, package2, package3], + ) + + # Tick until completion + tick_count = 0 + while parallel.status == Status.RUNNING or tick_count == 0: + tick_count += 1 + print(f"\n--- Tick {tick_count} ---") + list(parallel.tick()) + print(f"Parallel status: {parallel.status}") + print(f" Package 1: {package1.status}") + print(f" Package 2: {package2.status}") + print(f" Package 3: {package3.status}") + + if tick_count > 10: + print("ERROR: Too many ticks!") + break + + print(f"\nFinal status: {parallel.status}") + assert parallel.status == Status.FAILURE, f"Expected FAILURE, got {parallel.status}" + print("✓ Test passed!") + + +def test_synchronized_behavior_with_sequences(): + """Test that completed sequences are not re-ticked.""" + print("\n=== Test: Synchronized behavior with sequences ===") + + # Track how many times each action is ticked + action1 = SimpleAction("Action 1", ticks_to_complete=1) + action2 = SimpleAction("Action 2", ticks_to_complete=3) + action3 = SimpleAction("Action 3", ticks_to_complete=1) + + seq1 = Sequence("Seq 1", memory=True, children=[action1]) + seq2 = Sequence("Seq 2", memory=True, children=[action2]) + seq3 = Sequence("Seq 3", memory=True, children=[action3]) + + parallel = ParallelBarrier("Parallel", children=[seq1, seq2, seq3]) + + # Tick until completion + tick_count = 0 + while parallel.status == Status.RUNNING or tick_count == 0: + tick_count += 1 + list(parallel.tick()) + + print(f"Total ticks: {tick_count}") + print(f"Action 1 tick count: {action1.tick_count}") + print(f"Action 2 tick count: {action2.tick_count}") + print(f"Action 3 tick count: {action3.tick_count}") + + # Verify synchronized behavior + assert ( + action1.tick_count == 1 + ), f"Action 1 should be ticked once, got {action1.tick_count}" + assert ( + action2.tick_count == 3 + ), f"Action 2 should be ticked 3 times, got {action2.tick_count}" + assert ( + action3.tick_count == 1 + ), f"Action 3 should be ticked once, got {action3.tick_count}" + assert parallel.status == Status.SUCCESS + print("✓ Test passed!") + + +if __name__ == "__main__": + test_parallel_with_sequences() + test_parallel_with_one_failing_sequence() + test_synchronized_behavior_with_sequences() + print("\n✅ All integration tests passed!") From 24c73d6f5c94b8c766549f74b7ca141283535b59 Mon Sep 17 00:00:00 2001 From: Petar Shtuchkin Date: Fri, 10 Oct 2025 14:00:17 +0300 Subject: [PATCH 17/39] Fix type warning, remove redundant tests --- src/tests/test_parallel.py | 14 +- src/tests/test_parallel_integration.py | 205 ------------------------- 2 files changed, 9 insertions(+), 210 deletions(-) delete mode 100644 src/tests/test_parallel_integration.py diff --git a/src/tests/test_parallel.py b/src/tests/test_parallel.py index 84455a8..2ba6b94 100644 --- a/src/tests/test_parallel.py +++ b/src/tests/test_parallel.py @@ -1,6 +1,8 @@ #!/usr/bin/env python3 """Test the simplified Parallel composite.""" +from typing import Optional + from py_trees.behaviour import Behaviour from py_trees.common import Status @@ -10,7 +12,9 @@ class MockBehaviour(Behaviour): """Mock behaviour for testing.""" - def __init__(self, name: str, return_status: Status, ticks_until_done: int = 1): + def __init__( + self, name: str, return_status: Status, ticks_until_done: int = 1 + ) -> None: super().__init__(name) self.return_status = return_status self.ticks_until_done = ticks_until_done @@ -23,7 +27,7 @@ def update(self) -> Status: return Status.RUNNING -def test_all_success(): +def test_all_success() -> None: """Test that parallel returns SUCCESS when all children succeed.""" print("\n=== Test: All children succeed ===") parallel = ParallelBarrier( @@ -53,7 +57,7 @@ def test_all_success(): print("✓ Test passed!") -def test_one_failure(): +def test_one_failure() -> None: """Test that parallel returns FAILURE when one child fails.""" print("\n=== Test: One child fails ===") parallel = ParallelBarrier( @@ -83,7 +87,7 @@ def test_one_failure(): print("✓ Test passed!") -def test_synchronized_mode(): +def test_synchronized_mode() -> None: """Test that converged children are skipped on subsequent ticks.""" print("\n=== Test: Synchronized mode (skip converged children) ===") @@ -129,7 +133,7 @@ def test_synchronized_mode(): print("✓ Test passed!") -def test_empty_children(): +def test_empty_children() -> None: """Test that parallel with no children returns SUCCESS.""" print("\n=== Test: Empty children ===") parallel = ParallelBarrier("Test Parallel", children=[]) diff --git a/src/tests/test_parallel_integration.py b/src/tests/test_parallel_integration.py deleted file mode 100644 index 3a34217..0000000 --- a/src/tests/test_parallel_integration.py +++ /dev/null @@ -1,205 +0,0 @@ -#!/usr/bin/env python3 -"""Integration test for the simplified Parallel composite with the tree structure.""" - -from py_trees.behaviour import Behaviour -from py_trees.common import Status -from py_trees.composites import Sequence - -from redis_release.bht.composites import ParallelBarrier - - -class SimpleAction(Behaviour): - """Simple action that succeeds after N ticks.""" - - def __init__(self, name: str, ticks_to_complete: int = 1): - super().__init__(name) - self.ticks_to_complete = ticks_to_complete - self.tick_count = 0 - - def update(self) -> Status: - self.tick_count += 1 - if self.tick_count >= self.ticks_to_complete: - return Status.SUCCESS - return Status.RUNNING - - -class SimpleCondition(Behaviour): - """Simple condition that always returns SUCCESS.""" - - def __init__(self, name: str): - super().__init__(name) - - def update(self) -> Status: - return Status.SUCCESS - - -def test_parallel_with_sequences(): - """Test parallel with sequence children (similar to package release structure).""" - print("\n=== Test: Parallel with Sequence children ===") - - # Create sequences that simulate package releases - package1 = Sequence( - "Package 1", - memory=True, - children=[ - SimpleCondition("Check Package 1"), - SimpleAction("Build Package 1", ticks_to_complete=2), - SimpleAction("Publish Package 1", ticks_to_complete=1), - ], - ) - - package2 = Sequence( - "Package 2", - memory=True, - children=[ - SimpleCondition("Check Package 2"), - SimpleAction("Build Package 2", ticks_to_complete=1), - SimpleAction("Publish Package 2", ticks_to_complete=2), - ], - ) - - package3 = Sequence( - "Package 3", - memory=True, - children=[ - SimpleCondition("Check Package 3"), - SimpleAction("Build Package 3", ticks_to_complete=1), - SimpleAction("Publish Package 3", ticks_to_complete=1), - ], - ) - - # Create parallel to run all packages - parallel = ParallelBarrier( - "Release All Packages", - children=[package1, package2, package3], - ) - - # Tick until completion - tick_count = 0 - while parallel.status == Status.RUNNING or tick_count == 0: - tick_count += 1 - print(f"\n--- Tick {tick_count} ---") - list(parallel.tick()) - print(f"Parallel status: {parallel.status}") - print(f" Package 1: {package1.status}") - print(f" Package 2: {package2.status}") - print(f" Package 3: {package3.status}") - - if tick_count > 10: - print("ERROR: Too many ticks!") - break - - print(f"\nFinal status: {parallel.status}") - assert parallel.status == Status.SUCCESS, f"Expected SUCCESS, got {parallel.status}" - assert package1.status == Status.SUCCESS - assert package2.status == Status.SUCCESS - assert package3.status == Status.SUCCESS - print("✓ Test passed!") - - -def test_parallel_with_one_failing_sequence(): - """Test parallel where one sequence fails.""" - print("\n=== Test: Parallel with one failing sequence ===") - - class FailingAction(Behaviour): - def __init__(self, name: str): - super().__init__(name) - - def update(self) -> Status: - return Status.FAILURE - - package1 = Sequence( - "Package 1", - memory=True, - children=[ - SimpleAction("Build Package 1", ticks_to_complete=1), - ], - ) - - package2 = Sequence( - "Package 2 (will fail)", - memory=True, - children=[ - SimpleAction("Build Package 2", ticks_to_complete=1), - FailingAction("Publish Package 2 (fails)"), - ], - ) - - package3 = Sequence( - "Package 3", - memory=True, - children=[ - SimpleAction("Build Package 3", ticks_to_complete=1), - ], - ) - - parallel = ParallelBarrier( - "Release All Packages", - children=[package1, package2, package3], - ) - - # Tick until completion - tick_count = 0 - while parallel.status == Status.RUNNING or tick_count == 0: - tick_count += 1 - print(f"\n--- Tick {tick_count} ---") - list(parallel.tick()) - print(f"Parallel status: {parallel.status}") - print(f" Package 1: {package1.status}") - print(f" Package 2: {package2.status}") - print(f" Package 3: {package3.status}") - - if tick_count > 10: - print("ERROR: Too many ticks!") - break - - print(f"\nFinal status: {parallel.status}") - assert parallel.status == Status.FAILURE, f"Expected FAILURE, got {parallel.status}" - print("✓ Test passed!") - - -def test_synchronized_behavior_with_sequences(): - """Test that completed sequences are not re-ticked.""" - print("\n=== Test: Synchronized behavior with sequences ===") - - # Track how many times each action is ticked - action1 = SimpleAction("Action 1", ticks_to_complete=1) - action2 = SimpleAction("Action 2", ticks_to_complete=3) - action3 = SimpleAction("Action 3", ticks_to_complete=1) - - seq1 = Sequence("Seq 1", memory=True, children=[action1]) - seq2 = Sequence("Seq 2", memory=True, children=[action2]) - seq3 = Sequence("Seq 3", memory=True, children=[action3]) - - parallel = ParallelBarrier("Parallel", children=[seq1, seq2, seq3]) - - # Tick until completion - tick_count = 0 - while parallel.status == Status.RUNNING or tick_count == 0: - tick_count += 1 - list(parallel.tick()) - - print(f"Total ticks: {tick_count}") - print(f"Action 1 tick count: {action1.tick_count}") - print(f"Action 2 tick count: {action2.tick_count}") - print(f"Action 3 tick count: {action3.tick_count}") - - # Verify synchronized behavior - assert ( - action1.tick_count == 1 - ), f"Action 1 should be ticked once, got {action1.tick_count}" - assert ( - action2.tick_count == 3 - ), f"Action 2 should be ticked 3 times, got {action2.tick_count}" - assert ( - action3.tick_count == 1 - ), f"Action 3 should be ticked once, got {action3.tick_count}" - assert parallel.status == Status.SUCCESS - print("✓ Test passed!") - - -if __name__ == "__main__": - test_parallel_with_sequences() - test_parallel_with_one_failing_sequence() - test_synchronized_behavior_with_sequences() - print("\n✅ All integration tests passed!") From dff9660503c78f4e22a10036f7a00c07eef336a9 Mon Sep 17 00:00:00 2001 From: Petar Shtuchkin Date: Fri, 10 Oct 2025 16:41:28 +0300 Subject: [PATCH 18/39] Identify target from by listing branches in a remote repo --- src/redis_release/bht/behaviours.py | 204 +++++++++++--- src/redis_release/bht/composites.py | 51 +++- src/redis_release/bht/ppas.py | 15 +- src/redis_release/bht/tree.py | 2 + src/redis_release/cli.py | 2 +- src/redis_release/github_client_async.py | 54 ++++ src/redis_release/models.py | 99 +++++++ src/tests/test_identify_target_ref.py | 342 +++++++++++++++++++++++ 8 files changed, 726 insertions(+), 43 deletions(-) create mode 100644 src/tests/test_identify_target_ref.py diff --git a/src/redis_release/bht/behaviours.py b/src/redis_release/bht/behaviours.py index d6714e8..20b344c 100644 --- a/src/redis_release/bht/behaviours.py +++ b/src/redis_release/bht/behaviours.py @@ -1,6 +1,9 @@ """ Actions and Conditions for the Release Tree +Here we define only simple atomic actions and conditions. +Next level composites are defined in `composites.py`. + The guiding principles are: * Actions should be atomic and represent a single task. @@ -15,7 +18,7 @@ import uuid from datetime import datetime from token import OP -from typing import Any, Dict, Optional +from typing import Any, Dict, List, Optional from py_trees.behaviour import Behaviour from py_trees.common import Status @@ -71,21 +74,156 @@ def check_task_exists(self) -> bool: class IdentifyTargetRef(ReleaseAction): def __init__( - self, name: str, package_meta: PackageMeta, log_prefix: str = "" + self, + name: str, + package_meta: PackageMeta, + release_meta: ReleaseMeta, + github_client: GitHubClientAsync, + log_prefix: str = "", ) -> None: self.package_meta = package_meta + self.release_meta = release_meta + self.github_client = github_client + self.release_version: Optional["RedisVersion"] = None + self.branches: List[str] = [] super().__init__(name=name, log_prefix=log_prefix) + def initialise(self) -> None: + """Initialize by parsing release version and listing branches.""" + # If ref is already set, nothing to do + if self.package_meta.ref is not None: + return + + # Parse release version from tag + if not self.release_meta.tag: + self.logger.error("Release tag is not set") + return + + try: + from ..models import RedisVersion + + self.release_version = RedisVersion.parse(self.release_meta.tag) + self.logger.debug( + f"Parsed release version: {self.release_version.major}.{self.release_version.minor}" + ) + except ValueError as e: + self.logger.error(f"Failed to parse release tag: {e}") + return + + # List remote branches matching release pattern with major version + # Pattern: release/MAJOR.\d+$ (e.g., release/8.\d+$ for major version 8) + pattern = f"^release/{self.release_version.major}\\.\\d+$" + self.task = asyncio.create_task( + self.github_client.list_remote_branches( + self.package_meta.repo, pattern=pattern + ) + ) + def update(self) -> Status: + # If ref is already set, we're done if self.package_meta.ref is not None: + self.logger.debug(f"Ref already set: {self.package_meta.ref}") return Status.SUCCESS - # For now, just set a hardcoded ref - self.package_meta.ref = "release/8.2" - self.logger.info( - f"[green]Target ref identified:[/green] {self.package_meta.ref}" + + try: + assert self.task is not None + + # Wait for branch listing to complete + if not self.task.done(): + return Status.RUNNING + + self.branches = self.task.result() + self.logger.debug(f"Found {len(self.branches)} branches") + + # Sort branches and detect appropriate one + sorted_branches = self._sort_branches(self.branches) + detected_branch = self._detect_branch(sorted_branches) + + if detected_branch: + self.package_meta.ref = detected_branch + self.logger.info( + f"[green]Target ref identified:[/green] {self.package_meta.ref}" + ) + self.feedback_message = f"Target ref set to {self.package_meta.ref}" + return Status.SUCCESS + else: + self.logger.error("Failed to detect appropriate branch") + self.feedback_message = "Failed to detect appropriate branch" + return Status.FAILURE + + except Exception as e: + return self.log_exception_and_return_failure(e) + + def _sort_branches(self, branches: List[str]) -> List[str]: + """Sort branches by version in descending order. + + Args: + branches: List of branch names (e.g., ["release/8.0", "release/8.4"]) + + Returns: + Sorted list of branch names in descending order by version + (e.g., ["release/8.4", "release/8.2", "release/8.0"]) + """ + pattern = re.compile(r"^release/(\d+)\.(\d+)$") + branch_versions = [] + + for branch in branches: + match = pattern.match(branch) + if match: + major = int(match.group(1)) + minor = int(match.group(2)) + branch_versions.append((major, minor, branch)) + + # Sort by (major, minor) descending + branch_versions.sort(reverse=True) + + return [branch for _, _, branch in branch_versions] + + def _detect_branch(self, sorted_branches: List[str]) -> Optional[str]: + """Detect the appropriate branch from sorted list of branches. + + Walks over sorted list of branches (descending order) trying to find first + branch equal to release/MAJOR.MINOR or lower version. + + Args: + sorted_branches: Sorted list of branch names in descending order + (e.g., ["release/8.4", "release/8.2", "release/8.0"]) + Can be empty. + + Returns: + Branch name or None if no suitable branch found + """ + if not self.release_version: + return None + + if not sorted_branches: + self.logger.warning("No release branches found matching pattern") + return None + + target_major = self.release_version.major + target_minor = self.release_version.minor + + # Pattern to extract version from branch name + pattern = re.compile(r"^release/(\d+)\.(\d+)$") + + # Walk through sorted branches (descending order) + # Find first branch <= target version + for branch in sorted_branches: + match = pattern.match(branch) + if match: + major = int(match.group(1)) + minor = int(match.group(2)) + + if (major, minor) <= (target_major, target_minor): + self.logger.debug( + f"Found matching branch: {branch} for target {target_major}.{target_minor}" + ) + return branch + + self.logger.warning( + f"No suitable branch found for version {target_major}.{target_minor}" ) - self.feedback_message = f"Target ref set to {self.package_meta.ref}" - return Status.SUCCESS + return None class TriggerWorkflow(ReleaseAction): @@ -372,6 +510,31 @@ def update(self) -> Status: return self.log_exception_and_return_failure(e) +class AttachReleaseHandleToPublishWorkflow(LoggingAction): + def __init__( + self, + name: str, + build_workflow: Workflow, + publish_workflow: Workflow, + log_prefix: str = "", + ) -> None: + self.build_workflow = build_workflow + self.publish_workflow = publish_workflow + super().__init__(name=name, log_prefix=log_prefix) + + def update(self) -> Status: + if "release_handle" in self.publish_workflow.inputs: + return Status.SUCCESS + + if self.build_workflow.result is None: + return Status.FAILURE + + self.publish_workflow.inputs["release_handle"] = json.dumps( + self.build_workflow.result + ) + return Status.SUCCESS + + class ResetPackageState(ReleaseAction): def __init__( self, @@ -528,31 +691,6 @@ def update(self) -> Status: return Status.SUCCESS -class AttachReleaseHandleToPublishWorkflow(LoggingAction): - def __init__( - self, - name: str, - build_workflow: Workflow, - publish_workflow: Workflow, - log_prefix: str = "", - ) -> None: - self.build_workflow = build_workflow - self.publish_workflow = publish_workflow - super().__init__(name=name, log_prefix=log_prefix) - - def update(self) -> Status: - if "release_handle" in self.publish_workflow.inputs: - return Status.SUCCESS - - if self.build_workflow.result is None: - return Status.FAILURE - - self.publish_workflow.inputs["release_handle"] = json.dumps( - self.build_workflow.result - ) - return Status.SUCCESS - - class IsForceRebuild(LoggingAction): def __init__( self, name: str, package_meta: PackageMeta, log_prefix: str = "" diff --git a/src/redis_release/bht/composites.py b/src/redis_release/bht/composites.py index f03ba93..8703d3f 100644 --- a/src/redis_release/bht/composites.py +++ b/src/redis_release/bht/composites.py @@ -1,3 +1,16 @@ +""" +Higher level composites for the Release Tree + +These composites are built from the atomic actions and conditions defined in `behaviours.py`. +Here we make flag and state aware tree behaviors, implement retry and repeat patterns. + +The guiding principle for the composites defined here is the same as in behaviours.py +in a sense that we aim to make a more or less direct action without complex conditions +(except for the flags) + +More complex behaviors, including pre- and post- conditions are defined in `ppas.py`. +""" + from typing import Iterator, List, Optional from typing import Sequence as TypingSequence @@ -216,19 +229,22 @@ def __init__( ) -class IdentifyTargetRefGoal(FlagGuard): +class IdentifyTargetRefGuarded(FlagGuard): def __init__( self, name: str, package_meta: PackageMeta, release_meta: ReleaseMeta, + github_client: GitHubClientAsync, log_prefix: str = "", ) -> None: super().__init__( - None, + None if name == "" else name, IdentifyTargetRef( "Identify Target Ref", package_meta, + release_meta, + github_client, log_prefix=log_prefix, ), package_meta.ephemeral, @@ -341,12 +357,22 @@ def __init__( default_package, log_prefix=log_prefix, ) - reset_package_state_wrapped = SuccessIsRunning( + reset_package_state_running = SuccessIsRunning( "Success is Running", reset_package_state ) + reset_package_state_guarded = FlagGuard( + None if name == "" else name, + reset_package_state_running, + package.meta.ephemeral, + "identify_ref_failed", + flag_value=True, + raise_on=[], + guard_status=Status.FAILURE, + log_prefix=log_prefix, + ) super().__init__( None if name == "" else name, - reset_package_state_wrapped, + reset_package_state_guarded, workflow.ephemeral, "trigger_attempted", flag_value=True, @@ -358,7 +384,7 @@ def __init__( class RestartWorkflowGuarded(FlagGuard): """ - Reset workflow if we didn't trigger the workflow in current run + Reset workflow if we didn't trigger the workflow in current run and if there was no identify target ref error This will only reset the workflow state @@ -369,6 +395,7 @@ def __init__( self, name: str, workflow: Workflow, + package_meta: PackageMeta, default_workflow: Workflow, log_prefix: str = "", ) -> None: @@ -378,12 +405,22 @@ def __init__( default_workflow, log_prefix=log_prefix, ) - reset_workflow_state_wrapped = SuccessIsRunning( + reset_workflow_state_running = SuccessIsRunning( "Success is Running", reset_workflow_state ) + reset_workflow_state_guarded = FlagGuard( + None if name == "" else name, + reset_workflow_state_running, + package_meta.ephemeral, + "identify_ref_failed", + flag_value=True, + raise_on=[], + guard_status=Status.FAILURE, + log_prefix=log_prefix, + ) super().__init__( None if name == "" else name, - reset_workflow_state_wrapped, + reset_workflow_state_guarded, workflow.ephemeral, "trigger_attempted", flag_value=True, diff --git a/src/redis_release/bht/ppas.py b/src/redis_release/bht/ppas.py index 60532f0..e658416 100644 --- a/src/redis_release/bht/ppas.py +++ b/src/redis_release/bht/ppas.py @@ -1,3 +1,12 @@ +""" +Here we define PPAs (Postcondition-Precondition-Action) composites to be used in backchaining. + +See backchain.py for more details on backchaining. + +Chains are formed and latched in `tree.py` + +""" + from typing import Union from py_trees.composites import Selector, Sequence @@ -18,7 +27,7 @@ DownloadArtifactsListGuarded, ExtractArtifactResultGuarded, FindWorkflowByUUID, - IdentifyTargetRefGoal, + IdentifyTargetRefGuarded, TriggerWorkflowGuarded, WaitForWorkflowCompletion, ) @@ -108,14 +117,16 @@ def create_trigger_workflow_ppa( def create_identify_target_ref_ppa( package_meta: PackageMeta, release_meta: ReleaseMeta, + github_client: GitHubClientAsync, log_prefix: str, ) -> Union[Selector, Sequence]: return create_PPA( "Identify Target Ref", - IdentifyTargetRefGoal( + IdentifyTargetRefGuarded( "", package_meta, release_meta, + github_client, log_prefix=log_prefix, ), ) diff --git a/src/redis_release/bht/tree.py b/src/redis_release/bht/tree.py index 5038f8c..da9eb6f 100644 --- a/src/redis_release/bht/tree.py +++ b/src/redis_release/bht/tree.py @@ -268,6 +268,7 @@ def create_publish_workflow_tree_branch( reset_publish_workflow_state = RestartWorkflowGuarded( "", publish_workflow, + package_meta, default_publish_workflow, log_prefix=f"{package_name}.publish", ) @@ -339,6 +340,7 @@ def create_workflow_complete_tree_branch( identify_target_ref = create_identify_target_ref_ppa( package_meta, release_meta, + github_client, log_prefix, ) latch_chains( diff --git a/src/redis_release/cli.py b/src/redis_release/cli.py index d7e9a0f..8b5436d 100644 --- a/src/redis_release/cli.py +++ b/src/redis_release/cli.py @@ -275,7 +275,7 @@ def release_print_bht( workflow, package_meta, release_meta, github_client, log_prefix ), "identify_target_ref": lambda: create_identify_target_ref_ppa( - package_meta, release_meta, log_prefix + package_meta, release_meta, github_client, log_prefix ), "download_artifacts": lambda: create_download_artifacts_ppa( workflow, package_meta, github_client, log_prefix diff --git a/src/redis_release/github_client_async.py b/src/redis_release/github_client_async.py index cb87959..4db92de 100644 --- a/src/redis_release/github_client_async.py +++ b/src/redis_release/github_client_async.py @@ -600,3 +600,57 @@ def _extract_uuid(self, text: str) -> Optional[str]: uuid_pattern = r"[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}" uuid_match = re.search(uuid_pattern, text, re.IGNORECASE) return uuid_match.group() if uuid_match else None + + async def list_remote_branches( + self, repo: str, pattern: Optional[str] = None + ) -> List[str]: + """List remote branches, optionally filtered by pattern. + + Args: + repo: Repository name (e.g., "redis/redis") + pattern: Optional wildcard pattern (e.g., "release/*", "feature/*") + + Returns: + List of branch names matching the pattern + """ + url = f"https://api.github.com/repos/{repo}/git/refs/heads" + headers = { + "Accept": "application/vnd.github.v3+json", + "X-GitHub-Api-Version": "2022-11-28", + } + headers["Authorization"] = f"Bearer {self.token}" + + try: + data = await self.github_request_paginated( + url=url, + headers=headers, + params={}, + timeout=30, + per_page=100, + max_pages=None, + ) + + branches = [] + # data is a list of ref objects + if isinstance(data, list): + for ref_data in data: + ref_name = ref_data.get("ref", "") + if ref_name.startswith("refs/heads/"): + branch_name = ref_name[11:] # Remove "refs/heads/" prefix + branches.append(branch_name) + + # Filter by pattern if provided + if pattern: + branches = [branch for branch in branches if re.match(pattern, branch)] + + logger.info( + f"[green]Found {len(branches)} branches{' matching pattern' if pattern else ''}[/green]" + ) + if pattern: + logger.debug(f"Pattern: {pattern}") + + return sorted(branches) + + except Exception as e: + logger.error(f"[red]Error listing branches: {e}[/red]") + return [] diff --git a/src/redis_release/models.py b/src/redis_release/models.py index 24013bd..08f0c35 100644 --- a/src/redis_release/models.py +++ b/src/redis_release/models.py @@ -1,5 +1,6 @@ """Data models for Redis release automation.""" +import re from datetime import datetime from enum import Enum from typing import Any, Dict, Optional @@ -137,3 +138,101 @@ def has_publish_failures(self) -> bool: and pkg.publish_workflow.conclusion != WorkflowConclusion.SUCCESS for pkg in self.packages.values() ) + + +class RedisVersion(BaseModel): + """Represents a parsed Redis version. + + TODO: This class duplicates the code from docker-library-redis/redis-release + """ + + major: int = Field(..., ge=1, description="Major version number") + minor: int = Field(..., ge=0, description="Minor version number") + patch: Optional[int] = Field(None, ge=0, description="Patch version number") + suffix: str = Field("", description="Version suffix (e.g., -m01, -rc1, -eol)") + + @classmethod + def parse(cls, version_str: str) -> "RedisVersion": + """Parse a version string into components. + + Args: + version_str: Version string (e.g., "v8.2.1-m01", "8.2", "7.4.0-eol") + + Returns: + RedisVersion instance + + Raises: + ValueError: If version string format is invalid + """ + # Remove 'v' prefix if present + version = version_str.lstrip("v") + + # Extract numeric part and suffix + match = re.match(r"^([1-9]\d*\.\d+(?:\.\d+)?)(.*)", version) + if not match: + raise ValueError(f"Invalid version format: {version_str}") + + numeric_part, suffix = match.groups() + + # Parse numeric components + parts = numeric_part.split(".") + major = int(parts[0]) + minor = int(parts[1]) + patch = int(parts[2]) if len(parts) > 2 else None + + return cls(major=major, minor=minor, patch=patch, suffix=suffix) + + @property + def is_milestone(self) -> bool: + """Check if this is a milestone version (has suffix).""" + return bool(self.suffix) + + @property + def is_eol(self) -> bool: + """Check if this version is end-of-life.""" + return self.suffix.lower().endswith("-eol") + + @property + def mainline_version(self) -> str: + """Get the mainline version string (major.minor).""" + return f"{self.major}.{self.minor}" + + @property + def sort_key(self) -> str: + suffix_weight = 0 + if self.suffix.startswith("rc"): + suffix_weight = 100 + elif self.suffix.startswith("m"): + suffix_weight = 50 + + return ( + f"{self.major}.{self.minor}.{self.patch or 0}.{suffix_weight}.{self.suffix}" + ) + + def __str__(self) -> str: + """String representation of the version.""" + version = f"{self.major}.{self.minor}" + if self.patch is not None: + version += f".{self.patch}" + return version + self.suffix + + def __lt__(self, other: "RedisVersion") -> bool: + """Compare versions for sorting.""" + if not isinstance(other, RedisVersion): + return NotImplemented + + # Compare major.minor.patch first + self_tuple = (self.major, self.minor, self.patch or 0) + other_tuple = (other.major, other.minor, other.patch or 0) + + if self_tuple != other_tuple: + return self_tuple < other_tuple + + # If numeric parts are equal, compare suffixes + # Empty suffix (GA) comes after suffixes (milestones) + if not self.suffix and other.suffix: + return False + if self.suffix and not other.suffix: + return True + + return self.suffix < other.suffix diff --git a/src/tests/test_identify_target_ref.py b/src/tests/test_identify_target_ref.py new file mode 100644 index 0000000..3851402 --- /dev/null +++ b/src/tests/test_identify_target_ref.py @@ -0,0 +1,342 @@ +"""Tests for IdentifyTargetRef behaviour.""" + +import asyncio +from typing import List, Optional +from unittest.mock import AsyncMock, MagicMock + +import pytest +from py_trees.common import Status + +from redis_release.bht.behaviours import IdentifyTargetRef +from redis_release.bht.state import PackageMeta, PackageMetaEphemeral, ReleaseMeta +from redis_release.github_client_async import GitHubClientAsync + + +@pytest.fixture +def github_client() -> MagicMock: + """Create a mock GitHub client.""" + client = MagicMock(spec=GitHubClientAsync) + return client + + +@pytest.fixture +def package_meta() -> PackageMeta: + """Create a package meta object.""" + return PackageMeta( + repo="redis/docker-library-redis", + ref=None, + publish_internal_release=False, + ephemeral=PackageMetaEphemeral(), + ) + + +@pytest.fixture +def release_meta() -> ReleaseMeta: + """Create a release meta object.""" + return ReleaseMeta(tag="8.2.1") + + +@pytest.mark.asyncio +async def test_identify_target_ref_already_set( + github_client: MagicMock, package_meta: PackageMeta, release_meta: ReleaseMeta +) -> None: + """Test that if ref is already set, behaviour returns SUCCESS immediately.""" + package_meta.ref = "release/8.2" + + behaviour = IdentifyTargetRef( + "Test Identify Ref", + package_meta, + release_meta, + github_client, + ) + + # Initialize should do nothing + behaviour.initialise() + + # Update should return SUCCESS immediately + status = behaviour.update() + assert status == Status.SUCCESS + assert package_meta.ref == "release/8.2" + + # GitHub client should not be called + github_client.list_remote_branches.assert_not_called() + + +@pytest.mark.asyncio +async def test_identify_target_ref_exact_match( + github_client: MagicMock, package_meta: PackageMeta, release_meta: ReleaseMeta +) -> None: + """Test identifying target ref with exact version match.""" + # Mock branch listing + branches = ["release/7.2", "release/8.0", "release/8.2", "release/8.4"] + + async def mock_list_branches(repo: str, pattern: Optional[str] = None) -> List[str]: + return branches + + github_client.list_remote_branches = AsyncMock(side_effect=mock_list_branches) + + behaviour = IdentifyTargetRef( + "Test Identify Ref", + package_meta, + release_meta, + github_client, + ) + + # Initialize + behaviour.initialise() + + # Wait for async task to complete + await asyncio.sleep(0.1) + + # Update should detect release/8.2 + status = behaviour.update() + assert status == Status.SUCCESS + assert package_meta.ref == "release/8.2" + assert behaviour.feedback_message == "Target ref set to release/8.2" + + +@pytest.mark.asyncio +async def test_identify_target_ref_lower_version( + github_client: MagicMock, package_meta: PackageMeta, release_meta: ReleaseMeta +) -> None: + """Test identifying target ref when exact match doesn't exist, use lower version.""" + release_meta.tag = "8.3.0" + + # Mock branch listing - no release/8.3 branch + branches = ["release/7.2", "release/8.0", "release/8.2", "release/8.4"] + + async def mock_list_branches(repo: str, pattern: Optional[str] = None) -> list[str]: + return branches + + github_client.list_remote_branches = AsyncMock(side_effect=mock_list_branches) + + behaviour = IdentifyTargetRef( + "Test Identify Ref", + package_meta, + release_meta, + github_client, + ) + + # Initialize + behaviour.initialise() + + # Wait for async task to complete + await asyncio.sleep(0.1) + + # Update should detect release/8.2 (highest version <= 8.3) + status = behaviour.update() + assert status == Status.SUCCESS + assert package_meta.ref == "release/8.2" + + +@pytest.mark.asyncio +async def test_identify_target_ref_milestone_version( + github_client, package_meta, release_meta +): + """Test identifying target ref for milestone version.""" + release_meta.tag = "8.4-m01" + + # Mock branch listing + branches = ["release/7.2", "release/8.0", "release/8.2", "release/8.4"] + + async def mock_list_branches(repo, pattern=None): + return branches + + github_client.list_remote_branches = AsyncMock(side_effect=mock_list_branches) + + behaviour = IdentifyTargetRef( + "Test Identify Ref", + package_meta, + release_meta, + github_client, + ) + + # Initialize + behaviour.initialise() + + # Wait for async task to complete + await asyncio.sleep(0.1) + + # Update should detect release/8.4 + status = behaviour.update() + assert status == Status.SUCCESS + assert package_meta.ref == "release/8.4" + + +@pytest.mark.asyncio +async def test_identify_target_ref_no_suitable_branch( + github_client, package_meta, release_meta +): + """Test when no suitable branch is found (version too old).""" + release_meta.tag = "7.0.0" + + # Mock branch listing - all branches are newer + branches = ["release/7.2", "release/8.0", "release/8.2", "release/8.4"] + + async def mock_list_branches(repo, pattern=None): + return branches + + github_client.list_remote_branches = AsyncMock(side_effect=mock_list_branches) + + behaviour = IdentifyTargetRef( + "Test Identify Ref", + package_meta, + release_meta, + github_client, + ) + + # Initialize + behaviour.initialise() + + # Wait for async task to complete + await asyncio.sleep(0.1) + + # Update should fail + status = behaviour.update() + assert status == Status.FAILURE + assert package_meta.ref is None + assert behaviour.feedback_message == "Failed to detect appropriate branch" + + +@pytest.mark.asyncio +async def test_identify_target_ref_no_release_branches( + github_client, package_meta, release_meta +): + """Test when no release branches match the pattern.""" + # Mock branch listing - no release branches + branches = ["main", "develop", "feature/test"] + + async def mock_list_branches(repo, pattern=None): + return branches + + github_client.list_remote_branches = AsyncMock(side_effect=mock_list_branches) + + behaviour = IdentifyTargetRef( + "Test Identify Ref", + package_meta, + release_meta, + github_client, + ) + + # Initialize + behaviour.initialise() + + # Wait for async task to complete + await asyncio.sleep(0.1) + + # Update should fail + status = behaviour.update() + assert status == Status.FAILURE + assert package_meta.ref is None + + +@pytest.mark.asyncio +async def test_identify_target_ref_invalid_tag( + github_client, package_meta, release_meta +): + """Test with invalid release tag.""" + release_meta.tag = "invalid-tag" + + behaviour = IdentifyTargetRef( + "Test Identify Ref", + package_meta, + release_meta, + github_client, + ) + + # Initialize should handle error gracefully + behaviour.initialise() + + # Update should fail because task is None + status = behaviour.update() + assert status == Status.FAILURE + + +@pytest.mark.asyncio +async def test_identify_target_ref_no_tag(github_client, package_meta, release_meta): + """Test when release tag is not set.""" + release_meta.tag = None + + behaviour = IdentifyTargetRef( + "Test Identify Ref", + package_meta, + release_meta, + github_client, + ) + + # Initialize should handle missing tag + behaviour.initialise() + + # Update should fail + status = behaviour.update() + assert status == Status.FAILURE + + +@pytest.mark.asyncio +async def test_detect_branch_sorting(github_client, package_meta, release_meta): + """Test that branches are sorted correctly and highest suitable version is selected.""" + release_meta.tag = "8.5.0" + + # Mock branch listing - unsorted + branches = ["release/8.0", "release/8.4", "release/7.2", "release/8.2"] + + async def mock_list_branches(repo, pattern=None): + return branches + + github_client.list_remote_branches = AsyncMock(side_effect=mock_list_branches) + + behaviour = IdentifyTargetRef( + "Test Identify Ref", + package_meta, + release_meta, + github_client, + ) + + # Initialize + behaviour.initialise() + + # Wait for async task to complete + await asyncio.sleep(0.1) + + # Update should detect release/8.4 (highest version <= 8.5) + status = behaviour.update() + assert status == Status.SUCCESS + assert package_meta.ref == "release/8.4" + + +@pytest.mark.asyncio +async def test_identify_target_ref_running_state( + github_client, package_meta, release_meta +): + """Test that behaviour returns RUNNING while task is not complete.""" + # Create a future that won't complete immediately + future = asyncio.Future() + + async def mock_list_branches(repo, pattern=None): + await future + return ["release/8.2"] + + github_client.list_remote_branches = AsyncMock(side_effect=mock_list_branches) + + behaviour = IdentifyTargetRef( + "Test Identify Ref", + package_meta, + release_meta, + github_client, + ) + + # Initialize + behaviour.initialise() + + # Update should return RUNNING + status = behaviour.update() + assert status == Status.RUNNING + + # Complete the future + future.set_result(None) + await asyncio.sleep(0.1) + + # Now update should succeed + status = behaviour.update() + assert status == Status.SUCCESS + assert package_meta.ref == "release/8.2" From a60fbcfd6853224676940a61bf1b5df9aff646f1 Mon Sep 17 00:00:00 2001 From: Petar Shtuchkin Date: Fri, 10 Oct 2025 16:45:31 +0300 Subject: [PATCH 19/39] Fix annotations in tests --- src/tests/test_identify_target_ref.py | 64 +++++++++++---------------- 1 file changed, 25 insertions(+), 39 deletions(-) diff --git a/src/tests/test_identify_target_ref.py b/src/tests/test_identify_target_ref.py index 3851402..c28cd4c 100644 --- a/src/tests/test_identify_target_ref.py +++ b/src/tests/test_identify_target_ref.py @@ -1,7 +1,7 @@ """Tests for IdentifyTargetRef behaviour.""" import asyncio -from typing import List, Optional +from typing import Any, List from unittest.mock import AsyncMock, MagicMock import pytest @@ -70,10 +70,7 @@ async def test_identify_target_ref_exact_match( # Mock branch listing branches = ["release/7.2", "release/8.0", "release/8.2", "release/8.4"] - async def mock_list_branches(repo: str, pattern: Optional[str] = None) -> List[str]: - return branches - - github_client.list_remote_branches = AsyncMock(side_effect=mock_list_branches) + github_client.list_remote_branches = AsyncMock(return_value=branches) behaviour = IdentifyTargetRef( "Test Identify Ref", @@ -105,10 +102,7 @@ async def test_identify_target_ref_lower_version( # Mock branch listing - no release/8.3 branch branches = ["release/7.2", "release/8.0", "release/8.2", "release/8.4"] - async def mock_list_branches(repo: str, pattern: Optional[str] = None) -> list[str]: - return branches - - github_client.list_remote_branches = AsyncMock(side_effect=mock_list_branches) + github_client.list_remote_branches = AsyncMock(return_value=branches) behaviour = IdentifyTargetRef( "Test Identify Ref", @@ -131,18 +125,15 @@ async def mock_list_branches(repo: str, pattern: Optional[str] = None) -> list[s @pytest.mark.asyncio async def test_identify_target_ref_milestone_version( - github_client, package_meta, release_meta -): + github_client: MagicMock, package_meta: PackageMeta, release_meta: ReleaseMeta +) -> None: """Test identifying target ref for milestone version.""" release_meta.tag = "8.4-m01" # Mock branch listing branches = ["release/7.2", "release/8.0", "release/8.2", "release/8.4"] - async def mock_list_branches(repo, pattern=None): - return branches - - github_client.list_remote_branches = AsyncMock(side_effect=mock_list_branches) + github_client.list_remote_branches = AsyncMock(return_value=branches) behaviour = IdentifyTargetRef( "Test Identify Ref", @@ -165,18 +156,15 @@ async def mock_list_branches(repo, pattern=None): @pytest.mark.asyncio async def test_identify_target_ref_no_suitable_branch( - github_client, package_meta, release_meta -): + github_client: MagicMock, package_meta: PackageMeta, release_meta: ReleaseMeta +) -> None: """Test when no suitable branch is found (version too old).""" release_meta.tag = "7.0.0" # Mock branch listing - all branches are newer branches = ["release/7.2", "release/8.0", "release/8.2", "release/8.4"] - async def mock_list_branches(repo, pattern=None): - return branches - - github_client.list_remote_branches = AsyncMock(side_effect=mock_list_branches) + github_client.list_remote_branches = AsyncMock(return_value=branches) behaviour = IdentifyTargetRef( "Test Identify Ref", @@ -200,16 +188,13 @@ async def mock_list_branches(repo, pattern=None): @pytest.mark.asyncio async def test_identify_target_ref_no_release_branches( - github_client, package_meta, release_meta -): + github_client: MagicMock, package_meta: PackageMeta, release_meta: ReleaseMeta +) -> None: """Test when no release branches match the pattern.""" # Mock branch listing - no release branches branches = ["main", "develop", "feature/test"] - async def mock_list_branches(repo, pattern=None): - return branches - - github_client.list_remote_branches = AsyncMock(side_effect=mock_list_branches) + github_client.list_remote_branches = AsyncMock(return_value=branches) behaviour = IdentifyTargetRef( "Test Identify Ref", @@ -232,8 +217,8 @@ async def mock_list_branches(repo, pattern=None): @pytest.mark.asyncio async def test_identify_target_ref_invalid_tag( - github_client, package_meta, release_meta -): + github_client: MagicMock, package_meta: PackageMeta, release_meta: ReleaseMeta +) -> None: """Test with invalid release tag.""" release_meta.tag = "invalid-tag" @@ -253,7 +238,9 @@ async def test_identify_target_ref_invalid_tag( @pytest.mark.asyncio -async def test_identify_target_ref_no_tag(github_client, package_meta, release_meta): +async def test_identify_target_ref_no_tag( + github_client: MagicMock, package_meta: PackageMeta, release_meta: ReleaseMeta +) -> None: """Test when release tag is not set.""" release_meta.tag = None @@ -273,17 +260,16 @@ async def test_identify_target_ref_no_tag(github_client, package_meta, release_m @pytest.mark.asyncio -async def test_detect_branch_sorting(github_client, package_meta, release_meta): +async def test_detect_branch_sorting( + github_client: MagicMock, package_meta: PackageMeta, release_meta: ReleaseMeta +) -> None: """Test that branches are sorted correctly and highest suitable version is selected.""" release_meta.tag = "8.5.0" # Mock branch listing - unsorted branches = ["release/8.0", "release/8.4", "release/7.2", "release/8.2"] - async def mock_list_branches(repo, pattern=None): - return branches - - github_client.list_remote_branches = AsyncMock(side_effect=mock_list_branches) + github_client.list_remote_branches = AsyncMock(return_value=branches) behaviour = IdentifyTargetRef( "Test Identify Ref", @@ -306,13 +292,13 @@ async def mock_list_branches(repo, pattern=None): @pytest.mark.asyncio async def test_identify_target_ref_running_state( - github_client, package_meta, release_meta -): + github_client: MagicMock, package_meta: PackageMeta, release_meta: ReleaseMeta +) -> None: """Test that behaviour returns RUNNING while task is not complete.""" # Create a future that won't complete immediately - future = asyncio.Future() + future: asyncio.Future[None] = asyncio.Future() - async def mock_list_branches(repo, pattern=None): + async def mock_list_branches(*args: Any, **kwargs: Any) -> List[str]: await future return ["release/8.2"] From 404f4ce5f0d9808d931c1b4839fdca48a2df733d Mon Sep 17 00:00:00 2001 From: Petar Shtuchkin Date: Fri, 10 Oct 2025 18:27:52 +0300 Subject: [PATCH 20/39] Added print state tables --- src/redis_release/bht/state.py | 184 ++++++++++++++++++++++++++++++++- src/redis_release/cli.py | 28 +++++ 2 files changed, 211 insertions(+), 1 deletion(-) diff --git a/src/redis_release/bht/state.py b/src/redis_release/bht/state.py index 221499b..442567b 100644 --- a/src/redis_release/bht/state.py +++ b/src/redis_release/bht/state.py @@ -2,12 +2,15 @@ import logging import uuid from datetime import datetime +from importlib.metadata import packages_distributions from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, Optional, Protocol, Union +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Protocol, Union from botocore.exceptions import ClientError from pydantic import BaseModel, Field +from rich.console import Console from rich.pretty import pretty_repr +from rich.table import Table from redis_release.models import WorkflowConclusion, WorkflowStatus from redis_release.state_manager import S3Backed, logger @@ -238,6 +241,7 @@ def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None: self.storage.release_lock(self.tag) self._lock_acquired = False logger.info(f"Lock released for tag: {self.tag}") + print_state_table(self.state) @property def state(self) -> ReleaseState: @@ -508,3 +512,181 @@ def reset_model_to_defaults(target: BaseModel, default: BaseModel) -> None: else: # Simple value, copy directly setattr(target, field_name, default_value) + + +def print_state_table(state: ReleaseState, console: Optional[Console] = None) -> None: + """Print table showing the release state. + + Args: + state: The ReleaseState to display + console: Optional Rich Console instance (creates new one if not provided) + """ + if console is None: + console = Console() + + # Create table with title + table = Table( + title=f"[bold cyan]Release State: {state.meta.tag or 'N/A'}[/bold cyan]", + show_header=True, + header_style="bold magenta", + border_style="bright_blue", + title_style="bold cyan", + ) + + # Add columns + table.add_column("Package", style="cyan", no_wrap=True, width=20) + table.add_column("Build", justify="center", width=15) + table.add_column("Publish", justify="center", width=15) + table.add_column("Details", style="yellow", width=40) + + # Process each package + for package_name, package in sorted(state.packages.items()): + # Determine build status + build_status = _get_workflow_status_display(package.build) + + # Determine publish status + publish_status = _get_workflow_status_display(package.publish) + + # Collect details from workflows + details = _collect_details(package) + + # Add row to table + table.add_row( + package_name, + build_status, + publish_status, + details, + ) + + # Print the table + console.print() + console.print(table) + console.print() + + +def _get_workflow_status_display(workflow: Workflow) -> str: + """Get a rich-formatted status display for a workflow. + + Args: + workflow: The workflow to check + + Returns: + Rich-formatted status string + """ + # Check result field - if we have result, we succeeded + if workflow.result is not None: + return "[bold green]✓ Success[/bold green]" + + # Check if workflow was triggered + if workflow.triggered_at is None: + return "[dim]− Not Started[/dim]" + + # Workflow was triggered but no result - it failed + return "[bold red]✗ Failed[/bold red]" + + +def _collect_workflow_details(workflow: Workflow, prefix: str) -> List[str]: + """Collect details from a workflow using bottom-up approach. + + Shows successes until the first failure, then stops. + Bottom-up means: trigger → identify → timeout → conclusion → artifacts → result + + Args: + workflow: The workflow to check + prefix: Prefix for detail messages (e.g., "Build" or "Publish") + + Returns: + List of detail strings + """ + details: List[str] = [] + + # Stage 1: Trigger (earliest/bottom) + if workflow.ephemeral.trigger_failed or workflow.triggered_at is None: + details.append(f"[red]✗ Trigger {prefix} workflow failed[/red]") + return details + else: + details.append(f"[green]✓ {prefix} workflow triggered[/green]") + + # Stage 2: Identify + if workflow.ephemeral.identify_failed or workflow.run_id is None: + details.append(f"[red]✗ {prefix} workflow not found[/red]") + return details + else: + details.append(f"[green]✓ {prefix} workflow found[/green]") + + # Stage 3: Timeout (only ephemeral) + if workflow.ephemeral.timed_out: + details.append(f"[yellow]⏱ {prefix} timed out[/yellow]") + return details + + # Stage 4: Workflow conclusion + if workflow.conclusion == WorkflowConclusion.FAILURE: + details.append(f"[red]✗ {prefix} workflow failed[/red]") + return details + + # Stage 5: Artifacts download + if workflow.ephemeral.artifacts_download_failed or workflow.artifacts is None: + details.append(f"[red]✗ {prefix} artifacts download failed[/red]") + return details + else: + details.append(f"[green]✓ {prefix} artifacts downloaded[/green]") + + # Stage 6: Result extraction (latest/top) + if workflow.result is None or workflow.ephemeral.extract_result_failed: + details.append(f"[red]✗ {prefix} failed to extract result[/red]") + return details + else: + details.append(f"[green]✓ {prefix} result extracted[/green]") + + # Check for other workflow states + if workflow.status == WorkflowStatus.IN_PROGRESS: + details.append(f"[blue]⟳ {prefix} in progress[/blue]") + elif workflow.status == WorkflowStatus.QUEUED: + details.append(f"[cyan]⋯ {prefix} queued[/cyan]") + elif workflow.status == WorkflowStatus.PENDING: + details.append(f"[dim]○ {prefix} pending[/dim]") + + return details + + +def _collect_package_details(package: Package) -> List[str]: + """Collect details from package metadata. + + Args: + package: The package to check + + Returns: + List of detail strings (may be empty) + """ + details: List[str] = [] + + if package.meta.ephemeral.identify_ref_failed: + details.append("[red]✗ Identify target ref to run workflow failed[/red]") + elif package.meta.ref is not None: + details.append(f"[green]✓ Target Ref identified: {package.meta.ref}[/green]") + + return details + + +def _collect_details(package: Package) -> str: + """Collect and format all details from package and workflows. + + Args: + package: The package to check + + Returns: + Formatted string of details + """ + details: List[str] = [] + + # Collect package-level details + details.extend(_collect_package_details(package)) + + # Collect build workflow details + details.extend(_collect_workflow_details(package.build, "Build")) + + # Only collect publish details if build succeeded (has result) + if package.build.result is not None: + details.extend(_collect_workflow_details(package.publish, "Publish")) + + return "\n".join(details) diff --git a/src/redis_release/cli.py b/src/redis_release/cli.py index 8b5436d..62d4813 100644 --- a/src/redis_release/cli.py +++ b/src/redis_release/cli.py @@ -17,6 +17,8 @@ PackageMeta, ReleaseMeta, ReleaseState, + S3StateStorage, + StateSyncer, Workflow, ) @@ -345,5 +347,31 @@ def release_bht( asyncio.run(async_tick_tock(tree)) +@app.command() +def release_state( + release_tag: str = typer.Argument(..., help="Release tag (e.g., 8.4-m01-int1)"), + config_file: Optional[str] = typer.Option( + None, "--config", "-c", help="Path to config file (default: config.yaml)" + ), +) -> None: + """Run release using behaviour tree implementation.""" + setup_logging(logging.INFO) + config_path = config_file or "config.yaml" + config = load_config(config_path) + + # Create release args + args = ReleaseArgs( + release_tag=release_tag, + force_rebuild=[], + ) + + with StateSyncer( + storage=S3StateStorage(), + config=config, + args=args, + ): + pass + + if __name__ == "__main__": app() From cd519e9d894664a0721d2a57f172487cf604c80f Mon Sep 17 00:00:00 2001 From: Petar Shtuchkin Date: Wed, 15 Oct 2025 14:59:36 +0300 Subject: [PATCH 21/39] New prints and demo behaviours --- src/redis_release/bht/behaviours.py | 8 +++ src/redis_release/bht/state.py | 1 - src/redis_release/bht/tree.py | 33 +++++++++ src/redis_release/cli.py | 33 +++++++-- src/redis_release/logging_config.py | 102 +++++++++++++++++++++------- 5 files changed, 147 insertions(+), 30 deletions(-) diff --git a/src/redis_release/bht/behaviours.py b/src/redis_release/bht/behaviours.py index 20b344c..c72298e 100644 --- a/src/redis_release/bht/behaviours.py +++ b/src/redis_release/bht/behaviours.py @@ -374,7 +374,15 @@ def update(self) -> Status: return Status.RUNNING result = self.task.result() + if self.workflow.status != result.status: + self.logger.info( + f"Workflow {self.workflow.workflow_file}({self.workflow.run_id}) status changed: {self.workflow.status} -> {result.status}" + ) self.workflow.status = result.status + if self.workflow.conclusion != result.conclusion: + self.logger.info( + f"Workflow {self.workflow.workflow_file}({self.workflow.run_id}) conclusion changed: {self.workflow.conclusion} -> {result.conclusion}" + ) self.workflow.conclusion = result.conclusion self.feedback_message = ( f" {self.workflow.status}, {self.workflow.conclusion}" diff --git a/src/redis_release/bht/state.py b/src/redis_release/bht/state.py index 442567b..e0c5c57 100644 --- a/src/redis_release/bht/state.py +++ b/src/redis_release/bht/state.py @@ -2,7 +2,6 @@ import logging import uuid from datetime import datetime -from importlib.metadata import packages_distributions from pathlib import Path from typing import TYPE_CHECKING, Any, Dict, List, Optional, Protocol, Union diff --git a/src/redis_release/bht/tree.py b/src/redis_release/bht/tree.py index da9eb6f..24f56e3 100644 --- a/src/redis_release/bht/tree.py +++ b/src/redis_release/bht/tree.py @@ -11,6 +11,7 @@ from py_trees.display import unicode_tree from py_trees.trees import BehaviourTree from py_trees.visitors import SnapshotVisitor +from rich.pretty import pretty_repr from rich.text import Text from ..config import Config @@ -374,3 +375,35 @@ def create_extract_result_tree_branch( ) latch_chains(extract_artifact_result, download_artifacts) return extract_artifact_result + + +class DemoBehaviour(Behaviour): + def __init__(self, name: str): + super().__init__(name=name) + + def update(self) -> Status: + return Status.SUCCESS + + +def create_sequence_branch() -> Sequence: + s = Sequence( + name="Sequence: A && B", + memory=False, + children=[ + DemoBehaviour("A"), + DemoBehaviour("B"), + ], + ) + return s + + +def create_selector_branch() -> Selector: + s = Selector( + name="Selector: A || B", + memory=False, + children=[ + DemoBehaviour("A"), + DemoBehaviour("B"), + ], + ) + return s diff --git a/src/redis_release/cli.py b/src/redis_release/cli.py index 62d4813..451612d 100644 --- a/src/redis_release/cli.py +++ b/src/redis_release/cli.py @@ -36,6 +36,8 @@ create_build_workflow_tree_branch, create_extract_result_tree_branch, create_publish_workflow_tree_branch, + create_selector_branch, + create_sequence_branch, create_workflow_complete_tree_branch, create_workflow_with_result_tree_branch, initialize_tree_and_state, @@ -231,7 +233,20 @@ def release_print_bht( None, "--name", "-n", - help="Name of specific PPA or tree branch to print. PPAs: 'workflow_success', 'workflow_completion', 'find_workflow', 'trigger_workflow', 'identify_target_ref', 'download_artifacts', 'extract_artifact_result'. Tree branches: 'workflow_success_branch', 'workflow_result_branch'", + help="""Name of specific PPA or tree branch to print. + PPAs: + 'workflow_success', + 'workflow_completion', + 'find_workflow', + 'trigger_workflow', + 'identify_target_ref', + 'download_artifacts', + 'extract_artifact_result'. + Tree branches: + 'workflow_complete_branch', + 'workflow_with_result_branch', + 'publish_workflow_branch', + 'build_workflow_branch'""", ), ) -> None: """Print and render (using graphviz) the release behaviour tree or a specific PPA.""" @@ -292,12 +307,20 @@ def release_print_bht( "workflow_with_result_branch": lambda: create_workflow_with_result_tree_branch( "artifact", workflow, package_meta, release_meta, github_client, "" ), - "publish_worflow_branch": lambda: create_publish_workflow_tree_branch( - workflow, workflow, package_meta, release_meta, github_client, "" + "publish_workflow_branch": lambda: create_publish_workflow_tree_branch( + workflow, + workflow, + package_meta, + release_meta, + workflow, + github_client, + "", ), "build_workflow_branch": lambda: create_build_workflow_tree_branch( - workflow, package_meta, release_meta, github_client, "" + package, release_meta, package, github_client, "" ), + "demo_sequence": lambda: create_sequence_branch(), + "demo_selector": lambda: create_selector_branch(), } if name not in ppa_creators: @@ -332,7 +355,7 @@ def release_bht( ), ) -> None: """Run release using behaviour tree implementation.""" - setup_logging(logging.DEBUG) + setup_logging() config_path = config_file or "config.yaml" config = load_config(config_path) diff --git a/src/redis_release/logging_config.py b/src/redis_release/logging_config.py index 1470d79..1f439d3 100644 --- a/src/redis_release/logging_config.py +++ b/src/redis_release/logging_config.py @@ -1,49 +1,103 @@ """Logging configuration with Rich handler for beautiful colored output.""" import logging +import os +from typing import Optional from rich.logging import RichHandler +def _get_log_level_from_env() -> Optional[int]: + """Get log level from environment variables. + + Checks for LOG_LEVEL and LOGGING_LEVEL environment variables. + Supports both numeric values (10, 20, 30, 40, 50) and string values + (DEBUG, INFO, WARNING, ERROR, CRITICAL). + + Returns: + Log level as integer, or None if not found/invalid + """ + for env_var in ["LOG_LEVEL", "LOGGING_LEVEL"]: + level_str = os.getenv(env_var) + if level_str: + # Try to parse as integer first + try: + return int(level_str) + except ValueError: + pass + + # Try to parse as string level name + level_str = level_str.upper() + level_map = { + "DEBUG": logging.DEBUG, + "INFO": logging.INFO, + "WARNING": logging.WARNING, + "WARN": logging.WARNING, + "ERROR": logging.ERROR, + "CRITICAL": logging.CRITICAL, + "FATAL": logging.CRITICAL, + } + if level_str in level_map: + return level_map[level_str] + + return None + + def setup_logging( - level: int = logging.INFO, + level: Optional[int] = None, show_path: bool = True, third_party_level: int = logging.WARNING, + log_file: Optional[str] = None, ) -> None: - """Configure logging with Rich handler. + """Configure logging with Rich handler for beautiful colored output. Args: - level: Logging level (e.g., logging.INFO, logging.DEBUG) + level: Logging level (e.g., logging.INFO, logging.DEBUG). + If None, will check LOG_LEVEL or LOGGING_LEVEL environment variables. + Defaults to logging.INFO if no environment variable is set. show_path: Whether to show file path and line numbers in logs third_party_level: Logging level for third-party libraries (botocore, boto3, etc.) + log_file: Optional file path to also log to a file + """ + # Determine the actual log level to use + if level is None: + level = _get_log_level_from_env() + if level is None: + level = logging.INFO - Example: - >>> from redis_release.logging_config import setup_logging - >>> import logging - >>> setup_logging(level=logging.DEBUG) - >>> logger = logging.getLogger(__name__) - >>> logger.info("[blue]Hello[/blue] [green]World[/green]") + handler = RichHandler( + rich_tracebacks=True, + show_time=True, + show_level=True, + show_path=show_path, + markup=True, + tracebacks_show_locals=True, + omit_repeated_times=False, + ) + + handlers = [handler] + + # Add file handler if log_file is specified + if log_file: + file_handler = logging.FileHandler(log_file) + file_handler.setLevel(level) + formatter = logging.Formatter( + "%(asctime)s - %(name)s - %(levelname)s - %(message)s" + ) + file_handler.setFormatter(formatter) + handlers.append(file_handler) - # To see botocore debug logs: - >>> setup_logging(level=logging.DEBUG, third_party_level=logging.DEBUG) - """ logging.basicConfig( level=level, - format="[cyan1]%(name)s:[/cyan1] %(message)s", + format="%(name)s: %(message)s", datefmt="[%X]", - handlers=[ - RichHandler( - rich_tracebacks=True, - show_time=True, - show_level=True, - show_path=show_path, - markup=True, # Enable Rich markup in log messages - tracebacks_show_locals=True, # Show local variables in tracebacks - omit_repeated_times=False, # Force timestamp on every line - ) - ], + handlers=handlers, + force=True, ) + # Set root logger to the desired level + logging.getLogger().setLevel(level) + # Optionally reduce noise from some verbose libraries logging.getLogger("asyncio").setLevel(third_party_level) logging.getLogger("aiohttp").setLevel(third_party_level) From 9b4cd80b2076eeb9926bb28021b93f877a9b4f45 Mon Sep 17 00:00:00 2001 From: Petar Shtuchkin Date: Wed, 15 Oct 2025 15:01:36 +0300 Subject: [PATCH 22/39] Test aws access --- .github/workflows/test.yml | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) create mode 100644 .github/workflows/test.yml diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 0000000..58686a4 --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,24 @@ +on: + push: + branches: + - bht + + +jobs: + test: + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + + + - name: Configure aws credentials + uses: aws-actions/configure-aws-credentials@v1.7.0 + with: + role-to-assume: ${{ vars.AWS_IAM_ROLE_ARN }} + aws-region: us-east-1 + + - name: Test S3 bucket access + shell: bash + run: | + aws s3 ls \ No newline at end of file From 2c14b6dd128deec00e7de2220c60bf8732fab216 Mon Sep 17 00:00:00 2001 From: Petar Shtuchkin Date: Fri, 17 Oct 2025 18:38:29 +0300 Subject: [PATCH 23/39] Add switch back to original branch in ensure branch action --- .github/actions/ensure-release-branch/ensure-release-branch.sh | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/actions/ensure-release-branch/ensure-release-branch.sh b/.github/actions/ensure-release-branch/ensure-release-branch.sh index a9aa125..e5597f4 100755 --- a/.github/actions/ensure-release-branch/ensure-release-branch.sh +++ b/.github/actions/ensure-release-branch/ensure-release-branch.sh @@ -120,6 +120,8 @@ if execute_command git ls-remote --heads origin "$RELEASE_VERSION_BRANCH" | grep execute_command --ignore-exit-code 1 --no-std -- git diff --quiet --cached "origin/$RELEASE_VERSION_BRANCH" diff_result=$last_cmd_result execute_command --ignore-errors --no-std -- git merge --abort + # Switch back to original branch + execute_command --no-std -- git switch - if [ "$diff_result" -eq 1 ]; then echo "Found file differences between $RELEASE_BRANCH and $RELEASE_VERSION_BRANCH" execute_command --no-std -- git diff --name-only "origin/$RELEASE_VERSION_BRANCH" "origin/$RELEASE_BRANCH" From 3373b3be12eecba0916ca87608c19981ab3af97c Mon Sep 17 00:00:00 2001 From: Petar Shtuchkin Date: Fri, 17 Oct 2025 18:39:05 +0300 Subject: [PATCH 24/39] Run tests --- .github/workflows/run-tests.yml | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) create mode 100644 .github/workflows/run-tests.yml diff --git a/.github/workflows/run-tests.yml b/.github/workflows/run-tests.yml new file mode 100644 index 0000000..bfba114 --- /dev/null +++ b/.github/workflows/run-tests.yml @@ -0,0 +1,22 @@ +on: + push: + pull_request: + + +jobs: + test: + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Install packages + run: | + python -m venv venv + . venv/bin/activate + pip install -e .[dev] + + - name: Run tests + run: | + . venv/bin/activate + pytest \ No newline at end of file From ac78108eadaab41d5eb07823a7cff2204161d773 Mon Sep 17 00:00:00 2001 From: Petar Shtuchkin Date: Fri, 17 Oct 2025 21:57:40 +0300 Subject: [PATCH 25/39] Fix backchaining Search anchor point only in Selector, Sequence nodes to avoid Decorators with children When latching multiple chains always latch next chain to first one to avoid problems that may arise when last chain were simplified to one action and don't have a sequence anymore --- src/redis_release/bht/backchain.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/src/redis_release/bht/backchain.py b/src/redis_release/bht/backchain.py index fa37b74..16ffa1c 100644 --- a/src/redis_release/bht/backchain.py +++ b/src/redis_release/bht/backchain.py @@ -17,8 +17,14 @@ def find_chain_anchor_point( root: Behaviour, ) -> Sequence: + """Find the anchor point (Sequence) to which we can latch the next chain. + + We assume that the anchor point is the leftmost non empty Sequence in the tree. + """ for child in root.children: - if len(child.children) > 1: + if (isinstance(child, Sequence) or isinstance(child, Selector)) and len( + child.children + ) > 0: return find_chain_anchor_point(child) if isinstance(root, Sequence): return root @@ -31,7 +37,6 @@ def latch_chains(*chains: Union[Selector, Sequence]) -> None: first = chains[0] for chain in chains[1:]: latch_chain_to_chain(first, chain) - first = chain def latch_chain_to_chain( @@ -55,7 +60,7 @@ def latch_chain_to_chain( next_postcondition: Optional[Behaviour] = None anchor_precondition: Optional[Behaviour] = None - logger.debug(f"Latching {next.name} to {anchor_point.name}") + logger.debug(f'Latching "{next.name}" to "{anchor_point.name}"') # Trying to guess from the structure which node may be a postcondition # Later we compare it with the anchor point precondition and when they match From a7cf701948f433de29fdff17a1772df7944ee0cf Mon Sep 17 00:00:00 2001 From: Petar Shtuchkin Date: Fri, 17 Oct 2025 22:00:22 +0300 Subject: [PATCH 26/39] Remove test workflow --- .github/workflows/test.yml | 24 ------------------------ 1 file changed, 24 deletions(-) delete mode 100644 .github/workflows/test.yml diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml deleted file mode 100644 index 58686a4..0000000 --- a/.github/workflows/test.yml +++ /dev/null @@ -1,24 +0,0 @@ -on: - push: - branches: - - bht - - -jobs: - test: - runs-on: ubuntu-latest - steps: - - name: Checkout code - uses: actions/checkout@v4 - - - - name: Configure aws credentials - uses: aws-actions/configure-aws-credentials@v1.7.0 - with: - role-to-assume: ${{ vars.AWS_IAM_ROLE_ARN }} - aws-region: us-east-1 - - - name: Test S3 bucket access - shell: bash - run: | - aws s3 ls \ No newline at end of file From cfc490a12ed231a74f4d778e06fbc4b9e34009d9 Mon Sep 17 00:00:00 2001 From: Petar Shtuchkin Date: Fri, 17 Oct 2025 22:05:50 +0300 Subject: [PATCH 27/39] Separate trigger inputs, logging improvements --- src/redis_release/bht/behaviours.py | 229 +++++++++++++++++++++++++--- src/redis_release/bht/ppas.py | 52 +++++++ src/redis_release/bht/state.py | 50 ++++-- src/redis_release/bht/tree.py | 49 +++++- src/redis_release/cli.py | 8 +- src/redis_release/config.py | 3 + src/redis_release/models.py | 23 ++- src/redis_release/orchestrator.py | 8 +- 8 files changed, 368 insertions(+), 54 deletions(-) diff --git a/src/redis_release/bht/behaviours.py b/src/redis_release/bht/behaviours.py index c72298e..767f0aa 100644 --- a/src/redis_release/bht/behaviours.py +++ b/src/redis_release/bht/behaviours.py @@ -15,6 +15,7 @@ import json import logging import re +import stat import uuid from datetime import datetime from token import OP @@ -28,7 +29,14 @@ from redis_release.bht.state import reset_model_to_defaults from ..github_client_async import GitHubClientAsync -from ..models import WorkflowConclusion, WorkflowRun, WorkflowStatus +from ..models import ( + PackageType, + RedisVersion, + ReleaseType, + WorkflowConclusion, + WorkflowRun, + WorkflowStatus, +) from .decorators import FlagGuard from .logging_wrapper import PyTreesLoggerWrapper from .state import Package, PackageMeta, ReleaseMeta, Workflow @@ -55,6 +63,12 @@ def log_exception_and_return_failure(self, e: Exception) -> Status: self.logger._logger.error(f"[red]Full traceback:[/red]", exc_info=True) return Status.FAILURE + def log_once(self, key: str, container: Dict[str, bool]) -> bool: + if key not in container: + container[key] = True + return True + return False + class ReleaseAction(LoggingAction): task: Optional[asyncio.Task[Any]] = None @@ -100,7 +114,6 @@ def initialise(self) -> None: return try: - from ..models import RedisVersion self.release_version = RedisVersion.parse(self.release_meta.tag) self.logger.debug( @@ -141,9 +154,12 @@ def update(self) -> Status: if detected_branch: self.package_meta.ref = detected_branch - self.logger.info( - f"[green]Target ref identified:[/green] {self.package_meta.ref}" - ) + if self.log_once( + "target_ref_identified", self.package_meta.ephemeral.log_once_flags + ): + self.logger.info( + f"[green]Target ref identified:[/green] {self.package_meta.ref}" + ) self.feedback_message = f"Target ref set to {self.package_meta.ref}" return Status.SUCCESS else: @@ -255,6 +271,12 @@ def initialise(self) -> None: self.workflow.inputs["release_tag"] = self.release_meta.tag ref = self.package_meta.ref if self.package_meta.ref is not None else "main" self.workflow.ephemeral.trigger_attempted = True + if self.log_once( + "workflow_trigger_start", self.workflow.ephemeral.log_once_flags + ): + self.logger.info( + f"Triggering workflow {self.workflow.workflow_file}, ref: {ref}, uuid: {self.workflow.uuid}" + ) self.task = asyncio.create_task( self.github_client.trigger_workflow( self.package_meta.repo, @@ -273,9 +295,12 @@ def update(self) -> Status: self.task.result() self.workflow.triggered_at = datetime.now() - logger.info( - f"[green]Workflow triggered successfully:[/green] {self.workflow.uuid}" - ) + if self.log_once( + "workflow_triggered", self.workflow.ephemeral.log_once_flags + ): + logger.info( + f"[green]Workflow triggered successfully:[/green] {self.workflow.uuid}" + ) self.feedback_message = "workflow triggered" return Status.SUCCESS except Exception as e: @@ -308,7 +333,12 @@ def initialise(self) -> None: "[red]Workflow UUID is None - cannot identify workflow[/red]" ) return - + if self.log_once( + "workflow_identify_start", self.workflow.ephemeral.log_once_flags + ): + self.logger.info( + f"Start identifying workflow {self.workflow.workflow_file}, uuid: {self.workflow.uuid}" + ) self.task = asyncio.create_task( self.github_client.identify_workflow( self.package_meta.repo, self.workflow.workflow_file, self.workflow.uuid @@ -328,9 +358,12 @@ def update(self) -> Status: return Status.FAILURE self.workflow.run_id = result.run_id - self.logger.info( - f"[green]Workflow found successfully:[/green] uuid: {self.workflow.uuid}, run_id: {self.workflow.run_id}" - ) + if self.log_once( + "workflow_identified", self.workflow.ephemeral.log_once_flags + ): + self.logger.info( + f"[green]Workflow found successfully:[/green] uuid: {self.workflow.uuid}, run_id: {self.workflow.run_id}" + ) self.feedback_message = ( f"Workflow identified, run_id: {self.workflow.run_id}" ) @@ -360,6 +393,12 @@ def initialise(self) -> None: ) return + if self.log_once( + "workflow_status_update", self.workflow.ephemeral.log_once_flags + ): + self.logger.info( + f"Start checking workflow {self.workflow.workflow_file}, run_id: {self.workflow.run_id} status" + ) self.task = asyncio.create_task( self.github_client.get_workflow_run( self.package_meta.repo, self.workflow.run_id @@ -374,6 +413,12 @@ def update(self) -> Status: return Status.RUNNING result = self.task.result() + if self.log_once( + "workflow_status_current", self.workflow.ephemeral.log_once_flags + ): + self.logger.info( + f"Workflow {self.workflow.workflow_file}, run_id: {self.workflow.run_id} current status: {result.status}, {result.conclusion}" + ) if self.workflow.status != result.status: self.logger.info( f"Workflow {self.workflow.workflow_file}({self.workflow.run_id}) status changed: {self.workflow.status} -> {result.status}" @@ -583,6 +628,94 @@ def update(self) -> Status: # type: ignore return Status.SUCCESS +class GenericWorkflowInputs(ReleaseAction): + def __init__( + self, + name: str, + workflow: Workflow, + package_meta: PackageMeta, + release_meta: ReleaseMeta, + log_prefix: str = "", + ) -> None: + self.workflow = workflow + self.package_meta = package_meta + self.release_meta = release_meta + super().__init__(name=name, log_prefix=log_prefix) + + def update(self) -> Status: + return Status.SUCCESS + + +def create_prepare_build_workflow_inputs( + name: str, + workflow: Workflow, + package_meta: PackageMeta, + release_meta: ReleaseMeta, + log_prefix: str, +) -> Behaviour: + cls_map = { + PackageType.DEBIAN: DebianWorkflowInputs, + } + + selected_class = ( + cls_map.get(package_meta.package_type, GenericWorkflowInputs) + if package_meta.package_type + else GenericWorkflowInputs + ) + return selected_class( + name, + workflow, + package_meta, + release_meta, + log_prefix=log_prefix, + ) + + +def create_prepare_publish_workflow_inputs( + name: str, + workflow: Workflow, + package_meta: PackageMeta, + release_meta: ReleaseMeta, + log_prefix: str, +) -> Behaviour: + cls_map = { + PackageType.DEBIAN: DebianWorkflowInputs, + } + + selected_class = ( + cls_map.get(package_meta.package_type, GenericWorkflowInputs) + if package_meta.package_type + else GenericWorkflowInputs + ) + return selected_class( + name, + workflow, + package_meta, + release_meta, + log_prefix=log_prefix, + ) + + +class DebianWorkflowInputs(ReleaseAction): + def __init__( + self, + name: str, + workflow: Workflow, + package_meta: PackageMeta, + release_meta: ReleaseMeta, + log_prefix: str = "", + ) -> None: + self.workflow = workflow + self.package_meta = package_meta + self.release_meta = release_meta + super().__init__(name=f"{name} - debian", log_prefix=log_prefix) + + def update(self) -> Status: + if self.release_meta.release_type is not None: + self.workflow.inputs["release_type"] = self.release_meta.release_type.value + return Status.SUCCESS + + ### Conditions ### @@ -595,6 +728,10 @@ def __init__( def update(self) -> Status: if self.package_meta.ref is not None: + if self.log_once( + "target_ref_identified", self.package_meta.ephemeral.log_once_flags + ): + self.logger.info(f"Target ref identified: {self.package_meta.ref}") return Status.SUCCESS return Status.FAILURE @@ -607,6 +744,12 @@ def __init__(self, name: str, workflow: Workflow, log_prefix: str = "") -> None: def update(self) -> Status: self.logger.debug(f"IsWorkflowTriggered: {self.workflow}") if self.workflow.triggered_at is not None: + if self.log_once( + "workflow_triggered", self.workflow.ephemeral.log_once_flags + ): + self.logger.info( + f"Workflow is triggered at: {self.workflow.triggered_at}" + ) return Status.SUCCESS return Status.FAILURE @@ -619,6 +762,12 @@ def __init__(self, name: str, workflow: Workflow, log_prefix: str = "") -> None: def update(self) -> Status: self.logger.debug(f"{self.workflow}") if self.workflow.run_id is not None: + if self.log_once( + "workflow_identified", self.workflow.ephemeral.log_once_flags + ): + self.logger.info( + f"Workflow is identified, run_id: {self.workflow.run_id}" + ) return Status.SUCCESS return Status.FAILURE @@ -630,6 +779,10 @@ def __init__(self, name: str, workflow: Workflow, log_prefix: str = "") -> None: def update(self) -> Status: if self.workflow.status == WorkflowStatus.COMPLETED: + if self.log_once( + "workflow_completed", self.workflow.ephemeral.log_once_flags + ): + self.logger.info(f"Workflow is completed") return Status.SUCCESS return Status.FAILURE @@ -641,6 +794,10 @@ def __init__(self, name: str, workflow: Workflow, log_prefix: str = "") -> None: def update(self) -> Status: if self.workflow.conclusion == WorkflowConclusion.SUCCESS: + if self.log_once( + "workflow_successful", self.workflow.ephemeral.log_once_flags + ): + self.logger.info(f"Workflow completed with success status") return Status.SUCCESS return Status.FAILURE @@ -652,6 +809,10 @@ def __init__(self, name: str, workflow: Workflow, log_prefix: str = "") -> None: def update(self) -> Status: if self.workflow.artifacts is not None: + if self.log_once( + "workflow_artifacts", self.workflow.ephemeral.log_once_flags + ): + self.logger.info(f"Workflow has artifacts") return Status.SUCCESS return Status.FAILURE @@ -663,11 +824,13 @@ def __init__(self, name: str, workflow: Workflow, log_prefix: str = "") -> None: def update(self) -> Status: if self.workflow.result is not None: + if self.log_once("workflow_result", self.workflow.ephemeral.log_once_flags): + self.logger.info(f"Workflow is successful and has result") return Status.SUCCESS return Status.FAILURE -class NeedToPublish(LoggingAction): +class NeedToPublishRelease(LoggingAction): """Check the release type and package configuration to determine if we need to run publish workflow.""" def __init__( @@ -682,20 +845,44 @@ def __init__( super().__init__(name=name, log_prefix=log_prefix) def update(self) -> Status: - # Check if this is an internal release by matching the pattern -int\d*$ in the tag - if self.release_meta.tag and re.search(r"-int\d*$", self.release_meta.tag): - self.logger.debug(f"Asssuming internal release: {self.release_meta.tag}") + if self.release_meta.release_type == ReleaseType.INTERNAL: if self.package_meta.publish_internal_release: self.logger.debug( - f"Publishing internal release: {self.release_meta.tag}" + f"Internal release requires publishing: {self.release_meta.tag}" ) return Status.SUCCESS - self.logger.debug( - f"Skip publishing internal release: {self.release_meta.tag}" - ) + else: + self.logger.debug( + f"Skip publishing internal release: {self.release_meta.tag}" + ) return Status.FAILURE + return Status.FAILURE + + +class DetectReleaseType(LoggingAction): + def __init__( + self, name: str, release_meta: ReleaseMeta, log_prefix: str = "" + ) -> None: + self.release_meta = release_meta + super().__init__(name=name, log_prefix=log_prefix) - self.logger.debug(f"Public release: {self.release_meta.tag}") + def update(self) -> Status: + if self.release_meta.release_type is not None: + if self.log_once( + "release_type_detected", self.release_meta.ephemeral.log_once_flags + ): + self.logger.info( + f"Detected release type: {self.release_meta.release_type}" + ) + return Status.SUCCESS + if self.release_meta.tag and re.search(r"-int\d*$", self.release_meta.tag): + self.release_meta.release_type = ReleaseType.INTERNAL + else: + self.release_meta.release_type = ReleaseType.PUBLIC + self.log_once( + "release_type_detected", self.release_meta.ephemeral.log_once_flags + ) + self.logger.info(f"Detected release type: {self.release_meta.release_type}") return Status.SUCCESS diff --git a/src/redis_release/bht/ppas.py b/src/redis_release/bht/ppas.py index e658416..ccf5298 100644 --- a/src/redis_release/bht/ppas.py +++ b/src/redis_release/bht/ppas.py @@ -15,6 +15,7 @@ from .backchain import create_PPA from .behaviours import ( AttachReleaseHandleToPublishWorkflow, + DetectReleaseType, HasWorkflowArtifacts, HasWorkflowResult, IsTargetRefIdentified, @@ -22,6 +23,8 @@ IsWorkflowIdentified, IsWorkflowSuccessful, IsWorkflowTriggered, + create_prepare_build_workflow_inputs, + create_prepare_publish_workflow_inputs, ) from .composites import ( DownloadArtifactsListGuarded, @@ -129,6 +132,19 @@ def create_identify_target_ref_ppa( github_client, log_prefix=log_prefix, ), + IsTargetRefIdentified( + "Is Target Ref Identified?", package_meta, log_prefix=log_prefix + ), + ) + + +def create_detect_release_type_ppa( + release_meta: ReleaseMeta, + log_prefix: str, +) -> Union[Selector, Sequence]: + return create_PPA( + "Detect Release Type", + DetectReleaseType("Detect Release Type", release_meta, log_prefix=log_prefix), ) @@ -194,3 +210,39 @@ def create_attach_release_handle_ppa( log_prefix=log_prefix, ), ) + + +def create_build_workflow_inputs_ppa( + workflow: Workflow, + package_meta: PackageMeta, + release_meta: ReleaseMeta, + log_prefix: str, +) -> Union[Selector, Sequence]: + return create_PPA( + "Set Build Workflow Inputs", + create_prepare_build_workflow_inputs( + "Set Build Workflow Inputs", + workflow, + package_meta, + release_meta, + log_prefix=log_prefix, + ), + ) + + +def create_publish_workflow_inputs_ppa( + workflow: Workflow, + package_meta: PackageMeta, + release_meta: ReleaseMeta, + log_prefix: str, +) -> Union[Selector, Sequence]: + return create_PPA( + "Set Publish Workflow Inputs", + create_prepare_publish_workflow_inputs( + "Set Publish Workflow Inputs", + workflow, + package_meta, + release_meta, + log_prefix=log_prefix, + ), + ) diff --git a/src/redis_release/bht/state.py b/src/redis_release/bht/state.py index e0c5c57..2203add 100644 --- a/src/redis_release/bht/state.py +++ b/src/redis_release/bht/state.py @@ -11,7 +11,13 @@ from rich.pretty import pretty_repr from rich.table import Table -from redis_release.models import WorkflowConclusion, WorkflowStatus +from redis_release.models import ( + PackageType, + ReleaseType, + WorkflowConclusion, + WorkflowStatus, + WorkflowType, +) from redis_release.state_manager import S3Backed, logger from ..config import Config @@ -31,9 +37,11 @@ class WorkflowEphemeral(BaseModel): timed_out: bool = False artifacts_download_failed: bool = False extract_result_failed: bool = False + log_once_flags: Dict[str, bool] = Field(default_factory=dict) class Workflow(BaseModel): + workflow_type: Optional[WorkflowType] = None workflow_file: str = "" inputs: Dict[str, str] = Field(default_factory=dict) uuid: Optional[str] = None @@ -56,11 +64,13 @@ class PackageMetaEphemeral(BaseModel): force_rebuild: bool = False identify_ref_failed: bool = False + log_once_flags: Dict[str, bool] = Field(default_factory=dict) class PackageMeta(BaseModel): """Metadata for a package.""" + package_type: Optional[PackageType] = None repo: str = "" ref: Optional[str] = None publish_internal_release: bool = False @@ -77,10 +87,20 @@ class Package(BaseModel): publish: Workflow = Field(default_factory=Workflow) +class ReleaseMetaEphemeral(BaseModel): + """Ephemeral release metadata that is not persisted.""" + + log_once_flags: Dict[str, bool] = Field(default_factory=dict) + + class ReleaseMeta(BaseModel): """Metadata for the release.""" tag: Optional[str] = None + release_type: Optional[ReleaseType] = None + ephemeral: ReleaseMetaEphemeral = Field( + default_factory=ReleaseMetaEphemeral, exclude=True + ) class ReleaseState(BaseModel): @@ -94,6 +114,11 @@ def from_config(cls, config: Config) -> "ReleaseState": """Build ReleaseState from config with default values.""" packages = {} for package_name, package_config in config.packages.items(): + if not isinstance(package_config.package_type, PackageType): + raise ValueError( + f"Package '{package_name}': package_type must be a PackageType, " + f"got {type(package_config.package_type).__name__}" + ) # Validate and get build workflow file if not isinstance(package_config.build_workflow, str): raise ValueError( @@ -120,11 +145,13 @@ def from_config(cls, config: Config) -> "ReleaseState": package_meta = PackageMeta( repo=package_config.repo, ref=package_config.ref, + package_type=package_config.package_type, publish_internal_release=package_config.publish_internal_release, ) # Initialize build workflow build_workflow = Workflow( + workflow_type=WorkflowType.BUILD, workflow_file=package_config.build_workflow, inputs=package_config.build_inputs.copy(), timeout_minutes=package_config.build_timeout_minutes, @@ -132,6 +159,7 @@ def from_config(cls, config: Config) -> "ReleaseState": # Initialize publish workflow publish_workflow = Workflow( + workflow_type=WorkflowType.PUBLISH, workflow_file=package_config.publish_workflow, inputs=package_config.publish_inputs.copy(), timeout_minutes=package_config.publish_timeout_minutes, @@ -346,7 +374,7 @@ def get(self, tag: str) -> Optional[dict]: ReleaseState object or None if not found """ state_key = f"release-state/{tag}-blackboard.json" - logger.info(f"Loading blackboard for tag: {tag}") + logger.debug(f"Loading blackboard for tag: {tag}") if self.s3_client is None: raise RuntimeError("S3 client not initialized") @@ -355,13 +383,13 @@ def get(self, tag: str) -> Optional[dict]: response = self.s3_client.get_object(Bucket=self.bucket_name, Key=state_key) state_data: dict = json.loads(response["Body"].read().decode("utf-8")) - logger.info("Blackboard loaded successfully") + logger.debug("Blackboard loaded successfully") return state_data except ClientError as e: if e.response["Error"]["Code"] == "NoSuchKey": - logger.info(f"No existing blackboard found for tag: {tag}") + logger.debug(f"No existing blackboard found for tag: {tag}") return None else: logger.error(f"Failed to load blackboard: {e}") @@ -374,7 +402,7 @@ def put(self, tag: str, state: dict) -> None: state: ReleaseState object to save """ state_key = f"release-state/{tag}-blackboard.json" - logger.info(f"Saving blackboard for tag: {tag}") + logger.debug(f"Saving blackboard for tag: {tag}") if self.s3_client is None: raise RuntimeError("S3 client not initialized") @@ -392,7 +420,7 @@ def put(self, tag: str, state: dict) -> None: }, ) - logger.info("Blackboard saved successfully") + logger.debug("Blackboard saved successfully") except ClientError as e: logger.error(f"Failed to save blackboard: {e}") @@ -408,7 +436,7 @@ def acquire_lock(self, tag: str) -> bool: True if lock acquired successfully """ lock_key = f"release-locks/{tag}.lock" - logger.info(f"Acquiring lock for tag: {tag}") + logger.debug(f"Acquiring lock for tag: {tag}") if self.s3_client is None: raise RuntimeError("S3 client not initialized") @@ -429,7 +457,7 @@ def acquire_lock(self, tag: str) -> bool: IfNoneMatch="*", ) - logger.info("Lock acquired successfully") + logger.debug("Lock acquired successfully") return True except ClientError as e: @@ -460,7 +488,7 @@ def release_lock(self, tag: str) -> bool: True if lock released successfully """ lock_key = f"release-locks/{tag}.lock" - logger.info(f"Releasing lock for tag: {tag}") + logger.debug(f"Releasing lock for tag: {tag}") if self.s3_client is None: raise RuntimeError("S3 client not initialized") @@ -475,12 +503,12 @@ def release_lock(self, tag: str) -> bool: return False self.s3_client.delete_object(Bucket=self.bucket_name, Key=lock_key) - logger.info("Lock released successfully") + logger.debug("Lock released successfully") return True except ClientError as e: if e.response["Error"]["Code"] == "NoSuchKey": - logger.info(f"No lock found for tag: {tag}") + logger.debug(f"No lock found for tag: {tag}") return True else: logger.error(f"Failed to release lock: {e}") diff --git a/src/redis_release/bht/tree.py b/src/redis_release/bht/tree.py index 24f56e3..a62bd80 100644 --- a/src/redis_release/bht/tree.py +++ b/src/redis_release/bht/tree.py @@ -2,7 +2,7 @@ import logging import os from contextlib import contextmanager -from typing import Any, Iterator, Optional, Set, Tuple, Union +from typing import Any, Iterator, List, Optional, Set, Tuple, Union from py_trees.behaviour import Behaviour from py_trees.common import Status @@ -18,7 +18,7 @@ from ..github_client_async import GitHubClientAsync from .args import ReleaseArgs from .backchain import latch_chains -from .behaviours import NeedToPublish +from .behaviours import NeedToPublishRelease from .composites import ( ParallelBarrier, ResetPackageStateGuarded, @@ -27,10 +27,13 @@ ) from .ppas import ( create_attach_release_handle_ppa, + create_build_workflow_inputs_ppa, + create_detect_release_type_ppa, create_download_artifacts_ppa, create_extract_artifact_result_ppa, create_find_workflow_by_uuid_ppa, create_identify_target_ref_ppa, + create_publish_workflow_inputs_ppa, create_trigger_workflow_ppa, create_workflow_completion_ppa, create_workflow_success_ppa, @@ -213,6 +216,9 @@ def create_build_workflow_tree_branch( package_name: str, ) -> Union[Selector, Sequence]: + build_workflow_args = create_build_workflow_inputs_ppa( + package.build, package.meta, release_meta, log_prefix=f"{package_name}.build" + ) build_workflow = create_workflow_with_result_tree_branch( "release_handle", package.build, @@ -220,6 +226,7 @@ def create_build_workflow_tree_branch( release_meta, github_client, f"{package_name}.build", + trigger_preconditions=[build_workflow_args], ) assert isinstance(build_workflow, Selector) @@ -244,6 +251,15 @@ def create_publish_workflow_tree_branch( github_client: GitHubClientAsync, package_name: str, ) -> Union[Selector, Sequence]: + attach_release_handle = create_attach_release_handle_ppa( + build_workflow, publish_workflow, log_prefix=f"{package_name}.publish" + ) + publish_workflow_args = create_publish_workflow_inputs_ppa( + publish_workflow, + package_meta, + release_meta, + log_prefix=f"{package_name}.publish", + ) workflow_result = create_workflow_with_result_tree_branch( "release_info", publish_workflow, @@ -251,15 +267,11 @@ def create_publish_workflow_tree_branch( release_meta, github_client, f"{package_name}.publish", + trigger_preconditions=[publish_workflow_args, attach_release_handle], ) - attach_release_handle = create_attach_release_handle_ppa( - build_workflow, publish_workflow, log_prefix=f"{package_name}.publish" - ) - latch_chains(workflow_result, attach_release_handle) - not_need_to_publish = Inverter( "Not", - NeedToPublish( + NeedToPublishRelease( "Need To Publish?", package_meta, release_meta, @@ -287,10 +299,14 @@ def create_workflow_with_result_tree_branch( release_meta: ReleaseMeta, github_client: GitHubClientAsync, package_name: str, + trigger_preconditions: Optional[List[Union[Sequence, Selector]]] = None, ) -> Union[Selector, Sequence]: """ Creates a workflow process that succedes when the workflow is successful and a result artifact is extracted and json decoded. + + Args: + trigger_preconditions: List of preconditions to add to the workflow trigger """ workflow_result = create_extract_result_tree_branch( artifact_name, @@ -305,6 +321,7 @@ def create_workflow_with_result_tree_branch( release_meta, github_client, package_name, + trigger_preconditions, ) latch_chains(workflow_result, workflow_complete) @@ -318,7 +335,13 @@ def create_workflow_complete_tree_branch( release_meta: ReleaseMeta, github_client: GitHubClientAsync, log_prefix: str, + trigger_preconditions: Optional[List[Union[Sequence, Selector]]] = None, ) -> Union[Selector, Sequence]: + """ + + Args: + trigger_preconditions: List of preconditions to add to the workflow trigger + """ workflow_complete = create_workflow_completion_ppa( workflow, package_meta, @@ -338,17 +361,24 @@ def create_workflow_complete_tree_branch( github_client, log_prefix, ) + if trigger_preconditions: + latch_chains(trigger_workflow, *trigger_preconditions) identify_target_ref = create_identify_target_ref_ppa( package_meta, release_meta, github_client, log_prefix, ) + detect_release_type = create_detect_release_type_ppa( + release_meta, + log_prefix, + ) latch_chains( workflow_complete, find_workflow_by_uud, trigger_workflow, identify_target_ref, + detect_release_type, ) return workflow_complete @@ -377,6 +407,9 @@ def create_extract_result_tree_branch( return extract_artifact_result +### Demo ### + + class DemoBehaviour(Behaviour): def __init__(self, name: str): super().__init__(name=name) diff --git a/src/redis_release/cli.py b/src/redis_release/cli.py index 451612d..fd4561a 100644 --- a/src/redis_release/cli.py +++ b/src/redis_release/cli.py @@ -88,7 +88,7 @@ def release( help="Force rebuild Docker image, ignoring existing state", ), release_type: ReleaseType = typer.Option( - ReleaseType.AUTO, "--release-type", help="Override release type detection" + None, "--release-type", help="Override release type detection" ), dry_run: bool = typer.Option( False, "--dry-run", help="Show what would be done without executing" @@ -258,6 +258,7 @@ def release_print_bht( release_tag=release_tag, force_rebuild=[], ) + setup_logging() if name: # Print specific PPA or tree branch @@ -353,6 +354,9 @@ def release_bht( "--force-rebuild", help="Force rebuild for specific packages (can be specified multiple times). Use 'all' to force rebuild all packages.", ), + tree_cutoff: int = typer.Option( + 2000, "--tree-cutoff", "-m", help="Max number of ticks to run the tree for" + ), ) -> None: """Run release using behaviour tree implementation.""" setup_logging() @@ -367,7 +371,7 @@ def release_bht( # Use context manager version with automatic lock management with initialize_tree_and_state(config, args) as (tree, _): - asyncio.run(async_tick_tock(tree)) + asyncio.run(async_tick_tock(tree, cutoff=tree_cutoff)) @app.command() diff --git a/src/redis_release/config.py b/src/redis_release/config.py index 062e38e..acb03ff 100644 --- a/src/redis_release/config.py +++ b/src/redis_release/config.py @@ -6,12 +6,15 @@ import yaml from pydantic import BaseModel, Field +from .models import PackageType + class PackageConfig(BaseModel): """Configuration for a package type.""" repo: str ref: Optional[str] = None + package_type: PackageType workflow_branch: str = "autodetect" publish_internal_release: bool = False build_workflow: Union[str, bool] = Field(default=False) diff --git a/src/redis_release/models.py b/src/redis_release/models.py index 08f0c35..ec4a6d2 100644 --- a/src/redis_release/models.py +++ b/src/redis_release/models.py @@ -8,12 +8,25 @@ from pydantic import BaseModel, Field +class WorkflowType(str, Enum): + """Workflow type enumeration.""" + + BUILD = "build" + PUBLISH = "publish" + + +class PackageType(str, Enum): + """Package type enumeration.""" + + DOCKER = "docker" + DEBIAN = "debian" + + class ReleaseType(str, Enum): """Release type enumeration.""" - AUTO = "auto" PUBLIC = "public" - PRIVATE = "private" + INTERNAL = "internal" class WorkflowStatus(str, Enum): @@ -32,12 +45,6 @@ class WorkflowConclusion(str, Enum): FAILURE = "failure" -class PackageType(str, Enum): - """Package type enumeration.""" - - DOCKER = "docker" - - class WorkflowRun(BaseModel): """Represents a GitHub workflow run.""" diff --git a/src/redis_release/orchestrator.py b/src/redis_release/orchestrator.py index ff11a6d..7f44558 100644 --- a/src/redis_release/orchestrator.py +++ b/src/redis_release/orchestrator.py @@ -70,14 +70,14 @@ def _get_state_manager(self, dry_run: bool = False) -> StateManager: return self._state_manager def _determine_release_type( - self, tag: str, override: ReleaseType = ReleaseType.AUTO + self, tag: str, override: ReleaseType = None ) -> ReleaseType: """Determine release type from tag name.""" - if override != ReleaseType.AUTO: + if override is not None: return override if tag.endswith(tuple(f"-int{i}" for i in range(1, 100))): - return ReleaseType.PRIVATE + return ReleaseType.INTERNAL return ReleaseType.PUBLIC @@ -155,7 +155,7 @@ def execute_release( self, tag: str, force_rebuild: bool = False, - release_type: ReleaseType = ReleaseType.AUTO, + release_type: ReleaseType = None, dry_run: bool = False, ) -> ReleaseResult: """Execute the main release workflow. From 7fa15c1a7a8b294b232bfa55e7f077b56aa3b4f7 Mon Sep 17 00:00:00 2001 From: Petar Shtuchkin Date: Fri, 17 Oct 2025 22:08:47 +0300 Subject: [PATCH 28/39] Add prod config --- config.yaml | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) create mode 100644 config.yaml diff --git a/config.yaml b/config.yaml new file mode 100644 index 0000000..f8aac76 --- /dev/null +++ b/config.yaml @@ -0,0 +1,21 @@ +version: 1 +packages: + docker: + package_type: docker + repo: redis/docker-library-redis + build_workflow: release_build_and_test.yml + build_timeout_minutes: 45 + build_inputs: {} + publish_internal_release: no + publish_workflow: release_publish.yml # may be boolean false + publish_timeout_minutes: 10 + publish_inputs: {} + debian: + package_type: debian + repo: redis/redis-debian + build_workflow: release_build_and_test.yml + build_inputs: {} + publish_internal_release: yes + publish_workflow: release_publish.yml + publish_timeout_minutes: 10 + publish_inputs: {} From 66f8041082c9c24aa21569b85d8609b36c1a2451 Mon Sep 17 00:00:00 2001 From: Petar Shtuchkin Date: Fri, 17 Oct 2025 22:29:57 +0300 Subject: [PATCH 29/39] Logging improvement, fix tests --- src/redis_release/bht/behaviours.py | 20 ++++++++++----- src/redis_release/github_client_async.py | 18 ++++++++------ src/tests/test_state.py | 31 ++++++++++++++++++++++++ 3 files changed, 55 insertions(+), 14 deletions(-) diff --git a/src/redis_release/bht/behaviours.py b/src/redis_release/bht/behaviours.py index 767f0aa..a1fb410 100644 --- a/src/redis_release/bht/behaviours.py +++ b/src/redis_release/bht/behaviours.py @@ -298,7 +298,7 @@ def update(self) -> Status: if self.log_once( "workflow_triggered", self.workflow.ephemeral.log_once_flags ): - logger.info( + self.logger.info( f"[green]Workflow triggered successfully:[/green] {self.workflow.uuid}" ) self.feedback_message = "workflow triggered" @@ -480,6 +480,9 @@ def initialise(self) -> None: ) return + self.logger.info( + f"Start getting artifacts for workflow {self.workflow.workflow_file}, run_id: {self.workflow.run_id}" + ) self.task = asyncio.create_task( self.github_client.get_workflow_artifacts( self.package_meta.repo, self.workflow.run_id @@ -495,9 +498,12 @@ def update(self) -> Status: result = self.task.result() self.workflow.artifacts = result - self.logger.info( - f"[green]Downloaded artifacts list:[/green] {len(result)} {result} artifacts" - ) + if self.log_once( + "workflow_artifacts_list", self.workflow.ephemeral.log_once_flags + ): + self.logger.info( + f"[green]Downloaded artifacts list:[/green] {len(result)} artifacts" + ) self.feedback_message = f"Downloaded {len(result)} artifacts" return Status.SUCCESS except Exception as e: @@ -810,7 +816,7 @@ def __init__(self, name: str, workflow: Workflow, log_prefix: str = "") -> None: def update(self) -> Status: if self.workflow.artifacts is not None: if self.log_once( - "workflow_artifacts", self.workflow.ephemeral.log_once_flags + "workflow_artifacts_list", self.workflow.ephemeral.log_once_flags ): self.logger.info(f"Workflow has artifacts") return Status.SUCCESS @@ -825,7 +831,9 @@ def __init__(self, name: str, workflow: Workflow, log_prefix: str = "") -> None: def update(self) -> Status: if self.workflow.result is not None: if self.log_once("workflow_result", self.workflow.ephemeral.log_once_flags): - self.logger.info(f"Workflow is successful and has result") + self.logger.info( + f"Workflow {self.workflow.workflow_file}, run_id: {self.workflow.run_id} is successful and has result" + ) return Status.SUCCESS return Status.FAILURE diff --git a/src/redis_release/github_client_async.py b/src/redis_release/github_client_async.py index 4db92de..846ad10 100644 --- a/src/redis_release/github_client_async.py +++ b/src/redis_release/github_client_async.py @@ -238,7 +238,7 @@ async def trigger_workflow( Returns: WorkflowRun object with basic information (workflow identification will be done separately) """ - logger.info(f"[blue]Triggering workflow[/blue] {workflow_file} in {repo}") + logger.debug(f"[blue]Triggering workflow[/blue] {workflow_file} in {repo}") logger.debug(f"Inputs: {inputs}") logger.debug(f"Ref: {ref}") logger.debug(f"Workflow UUID: [cyan]{inputs['workflow_uuid']}[/cyan]") @@ -264,7 +264,7 @@ async def trigger_workflow( timeout=30, error_context="trigger workflow", ) - logger.info(f"[green]Workflow triggered successfully[/green]") + logger.debug(f"[green]Workflow triggered successfully[/green]") return True except aiohttp.ClientError as e: logger.error(f"[red]Failed to trigger workflow:[/red] {e}") @@ -283,7 +283,9 @@ async def identify_workflow( for run in runs: extracted_uuid = self._extract_uuid(run.workflow_id) if extracted_uuid and extracted_uuid.lower() == workflow_uuid.lower(): - logger.info(f"[green]Found matching workflow run:[/green] {run.run_id}") + logger.debug( + f"[green]Found matching workflow run:[/green] {run.run_id}" + ) logger.debug(f"Workflow name: {run.workflow_id}") logger.debug(f"Extracted UUID: {extracted_uuid}") run.workflow_uuid = workflow_uuid @@ -436,7 +438,7 @@ async def get_workflow_artifacts(self, repo: str, run_id: int) -> Dict[str, Dict Each artifact dictionary contains: id, archive_download_url, created_at, expires_at, updated_at, size_in_bytes, digest """ - logger.info(f"[blue]Getting artifacts for workflow {run_id} in {repo}[/blue]") + logger.debug(f"[blue]Getting artifacts for workflow {run_id} in {repo}[/blue]") url = f"https://api.github.com/repos/{repo}/actions/runs/{run_id}/artifacts" headers = { @@ -482,7 +484,7 @@ async def get_workflow_artifacts(self, repo: str, run_id: int) -> Dict[str, Dict artifacts[artifact_name] = artifact_info if artifacts: - logger.info(f"[green]Found {len(artifacts)} artifacts[/green]") + logger.debug(f"[green]Found {len(artifacts)} artifacts[/green]") for artifact_name, artifact_info in artifacts.items(): size_mb = round( artifact_info.get("size_in_bytes", 0) / (1024 * 1024), 2 @@ -533,7 +535,7 @@ async def download_and_extract_json_result( logger.error(f"[red]{artifact_name} artifact has no ID[/red]") return None - logger.info( + logger.debug( f"[blue]Extracting {json_file_name} from artifact {artifact_id}[/blue]" ) @@ -570,7 +572,7 @@ async def download_and_extract_json_result( if json_file_name in zip_file.namelist(): with zip_file.open(json_file_name) as json_file_obj: result_data = json.load(json_file_obj) - logger.info( + logger.debug( f"[green]Successfully extracted {json_file_name}[/green]" ) return result_data @@ -643,7 +645,7 @@ async def list_remote_branches( if pattern: branches = [branch for branch in branches if re.match(pattern, branch)] - logger.info( + logger.debug( f"[green]Found {len(branches)} branches{' matching pattern' if pattern else ''}[/green]" ) if pattern: diff --git a/src/tests/test_state.py b/src/tests/test_state.py index b87daf5..53ed440 100644 --- a/src/tests/test_state.py +++ b/src/tests/test_state.py @@ -13,6 +13,7 @@ Workflow, ) from redis_release.config import Config, PackageConfig +from redis_release.models import PackageType class TestReleaseStateFromConfig: @@ -26,6 +27,7 @@ def test_from_config_with_valid_workflows(self) -> None: packages={ "test-package": PackageConfig( repo="test/repo", + package_type=PackageType.DEBIAN, build_workflow="build.yml", publish_workflow="publish.yml", ) @@ -50,6 +52,7 @@ def test_from_config_with_custom_timeout_values(self) -> None: packages={ "test-package": PackageConfig( repo="test/repo", + package_type=PackageType.DEBIAN, build_workflow="build.yml", build_timeout_minutes=60, publish_workflow="publish.yml", @@ -70,6 +73,7 @@ def test_from_config_with_ref(self) -> None: packages={ "test-package": PackageConfig( repo="test/repo", + package_type=PackageType.DEBIAN, ref="release/8.0", build_workflow="build.yml", publish_workflow="publish.yml", @@ -88,6 +92,7 @@ def test_from_config_with_workflow_inputs(self) -> None: packages={ "test-package": PackageConfig( repo="test/repo", + package_type=PackageType.DEBIAN, build_workflow="build.yml", build_inputs={"key1": "value1", "key2": "value2"}, publish_workflow="publish.yml", @@ -113,6 +118,7 @@ def test_from_config_with_all_optional_fields(self) -> None: packages={ "test-package": PackageConfig( repo="test/repo", + package_type=PackageType.DEBIAN, ref="main", build_workflow="build.yml", build_timeout_minutes=60, @@ -140,6 +146,7 @@ def test_from_config_with_empty_build_workflow(self) -> None: packages={ "test-package": PackageConfig( repo="test/repo", + package_type=PackageType.DEBIAN, build_workflow="", publish_workflow="publish.yml", ) @@ -156,6 +163,7 @@ def test_from_config_with_empty_publish_workflow(self) -> None: packages={ "test-package": PackageConfig( repo="test/repo", + package_type=PackageType.DEBIAN, build_workflow="build.yml", publish_workflow="", ) @@ -172,6 +180,7 @@ def test_from_config_with_whitespace_only_build_workflow(self) -> None: packages={ "test-package": PackageConfig( repo="test/repo", + package_type=PackageType.DEBIAN, build_workflow=" ", publish_workflow="publish.yml", ) @@ -188,6 +197,7 @@ def test_from_config_with_whitespace_only_publish_workflow(self) -> None: packages={ "test-package": PackageConfig( repo="test/repo", + package_type=PackageType.DEBIAN, build_workflow="build.yml", publish_workflow=" ", ) @@ -204,11 +214,13 @@ def test_from_config_with_multiple_packages(self) -> None: packages={ "package1": PackageConfig( repo="test/repo1", + package_type=PackageType.DEBIAN, build_workflow="build1.yml", publish_workflow="publish1.yml", ), "package2": PackageConfig( repo="test/repo2", + package_type=PackageType.DOCKER, build_workflow="build2.yml", publish_workflow="publish2.yml", ), @@ -230,6 +242,7 @@ def test_from_config_error_message_includes_package_name(self) -> None: packages={ "my-special-package": PackageConfig( repo="test/repo", + package_type=PackageType.DEBIAN, build_workflow="", publish_workflow="publish.yml", ) @@ -246,6 +259,7 @@ def test_from_config_with_boolean_build_workflow(self) -> None: packages={ "test-package": PackageConfig( repo="test/repo", + package_type=PackageType.DEBIAN, build_workflow=False, publish_workflow="publish.yml", ) @@ -262,6 +276,7 @@ def test_from_config_with_boolean_publish_workflow(self) -> None: packages={ "test-package": PackageConfig( repo="test/repo", + package_type=PackageType.DEBIAN, build_workflow="build.yml", publish_workflow=False, ) @@ -343,6 +358,7 @@ def test_release_state_ephemeral_not_serialized(self) -> None: packages={ "test-package": PackageConfig( repo="test/repo", + package_type=PackageType.DEBIAN, build_workflow="build.yml", publish_workflow="publish.yml", ) @@ -409,6 +425,7 @@ def test_ephemeral_field_exists(self) -> None: packages={ "test-package": PackageConfig( repo="test/repo", + package_type=PackageType.DEBIAN, build_workflow="build.yml", publish_workflow="publish.yml", ) @@ -425,6 +442,7 @@ def test_force_rebuild_field_can_be_modified(self) -> None: packages={ "test-package": PackageConfig( repo="test/repo", + package_type=PackageType.DEBIAN, build_workflow="build.yml", publish_workflow="publish.yml", ) @@ -442,6 +460,7 @@ def test_ephemeral_not_serialized(self) -> None: packages={ "test-package": PackageConfig( repo="test/repo", + package_type=PackageType.DEBIAN, build_workflow="build.yml", publish_workflow="publish.yml", ) @@ -468,6 +487,7 @@ def test_state_syncer_sets_tag_from_args(self) -> None: packages={ "test-package": PackageConfig( repo="test/repo", + package_type=PackageType.DEBIAN, build_workflow="build.yml", publish_workflow="publish.yml", ) @@ -487,11 +507,13 @@ def test_state_syncer_sets_force_rebuild_from_args(self) -> None: packages={ "docker": PackageConfig( repo="test/docker", + package_type=PackageType.DOCKER, build_workflow="build.yml", publish_workflow="publish.yml", ), "redis": PackageConfig( repo="test/redis", + package_type=PackageType.DEBIAN, build_workflow="build.yml", publish_workflow="publish.yml", ), @@ -512,16 +534,19 @@ def test_state_syncer_sets_multiple_force_rebuild_from_args(self) -> None: packages={ "docker": PackageConfig( repo="test/docker", + package_type=PackageType.DOCKER, build_workflow="build.yml", publish_workflow="publish.yml", ), "redis": PackageConfig( repo="test/redis", + package_type=PackageType.DEBIAN, build_workflow="build.yml", publish_workflow="publish.yml", ), "snap": PackageConfig( repo="test/snap", + package_type=PackageType.DEBIAN, build_workflow="build.yml", publish_workflow="publish.yml", ), @@ -543,6 +568,7 @@ def test_state_syncer_without_args(self) -> None: packages={ "test-package": PackageConfig( repo="test/repo", + package_type=PackageType.DEBIAN, build_workflow="build.yml", publish_workflow="publish.yml", ) @@ -565,16 +591,19 @@ def test_state_syncer_force_rebuild_all(self) -> None: packages={ "docker": PackageConfig( repo="test/docker", + package_type=PackageType.DOCKER, build_workflow="build.yml", publish_workflow="publish.yml", ), "redis": PackageConfig( repo="test/redis", + package_type=PackageType.DEBIAN, build_workflow="build.yml", publish_workflow="publish.yml", ), "snap": PackageConfig( repo="test/snap", + package_type=PackageType.DEBIAN, build_workflow="build.yml", publish_workflow="publish.yml", ), @@ -597,11 +626,13 @@ def test_state_syncer_force_rebuild_all_with_other_values(self) -> None: packages={ "docker": PackageConfig( repo="test/docker", + package_type=PackageType.DOCKER, build_workflow="build.yml", publish_workflow="publish.yml", ), "redis": PackageConfig( repo="test/redis", + package_type=PackageType.DEBIAN, build_workflow="build.yml", publish_workflow="publish.yml", ), From 3b4db87db764e6698f4f69f08a0879cbbc7599af Mon Sep 17 00:00:00 2001 From: Petar Shtuchkin Date: Sat, 18 Oct 2025 13:13:22 +0300 Subject: [PATCH 30/39] only packages, read only mode, full reset --- src/redis_release/bht/args.py | 2 +- src/redis_release/bht/state.py | 18 ++++- src/redis_release/bht/tree.py | 135 ++++++++++++++++++++++++++++++- src/redis_release/cli.py | 144 +++++++-------------------------- 4 files changed, 177 insertions(+), 122 deletions(-) diff --git a/src/redis_release/bht/args.py b/src/redis_release/bht/args.py index 6178378..abf9809 100644 --- a/src/redis_release/bht/args.py +++ b/src/redis_release/bht/args.py @@ -10,4 +10,4 @@ class ReleaseArgs(BaseModel): release_tag: str force_rebuild: List[str] = Field(default_factory=list) - + only_packages: List[str] = Field(default_factory=list) diff --git a/src/redis_release/bht/state.py b/src/redis_release/bht/state.py index 2203add..c0e5c67 100644 --- a/src/redis_release/bht/state.py +++ b/src/redis_release/bht/state.py @@ -245,6 +245,7 @@ def __init__( storage: StateStorage, config: Config, args: "ReleaseArgs", + read_only: bool = False, ): self.tag = args.release_tag self.storage = storage @@ -253,8 +254,11 @@ def __init__( self.last_dump: Optional[str] = None self._state: Optional[ReleaseState] = None self._lock_acquired = False + self.read_only = read_only def __enter__(self) -> "StateSyncer": + if self.read_only: + return self """Acquire lock when entering context.""" if not self.storage.acquire_lock(self.tag): raise RuntimeError(f"Failed to acquire lock for tag: {self.tag}") @@ -263,6 +267,8 @@ def __enter__(self) -> "StateSyncer": return self def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None: + if self.read_only: + return """Release lock when exiting context.""" if self._lock_acquired: self.storage.release_lock(self.tag) @@ -273,7 +279,15 @@ def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None: @property def state(self) -> ReleaseState: if self._state is None: - loaded = self.load() + loaded = None + if self.args.force_rebuild and "all" in self.args.force_rebuild: + logger.info( + "Force rebuild 'all' enabled, using default state based on config" + ) + loaded = self.default_state() + else: + loaded = self.load() + if loaded is None: self._state = self.default_state() else: @@ -315,6 +329,8 @@ def load(self) -> Optional[ReleaseState]: def sync(self) -> None: """Save state to storage backend if changed since last sync.""" + if self.read_only: + raise RuntimeError("Cannot sync read-only state") current_dump = self.state.model_dump_json(indent=2) if current_dump != self.last_dump: diff --git a/src/redis_release/bht/tree.py b/src/redis_release/bht/tree.py index a62bd80..d25af33 100644 --- a/src/redis_release/bht/tree.py +++ b/src/redis_release/bht/tree.py @@ -11,7 +11,6 @@ from py_trees.display import unicode_tree from py_trees.trees import BehaviourTree from py_trees.visitors import SnapshotVisitor -from rich.pretty import pretty_repr from rich.text import Text from ..config import Config @@ -52,6 +51,125 @@ logger = logging.getLogger(__name__) +class TreeInspector: + """Inspector for creating and inspecting behavior tree branches and PPAs.""" + + # List of available branch/PPA names + AVAILABLE_NAMES = [ + "workflow_success", + "workflow_completion", + "find_workflow", + "trigger_workflow", + "identify_target_ref", + "download_artifacts", + "extract_artifact_result", + "workflow_complete_branch", + "workflow_with_result_branch", + "publish_workflow_branch", + "build_workflow_branch", + "demo_sequence", + "demo_selector", + ] + + def __init__(self, release_tag: str): + """Initialize TreeInspector. + + Args: + release_tag: Release tag for creating mock ReleaseMeta + """ + self.release_tag = release_tag + + def get_names(self) -> List[str]: + """Get list of available branch/PPA names. + + Returns: + List of available names that can be passed to create_by_name() + """ + return self.AVAILABLE_NAMES.copy() + + def create_by_name(self, name: str) -> Union[Selector, Sequence, Behaviour]: + """Create a branch or PPA by name. + + Args: + name: Name of the branch or PPA to create + + Returns: + The created behavior tree branch or PPA + + Raises: + ValueError: If the name is not found in the available branches + """ + if name not in self.AVAILABLE_NAMES: + available = ", ".join(self.get_names()) + raise ValueError(f"Unknown name '{name}'. Available options: {available}") + + # Create mock objects for PPA/branch creation + workflow = Workflow(workflow_file="test.yml", inputs={}) + package_meta = PackageMeta(repo="redis/redis", ref="main") + release_meta = ReleaseMeta(tag=self.release_tag) + github_client = GitHubClientAsync(token="dummy") + package = Package( + meta=package_meta, + build=workflow, + publish=Workflow(workflow_file="publish.yml", inputs={}), + ) + log_prefix = "test" + + # Create and return the requested branch/PPA + if name == "workflow_success": + return create_workflow_success_ppa(workflow, log_prefix) + elif name == "workflow_completion": + return create_workflow_completion_ppa( + workflow, package_meta, github_client, log_prefix + ) + elif name == "find_workflow": + return create_find_workflow_by_uuid_ppa( + workflow, package_meta, github_client, log_prefix + ) + elif name == "trigger_workflow": + return create_trigger_workflow_ppa( + workflow, package_meta, release_meta, github_client, log_prefix + ) + elif name == "identify_target_ref": + return create_identify_target_ref_ppa( + package_meta, release_meta, github_client, log_prefix + ) + elif name == "download_artifacts": + return create_download_artifacts_ppa( + workflow, package_meta, github_client, log_prefix + ) + elif name == "extract_artifact_result": + return create_extract_artifact_result_ppa( + "test-artifact", workflow, package_meta, github_client, log_prefix + ) + elif name == "workflow_complete_branch": + return create_workflow_complete_tree_branch( + workflow, package_meta, release_meta, github_client, "" + ) + elif name == "workflow_with_result_branch": + return create_workflow_with_result_tree_branch( + "artifact", workflow, package_meta, release_meta, github_client, "" + ) + elif name == "publish_workflow_branch": + return create_publish_workflow_tree_branch( + workflow, + workflow, + package_meta, + release_meta, + workflow, + github_client, + "", + ) + elif name == "build_workflow_branch": + return create_build_workflow_tree_branch( + package, release_meta, package, github_client, "" + ) + elif name == "demo_sequence": + return create_sequence_branch() + else: # name == "demo_selector" + return create_selector_branch() + + async def async_tick_tock(tree: BehaviourTree, cutoff: int = 100) -> None: """Drive Behaviour tree using async event loop @@ -98,6 +216,7 @@ def initialize_tree_and_state( config: Config, args: ReleaseArgs, storage: Optional[StateStorage] = None, + read_only: bool = False, ) -> Iterator[Tuple[BehaviourTree, StateSyncer]]: github_client = GitHubClientAsync(token=os.getenv("GITHUB_TOKEN")) @@ -109,9 +228,13 @@ def initialize_tree_and_state( storage=storage, config=config, args=args, + read_only=read_only, ) as state_syncer: root = create_root_node( - state_syncer.state, state_syncer.default_state(), github_client + state_syncer.state, + state_syncer.default_state(), + github_client, + args.only_packages, ) tree = BehaviourTree(root) @@ -149,7 +272,10 @@ def log_tree_state_with_markup(tree: BehaviourTree) -> None: def create_root_node( - state: ReleaseState, default_state: ReleaseState, github_client: GitHubClientAsync + state: ReleaseState, + default_state: ReleaseState, + github_client: GitHubClientAsync, + only_packages: Optional[List[str]] = None, ) -> Behaviour: root = ParallelBarrier( @@ -157,6 +283,9 @@ def create_root_node( children=[], ) for package_name, package in state.packages.items(): + if only_packages and package_name not in only_packages: + logger.info(f"Skipping package {package_name} as it's not in only_packages") + continue root.add_child( create_package_release_tree_branch( package, diff --git a/src/redis_release/cli.py b/src/redis_release/cli.py index fd4561a..2c93ca7 100644 --- a/src/redis_release/cli.py +++ b/src/redis_release/cli.py @@ -11,39 +11,10 @@ from rich.table import Table from redis_release.bht.args import ReleaseArgs -from redis_release.bht.state import ( - InMemoryStateStorage, - Package, - PackageMeta, - ReleaseMeta, - ReleaseState, - S3StateStorage, - StateSyncer, - Workflow, -) +from redis_release.bht.state import InMemoryStateStorage, S3StateStorage, StateSyncer -from .bht.ppas import ( - create_download_artifacts_ppa, - create_extract_artifact_result_ppa, - create_find_workflow_by_uuid_ppa, - create_identify_target_ref_ppa, - create_trigger_workflow_ppa, - create_workflow_completion_ppa, - create_workflow_success_ppa, -) -from .bht.tree import ( - async_tick_tock, - create_build_workflow_tree_branch, - create_extract_result_tree_branch, - create_publish_workflow_tree_branch, - create_selector_branch, - create_sequence_branch, - create_workflow_complete_tree_branch, - create_workflow_with_result_tree_branch, - initialize_tree_and_state, -) +from .bht.tree import TreeInspector, async_tick_tock, initialize_tree_and_state from .config import load_config -from .github_client_async import GitHubClientAsync from .logging_config import setup_logging from .models import ReleaseType from .orchestrator import ReleaseOrchestrator @@ -233,20 +204,12 @@ def release_print_bht( None, "--name", "-n", - help="""Name of specific PPA or tree branch to print. - PPAs: - 'workflow_success', - 'workflow_completion', - 'find_workflow', - 'trigger_workflow', - 'identify_target_ref', - 'download_artifacts', - 'extract_artifact_result'. - Tree branches: - 'workflow_complete_branch', - 'workflow_with_result_branch', - 'publish_workflow_branch', - 'build_workflow_branch'""", + help=f"Name of specific PPA or tree branch to print. Available: {', '.join(TreeInspector.AVAILABLE_NAMES)}", + ), + only_packages: Optional[List[str]] = typer.Option( + None, + "--only-packages", + help="Only process specific packages (can be specified multiple times)", ), ) -> None: """Print and render (using graphviz) the release behaviour tree or a specific PPA.""" @@ -257,85 +220,26 @@ def release_print_bht( args = ReleaseArgs( release_tag=release_tag, force_rebuild=[], + only_packages=only_packages or [], ) setup_logging() if name: - # Print specific PPA or tree branch - github_client = GitHubClientAsync(token=os.getenv("GITHUB_TOKEN", "dummy")) - - # Create mock state objects for PPA creation - workflow = Workflow(workflow_file="test.yml", inputs={}) - package_meta = PackageMeta(repo="redis/redis", ref="main") - release_meta = ReleaseMeta(tag=release_tag) - log_prefix = "test" - - # Create mock ReleaseState for tree branch functions - package = Package( - meta=package_meta, - build=workflow, - publish=Workflow(workflow_file="publish.yml", inputs={}), - ) - state = ReleaseState(meta=release_meta, packages={"docker": package}) - - # Map PPA names to creation functions - ppa_creators = { - "workflow_success": lambda: create_workflow_success_ppa( - workflow, log_prefix - ), - "workflow_completion": lambda: create_workflow_completion_ppa( - workflow, package_meta, github_client, log_prefix - ), - "find_workflow": lambda: create_find_workflow_by_uuid_ppa( - workflow, package_meta, github_client, log_prefix - ), - "trigger_workflow": lambda: create_trigger_workflow_ppa( - workflow, package_meta, release_meta, github_client, log_prefix - ), - "identify_target_ref": lambda: create_identify_target_ref_ppa( - package_meta, release_meta, github_client, log_prefix - ), - "download_artifacts": lambda: create_download_artifacts_ppa( - workflow, package_meta, github_client, log_prefix - ), - "extract_artifact_result": lambda: create_extract_artifact_result_ppa( - "test-artifact", workflow, package_meta, github_client, log_prefix - ), - # Tree branch functions - "workflow_complete_branch": lambda: create_workflow_complete_tree_branch( - workflow, package_meta, release_meta, github_client, "" - ), - "workflow_with_result_branch": lambda: create_workflow_with_result_tree_branch( - "artifact", workflow, package_meta, release_meta, github_client, "" - ), - "publish_workflow_branch": lambda: create_publish_workflow_tree_branch( - workflow, - workflow, - package_meta, - release_meta, - workflow, - github_client, - "", - ), - "build_workflow_branch": lambda: create_build_workflow_tree_branch( - package, release_meta, package, github_client, "" - ), - "demo_sequence": lambda: create_sequence_branch(), - "demo_selector": lambda: create_selector_branch(), - } - - if name not in ppa_creators: - console.print( - f"[red]Error: Unknown name '{name}'. Available options: {', '.join(ppa_creators.keys())}[/red]" - ) + # Create TreeInspector and render the requested branch + inspector = TreeInspector(release_tag=release_tag) + + try: + branch = inspector.create_by_name(name) + render_dot_tree(branch) + print(unicode_tree(branch)) + except ValueError as e: + console.print(f"[red]Error: {e}[/red]") raise typer.Exit(1) - - ppa = ppa_creators[name]() - render_dot_tree(ppa) - print(unicode_tree(ppa)) else: # Print full release tree - with initialize_tree_and_state(config, args, InMemoryStateStorage()) as ( + with initialize_tree_and_state( + config, args, InMemoryStateStorage(), read_only=True + ) as ( tree, _, ): @@ -354,6 +258,11 @@ def release_bht( "--force-rebuild", help="Force rebuild for specific packages (can be specified multiple times). Use 'all' to force rebuild all packages.", ), + only_packages: Optional[List[str]] = typer.Option( + None, + "--only-packages", + help="Only process specific packages (can be specified multiple times)", + ), tree_cutoff: int = typer.Option( 2000, "--tree-cutoff", "-m", help="Max number of ticks to run the tree for" ), @@ -367,6 +276,7 @@ def release_bht( args = ReleaseArgs( release_tag=release_tag, force_rebuild=force_rebuild or [], + only_packages=only_packages or [], ) # Use context manager version with automatic lock management From 739cdf92b5622a24c72afb32c9822073404f2fce Mon Sep 17 00:00:00 2001 From: Petar Shtuchkin Date: Sat, 18 Oct 2025 13:54:24 +0300 Subject: [PATCH 31/39] Cleanup and code moving around --- src/redis_release/bht/behaviours.py | 6 - src/redis_release/bht/composites.py | 13 +- src/redis_release/bht/ppas.py | 2 +- src/redis_release/bht/state.py | 352 +----------- src/redis_release/bht/tree.py | 8 +- src/redis_release/cli.py | 192 +------ src/redis_release/github_client.py | 709 ------------------------- src/redis_release/models.py | 94 +--- src/redis_release/orchestrator.py | 361 ------------- src/redis_release/state_manager.py | 313 +++++++---- src/redis_release/workflow_executor.py | 372 ------------- src/tests/test_state.py | 8 +- 12 files changed, 246 insertions(+), 2184 deletions(-) delete mode 100644 src/redis_release/github_client.py delete mode 100644 src/redis_release/orchestrator.py delete mode 100644 src/redis_release/workflow_executor.py diff --git a/src/redis_release/bht/behaviours.py b/src/redis_release/bht/behaviours.py index a1fb410..7cc7f5c 100644 --- a/src/redis_release/bht/behaviours.py +++ b/src/redis_release/bht/behaviours.py @@ -15,16 +15,12 @@ import json import logging import re -import stat import uuid from datetime import datetime -from token import OP from typing import Any, Dict, List, Optional from py_trees.behaviour import Behaviour from py_trees.common import Status -from py_trees.composites import Selector, Sequence -from py_trees.decorators import Inverter, Repeat, Retry, Timeout from redis_release.bht.state import reset_model_to_defaults @@ -34,10 +30,8 @@ RedisVersion, ReleaseType, WorkflowConclusion, - WorkflowRun, WorkflowStatus, ) -from .decorators import FlagGuard from .logging_wrapper import PyTreesLoggerWrapper from .state import Package, PackageMeta, ReleaseMeta, Workflow diff --git a/src/redis_release/bht/composites.py b/src/redis_release/bht/composites.py index 8703d3f..b715a70 100644 --- a/src/redis_release/bht/composites.py +++ b/src/redis_release/bht/composites.py @@ -11,27 +11,20 @@ More complex behaviors, including pre- and post- conditions are defined in `ppas.py`. """ -from typing import Iterator, List, Optional +from typing import Iterator, Optional from typing import Sequence as TypingSequence from py_trees.behaviour import Behaviour -from py_trees.common import OneShotPolicy, Status -from py_trees.composites import Composite, Selector, Sequence +from py_trees.common import Status +from py_trees.composites import Composite, Sequence from py_trees.decorators import Repeat, Retry, SuccessIsRunning, Timeout from ..github_client_async import GitHubClientAsync from .behaviours import ( ExtractArtifactResult, GetWorkflowArtifactsList, - HasWorkflowArtifacts, - HasWorkflowResult, IdentifyTargetRef, IdentifyWorkflowByUUID, - IsTargetRefIdentified, - IsWorkflowCompleted, - IsWorkflowIdentified, - IsWorkflowSuccessful, - IsWorkflowTriggered, ResetPackageState, ResetWorkflowState, Sleep, diff --git a/src/redis_release/bht/ppas.py b/src/redis_release/bht/ppas.py index ccf5298..a1b5a39 100644 --- a/src/redis_release/bht/ppas.py +++ b/src/redis_release/bht/ppas.py @@ -34,7 +34,7 @@ TriggerWorkflowGuarded, WaitForWorkflowCompletion, ) -from .state import PackageMeta, ReleaseMeta, ReleaseState, Workflow +from .state import PackageMeta, ReleaseMeta, Workflow def create_workflow_success_ppa( diff --git a/src/redis_release/bht/state.py b/src/redis_release/bht/state.py index c0e5c67..380302b 100644 --- a/src/redis_release/bht/state.py +++ b/src/redis_release/bht/state.py @@ -1,14 +1,11 @@ import json import logging -import uuid from datetime import datetime from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Protocol, Union +from typing import Any, Dict, List, Optional, Union -from botocore.exceptions import ClientError from pydantic import BaseModel, Field from rich.console import Console -from rich.pretty import pretty_repr from rich.table import Table from redis_release.models import ( @@ -18,13 +15,9 @@ WorkflowStatus, WorkflowType, ) -from redis_release.state_manager import S3Backed, logger from ..config import Config -if TYPE_CHECKING: - from .args import ReleaseArgs - logger = logging.getLogger(__name__) @@ -188,349 +181,6 @@ def from_json(cls, data: Union[str, Dict, Path]) -> "ReleaseState": return cls(**json_data) -class StateStorage(Protocol): - """Protocol for state storage backends.""" - - def get(self, tag: str) -> Optional[dict]: - """Load state data by tag. - - Args: - tag: Release tag - - Returns: - State dict or None if not found - """ - ... - - def put(self, tag: str, state: dict) -> None: - """Save state data by tag. - - Args: - tag: Release tag - state: State dict to save - """ - ... - - def acquire_lock(self, tag: str) -> bool: - """Acquire a lock for the release process. - - Args: - tag: Release tag - - Returns: - True if lock acquired successfully - """ - ... - - def release_lock(self, tag: str) -> bool: - """Release a lock for the release process. - - Args: - tag: Release tag - - Returns: - True if lock released successfully - """ - ... - - -class StateSyncer: - """Syncs ReleaseState to storage backend only when changed. - - Can be used as a context manager to automatically acquire and release locks. - """ - - def __init__( - self, - storage: StateStorage, - config: Config, - args: "ReleaseArgs", - read_only: bool = False, - ): - self.tag = args.release_tag - self.storage = storage - self.config = config - self.args = args - self.last_dump: Optional[str] = None - self._state: Optional[ReleaseState] = None - self._lock_acquired = False - self.read_only = read_only - - def __enter__(self) -> "StateSyncer": - if self.read_only: - return self - """Acquire lock when entering context.""" - if not self.storage.acquire_lock(self.tag): - raise RuntimeError(f"Failed to acquire lock for tag: {self.tag}") - self._lock_acquired = True - logger.info(f"Lock acquired for tag: {self.tag}") - return self - - def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None: - if self.read_only: - return - """Release lock when exiting context.""" - if self._lock_acquired: - self.storage.release_lock(self.tag) - self._lock_acquired = False - logger.info(f"Lock released for tag: {self.tag}") - print_state_table(self.state) - - @property - def state(self) -> ReleaseState: - if self._state is None: - loaded = None - if self.args.force_rebuild and "all" in self.args.force_rebuild: - logger.info( - "Force rebuild 'all' enabled, using default state based on config" - ) - loaded = self.default_state() - else: - loaded = self.load() - - if loaded is None: - self._state = self.default_state() - else: - self._state = loaded - self.apply_args(self._state) - logger.debug(pretty_repr(self._state)) - return self._state - - def default_state(self) -> ReleaseState: - """Create default state from config.""" - state = ReleaseState.from_config(self.config) - self.apply_args(state) - return state - - def apply_args(self, state: ReleaseState) -> None: - """Apply arguments to state.""" - state.meta.tag = self.tag - - if self.args: - if "all" in self.args.force_rebuild: - # Set force_rebuild for all packages - for package_name in state.packages: - state.packages[package_name].meta.ephemeral.force_rebuild = True - else: - # Set force_rebuild for specific packages - for package_name in self.args.force_rebuild: - if package_name in state.packages: - state.packages[package_name].meta.ephemeral.force_rebuild = True - - def load(self) -> Optional[ReleaseState]: - """Load state from storage backend.""" - state_data = self.storage.get(self.tag) - if state_data is None: - return None - - state = ReleaseState(**state_data) - self.last_dump = state.model_dump_json(indent=2) - return state - - def sync(self) -> None: - """Save state to storage backend if changed since last sync.""" - if self.read_only: - raise RuntimeError("Cannot sync read-only state") - current_dump = self.state.model_dump_json(indent=2) - - if current_dump != self.last_dump: - self.last_dump = current_dump - state_dict = json.loads(current_dump) - self.storage.put(self.tag, state_dict) - logger.debug("State saved") - - -class InMemoryStateStorage: - """In-memory state storage for testing.""" - - def __init__(self) -> None: - self._storage: Dict[str, dict] = {} - self._locks: Dict[str, bool] = {} - - def get(self, tag: str) -> Optional[dict]: - """Load state data by tag.""" - return self._storage.get(tag) - - def put(self, tag: str, state: dict) -> None: - """Save state data by tag.""" - self._storage[tag] = state - - def acquire_lock(self, tag: str) -> bool: - """Acquire a lock for the release process.""" - if self._locks.get(tag, False): - return False - self._locks[tag] = True - return True - - def release_lock(self, tag: str) -> bool: - """Release a lock for the release process.""" - self._locks[tag] = False - return True - - -class S3StateStorage(S3Backed): - def __init__( - self, - bucket_name: Optional[str] = None, - aws_region: str = "us-east-1", - aws_profile: Optional[str] = None, - owner: Optional[str] = None, - ): - super().__init__(bucket_name, False, aws_region, aws_profile) - # Generate UUID for this instance to use as lock owner - self.owner = owner if owner else str(uuid.uuid4()) - - def get(self, tag: str) -> Optional[dict]: - """Load blackboard data from S3. - - Args: - tag: Release tag - - Returns: - ReleaseState object or None if not found - """ - state_key = f"release-state/{tag}-blackboard.json" - logger.debug(f"Loading blackboard for tag: {tag}") - - if self.s3_client is None: - raise RuntimeError("S3 client not initialized") - - try: - response = self.s3_client.get_object(Bucket=self.bucket_name, Key=state_key) - state_data: dict = json.loads(response["Body"].read().decode("utf-8")) - - logger.debug("Blackboard loaded successfully") - - return state_data - - except ClientError as e: - if e.response["Error"]["Code"] == "NoSuchKey": - logger.debug(f"No existing blackboard found for tag: {tag}") - return None - else: - logger.error(f"Failed to load blackboard: {e}") - raise - - def put(self, tag: str, state: dict) -> None: - """Save release state to S3. - - Args: - state: ReleaseState object to save - """ - state_key = f"release-state/{tag}-blackboard.json" - logger.debug(f"Saving blackboard for tag: {tag}") - - if self.s3_client is None: - raise RuntimeError("S3 client not initialized") - - state_json = json.dumps(state, indent=2, default=str) - - try: - self.s3_client.put_object( - Bucket=self.bucket_name, - Key=state_key, - Body=state_json, - ContentType="application/json", - Metadata={ - "tag": tag, - }, - ) - - logger.debug("Blackboard saved successfully") - - except ClientError as e: - logger.error(f"Failed to save blackboard: {e}") - raise - - def acquire_lock(self, tag: str) -> bool: - """Acquire a lock for the release process. - - Args: - tag: Release tag - - Returns: - True if lock acquired successfully - """ - lock_key = f"release-locks/{tag}.lock" - logger.debug(f"Acquiring lock for tag: {tag}") - - if self.s3_client is None: - raise RuntimeError("S3 client not initialized") - - lock_data = { - "tag": tag, - "owner": self.owner, - "acquired_at": datetime.now().isoformat(), - } - - try: - self.s3_client.put_object( - Bucket=self.bucket_name, - Key=lock_key, - Body=json.dumps(lock_data, indent=2), - ContentType="application/json", - # fail if object already exists - IfNoneMatch="*", - ) - - logger.debug("Lock acquired successfully") - return True - - except ClientError as e: - if e.response["Error"]["Code"] == "PreconditionFailed": - try: - response = self.s3_client.get_object( - Bucket=self.bucket_name, Key=lock_key - ) - existing_lock = json.loads(response["Body"].read().decode("utf-8")) - logger.warning( - f"Lock already held by: {existing_lock.get('owner', 'unknown')}, " - f"acquired at: {existing_lock.get('acquired_at', 'unknown')}" - ) - except: - logger.warning("Lock exists but couldn't read details") - return False - else: - logger.error(f"Failed to acquire lock: {e}") - raise - - def release_lock(self, tag: str) -> bool: - """Release a lock for the release process. - - Args: - tag: Release tag - - Returns: - True if lock released successfully - """ - lock_key = f"release-locks/{tag}.lock" - logger.debug(f"Releasing lock for tag: {tag}") - - if self.s3_client is None: - raise RuntimeError("S3 client not initialized") - - try: - # check if we own the lock - response = self.s3_client.get_object(Bucket=self.bucket_name, Key=lock_key) - lock_data = json.loads(response["Body"].read().decode("utf-8")) - - if lock_data.get("owner") != self.owner: - logger.error(f"Cannot release lock owned by: {lock_data.get('owner')}") - return False - - self.s3_client.delete_object(Bucket=self.bucket_name, Key=lock_key) - logger.debug("Lock released successfully") - return True - - except ClientError as e: - if e.response["Error"]["Code"] == "NoSuchKey": - logger.debug(f"No lock found for tag: {tag}") - return True - else: - logger.error(f"Failed to release lock: {e}") - raise - - def reset_model_to_defaults(target: BaseModel, default: BaseModel) -> None: """Recursively reset a BaseModel in-place with values from default model.""" for field_name, field_info in default.model_fields.items(): diff --git a/src/redis_release/bht/tree.py b/src/redis_release/bht/tree.py index d25af33..15d58d8 100644 --- a/src/redis_release/bht/tree.py +++ b/src/redis_release/bht/tree.py @@ -15,6 +15,7 @@ from ..config import Config from ..github_client_async import GitHubClientAsync +from ..state_manager import S3StateStorage, StateStorage, StateSyncer from .args import ReleaseArgs from .backchain import latch_chains from .behaviours import NeedToPublishRelease @@ -42,10 +43,8 @@ PackageMeta, ReleaseMeta, ReleaseState, - S3StateStorage, - StateStorage, - StateSyncer, Workflow, + print_state_table, ) logger = logging.getLogger(__name__) @@ -218,7 +217,7 @@ def initialize_tree_and_state( storage: Optional[StateStorage] = None, read_only: bool = False, ) -> Iterator[Tuple[BehaviourTree, StateSyncer]]: - github_client = GitHubClientAsync(token=os.getenv("GITHUB_TOKEN")) + github_client = GitHubClientAsync(token=os.getenv("GITHUB_TOKEN") or "") if storage is None: storage = S3StateStorage() @@ -246,6 +245,7 @@ def initialize_tree_and_state( tree.add_post_tick_handler(log_tree_state_with_markup) yield (tree, state_syncer) + print_state_table(state_syncer.state) def log_tree_state_with_markup(tree: BehaviourTree) -> None: diff --git a/src/redis_release/cli.py b/src/redis_release/cli.py index 2c93ca7..b909596 100644 --- a/src/redis_release/cli.py +++ b/src/redis_release/cli.py @@ -7,17 +7,18 @@ import typer from py_trees.display import render_dot_tree, unicode_tree -from rich.console import Console -from rich.table import Table from redis_release.bht.args import ReleaseArgs -from redis_release.bht.state import InMemoryStateStorage, S3StateStorage, StateSyncer +from redis_release.bht.state import print_state_table +from redis_release.state_manager import ( + InMemoryStateStorage, + S3StateStorage, + StateSyncer, +) from .bht.tree import TreeInspector, async_tick_tock, initialize_tree_and_state from .config import load_config from .logging_config import setup_logging -from .models import ReleaseType -from .orchestrator import ReleaseOrchestrator app = typer.Typer( name="redis-release", @@ -25,177 +26,11 @@ add_completion=False, ) -console = Console() - - -def get_orchestrator( - github_token: Optional[str] = None, - require_github_token: bool = True, -) -> ReleaseOrchestrator: - """Create and return a ReleaseOrchestrator instance.""" - if require_github_token: - if not github_token: - github_token = os.getenv("GITHUB_TOKEN") - if not github_token: - console.print( - "[red]Error: GITHUB_TOKEN environment variable is required[/red]" - ) - raise typer.Exit(1) - else: - # for commands that don't need GitHub API access - github_token = github_token or os.getenv("GITHUB_TOKEN", "not-required") - - return ReleaseOrchestrator( - github_token=github_token, - ) - - -@app.command() -def release( - tag: str = typer.Argument(..., help="Release tag (e.g., 8.4-m01-int1)"), - force_rebuild: bool = typer.Option( - False, - "--force-rebuild", - help="Force rebuild Docker image, ignoring existing state", - ), - release_type: ReleaseType = typer.Option( - None, "--release-type", help="Override release type detection" - ), - dry_run: bool = typer.Option( - False, "--dry-run", help="Show what would be done without executing" - ), - github_token: Optional[str] = typer.Option( - None, "--github-token", envvar="GITHUB_TOKEN", help="GitHub API token" - ), -) -> None: - """Execute release workflow for the given tag.""" - console.print(f"[bold blue]Starting release process for tag: {tag}[/bold blue]") - - if dry_run: - console.print("[yellow]DRY RUN MODE - No actual changes will be made[/yellow]") - - orchestrator = get_orchestrator(github_token) - - try: - result = orchestrator.execute_release( - tag=tag, - force_rebuild=force_rebuild, - release_type=release_type, - dry_run=dry_run, - ) - - if result.success: - console.print(f"[green] Release {tag} completed successfully![/green]") - else: - console.print( - f"[yellow] Release {tag} requires manual intervention[/yellow]" - ) - if result.message: - console.print(f"[yellow]{result.message}[/yellow]") - raise typer.Exit(1) - - except Exception as e: - console.print(f"[red] Release {tag} failed: {e}[/red]") - raise typer.Exit(1) +logger = logging.getLogger(__name__) @app.command() -def status( - tag: str = typer.Argument(..., help="Release tag to check status for"), - dry_run: bool = typer.Option( - False, "--dry-run", help="Use local cache instead of S3" - ), -) -> None: - """Show release status for the given tag.""" - console.print(f"[bold blue]Release status for tag: {tag}[/bold blue]") - - orchestrator = get_orchestrator(github_token=None, require_github_token=False) - - try: - state = orchestrator.get_release_status(tag, dry_run=dry_run) - - if not state: - console.print(f"[yellow]No release found for tag: {tag}[/yellow]") - return - - table = Table(title=f"Release Status: {tag}") - table.add_column("Package", style="cyan") - table.add_column("Build Status", style="magenta") - table.add_column("Publish Status", style="green") - table.add_column("Build Artifacts", style="blue") - table.add_column("Publish Artifacts", style="yellow") - - for pkg_type, pkg_state in state.packages.items(): - # Build status - if not pkg_state.build_completed: - build_status = "[blue]In Progress[/blue]" - elif pkg_state.build_workflow and pkg_state.build_workflow.conclusion: - if pkg_state.build_workflow.conclusion.value == "success": - build_status = "[green]Success[/green]" - elif pkg_state.build_workflow.conclusion.value == "failure": - build_status = "[red]Failed[/red]" - else: - build_status = "[yellow]Cancelled[/yellow]" - else: - build_status = "[yellow]Cancelled[/yellow]" - - # Publish status - if not pkg_state.publish_completed: - publish_status = ( - "[blue]In Progress[/blue]" - if pkg_state.publish_workflow - else "[dim]Not Started[/dim]" - ) - elif pkg_state.publish_workflow and pkg_state.publish_workflow.conclusion: - if pkg_state.publish_workflow.conclusion.value == "success": - publish_status = "[green]Success[/green]" - elif pkg_state.publish_workflow.conclusion.value == "failure": - publish_status = "[red]Failed[/red]" - else: - publish_status = "[yellow]Cancelled[/yellow]" - else: - publish_status = "[yellow]Cancelled[/yellow]" - - # Build artifacts - if pkg_state.build_artifacts: - build_artifacts = f"[green]{len(pkg_state.build_artifacts)}[/green]" - else: - build_artifacts = "[dim]None[/dim]" - - # Publish artifacts - if pkg_state.publish_artifacts: - publish_artifacts = f"[green]{len(pkg_state.publish_artifacts)}[/green]" - else: - publish_artifacts = "[dim]None[/dim]" - - table.add_row( - pkg_type.value, - build_status, - publish_status, - build_artifacts, - publish_artifacts, - ) - - console.print(table) - - if state.redis_tag_commit or state.docker_repo_commit: - console.print("\n[bold]Commit Information:[/bold]") - if state.redis_tag_commit: - console.print( - f" Redis tag {tag}: [cyan]{state.redis_tag_commit[:8]}[/cyan]" - ) - if state.docker_repo_commit: - console.print( - f" Docker repo: [cyan]{state.docker_repo_commit[:8]}[/cyan]" - ) - - except Exception as e: - console.print(f"[red] Failed to get status: {e}[/red]") - raise typer.Exit(1) - - -@app.command() -def release_print_bht( +def release_print( release_tag: str = typer.Argument(..., help="Release tag (e.g., 8.4-m01-int1)"), config_file: Optional[str] = typer.Option( None, "--config", "-c", help="Path to config file (default: config.yaml)" @@ -233,7 +68,7 @@ def release_print_bht( render_dot_tree(branch) print(unicode_tree(branch)) except ValueError as e: - console.print(f"[red]Error: {e}[/red]") + logger.error(f"[red]Error: {e}[/red]") raise typer.Exit(1) else: # Print full release tree @@ -248,7 +83,7 @@ def release_print_bht( @app.command() -def release_bht( +def release( release_tag: str = typer.Argument(..., help="Release tag (e.g., 8.4-m01-int1)"), config_file: Optional[str] = typer.Option( None, "--config", "-c", help="Path to config file (default: config.yaml)" @@ -285,7 +120,7 @@ def release_bht( @app.command() -def release_state( +def status( release_tag: str = typer.Argument(..., help="Release tag (e.g., 8.4-m01-int1)"), config_file: Optional[str] = typer.Option( None, "--config", "-c", help="Path to config file (default: config.yaml)" @@ -306,8 +141,9 @@ def release_state( storage=S3StateStorage(), config=config, args=args, - ): - pass + read_only=True, + ) as state_syncer: + print_state_table(state_syncer.state) if __name__ == "__main__": diff --git a/src/redis_release/github_client.py b/src/redis_release/github_client.py deleted file mode 100644 index 23d4a81..0000000 --- a/src/redis_release/github_client.py +++ /dev/null @@ -1,709 +0,0 @@ -"""GitHub API client for workflow operations.""" - -import json -import re -import time -import uuid -from typing import Any, Dict, List, Optional -import requests - -from rich.console import Console - -from .models import WorkflowConclusion, WorkflowRun, WorkflowStatus - -console = Console() - - -class GitHubClient: - """GitHub API client for workflow operations.""" - - def __init__(self, token: str, dry_run: bool = False): - """Initialize GitHub client. - - Args: - token: GitHub API token - dry_run: If True, only simulate operations without making real API calls - """ - self.token = token - self.dry_run = dry_run - self._mock_run_counter = 1000 - - def trigger_workflow( - self, repo: str, workflow_file: str, inputs: Dict[str, str], ref: str = "main" - ) -> WorkflowRun: - """Trigger a workflow in a repository. - - Args: - repo: Repository name (e.g., "redis/docker-library-redis") - workflow_file: Workflow file name (e.g., "build.yml") - inputs: Workflow inputs - ref: Git reference to run workflow on - - Returns: - WorkflowRun object with run information - """ - # Generate a unique UUID for this workflow run - workflow_uuid = str(uuid.uuid4()) - - console.print(f"[blue] Triggering workflow {workflow_file} in {repo}[/blue]") - console.print(f"[dim] Inputs: {inputs}[/dim]") - console.print(f"[dim] Ref: {ref}[/dim]") - console.print(f"[dim] Workflow UUID: {workflow_uuid}[/dim]") - - if self.dry_run: - console.print("[yellow] (DRY RUN - not actually triggered)[/yellow]") - # generate mock run_id even in dry-run for consistency - run_id = self._mock_run_counter - self._mock_run_counter += 1 - return WorkflowRun( - repo=repo, - workflow_id=workflow_file, - workflow_uuid=workflow_uuid, - run_id=run_id, - status=WorkflowStatus.PENDING, - ) - - url = f"https://api.github.com/repos/{repo}/actions/workflows/{workflow_file}/dispatches" - headers = { - "Authorization": f"Bearer {self.token}", - "Accept": "application/vnd.github.v3+json", - "X-GitHub-Api-Version": "2022-11-28", - } - - # Add the workflow UUID to inputs so it appears in the workflow run name - enhanced_inputs = inputs.copy() - enhanced_inputs["workflow_uuid"] = workflow_uuid - - payload = {"ref": ref, "inputs": enhanced_inputs} - - try: - response = requests.post(url, headers=headers, json=payload, timeout=30) - response.raise_for_status() - - console.print(f"[green]Workflow triggered successfully[/green]") - - workflow_run = self.identify_workflow(repo, workflow_file, workflow_uuid) - console.print(f"[dim] Run ID: {workflow_run.run_id}[/dim]") - console.print( - f"[dim] URL: https://github.com/{repo}/actions/runs/{workflow_run.run_id}[/dim]" - ) - return workflow_run - - except requests.exceptions.RequestException as e: - console.print(f"[red]Failed to trigger workflow: {e}[/red]") - raise - - def get_workflow_run(self, repo: str, run_id: int) -> WorkflowRun: - """Get workflow run status. - - Args: - repo: Repository name - run_id: Workflow run ID - - Returns: - Updated WorkflowRun object - """ - if self.dry_run: - return WorkflowRun( - repo=repo, - workflow_id="mock.yml", - workflow_uuid=None, # No UUID for mock runs - run_id=run_id, - status=WorkflowStatus.COMPLETED, - conclusion=WorkflowConclusion.SUCCESS, - ) - - url = f"https://api.github.com/repos/{repo}/actions/runs/{run_id}" - headers = { - "Authorization": f"Bearer {self.token}", - "Accept": "application/vnd.github.v3+json", - "X-GitHub-Api-Version": "2022-11-28", - } - - try: - response = requests.get(url, headers=headers, timeout=30) - response.raise_for_status() - - data = response.json() - - github_status = data.get("status", "unknown") - if github_status == "queued": - status = WorkflowStatus.QUEUED - elif github_status == "in_progress": - status = WorkflowStatus.IN_PROGRESS - elif github_status == "completed": - status = WorkflowStatus.COMPLETED - else: - status = WorkflowStatus.PENDING - - github_conclusion = data.get("conclusion") - conclusion = None - if github_conclusion == "success": - conclusion = WorkflowConclusion.SUCCESS - elif github_conclusion == "failure": - conclusion = WorkflowConclusion.FAILURE - - workflow_name = data.get("name", "unknown") - workflow_uuid = self._extract_uuid(workflow_name) - - return WorkflowRun( - repo=repo, - workflow_id=workflow_name, - workflow_uuid=workflow_uuid, - run_id=data.get("id"), - status=status, - conclusion=conclusion, - ) - - except requests.exceptions.RequestException as e: - console.print(f"[red]Failed to get workflow run: {e}[/red]") - raise - - def wait_for_workflow_completion( - self, repo: str, run_id: int, timeout_minutes: int = 30, poll_interval: int = 30 - ) -> WorkflowRun: - """Wait for workflow to complete. - - Args: - repo: Repository name - run_id: Workflow run ID - timeout_minutes: Maximum time to wait - poll_interval: Seconds between status checks - - Returns: - Final WorkflowRun object - - Raises: - TimeoutError: If workflow doesn't complete within timeout - """ - console.print( - f"[blue] Waiting for workflow {run_id} in {repo} to complete...[/blue]" - ) - - start_time = time.time() - timeout_seconds = timeout_minutes * 60 - - while True: - if run_id is None: - raise ValueError("Cannot wait for workflow completion: run_id is None") - - workflow_run = self.get_workflow_run(repo, run_id) - - status_value = ( - workflow_run.status.value if workflow_run.status else "unknown" - ) - console.print(f"[dim] Status: {status_value}[/dim]") - - if workflow_run.status == WorkflowStatus.COMPLETED: - if workflow_run.conclusion == WorkflowConclusion.SUCCESS: - console.print( - f"[green] Workflow {run_id} completed successfully[/green]" - ) - elif workflow_run.conclusion == WorkflowConclusion.FAILURE: - console.print(f"[red] Workflow {run_id} failed[/red]") - else: - conclusion_value = ( - workflow_run.conclusion.value - if workflow_run.conclusion - else "cancelled/skipped" - ) - if conclusion_value in ["cancelled", "cancelled/skipped"]: - status_color = "yellow" - elif conclusion_value in ["skipped"]: - status_color = "blue" - else: - status_color = "red" - - console.print( - f"[dim] Workflow {run_id} completed with status:[/dim] [{status_color}]{conclusion_value}[/{status_color}]" - ) - return workflow_run - - elapsed = time.time() - start_time - if elapsed > timeout_seconds: - raise TimeoutError( - f"Workflow {run_id} in {repo} did not complete within {timeout_minutes} minutes" - ) - - if not self.dry_run: - time.sleep(poll_interval) - else: - # in dry run, simulate quick completion - time.sleep(0.1) - return WorkflowRun( - repo=repo, - workflow_id="mock.yml", - workflow_uuid=None, # No UUID for mock runs - run_id=run_id, - status=WorkflowStatus.COMPLETED, - conclusion=WorkflowConclusion.SUCCESS, - ) - - def get_workflow_artifacts(self, repo: str, run_id: int) -> Dict[str, Dict]: - """Get artifacts from a completed workflow. - - Args: - repo: Repository name - run_id: Workflow run ID - - Returns: - Dictionary with artifact names as keys and artifact details as values. - Each artifact dictionary contains: id, archive_download_url, created_at, - expires_at, updated_at, size_in_bytes, digest - """ - console.print(f"[blue]Getting artifacts for workflow {run_id} in {repo}[/blue]") - - if self.dry_run: - return { - "release_handle": { - "id": 12345, - "archive_download_url": f"https://api.github.com/repos/{repo}/actions/artifacts/12345/zip", - "created_at": "2023-01-01T00:00:00Z", - "expires_at": "2023-01-31T00:00:00Z", - "updated_at": "2023-01-01T00:00:00Z", - "size_in_bytes": 1048576, - "digest": "sha256:mock-digest", - }, - "release_info": { - "id": 67890, - "archive_download_url": f"https://api.github.com/repos/{repo}/actions/artifacts/67890/zip", - "created_at": "2023-01-01T00:00:00Z", - "expires_at": "2023-01-31T00:00:00Z", - "updated_at": "2023-01-01T00:00:00Z", - "size_in_bytes": 2097152, - "digest": "sha256:mock-digest-info", - }, - "mock-artifact": { - "id": 11111, - "archive_download_url": f"https://api.github.com/repos/{repo}/actions/artifacts/11111/zip", - "created_at": "2023-01-01T00:00:00Z", - "expires_at": "2023-01-31T00:00:00Z", - "updated_at": "2023-01-01T00:00:00Z", - "size_in_bytes": 2048576, - "digest": "sha256:mock-digest-2", - }, - } - - # Real GitHub API call to get artifacts - url = f"https://api.github.com/repos/{repo}/actions/runs/{run_id}/artifacts" - headers = { - "Authorization": f"Bearer {self.token}", - "Accept": "application/vnd.github.v3+json", - "X-GitHub-Api-Version": "2022-11-28", - } - - try: - response = requests.get(url, headers=headers, timeout=30) - response.raise_for_status() - - data = response.json() - artifacts = {} - - for artifact_data in data.get("artifacts", []): - artifact_name = artifact_data.get("name", "unknown") - - # Extract the required fields from the GitHub API response - artifact_info = { - "id": artifact_data.get("id"), - "archive_download_url": artifact_data.get("archive_download_url"), - "created_at": artifact_data.get("created_at"), - "expires_at": artifact_data.get("expires_at"), - "updated_at": artifact_data.get("updated_at"), - "size_in_bytes": artifact_data.get("size_in_bytes"), - "digest": artifact_data.get("workflow_run", {}).get( - "head_sha" - ), # Using head_sha as digest - } - - artifacts[artifact_name] = artifact_info - - if artifacts: - console.print(f"[green]Found {len(artifacts)} artifacts[/green]") - for artifact_name, artifact_info in artifacts.items(): - size_mb = round( - artifact_info.get("size_in_bytes", 0) / (1024 * 1024), 2 - ) - console.print( - f"[dim] {artifact_name} ({size_mb}MB) - ID: {artifact_info.get('id')}[/dim]" - ) - else: - console.print( - "[yellow]No artifacts found for this workflow run[/yellow]" - ) - - return artifacts - - except requests.exceptions.RequestException as e: - console.print(f"[red]Failed to get artifacts: {e}[/red]") - return {} - - def extract_result( - self, - repo: str, - artifacts: Dict[str, Dict], - artifact_name: str, - json_file_name: str, - ) -> Optional[Dict[str, Any]]: - """Extract JSON result from artifacts. - - Args: - repo: Repository name - artifacts: Dictionary of artifacts from get_workflow_artifacts - artifact_name: Name of the artifact to extract from - json_file_name: Name of the JSON file within the artifact - - Returns: - Parsed JSON content from the specified file, or None if not found - """ - if artifact_name not in artifacts: - console.print(f"[yellow]No {artifact_name} artifact found[/yellow]") - return None - - target_artifact = artifacts[artifact_name] - artifact_id = target_artifact.get("id") - - if not artifact_id: - console.print(f"[red]{artifact_name} artifact has no ID[/red]") - return None - - console.print( - f"[blue]Extracting {json_file_name} from artifact {artifact_id}[/blue]" - ) - - if self.dry_run: - console.print( - f"[yellow] (DRY RUN - returning mock {json_file_name})[/yellow]" - ) - return { - "mock": True, - "version": "1.0.0", - "build_info": { - "timestamp": "2023-01-01T00:00:00Z", - "commit": "mock-commit-hash", - }, - } - - # Download the artifact and extract JSON file - download_url = target_artifact.get("archive_download_url") - if not download_url: - console.print(f"[red]{artifact_name} artifact has no download URL[/red]") - return None - - headers = { - "Authorization": f"Bearer {self.token}", - "Accept": "application/vnd.github.v3+json", - "X-GitHub-Api-Version": "2022-11-28", - } - - try: - # Download the artifact zip file - response = requests.get(download_url, headers=headers, timeout=30) - response.raise_for_status() - - # Extract JSON file from the zip - import zipfile - import io - - with zipfile.ZipFile(io.BytesIO(response.content)) as zip_file: - if json_file_name in zip_file.namelist(): - with zip_file.open(json_file_name) as json_file: - result_data = json.load(json_file) - console.print( - f"[green]Successfully extracted {json_file_name}[/green]" - ) - return result_data - else: - console.print(f"[red]{json_file_name} not found in artifact[/red]") - return None - - except requests.exceptions.RequestException as e: - console.print( - f"[red]Failed to download {artifact_name} artifact: {e}[/red]" - ) - return None - except (zipfile.BadZipFile, json.JSONDecodeError, KeyError) as e: - console.print(f"[red]Failed to extract {json_file_name}: {e}[/red]") - return None - - def extract_release_handle( - self, repo: str, artifacts: Dict[str, Dict] - ) -> Optional[Dict[str, Any]]: - """Extract release_handle JSON from artifacts. - - This is a backward compatibility wrapper around extract_result. - - Args: - repo: Repository name - artifacts: Dictionary of artifacts from get_workflow_artifacts - - Returns: - Parsed JSON content from release_handle.json file, or None if not found - """ - return self.extract_result( - repo, artifacts, "release_handle", "release_handle.json" - ) - - def get_recent_workflow_runs( - self, repo: str, workflow_file: str, limit: int = 10 - ) -> List[WorkflowRun]: - """Get recent workflow runs for a specific workflow. - - Args: - repo: Repository name - workflow_file: Workflow file name - limit: Maximum number of runs to return - - Returns: - List of WorkflowRun objects, sorted by creation time (newest first) - """ - if self.dry_run: - return [] - - url = f"https://api.github.com/repos/{repo}/actions/workflows/{workflow_file}/runs" - headers = { - "Authorization": f"Bearer {self.token}", - "Accept": "application/vnd.github.v3+json", - "X-GitHub-Api-Version": "2022-11-28", - } - - params = {"per_page": limit, "page": 1} - - try: - response = requests.get(url, headers=headers, params=params, timeout=30) - response.raise_for_status() - - data = response.json() - runs = [] - - for run_data in data.get("workflow_runs", []): - github_status = run_data.get("status", "unknown") - if github_status == "queued": - status = WorkflowStatus.QUEUED - elif github_status == "in_progress": - status = WorkflowStatus.IN_PROGRESS - elif github_status == "completed": - status = WorkflowStatus.COMPLETED - else: - status = WorkflowStatus.PENDING - - github_conclusion = run_data.get("conclusion") - conclusion = None - if github_conclusion == "success": - conclusion = WorkflowConclusion.SUCCESS - elif github_conclusion == "failure": - conclusion = WorkflowConclusion.FAILURE - - workflow_name = run_data.get("name", workflow_file) - workflow_uuid = self._extract_uuid(workflow_name) - - runs.append( - WorkflowRun( - repo=repo, - workflow_id=workflow_name, - workflow_uuid=workflow_uuid, - run_id=run_data.get("id"), - status=status, - conclusion=conclusion, - ) - ) - - return runs - - except requests.exceptions.RequestException as e: - console.print(f"[red]Failed to get workflow runs: {e}[/red]") - return [] - - def _extract_uuid(self, text: str) -> Optional[str]: - """Extract UUID from a string if present. - - Args: - text: String to search for UUID pattern - - Returns: - UUID string if found, None otherwise - """ - if not text: - return None - - uuid_pattern = r"[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}" - uuid_match = re.search(uuid_pattern, text, re.IGNORECASE) - return uuid_match.group() if uuid_match else None - - def identify_workflow( - self, repo: str, workflow_file: str, workflow_uuid: str, max_tries: int = 10 - ) -> WorkflowRun: - """Identify a specific workflow run by UUID in its name. - - Args: - repo: Repository name - workflow_file: Workflow file name - workflow_uuid: UUID to search for in workflow run names - max_tries: Maximum number of attempts to find the workflow - - Returns: - WorkflowRun object with matching UUID - - Raises: - RuntimeError: If workflow run cannot be found after max_tries - """ - console.print( - f"[blue]Searching for workflow run with UUID: {workflow_uuid}[/blue]" - ) - - for attempt in range(max_tries): - time.sleep(2) - if attempt > 0: - console.print(f"[dim] Attempt {attempt + 1}/{max_tries}[/dim]") - - runs = self.get_recent_workflow_runs(repo, workflow_file, limit=20) - - for run in runs: - extracted_uuid = self._extract_uuid(run.workflow_id) - if extracted_uuid and extracted_uuid.lower() == workflow_uuid.lower(): - console.print( - f"[green]Found matching workflow run: {run.run_id}[/green]" - ) - console.print(f"[dim] Workflow name: {run.workflow_id}[/dim]") - console.print(f"[dim] Extracted UUID: {extracted_uuid}[/dim]") - run.workflow_uuid = workflow_uuid - return run - - console.print("[dim] No matching workflow found, trying again...[/dim]") - - raise RuntimeError( - f"Could not find workflow run with UUID {workflow_uuid} after {max_tries} attempts. " - f"The workflow may have failed to start or there may be a delay in GitHub's API." - ) - - def check_workflow_exists(self, repo: str, workflow_file: str) -> bool: - """Check if a workflow file exists and is accessible. - - Args: - repo: Repository name - workflow_file: Workflow file name - - Returns: - True if workflow exists and is accessible - """ - if self.dry_run: - return True - - url = f"https://api.github.com/repos/{repo}/actions/workflows/{workflow_file}" - headers = { - "Authorization": f"Bearer {self.token}", - "Accept": "application/vnd.github.v3+json", - "X-GitHub-Api-Version": "2022-11-28", - } - - try: - response = requests.get(url, headers=headers, timeout=30) - if response.status_code == 200: - workflow_data = response.json() - console.print(f"[green]✓ Workflow '{workflow_file}' found[/green]") - console.print( - f"[dim] Name: {workflow_data.get('name', 'Unknown')}[/dim]" - ) - console.print( - f"[dim] State: {workflow_data.get('state', 'Unknown')}[/dim]" - ) - return True - elif response.status_code == 404: - console.print( - f"[red]✗ Workflow '{workflow_file}' not found in {repo}[/red]" - ) - return False - else: - console.print( - f"[yellow]? Cannot check workflow: HTTP {response.status_code}[/yellow]" - ) - return False - - except requests.exceptions.RequestException as e: - console.print(f"[red]Error checking workflow: {e}[/red]") - return False - - def get_tag_commit(self, repo: str, tag: str) -> Optional[str]: - """Get commit hash for a specific tag. - - Args: - repo: Repository name (e.g., "redis/redis") - tag: Tag name (e.g., "8.2.1") - - Returns: - Commit hash or None if not found - """ - if self.dry_run: - return f"mock-commit-{tag}" - - url = f"https://api.github.com/repos/{repo}/tags" - headers = { - "Accept": "application/vnd.github.v3+json", - "X-GitHub-Api-Version": "2022-11-28", - } - - # only add auth for non-public repos or if we're accessing our own repos - if not repo.startswith("redis/"): - headers["Authorization"] = f"Bearer {self.token}" - - try: - response = requests.get(url, headers=headers, timeout=30) - if response.status_code == 200: - data = response.json() - - for tag_data in data: - if tag_data.get("name") == tag: - commit_sha = tag_data.get("commit", {}).get("sha") - if commit_sha: - return commit_sha - - console.print(f"[red]Tag '{tag}' not found in {repo}[/red]") - console.print( - f"[dim]Available tags: https://github.com/{repo}/tags[/dim]" - ) - return None - - elif response.status_code == 404: - console.print(f"[red]Repository '{repo}' not found[/red]") - return None - else: - console.print( - f"[yellow]Could not check tags in {repo}: HTTP {response.status_code}[/yellow]" - ) - return None - - except requests.exceptions.RequestException as e: - console.print(f"[red]Error getting tag commit: {e}[/red]") - return None - - def get_branch_latest_commit(self, repo: str, branch: str) -> Optional[str]: - """Get latest commit hash from a branch. - - Args: - repo: Repository name - branch: Branch name - - Returns: - Commit hash or None if not found - """ - if self.dry_run: - return f"mock-commit-{branch}" - - url = f"https://api.github.com/repos/{repo}/git/refs/heads/{branch}" - headers = { - "Authorization": f"Bearer {self.token}", - "Accept": "application/vnd.github.v3+json", - "X-GitHub-Api-Version": "2022-11-28", - } - - try: - response = requests.get(url, headers=headers, timeout=30) - if response.status_code == 200: - data = response.json() - return data.get("object", {}).get("sha") - else: - console.print(f"[yellow]Branch '{branch}' not found in {repo}[/yellow]") - return None - - except requests.exceptions.RequestException as e: - console.print(f"[red]Error getting branch commit: {e}[/red]") - return None diff --git a/src/redis_release/models.py b/src/redis_release/models.py index ec4a6d2..2ab5c37 100644 --- a/src/redis_release/models.py +++ b/src/redis_release/models.py @@ -1,9 +1,8 @@ """Data models for Redis release automation.""" import re -from datetime import datetime from enum import Enum -from typing import Any, Dict, Optional +from typing import Optional from pydantic import BaseModel, Field @@ -56,97 +55,6 @@ class WorkflowRun(BaseModel): conclusion: Optional[WorkflowConclusion] = None -class PackageState(BaseModel): - """State of a package in the release process.""" - - package_type: PackageType - build_workflow: Optional[WorkflowRun] = None - build_artifacts: Dict[str, Dict[str, Any]] = Field(default_factory=dict) - release_handle: Optional[Dict[str, Any]] = None - build_completed: bool = False - - # Publish phase information - publish_workflow: Optional[WorkflowRun] = None - publish_completed: bool = False - publish_info: Optional[Dict[str, Any]] = None - publish_artifacts: Dict[str, Dict[str, Any]] = Field(default_factory=dict) - - def is_build_phase_successful(self) -> bool: - """Check if build workflow is completed successfully.""" - return ( - self.build_completed - and self.build_workflow is not None - and self.build_workflow.conclusion == WorkflowConclusion.SUCCESS - ) - - def is_publish_phase_successful(self) -> bool: - """Check if publish workflow is completed successfully.""" - return ( - self.publish_completed - and self.publish_workflow is not None - and self.publish_workflow.conclusion == WorkflowConclusion.SUCCESS - ) - - -class ReleaseState(BaseModel): - """Complete state of a release process.""" - - tag: str - release_type: ReleaseType - packages: Dict[PackageType, PackageState] = Field(default_factory=dict) - created_at: datetime = Field(default_factory=datetime.now) - - # Git commit information - redis_tag_commit: Optional[str] = None # Redis tag commit hash - docker_repo_commit: Optional[str] = None # Docker repo latest commit hash - - def is_build_successful(self) -> bool: - """Check if all build workflows are completed successfully.""" - if not self.packages: - return False - return all(pkg.is_build_phase_successful() for pkg in self.packages.values()) - - def is_build_phase_finished(self) -> bool: - """Check if all build workflows are finished (successfully or not).""" - if not self.packages: - return False - return all(pkg.build_completed for pkg in self.packages.values()) - - def has_build_failures(self) -> bool: - """Check if any build workflows failed or were cancelled.""" - if not self.packages: - return False - return any( - pkg.build_completed - and pkg.build_workflow - and pkg.build_workflow.conclusion != WorkflowConclusion.SUCCESS - for pkg in self.packages.values() - ) - - def is_publish_successful(self) -> bool: - """Check if all publish workflows are completed successfully.""" - if not self.packages: - return False - return all(pkg.is_publish_phase_successful() for pkg in self.packages.values()) - - def is_publish_phase_finished(self) -> bool: - """Check if all publish workflows are finished (successfully or not).""" - if not self.packages: - return False - return all(pkg.publish_completed for pkg in self.packages.values()) - - def has_publish_failures(self) -> bool: - """Check if any publish workflows failed or were cancelled.""" - if not self.packages: - return False - return any( - pkg.publish_completed - and pkg.publish_workflow - and pkg.publish_workflow.conclusion != WorkflowConclusion.SUCCESS - for pkg in self.packages.values() - ) - - class RedisVersion(BaseModel): """Represents a parsed Redis version. diff --git a/src/redis_release/orchestrator.py b/src/redis_release/orchestrator.py deleted file mode 100644 index 7f44558..0000000 --- a/src/redis_release/orchestrator.py +++ /dev/null @@ -1,361 +0,0 @@ -"""Main orchestration logic for Redis release automation.""" - -from dataclasses import dataclass -from typing import Optional - -from rich.console import Console - -from .github_client import GitHubClient -from .models import ( - PackageState, - PackageType, - ReleaseState, - ReleaseType, - WorkflowConclusion, - WorkflowRun, -) -from .state_manager import StateManager -from .workflow_executor import BuildPhase, PhaseExecutor, PublishPhase - -console = Console() - - -@dataclass -class ReleaseResult: - """Result of a release operation.""" - - success: bool - message: Optional[str] = None - state: Optional[ReleaseState] = None - - -class ReleaseOrchestrator: - """Main orchestrator for Redis release automation.""" - - def __init__( - self, - github_token: str, - state_bucket: Optional[str] = None, - ): - """Initialize release orchestrator. - - Args: - github_token: GitHub API token - state_bucket: S3 bucket for state storage - """ - self.github_token = github_token - self.state_bucket = state_bucket - - self._github_client: Optional[GitHubClient] = None - self._state_manager: Optional[StateManager] = None - - self.docker_config = { - "repo": "Peter-Sh/docker-library-redis", - "workflow": "release_build_and_test.yml", - } - - def _get_github_client(self, dry_run: bool = False) -> GitHubClient: - """Get GitHub client instance.""" - if self._github_client is None or self._github_client.dry_run != dry_run: - self._github_client = GitHubClient(self.github_token, dry_run=dry_run) - return self._github_client - - def _get_state_manager(self, dry_run: bool = False) -> StateManager: - """Get state manager instance.""" - if self._state_manager is None or self._state_manager.dry_run != dry_run: - self._state_manager = StateManager( - bucket_name=self.state_bucket, - dry_run=dry_run, - ) - return self._state_manager - - def _determine_release_type( - self, tag: str, override: ReleaseType = None - ) -> ReleaseType: - """Determine release type from tag name.""" - if override is not None: - return override - - if tag.endswith(tuple(f"-int{i}" for i in range(1, 100))): - return ReleaseType.INTERNAL - - return ReleaseType.PUBLIC - - def _get_docker_repo(self) -> str: - """Get Docker repository name.""" - return self.docker_config["repo"] - - def _get_docker_branch(self, tag: str) -> str: - """Determine the correct branch for Docker workflow based on release tag. - - Args: - tag: Release tag (e.g., "8.2.1", "8.4-m01") - - Returns: - Branch name to use for workflow trigger - """ - # extract major.minor version from tag - # examples: "8.2.1" -> "8.2", "8.4-m01" -> "8.4" - if "." in tag: - parts = tag.split(".") - if len(parts) >= 2: - major_minor = f"{parts[0]}.{parts[1]}" - return f"release/{major_minor}" - - console.print( - f"[yellow]Warning: Could not determine branch for tag '{tag}', using 'main'[/yellow]" - ) - return "main" - - def _create_initial_state( - self, - tag: str, - release_type: ReleaseType, - github_client: GitHubClient = None, - ) -> ReleaseState: - """Create initial release state.""" - state = ReleaseState( - tag=tag, - release_type=release_type, - ) - - if github_client: - console.print("[dim]Getting commit information...[/dim]") - - redis_commit = github_client.get_tag_commit("redis/redis", tag) - if redis_commit: - state.redis_tag_commit = redis_commit - console.print(f"[dim] Redis tag {tag}: {redis_commit[:8]}[/dim]") - else: - raise ValueError( - f"Redis tag '{tag}' not found in redis/redis repository. Cannot proceed with release." - ) - - docker_branch = self._get_docker_branch(tag) - docker_commit = github_client.get_branch_latest_commit( - self._get_docker_repo(), docker_branch - ) - if docker_commit: - state.docker_repo_commit = docker_commit - console.print( - f"[dim] Docker repo {docker_branch}: {docker_commit[:8]}[/dim]" - ) - else: - console.print( - f"[yellow]Warning: Could not get latest commit from {docker_branch}[/yellow]" - ) - - state.packages[PackageType.DOCKER] = PackageState( - package_type=PackageType.DOCKER - ) - - return state - - def execute_release( - self, - tag: str, - force_rebuild: bool = False, - release_type: ReleaseType = None, - dry_run: bool = False, - ) -> ReleaseResult: - """Execute the main release workflow. - - Args: - tag: Release tag - force_rebuild: Force rebuild all packages - clients_test_passed: Whether client testing is complete - release_type: Override release type detection - packages: Only process specific packages - dry_run: Simulate operations without making changes - - Returns: - ReleaseResult with operation outcome - """ - console.print(f"[bold blue] Starting release process for {tag}[/bold blue]") - - github_client = self._get_github_client(dry_run) - state_manager = self._get_state_manager(dry_run) - - actual_release_type = self._determine_release_type(tag, release_type) - console.print(f"[blue]Release type: {actual_release_type.value}[/blue]") - - # use release tag as lock identifier to prevent concurrent releases - lock_owner = f"release-{tag}" - - try: - if not state_manager.acquire_lock(tag, lock_owner): - return ReleaseResult( - success=False, - message="Could not acquire lock - another release process may be running", - ) - - state = state_manager.load_state(tag) - if state is None or force_rebuild: - console.print("[blue]Creating new release state[/blue]") - try: - state = self._create_initial_state( - tag, actual_release_type, github_client - ) - except ValueError as e: - console.print(f"[red]Release validation failed: {e}[/red]") - return ReleaseResult( - success=False, - message=str(e), - state=None, - ) - else: - console.print("[blue]Loaded existing release state[/blue]") - - if force_rebuild or self._should_run_build_phase(state): - console.print("[bold blue] Starting build phase[/bold blue]") - build_result = self._execute_build_phase(state, github_client) - if not build_result: - state_manager.save_state(state) - return ReleaseResult( - success=False, message="Build phase failed", state=state - ) - else: - docker_state = state.packages.get(PackageType.DOCKER) - self._print_completed_state_phase( - phase_completed=( - docker_state.build_completed if docker_state else False - ), - workflow=docker_state.build_workflow if docker_state else None, - name="Build", - ) - - state_manager.save_state(state) - - # Execute publish phase if needed - if force_rebuild or self._should_run_publish_phase(state): - console.print("[blue]Starting publish phase...[/blue]") - if not self._execute_publish_phase(state, github_client): - return ReleaseResult( - success=False, message="Publish phase failed", state=state - ) - else: - docker_state = state.packages.get(PackageType.DOCKER) - self._print_completed_state_phase( - phase_completed=( - docker_state.publish_completed if docker_state else False - ), - workflow=docker_state.publish_workflow if docker_state else None, - name="Publish", - ) - - state_manager.save_state(state) - - if state.is_build_successful() and state.is_publish_successful(): - return ReleaseResult( - success=True, - message=f"Release {tag} completed successfully!", - state=state, - ) - else: - return ReleaseResult( - success=False, message=f"Release {tag} failed", state=state - ) - - finally: - state_manager.save_state(state) - state_manager.release_lock(tag, lock_owner) - - def _should_run_build_phase(self, state: ReleaseState) -> bool: - """Check if build phase should be executed.""" - docker_state = state.packages.get(PackageType.DOCKER) - return not docker_state or not docker_state.is_build_phase_successful() - - def _should_run_publish_phase(self, state: ReleaseState) -> bool: - """Check if publish phase should be executed.""" - # Only run publish phase if build phase is complete - docker_state = state.packages.get(PackageType.DOCKER) - docker_state = state.packages.get(PackageType.DOCKER) - if not docker_state or not docker_state.is_publish_phase_successful(): - return state.release_type == ReleaseType.PUBLIC - - def _print_completed_state_phase( - self, phase_completed: bool, workflow: Optional[WorkflowRun], name: str - ) -> None: - """Print the current phase state when phase is already completed.""" - if phase_completed: - if workflow and workflow.conclusion: - conclusion = workflow.conclusion.value - if conclusion == "success": - console.print( - f"[green] {name} phase already completed successfully[/green]" - ) - console.print( - f"[dim] Skipping workflow execution - {name} is done[/dim]" - ) - else: - console.print( - f"[yellow] {name} phase already completed with status: {conclusion}[/yellow]" - ) - console.print( - "[dim] Skipping workflow execution - use --force-rebuild to retry[/dim]" - ) - else: - console.print(f"[yellow] {name} phase already completed[/yellow]") - console.print( - "[dim] Skipping workflow execution - use --force-rebuild to retry[/dim]" - ) - else: - console.print(f"[blue] No {name.lower()} phase needed[/blue]") - - def _execute_build_phase( - self, state: ReleaseState, github_client: GitHubClient - ) -> bool: - """Execute build phase for all packages. - - Returns: - True if all builds succeeded - """ - repo = self._get_docker_repo() - - build_phase = BuildPhase( - state=state, - repo=repo, - orchestrator_config=self.docker_config, - timeout_minutes=45, - ) - - executor = PhaseExecutor() - return executor.execute_phase(build_phase, github_client) - - def _execute_publish_phase( - self, state: ReleaseState, github_client: GitHubClient - ) -> bool: - """Execute publish phase for all packages. - - Returns: - True if all publishes succeeded - - Raises: - RuntimeError: If release_handle doesn't exist in state (raised by PublishPhase) - """ - repo = self._get_docker_repo() - - publish_phase = PublishPhase( - state=state, - repo=repo, - orchestrator_config=self.docker_config, - timeout_minutes=30, # Publish might be faster than build - ) - - executor = PhaseExecutor() - return executor.execute_phase(publish_phase, github_client) - - def get_release_status( - self, tag: str, dry_run: bool = False - ) -> Optional[ReleaseState]: - """Get current release status. - - Args: - tag: Release tag - dry_run: Use local cache instead of S3 - - Returns: - ReleaseState or None if not found - """ - state_manager = self._get_state_manager(dry_run=dry_run) - return state_manager.load_state(tag) diff --git a/src/redis_release/state_manager.py b/src/redis_release/state_manager.py index 99b1c0b..772c069 100644 --- a/src/redis_release/state_manager.py +++ b/src/redis_release/state_manager.py @@ -3,14 +3,19 @@ import json import logging import os -from builtins import NotImplementedError +import uuid from datetime import datetime -from typing import Optional +from typing import Any, Dict, Optional, Protocol import boto3 from botocore.exceptions import ClientError, NoCredentialsError +from rich.pretty import pretty_repr -from .models import ReleaseState +from redis_release.bht.args import ReleaseArgs +from redis_release.bht.state import ReleaseState, logger, print_state_table +from redis_release.config import Config + +from .bht.state import ReleaseState logger = logging.getLogger(__name__) @@ -94,49 +99,199 @@ def s3_client(self) -> Optional[boto3.client]: return self._s3_client -class StateManager(S3Backed): - """Manages release state persistence in S3.""" +class StateStorage(Protocol): + """Protocol for state storage backends.""" - def __init__( - self, - bucket_name: Optional[str] = None, - dry_run: bool = False, - aws_region: str = "us-east-1", - aws_profile: Optional[str] = None, - ): - super().__init__(bucket_name, dry_run, aws_region, aws_profile) + def get(self, tag: str) -> Optional[dict]: + """Load state data by tag. - def _create_bucket(self) -> None: - """Create S3 bucket if it doesn't exist.""" - if self._s3_client is None: - raise RuntimeError("S3 client not initialized") + Args: + tag: Release tag - try: - logger.info(f"Creating S3 bucket: {self.bucket_name}") + Returns: + State dict or None if not found + """ + ... - if self.aws_region == "us-east-1": - self._s3_client.create_bucket(Bucket=self.bucket_name) - else: - self._s3_client.create_bucket( - Bucket=self.bucket_name, - CreateBucketConfiguration={"LocationConstraint": self.aws_region}, - ) + def put(self, tag: str, state: dict) -> None: + """Save state data by tag. - self._s3_client.put_bucket_versioning( - Bucket=self.bucket_name, VersioningConfiguration={"Status": "Enabled"} - ) + Args: + tag: Release tag + state: State dict to save + """ + ... - logger.info(f"S3 bucket created successfully: {self.bucket_name}") + def acquire_lock(self, tag: str) -> bool: + """Acquire a lock for the release process. - except ClientError as e: - if e.response["Error"]["Code"] == "BucketAlreadyOwnedByYou": - logger.warning(f"Bucket already exists: {self.bucket_name}") + Args: + tag: Release tag + + Returns: + True if lock acquired successfully + """ + ... + + def release_lock(self, tag: str) -> bool: + """Release a lock for the release process. + + Args: + tag: Release tag + + Returns: + True if lock released successfully + """ + ... + + +class StateSyncer: + """Syncs ReleaseState to storage backend only when changed. + + Can be used as a context manager to automatically acquire and release locks. + """ + + def __init__( + self, + storage: StateStorage, + config: Config, + args: "ReleaseArgs", + read_only: bool = False, + ): + self.tag = args.release_tag + self.storage = storage + self.config = config + self.args = args + self.last_dump: Optional[str] = None + self._state: Optional[ReleaseState] = None + self._lock_acquired = False + self.read_only = read_only + + def __enter__(self) -> "StateSyncer": + if self.read_only: + return self + """Acquire lock when entering context.""" + if not self.storage.acquire_lock(self.tag): + raise RuntimeError(f"Failed to acquire lock for tag: {self.tag}") + self._lock_acquired = True + logger.info(f"Lock acquired for tag: {self.tag}") + return self + + def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None: + if self.read_only: + return + """Release lock when exiting context.""" + if self._lock_acquired: + self.storage.release_lock(self.tag) + self._lock_acquired = False + logger.info(f"Lock released for tag: {self.tag}") + + @property + def state(self) -> ReleaseState: + if self._state is None: + loaded = None + if self.args.force_rebuild and "all" in self.args.force_rebuild: + logger.info( + "Force rebuild 'all' enabled, using default state based on config" + ) + loaded = self.default_state() else: - logger.error(f"Failed to create bucket: {e}") - raise + loaded = self.load() - def load_state(self, tag: str) -> Optional[ReleaseState]: - """Load release state from S3. + if loaded is None: + self._state = self.default_state() + else: + self._state = loaded + self.apply_args(self._state) + logger.debug(pretty_repr(self._state)) + return self._state + + def default_state(self) -> ReleaseState: + """Create default state from config.""" + state = ReleaseState.from_config(self.config) + self.apply_args(state) + return state + + def apply_args(self, state: ReleaseState) -> None: + """Apply arguments to state.""" + state.meta.tag = self.tag + + if self.args: + if "all" in self.args.force_rebuild: + # Set force_rebuild for all packages + for package_name in state.packages: + state.packages[package_name].meta.ephemeral.force_rebuild = True + else: + # Set force_rebuild for specific packages + for package_name in self.args.force_rebuild: + if package_name in state.packages: + state.packages[package_name].meta.ephemeral.force_rebuild = True + + def load(self) -> Optional[ReleaseState]: + """Load state from storage backend.""" + state_data = self.storage.get(self.tag) + if state_data is None: + return None + + state = ReleaseState(**state_data) + self.last_dump = state.model_dump_json(indent=2) + return state + + def sync(self) -> None: + """Save state to storage backend if changed since last sync.""" + if self.read_only: + raise RuntimeError("Cannot sync read-only state") + current_dump = self.state.model_dump_json(indent=2) + + if current_dump != self.last_dump: + self.last_dump = current_dump + state_dict = json.loads(current_dump) + self.storage.put(self.tag, state_dict) + logger.debug("State saved") + + +class InMemoryStateStorage: + """In-memory state storage for testing.""" + + def __init__(self) -> None: + self._storage: Dict[str, dict] = {} + self._locks: Dict[str, bool] = {} + + def get(self, tag: str) -> Optional[dict]: + """Load state data by tag.""" + return self._storage.get(tag) + + def put(self, tag: str, state: dict) -> None: + """Save state data by tag.""" + self._storage[tag] = state + + def acquire_lock(self, tag: str) -> bool: + """Acquire a lock for the release process.""" + if self._locks.get(tag, False): + return False + self._locks[tag] = True + return True + + def release_lock(self, tag: str) -> bool: + """Release a lock for the release process.""" + self._locks[tag] = False + return True + + +class S3StateStorage(S3Backed): + def __init__( + self, + bucket_name: Optional[str] = None, + aws_region: str = "us-east-1", + aws_profile: Optional[str] = None, + owner: Optional[str] = None, + ): + super().__init__(bucket_name, False, aws_region, aws_profile) + # Generate UUID for this instance to use as lock owner + self.owner = owner if owner else str(uuid.uuid4()) + + def get(self, tag: str) -> Optional[dict]: + """Load blackboard data from S3. Args: tag: Release tag @@ -144,57 +299,42 @@ def load_state(self, tag: str) -> Optional[ReleaseState]: Returns: ReleaseState object or None if not found """ - state_key = f"release-state/{tag}.json" - logger.info(f"Loading state for tag: {tag}") - - if self.dry_run: - state_data = self._local_state_cache.get(state_key) - if state_data: - logger.debug("DRY RUN - loaded from local cache") - return ReleaseState.model_validate(state_data) - else: - logger.debug("DRY RUN - no state found in cache") - return None + state_key = f"release-state/{tag}-blackboard.json" + logger.debug(f"Loading blackboard for tag: {tag}") if self.s3_client is None: raise RuntimeError("S3 client not initialized") try: response = self.s3_client.get_object(Bucket=self.bucket_name, Key=state_key) - state_data = json.loads(response["Body"].read().decode("utf-8")) + state_data: dict = json.loads(response["Body"].read().decode("utf-8")) - logger.info("State loaded successfully") + logger.debug("Blackboard loaded successfully") - return ReleaseState.model_validate(state_data) + return state_data except ClientError as e: if e.response["Error"]["Code"] == "NoSuchKey": - logger.warning(f"No existing state found for tag: {tag}") + logger.debug(f"No existing blackboard found for tag: {tag}") return None else: - logger.error(f"Failed to load state: {e}") + logger.error(f"Failed to load blackboard: {e}") raise - def save_state(self, state: ReleaseState) -> None: + def put(self, tag: str, state: dict) -> None: """Save release state to S3. Args: state: ReleaseState object to save """ - state_key = f"release-state/{state.tag}.json" - logger.info(f"Saving state for tag: {state.tag}") - - state_data = state.model_dump(mode="json") - state_json = json.dumps(state_data, indent=2, default=str) - - if self.dry_run: - logger.debug("DRY RUN - saved to local cache") - self._local_state_cache[state_key] = state_data - return + state_key = f"release-state/{tag}-blackboard.json" + logger.debug(f"Saving blackboard for tag: {tag}") if self.s3_client is None: raise RuntimeError("S3 client not initialized") + state_json = json.dumps(state, indent=2, default=str) + try: self.s3_client.put_object( Bucket=self.bucket_name, @@ -202,40 +342,34 @@ def save_state(self, state: ReleaseState) -> None: Body=state_json, ContentType="application/json", Metadata={ - "tag": state.tag, - "release_type": state.release_type.value, + "tag": tag, }, ) - logger.info("State saved successfully") + logger.debug("Blackboard saved successfully") except ClientError as e: - logger.error(f"Failed to save state: {e}") + logger.error(f"Failed to save blackboard: {e}") raise - def acquire_lock(self, tag: str, owner: str) -> bool: + def acquire_lock(self, tag: str) -> bool: """Acquire a lock for the release process. Args: tag: Release tag - owner: Lock owner identifier Returns: True if lock acquired successfully """ lock_key = f"release-locks/{tag}.lock" - logger.info(f"Acquiring lock for tag: {tag}") - - if self.dry_run: - logger.debug("DRY RUN - lock acquired") - return True + logger.debug(f"Acquiring lock for tag: {tag}") if self.s3_client is None: raise RuntimeError("S3 client not initialized") lock_data = { "tag": tag, - "owner": owner, + "owner": self.owner, "acquired_at": datetime.now().isoformat(), } @@ -249,7 +383,7 @@ def acquire_lock(self, tag: str, owner: str) -> bool: IfNoneMatch="*", ) - logger.info("Lock acquired successfully") + logger.debug("Lock acquired successfully") return True except ClientError as e: @@ -259,35 +393,28 @@ def acquire_lock(self, tag: str, owner: str) -> bool: Bucket=self.bucket_name, Key=lock_key ) existing_lock = json.loads(response["Body"].read().decode("utf-8")) - logger.error( - f"Lock already held by: {existing_lock.get('owner', 'unknown')}" - ) - logger.debug( - f"Acquired at: {existing_lock.get('acquired_at', 'unknown')}" + logger.warning( + f"Lock already held by: {existing_lock.get('owner', 'unknown')}, " + f"acquired at: {existing_lock.get('acquired_at', 'unknown')}" ) except: - logger.error("Lock exists but couldn't read details") + logger.warning("Lock exists but couldn't read details") return False else: logger.error(f"Failed to acquire lock: {e}") raise - def release_lock(self, tag: str, owner: str) -> bool: + def release_lock(self, tag: str) -> bool: """Release a lock for the release process. Args: tag: Release tag - owner: Lock owner identifier Returns: True if lock released successfully """ lock_key = f"release-locks/{tag}.lock" - logger.info(f"Releasing lock for tag: {tag}") - - if self.dry_run: - logger.debug("DRY RUN - lock released") - return True + logger.debug(f"Releasing lock for tag: {tag}") if self.s3_client is None: raise RuntimeError("S3 client not initialized") @@ -297,17 +424,17 @@ def release_lock(self, tag: str, owner: str) -> bool: response = self.s3_client.get_object(Bucket=self.bucket_name, Key=lock_key) lock_data = json.loads(response["Body"].read().decode("utf-8")) - if lock_data.get("owner") != owner: + if lock_data.get("owner") != self.owner: logger.error(f"Cannot release lock owned by: {lock_data.get('owner')}") return False self.s3_client.delete_object(Bucket=self.bucket_name, Key=lock_key) - logger.info("Lock released successfully") + logger.debug("Lock released successfully") return True except ClientError as e: if e.response["Error"]["Code"] == "NoSuchKey": - logger.warning(f"No lock found for tag: {tag}") + logger.debug(f"No lock found for tag: {tag}") return True else: logger.error(f"Failed to release lock: {e}") diff --git a/src/redis_release/workflow_executor.py b/src/redis_release/workflow_executor.py deleted file mode 100644 index b44164e..0000000 --- a/src/redis_release/workflow_executor.py +++ /dev/null @@ -1,372 +0,0 @@ -"""Workflow execution classes for Redis release automation.""" - -import json -from abc import ABC, abstractmethod -from typing import Any, Dict, Optional - -from rich.console import Console - -from .github_client import GitHubClient -from .models import ( - PackageState, - PackageType, - ReleaseState, - WorkflowConclusion, - WorkflowRun, -) - -console = Console() - - -class Phase(ABC): - """Abstract base class for workflow phases.""" - - def __init__( - self, - state: ReleaseState, - repo: str, - orchestrator_config: Dict[str, Any], - timeout_minutes: int = 45, - ): - self.state = state - self.repo = repo - self.orchestrator_config = orchestrator_config - self.timeout_minutes = timeout_minutes - - @property - @abstractmethod - def phase_name(self) -> str: - """Human-readable phase name for logging.""" - pass - - @property - @abstractmethod - def package_state(self) -> PackageState: - """Get the package state for this phase.""" - pass - - @property - @abstractmethod - def branch(self) -> str: - """Get the branch to run the workflow on.""" - pass - - @property - @abstractmethod - def workflow_file(self) -> str: - """Get the workflow file name.""" - pass - - @property - @abstractmethod - def workflow_inputs(self) -> Dict[str, Any]: - """Get the inputs to pass to the workflow.""" - pass - - @abstractmethod - def is_completed(self) -> bool: - """Check if this phase is already completed.""" - pass - - @abstractmethod - def get_workflow(self) -> Optional[WorkflowRun]: - """Get the current workflow for this phase.""" - pass - - @abstractmethod - def set_workflow(self, workflow: WorkflowRun) -> None: - """Set the workflow for this phase.""" - pass - - @abstractmethod - def set_completed(self, completed: bool) -> None: - """Mark this phase as completed.""" - pass - - @abstractmethod - def set_artifacts(self, artifacts: Dict[str, Dict[str, Any]]) -> None: - """Set artifacts for this phase.""" - pass - - @abstractmethod - def set_result(self, result_data: Dict[str, Any]) -> None: - """Set phase-specific result data.""" - pass - - @abstractmethod - def extract_result( - self, github_client: GitHubClient, artifacts: Dict[str, Dict[str, Any]] - ) -> Optional[Dict[str, Any]]: - """Extract phase-specific result data from artifacts.""" - pass - - def _get_release_branch(self) -> str: - """Get the release branch based on the release tag. - - Extracts major.minor from tag (e.g., "8.2.1" -> "release/8.2"). - - Returns: - Release branch name - - Raises: - ValueError: If tag format is invalid - """ - tag_parts = self.state.tag.split(".") - if len(tag_parts) < 2: - raise ValueError( - f"Invalid tag format '{self.state.tag}': expected at least major.minor version" - ) - - try: - # Validate that major and minor are numeric - int(tag_parts[0]) - int(tag_parts[1]) - except ValueError: - raise ValueError( - f"Invalid tag format '{self.state.tag}': major and minor versions must be numeric" - ) - - major_minor = f"{tag_parts[0]}.{tag_parts[1]}" - return f"release/{major_minor}" - - -class BuildPhase(Phase): - """Build phase implementation.""" - - @property - def phase_name(self) -> str: - return "Docker build" - - @property - def package_state(self) -> PackageState: - return self.state.packages[PackageType.DOCKER] - - @property - def branch(self) -> str: - """Get the Docker branch based on the release tag.""" - return self._get_release_branch() - - @property - def workflow_file(self) -> str: - """Get the build workflow file from orchestrator config.""" - return self.orchestrator_config.get("workflow", "release_build_and_test.yml") - - @property - def workflow_inputs(self) -> Dict[str, Any]: - """Get the build workflow inputs.""" - return { - "release_tag": self.state.tag, - } - - def is_completed(self) -> bool: - return self.package_state.build_completed - - def get_workflow(self) -> Optional[WorkflowRun]: - return self.package_state.build_workflow - - def set_workflow(self, workflow: WorkflowRun) -> None: - self.package_state.build_workflow = workflow - - def set_completed(self, completed: bool) -> None: - self.package_state.build_completed = completed - - def set_artifacts(self, artifacts: Dict[str, Dict[str, Any]]) -> None: - self.package_state.build_artifacts = artifacts - - def set_result(self, result_data: Dict[str, Any]) -> None: - self.package_state.release_handle = result_data - - def extract_result( - self, github_client: GitHubClient, artifacts: Dict[str, Dict[str, Any]] - ) -> Optional[Dict[str, Any]]: - """Extract release_handle from artifacts.""" - result = github_client.extract_result( - self.repo, artifacts, "release_handle", "release_handle.json" - ) - if result is None: - console.print("[red]Failed to extract release_handle from artifacts[/red]") - return result - - -class PublishPhase(Phase): - """Publish phase implementation.""" - - @property - def phase_name(self) -> str: - return "Docker publish" - - @property - def package_state(self) -> PackageState: - return self.state.packages[PackageType.DOCKER] - - @property - def branch(self) -> str: - """Get the Docker branch based on the release tag.""" - return self._get_release_branch() - - @property - def workflow_file(self) -> str: - """Get the publish workflow file from orchestrator config.""" - return self.orchestrator_config.get("publish_workflow", "release_publish.yml") - - @property - def workflow_inputs(self) -> Dict[str, Any]: - """Get the publish workflow inputs. - - Raises: - RuntimeError: If release_handle is not available in package state - """ - if not self.package_state.release_handle: - raise RuntimeError( - "release_handle is required for publish phase but not found in package state" - ) - - return { - "release_handle": json.dumps(self.package_state.release_handle), - } - - def is_completed(self) -> bool: - return self.package_state.publish_completed - - def get_workflow(self) -> Optional[WorkflowRun]: - return self.package_state.publish_workflow - - def set_workflow(self, workflow: WorkflowRun) -> None: - self.package_state.publish_workflow = workflow - - def set_completed(self, completed: bool) -> None: - self.package_state.publish_completed = completed - - def set_artifacts(self, artifacts: Dict[str, Dict[str, Any]]) -> None: - self.package_state.publish_artifacts = artifacts - - def set_result(self, result_data: Dict[str, Any]) -> None: - self.package_state.publish_info = result_data - - def extract_result( - self, github_client: GitHubClient, artifacts: Dict[str, Dict[str, Any]] - ) -> Optional[Dict[str, Any]]: - """Extract release_info from artifacts.""" - result = github_client.extract_result( - self.repo, artifacts, "release_info", "release_info.json" - ) - if result is None: - console.print("[red]Failed to extract release_info from artifacts[/red]") - return result - - -class PhaseExecutor: - """Executes workflow phases.""" - - def execute_phase(self, phase: Phase, github_client: GitHubClient) -> bool: - """Execute a workflow phase. - - Args: - phase: The phase to execute - github_client: GitHub client for API operations - - Returns: - True if phase succeeded, False otherwise - """ - if not self._trigger_workflow(phase, github_client): - return False - - # Wait for workflow completion if needed - workflow = phase.get_workflow() - console.print("[dim]Waiting for workflow completion...[/dim]") - return self._wait_for_completion(phase, github_client, workflow) - - def _trigger_workflow(self, phase: Phase, github_client: GitHubClient) -> bool: - """Trigger the workflow for a phase.""" - console.print(f"[dim]Using branch: {phase.branch}[/dim]") - - if not github_client.check_workflow_exists(phase.repo, phase.workflow_file): - console.print( - f"[red]Workflow '{phase.workflow_file}' not found in {phase.repo}[/red]" - ) - console.print( - f"[yellow]Make sure the workflow file exists in branch '{phase.branch}'[/yellow]" - ) - return False - - try: - workflow_run = github_client.trigger_workflow( - phase.repo, phase.workflow_file, phase.workflow_inputs, ref=phase.branch - ) - phase.set_workflow(workflow_run) - return True - - except Exception as e: - console.print(f"[red]Failed to trigger {phase.phase_name}: {e}[/red]") - return False - - def _wait_for_completion( - self, phase: Phase, github_client: GitHubClient, workflow: WorkflowRun - ) -> bool: - """Wait for workflow completion and handle results.""" - try: - console.print(f"[blue]Waiting for {phase.phase_name} to complete...[/blue]") - completed_run = github_client.wait_for_workflow_completion( - workflow.repo, - workflow.run_id, - timeout_minutes=phase.timeout_minutes, - ) - - phase.set_workflow(completed_run) - - if completed_run.conclusion == WorkflowConclusion.SUCCESS: - return self._handle_success(phase, github_client, completed_run) - elif completed_run.conclusion == WorkflowConclusion.FAILURE: - phase.set_completed(True) # completed, but failed - console.print(f"[red]{phase.phase_name} failed[/red]") - return False - else: - return self._handle_other_conclusion(phase, completed_run) - - except Exception as e: - console.print(f"[red]{phase.phase_name} failed: {e}[/red]") - return False - - def _handle_success( - self, phase: Phase, github_client: GitHubClient, completed_run: WorkflowRun - ) -> bool: - """Handle successful workflow completion.""" - phase.set_completed(True) - - # Get artifacts - artifacts = github_client.get_workflow_artifacts( - completed_run.repo, completed_run.run_id - ) - phase.set_artifacts(artifacts) - - # Extract phase-specific result data - result_data = phase.extract_result(github_client, artifacts) - if result_data is None: - return False - - phase.set_result(result_data) - console.print(f"[green]{phase.phase_name} completed successfully[/green]") - return True - - def _handle_other_conclusion( - self, phase: Phase, completed_run: WorkflowRun - ) -> bool: - """Handle non-success, non-failure conclusions.""" - phase.set_completed(True) # completed, but not successful - conclusion_text = ( - completed_run.conclusion.value - if completed_run.conclusion - else "cancelled/skipped" - ) - - if conclusion_text in ["cancelled", "cancelled/skipped"]: - status_color = "yellow" - elif conclusion_text in ["skipped"]: - status_color = "blue" - else: - status_color = "red" - - console.print( - f"[dim]{phase.phase_name} completed with status:[/dim] [{status_color}]{conclusion_text}[/{status_color}]" - ) - return False diff --git a/src/tests/test_state.py b/src/tests/test_state.py index 53ed440..683fc6d 100644 --- a/src/tests/test_state.py +++ b/src/tests/test_state.py @@ -6,14 +6,10 @@ import pytest from redis_release.bht.args import ReleaseArgs -from redis_release.bht.state import ( - InMemoryStateStorage, - ReleaseState, - StateSyncer, - Workflow, -) +from redis_release.bht.state import ReleaseState, Workflow from redis_release.config import Config, PackageConfig from redis_release.models import PackageType +from redis_release.state_manager import InMemoryStateStorage, StateSyncer class TestReleaseStateFromConfig: From 37b1cfc4f532752186ee863bc888dc0286adfea1 Mon Sep 17 00:00:00 2001 From: Petar Shtuchkin Date: Sat, 18 Oct 2025 17:12:32 +0300 Subject: [PATCH 32/39] Continue moving around the code --- src/redis_release/bht/tree.py | 249 +++++++++++++++++----------------- 1 file changed, 128 insertions(+), 121 deletions(-) diff --git a/src/redis_release/bht/tree.py b/src/redis_release/bht/tree.py index 15d58d8..b8d4e76 100644 --- a/src/redis_release/bht/tree.py +++ b/src/redis_release/bht/tree.py @@ -1,3 +1,8 @@ +""" +This module contains tree initialization, larger branches creation +and utility functions to run or inspect the tree. +""" + import asyncio import logging import os @@ -50,125 +55,6 @@ logger = logging.getLogger(__name__) -class TreeInspector: - """Inspector for creating and inspecting behavior tree branches and PPAs.""" - - # List of available branch/PPA names - AVAILABLE_NAMES = [ - "workflow_success", - "workflow_completion", - "find_workflow", - "trigger_workflow", - "identify_target_ref", - "download_artifacts", - "extract_artifact_result", - "workflow_complete_branch", - "workflow_with_result_branch", - "publish_workflow_branch", - "build_workflow_branch", - "demo_sequence", - "demo_selector", - ] - - def __init__(self, release_tag: str): - """Initialize TreeInspector. - - Args: - release_tag: Release tag for creating mock ReleaseMeta - """ - self.release_tag = release_tag - - def get_names(self) -> List[str]: - """Get list of available branch/PPA names. - - Returns: - List of available names that can be passed to create_by_name() - """ - return self.AVAILABLE_NAMES.copy() - - def create_by_name(self, name: str) -> Union[Selector, Sequence, Behaviour]: - """Create a branch or PPA by name. - - Args: - name: Name of the branch or PPA to create - - Returns: - The created behavior tree branch or PPA - - Raises: - ValueError: If the name is not found in the available branches - """ - if name not in self.AVAILABLE_NAMES: - available = ", ".join(self.get_names()) - raise ValueError(f"Unknown name '{name}'. Available options: {available}") - - # Create mock objects for PPA/branch creation - workflow = Workflow(workflow_file="test.yml", inputs={}) - package_meta = PackageMeta(repo="redis/redis", ref="main") - release_meta = ReleaseMeta(tag=self.release_tag) - github_client = GitHubClientAsync(token="dummy") - package = Package( - meta=package_meta, - build=workflow, - publish=Workflow(workflow_file="publish.yml", inputs={}), - ) - log_prefix = "test" - - # Create and return the requested branch/PPA - if name == "workflow_success": - return create_workflow_success_ppa(workflow, log_prefix) - elif name == "workflow_completion": - return create_workflow_completion_ppa( - workflow, package_meta, github_client, log_prefix - ) - elif name == "find_workflow": - return create_find_workflow_by_uuid_ppa( - workflow, package_meta, github_client, log_prefix - ) - elif name == "trigger_workflow": - return create_trigger_workflow_ppa( - workflow, package_meta, release_meta, github_client, log_prefix - ) - elif name == "identify_target_ref": - return create_identify_target_ref_ppa( - package_meta, release_meta, github_client, log_prefix - ) - elif name == "download_artifacts": - return create_download_artifacts_ppa( - workflow, package_meta, github_client, log_prefix - ) - elif name == "extract_artifact_result": - return create_extract_artifact_result_ppa( - "test-artifact", workflow, package_meta, github_client, log_prefix - ) - elif name == "workflow_complete_branch": - return create_workflow_complete_tree_branch( - workflow, package_meta, release_meta, github_client, "" - ) - elif name == "workflow_with_result_branch": - return create_workflow_with_result_tree_branch( - "artifact", workflow, package_meta, release_meta, github_client, "" - ) - elif name == "publish_workflow_branch": - return create_publish_workflow_tree_branch( - workflow, - workflow, - package_meta, - release_meta, - workflow, - github_client, - "", - ) - elif name == "build_workflow_branch": - return create_build_workflow_tree_branch( - package, release_meta, package, github_client, "" - ) - elif name == "demo_sequence": - return create_sequence_branch() - else: # name == "demo_selector" - return create_selector_branch() - - async def async_tick_tock(tree: BehaviourTree, cutoff: int = 100) -> None: """Drive Behaviour tree using async event loop @@ -244,8 +130,10 @@ def initialize_tree_and_state( tree.add_post_tick_handler(lambda _: state_syncer.sync()) tree.add_post_tick_handler(log_tree_state_with_markup) - yield (tree, state_syncer) - print_state_table(state_syncer.state) + try: + yield (tree, state_syncer) + finally: + print_state_table(state_syncer.state) def log_tree_state_with_markup(tree: BehaviourTree) -> None: @@ -536,6 +424,125 @@ def create_extract_result_tree_branch( return extract_artifact_result +class TreeInspector: + """Inspector for creating and inspecting behavior tree branches and PPAs.""" + + # List of available branch/PPA names + AVAILABLE_NAMES = [ + "workflow_success", + "workflow_completion", + "find_workflow", + "trigger_workflow", + "identify_target_ref", + "download_artifacts", + "extract_artifact_result", + "workflow_complete_branch", + "workflow_with_result_branch", + "publish_workflow_branch", + "build_workflow_branch", + "demo_sequence", + "demo_selector", + ] + + def __init__(self, release_tag: str): + """Initialize TreeInspector. + + Args: + release_tag: Release tag for creating mock ReleaseMeta + """ + self.release_tag = release_tag + + def get_names(self) -> List[str]: + """Get list of available branch/PPA names. + + Returns: + List of available names that can be passed to create_by_name() + """ + return self.AVAILABLE_NAMES.copy() + + def create_by_name(self, name: str) -> Union[Selector, Sequence, Behaviour]: + """Create a branch or PPA by name. + + Args: + name: Name of the branch or PPA to create + + Returns: + The created behavior tree branch or PPA + + Raises: + ValueError: If the name is not found in the available branches + """ + if name not in self.AVAILABLE_NAMES: + available = ", ".join(self.get_names()) + raise ValueError(f"Unknown name '{name}'. Available options: {available}") + + # Create mock objects for PPA/branch creation + workflow = Workflow(workflow_file="test.yml", inputs={}) + package_meta = PackageMeta(repo="redis/redis", ref="main") + release_meta = ReleaseMeta(tag=self.release_tag) + github_client = GitHubClientAsync(token="dummy") + package = Package( + meta=package_meta, + build=workflow, + publish=Workflow(workflow_file="publish.yml", inputs={}), + ) + log_prefix = "test" + + # Create and return the requested branch/PPA + if name == "workflow_success": + return create_workflow_success_ppa(workflow, log_prefix) + elif name == "workflow_completion": + return create_workflow_completion_ppa( + workflow, package_meta, github_client, log_prefix + ) + elif name == "find_workflow": + return create_find_workflow_by_uuid_ppa( + workflow, package_meta, github_client, log_prefix + ) + elif name == "trigger_workflow": + return create_trigger_workflow_ppa( + workflow, package_meta, release_meta, github_client, log_prefix + ) + elif name == "identify_target_ref": + return create_identify_target_ref_ppa( + package_meta, release_meta, github_client, log_prefix + ) + elif name == "download_artifacts": + return create_download_artifacts_ppa( + workflow, package_meta, github_client, log_prefix + ) + elif name == "extract_artifact_result": + return create_extract_artifact_result_ppa( + "test-artifact", workflow, package_meta, github_client, log_prefix + ) + elif name == "workflow_complete_branch": + return create_workflow_complete_tree_branch( + workflow, package_meta, release_meta, github_client, "" + ) + elif name == "workflow_with_result_branch": + return create_workflow_with_result_tree_branch( + "artifact", workflow, package_meta, release_meta, github_client, "" + ) + elif name == "publish_workflow_branch": + return create_publish_workflow_tree_branch( + workflow, + workflow, + package_meta, + release_meta, + workflow, + github_client, + "", + ) + elif name == "build_workflow_branch": + return create_build_workflow_tree_branch( + package, release_meta, package, github_client, "" + ) + elif name == "demo_sequence": + return create_sequence_branch() + else: # name == "demo_selector" + return create_selector_branch() + + ### Demo ### From aadc8ba7a218e477265153cb6518ad36390624df Mon Sep 17 00:00:00 2001 From: Petar Shtuchkin Date: Sat, 18 Oct 2025 17:14:34 +0300 Subject: [PATCH 33/39] Rename StateSyncer -> StateManager --- src/redis_release/bht/tree.py | 6 +++--- src/redis_release/cli.py | 4 ++-- src/redis_release/state_manager.py | 4 ++-- src/tests/test_state.py | 14 +++++++------- 4 files changed, 14 insertions(+), 14 deletions(-) diff --git a/src/redis_release/bht/tree.py b/src/redis_release/bht/tree.py index b8d4e76..4285954 100644 --- a/src/redis_release/bht/tree.py +++ b/src/redis_release/bht/tree.py @@ -20,7 +20,7 @@ from ..config import Config from ..github_client_async import GitHubClientAsync -from ..state_manager import S3StateStorage, StateStorage, StateSyncer +from ..state_manager import S3StateStorage, StateManager, StateStorage from .args import ReleaseArgs from .backchain import latch_chains from .behaviours import NeedToPublishRelease @@ -102,14 +102,14 @@ def initialize_tree_and_state( args: ReleaseArgs, storage: Optional[StateStorage] = None, read_only: bool = False, -) -> Iterator[Tuple[BehaviourTree, StateSyncer]]: +) -> Iterator[Tuple[BehaviourTree, StateManager]]: github_client = GitHubClientAsync(token=os.getenv("GITHUB_TOKEN") or "") if storage is None: storage = S3StateStorage() # Create state syncer with storage backend and acquire lock - with StateSyncer( + with StateManager( storage=storage, config=config, args=args, diff --git a/src/redis_release/cli.py b/src/redis_release/cli.py index b909596..96efb02 100644 --- a/src/redis_release/cli.py +++ b/src/redis_release/cli.py @@ -13,7 +13,7 @@ from redis_release.state_manager import ( InMemoryStateStorage, S3StateStorage, - StateSyncer, + StateManager, ) from .bht.tree import TreeInspector, async_tick_tock, initialize_tree_and_state @@ -137,7 +137,7 @@ def status( force_rebuild=[], ) - with StateSyncer( + with StateManager( storage=S3StateStorage(), config=config, args=args, diff --git a/src/redis_release/state_manager.py b/src/redis_release/state_manager.py index 772c069..9973513 100644 --- a/src/redis_release/state_manager.py +++ b/src/redis_release/state_manager.py @@ -145,7 +145,7 @@ def release_lock(self, tag: str) -> bool: ... -class StateSyncer: +class StateManager: """Syncs ReleaseState to storage backend only when changed. Can be used as a context manager to automatically acquire and release locks. @@ -167,7 +167,7 @@ def __init__( self._lock_acquired = False self.read_only = read_only - def __enter__(self) -> "StateSyncer": + def __enter__(self) -> "StateManager": if self.read_only: return self """Acquire lock when entering context.""" diff --git a/src/tests/test_state.py b/src/tests/test_state.py index 683fc6d..4f8a6b0 100644 --- a/src/tests/test_state.py +++ b/src/tests/test_state.py @@ -9,7 +9,7 @@ from redis_release.bht.state import ReleaseState, Workflow from redis_release.config import Config, PackageConfig from redis_release.models import PackageType -from redis_release.state_manager import InMemoryStateStorage, StateSyncer +from redis_release.state_manager import InMemoryStateStorage, StateManager class TestReleaseStateFromConfig: @@ -492,7 +492,7 @@ def test_state_syncer_sets_tag_from_args(self) -> None: args = ReleaseArgs(release_tag="8.4-m01", force_rebuild=[]) storage = InMemoryStateStorage() - syncer = StateSyncer(storage=storage, config=config, args=args) + syncer = StateManager(storage=storage, config=config, args=args) assert syncer.state.meta.tag == "8.4-m01" @@ -518,7 +518,7 @@ def test_state_syncer_sets_force_rebuild_from_args(self) -> None: args = ReleaseArgs(release_tag="8.4-m01", force_rebuild=["docker"]) storage = InMemoryStateStorage() - syncer = StateSyncer(storage=storage, config=config, args=args) + syncer = StateManager(storage=storage, config=config, args=args) assert syncer.state.packages["docker"].meta.ephemeral.force_rebuild is True assert syncer.state.packages["redis"].meta.ephemeral.force_rebuild is False @@ -551,7 +551,7 @@ def test_state_syncer_sets_multiple_force_rebuild_from_args(self) -> None: args = ReleaseArgs(release_tag="8.4-m01", force_rebuild=["docker", "snap"]) storage = InMemoryStateStorage() - syncer = StateSyncer(storage=storage, config=config, args=args) + syncer = StateManager(storage=storage, config=config, args=args) assert syncer.state.packages["docker"].meta.ephemeral.force_rebuild is True assert syncer.state.packages["redis"].meta.ephemeral.force_rebuild is False @@ -573,7 +573,7 @@ def test_state_syncer_without_args(self) -> None: args = ReleaseArgs(release_tag="test-tag", force_rebuild=[]) storage = InMemoryStateStorage() - syncer = StateSyncer(storage=storage, config=config, args=args) + syncer = StateManager(storage=storage, config=config, args=args) assert syncer.state.meta.tag == "test-tag" assert ( @@ -608,7 +608,7 @@ def test_state_syncer_force_rebuild_all(self) -> None: args = ReleaseArgs(release_tag="8.4-m01", force_rebuild=["all"]) storage = InMemoryStateStorage() - syncer = StateSyncer(storage=storage, config=config, args=args) + syncer = StateManager(storage=storage, config=config, args=args) # All packages should have force_rebuild set to True assert syncer.state.packages["docker"].meta.ephemeral.force_rebuild is True @@ -637,7 +637,7 @@ def test_state_syncer_force_rebuild_all_with_other_values(self) -> None: args = ReleaseArgs(release_tag="8.4-m01", force_rebuild=["docker", "all"]) storage = InMemoryStateStorage() - syncer = StateSyncer(storage=storage, config=config, args=args) + syncer = StateManager(storage=storage, config=config, args=args) # All packages should have force_rebuild set to True assert syncer.state.packages["docker"].meta.ephemeral.force_rebuild is True From c7c9f89499401a31b20bfd46dc59e95e3f2421f4 Mon Sep 17 00:00:00 2001 From: Petar Shtuchkin Date: Mon, 20 Oct 2025 16:25:07 +0300 Subject: [PATCH 34/39] Return archive URL to github As milestone tags are not published to downloads --- .../validate-redis-release-archive.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/actions/validate-redis-release-archive/validate-redis-release-archive.sh b/.github/actions/validate-redis-release-archive/validate-redis-release-archive.sh index d0a766e..cbd4674 100755 --- a/.github/actions/validate-redis-release-archive/validate-redis-release-archive.sh +++ b/.github/actions/validate-redis-release-archive/validate-redis-release-archive.sh @@ -13,8 +13,8 @@ if [ -z "$TAG" ]; then fi # Construct Redis archive URL -#REDIS_ARCHIVE_URL="https://github.com/redis/redis/archive/refs/tags/${TAG}.tar.gz" -REDIS_ARCHIVE_URL="https://download.redis.io/releases/redis-${TAG}.tar.gz" +REDIS_ARCHIVE_URL="https://github.com/redis/redis/archive/refs/tags/${TAG}.tar.gz" +#REDIS_ARCHIVE_URL="https://download.redis.io/releases/redis-${TAG}.tar.gz" echo "REDIS_ARCHIVE_URL: $REDIS_ARCHIVE_URL" # Download the Redis archive From f6d4c872aba5d7e15fa06f569f96e8b85e803b2c Mon Sep 17 00:00:00 2001 From: Petar Shtuchkin Date: Mon, 20 Oct 2025 16:27:34 +0300 Subject: [PATCH 35/39] Impoved comments --- config.yaml | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/config.yaml b/config.yaml index f8aac76..c00d4a4 100644 --- a/config.yaml +++ b/config.yaml @@ -1,14 +1,26 @@ version: 1 packages: docker: + # available types: docker, debian package_type: docker + # repo where the workflow is started repo: redis/docker-library-redis + # ref it not specified it will be determined based on the tag and existing branches + # should be empty for real releas, is to be used for testing only + # ref: branch_name + # build workflow name build_workflow: release_build_and_test.yml + # build workflow timeout in minutes, optional build_timeout_minutes: 45 + # static workflow inputs for build workflow build_inputs: {} + # whether to run publish workflow for internal releases publish_internal_release: no - publish_workflow: release_publish.yml # may be boolean false + # publish workflow name + publish_workflow: release_publish.yml + # publish workflow timeout in minutes, optional publish_timeout_minutes: 10 + # static workflow inputs for publish workflow publish_inputs: {} debian: package_type: debian From 29fa65ee26f13a339e581ccd47c93ff94640748f Mon Sep 17 00:00:00 2001 From: Petar Shtuchkin Date: Mon, 20 Oct 2025 16:32:48 +0300 Subject: [PATCH 36/39] Fix typo in config comments --- config.yaml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/config.yaml b/config.yaml index c00d4a4..99c63fc 100644 --- a/config.yaml +++ b/config.yaml @@ -5,8 +5,9 @@ packages: package_type: docker # repo where the workflow is started repo: redis/docker-library-redis - # ref it not specified it will be determined based on the tag and existing branches - # should be empty for real releas, is to be used for testing only + # ref to use for workflow runs, if not specified it + # will be determined based on the tag and existing branches + # Should be empty for real use cases, is to be used for testing only # ref: branch_name # build workflow name build_workflow: release_build_and_test.yml From 8e4bd49c71b0cab5f340588c8783761b8b02c99f Mon Sep 17 00:00:00 2001 From: Petar Shtuchkin Date: Thu, 30 Oct 2025 10:47:03 +0200 Subject: [PATCH 37/39] Interpret any non empty non success workflow conclusion as FAIL Make logging_wrapper skip itself as a caller --- src/redis_release/bht/behaviours.py | 5 ++++ src/redis_release/bht/logging_wrapper.py | 32 ++++++------------------ src/redis_release/github_client_async.py | 2 +- 3 files changed, 14 insertions(+), 25 deletions(-) diff --git a/src/redis_release/bht/behaviours.py b/src/redis_release/bht/behaviours.py index 7cc7f5c..f5d807e 100644 --- a/src/redis_release/bht/behaviours.py +++ b/src/redis_release/bht/behaviours.py @@ -799,6 +799,11 @@ def update(self) -> Status: ): self.logger.info(f"Workflow completed with success status") return Status.SUCCESS + elif self.workflow.conclusion == WorkflowConclusion.FAILURE: + if self.log_once( + "workflow_unsuccessful", self.workflow.ephemeral.log_once_flags + ): + self.logger.error(f"Workflow completed with failure status") return Status.FAILURE diff --git a/src/redis_release/bht/logging_wrapper.py b/src/redis_release/bht/logging_wrapper.py index 4fb4d0d..a1ad2a8 100644 --- a/src/redis_release/bht/logging_wrapper.py +++ b/src/redis_release/bht/logging_wrapper.py @@ -35,33 +35,17 @@ def __init__(self, logger: logging.Logger) -> None: self._logger = logger def debug(self, msg: str) -> None: - """Log a message with severity 'DEBUG'. - - Args: - msg: The message to log - """ - self._logger.debug(msg) + """Log a message with severity 'DEBUG'.""" + self._logger.debug(msg, stacklevel=2) def info(self, msg: str) -> None: - """Log a message with severity 'INFO'. - - Args: - msg: The message to log - """ - self._logger.info(msg) + """Log a message with severity 'INFO'.""" + self._logger.info(msg, stacklevel=2) def warning(self, msg: str) -> None: - """Log a message with severity 'WARNING'. - - Args: - msg: The message to log - """ - self._logger.warning(msg) + """Log a message with severity 'WARNING'.""" + self._logger.warning(msg, stacklevel=2) def error(self, msg: str) -> None: - """Log a message with severity 'ERROR'. - - Args: - msg: The message to log - """ - self._logger.error(msg) + """Log a message with severity 'ERROR'.""" + self._logger.error(msg, stacklevel=2) diff --git a/src/redis_release/github_client_async.py b/src/redis_release/github_client_async.py index 846ad10..47508a9 100644 --- a/src/redis_release/github_client_async.py +++ b/src/redis_release/github_client_async.py @@ -334,7 +334,7 @@ async def get_workflow_run(self, repo: str, run_id: int) -> WorkflowRun: conclusion = None if github_conclusion == "success": conclusion = WorkflowConclusion.SUCCESS - elif github_conclusion == "failure": + elif github_conclusion is not None: conclusion = WorkflowConclusion.FAILURE workflow_name = data.get("name", "unknown") From a1c58d9d3b82ac0573f8c09994bb595b83f830fa Mon Sep 17 00:00:00 2001 From: Petar Shtuchkin Date: Thu, 30 Oct 2025 11:15:30 +0200 Subject: [PATCH 38/39] Introduce uv --- README.md | 65 +- pyproject.toml | 9 +- uv.lock | 3385 ++++++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 3446 insertions(+), 13 deletions(-) create mode 100644 uv.lock diff --git a/README.md b/README.md index ae96635..7f9470e 100644 --- a/README.md +++ b/README.md @@ -4,14 +4,18 @@ A command-line tool for automating Redis OSS releases across multiple package re ## Installation -### From Source +### Using uv ```bash git clone https://github.com/redis/redis-oss-release-automation.git cd redis-oss-release-automation -pip install -e . +uv sync ``` +After `uv sync`, you can run the tool in two ways: +- **With `uv run`**: `uv run redis-release ` +- **Activate virtual environment**: `. .venv/bin/activate` then `redis-release ` + ## Prerequisites 1. **GitHub Token**: Personal access token with workflow permissions @@ -24,20 +28,38 @@ pip install -e . export GITHUB_TOKEN="ghp_xxxxxxxxxxxx" export AWS_ACCESS_KEY_ID="your-access-key-id" export AWS_SECRET_ACCESS_KEY="your-secret-access-key" -export AWS_SESSION_TOKEN="your-session-token" +export AWS_SESSION_TOKEN="your-session-token" export REDIS_RELEASE_STATE_BUCKET="redis-release-state" ``` +### AWS SSO Login + +In AWS, you can also use `aws sso login` prior to running the tool to authenticate. + ## Usage ### Basic Release +By default, `config.yaml` is used. You can specify a different config file with `--config`: + ```bash -# Start a new release +# Start a new release (uses config.yaml by default) redis-release release 8.2.0 -# Force rebuild packages -redis-release release 8.2.0 --force-rebuild +# Use custom config file +redis-release release 8.2.0 --config custom-config.yaml + +# Force rebuild all packages (WARNING: This will delete all existing state!) +redis-release release 8.2.0 --force-rebuild all + +# Force rebuild specific package +redis-release release 8.2.0 --force-rebuild package-name + +# Release only specific packages (can be used multiple times) +redis-release release 8.2.0 --only-packages package1 --only-packages package2 + +# Force release type (changes release-type even for existing state) +redis-release release 8.2.0 --force-release-type rc ``` ### Check Status @@ -47,9 +69,34 @@ redis-release release 8.2.0 --force-rebuild redis-release status 8.2.0 ``` -### Advanced Options +## Troubleshooting + +### Dangling Release Locks + +If you encounter a dangling lock file, you can delete it from the S3 bucket: ```bash -# Dry run mode (simulate without changes) -redis-release release 8.2.0 --dry-run +aws s3 rm s3://redis-release-state/release-locks/TAG.lock ``` + +Replace `TAG` with the release tag (e.g., `8.2.0`). + +## Diagrams + +Generate release workflow diagrams using: + +```bash +# Generate full release diagram +redis-release release-print + +# Generate diagram with custom name (list available with --help) +redis-release release-print --name NAME +``` + +**Note**: Graphviz is required to generate diagrams. + +## Configuration + +The tool uses a YAML configuration file to define release packages and their settings. By default, `config.yaml` is used. + +See `config.yaml` for an example configuration file. diff --git a/pyproject.toml b/pyproject.toml index 61cb611..bcd5fad 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,5 @@ [build-system] -requires = ["hatchling"] +requires = ["hatchling>=1.13"] build-backend = "hatchling.build" [project] @@ -15,11 +15,11 @@ classifiers = [ "Development Status :: 3 - Alpha", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", ] dependencies = [ "typer[all]>=0.9.0", @@ -28,7 +28,8 @@ dependencies = [ "boto3>=1.26.0", "rich>=13.0.0", "pydantic>=2.0.0", - "py_trees>=2.2,<3.0" + "py_trees>=2.2,<3.0", + "pyyaml>=6.0.3", ] [project.optional-dependencies] diff --git a/uv.lock b/uv.lock new file mode 100644 index 0000000..f82013a --- /dev/null +++ b/uv.lock @@ -0,0 +1,3385 @@ +version = 1 +revision = 3 +requires-python = ">=3.8" +resolution-markers = [ + "python_full_version >= '3.10'", + "python_full_version == '3.9.*'", + "python_full_version < '3.9'", +] + +[[package]] +name = "aiohappyeyeballs" +version = "2.4.4" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.9'", +] +sdist = { url = "https://files.pythonhosted.org/packages/7f/55/e4373e888fdacb15563ef6fa9fa8c8252476ea071e96fb46defac9f18bf2/aiohappyeyeballs-2.4.4.tar.gz", hash = "sha256:5fdd7d87889c63183afc18ce9271f9b0a7d32c2303e394468dd45d514a757745", size = 21977, upload-time = "2024-11-30T18:44:00.701Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b9/74/fbb6559de3607b3300b9be3cc64e97548d55678e44623db17820dbd20002/aiohappyeyeballs-2.4.4-py3-none-any.whl", hash = "sha256:a980909d50efcd44795c4afeca523296716d50cd756ddca6af8c65b996e27de8", size = 14756, upload-time = "2024-11-30T18:43:39.849Z" }, +] + +[[package]] +name = "aiohappyeyeballs" +version = "2.6.1" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", + "python_full_version == '3.9.*'", +] +sdist = { url = "https://files.pythonhosted.org/packages/26/30/f84a107a9c4331c14b2b586036f40965c128aa4fee4dda5d3d51cb14ad54/aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558", size = 22760, upload-time = "2025-03-12T01:42:48.764Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0f/15/5bf3b99495fb160b63f95972b81750f18f7f4e02ad051373b669d17d44f2/aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8", size = 15265, upload-time = "2025-03-12T01:42:47.083Z" }, +] + +[[package]] +name = "aiohttp" +version = "3.10.11" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.9'", +] +dependencies = [ + { name = "aiohappyeyeballs", version = "2.4.4", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, + { name = "aiosignal", version = "1.3.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, + { name = "async-timeout", marker = "python_full_version < '3.9'" }, + { name = "attrs", version = "25.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, + { name = "frozenlist", version = "1.5.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, + { name = "multidict", version = "6.1.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, + { name = "yarl", version = "1.15.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/25/a8/8e2ba36c6e3278d62e0c88aa42bb92ddbef092ac363b390dab4421da5cf5/aiohttp-3.10.11.tar.gz", hash = "sha256:9dc2b8f3dcab2e39e0fa309c8da50c3b55e6f34ab25f1a71d3288f24924d33a7", size = 7551886, upload-time = "2024-11-13T16:40:33.335Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/11/c7/575f9e82d7ef13cb1b45b9db8a5b8fadb35107fb12e33809356ae0155223/aiohttp-3.10.11-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5077b1a5f40ffa3ba1f40d537d3bec4383988ee51fbba6b74aa8fb1bc466599e", size = 588218, upload-time = "2024-11-13T16:36:38.461Z" }, + { url = "https://files.pythonhosted.org/packages/12/7b/a800dadbd9a47b7f921bfddcd531371371f39b9cd05786c3638bfe2e1175/aiohttp-3.10.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8d6a14a4d93b5b3c2891fca94fa9d41b2322a68194422bef0dd5ec1e57d7d298", size = 400815, upload-time = "2024-11-13T16:36:40.547Z" }, + { url = "https://files.pythonhosted.org/packages/cb/28/7dbd53ab10b0ded397feed914880f39ce075bd39393b8dfc322909754a0a/aiohttp-3.10.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ffbfde2443696345e23a3c597049b1dd43049bb65337837574205e7368472177", size = 392099, upload-time = "2024-11-13T16:36:43.918Z" }, + { url = "https://files.pythonhosted.org/packages/6a/2e/c6390f49e67911711c2229740e261c501685fe7201f7f918d6ff2fd1cfb0/aiohttp-3.10.11-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20b3d9e416774d41813bc02fdc0663379c01817b0874b932b81c7f777f67b217", size = 1224854, upload-time = "2024-11-13T16:36:46.473Z" }, + { url = "https://files.pythonhosted.org/packages/69/68/c96afae129201bff4edbece52b3e1abf3a8af57529a42700669458b00b9f/aiohttp-3.10.11-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2b943011b45ee6bf74b22245c6faab736363678e910504dd7531a58c76c9015a", size = 1259641, upload-time = "2024-11-13T16:36:48.28Z" }, + { url = "https://files.pythonhosted.org/packages/63/89/bedd01456442747946114a8c2f30ff1b23d3b2ea0c03709f854c4f354a5a/aiohttp-3.10.11-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48bc1d924490f0d0b3658fe5c4b081a4d56ebb58af80a6729d4bd13ea569797a", size = 1295412, upload-time = "2024-11-13T16:36:50.286Z" }, + { url = "https://files.pythonhosted.org/packages/9b/4d/942198e2939efe7bfa484781590f082135e9931b8bcafb4bba62cf2d8f2f/aiohttp-3.10.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e12eb3f4b1f72aaaf6acd27d045753b18101524f72ae071ae1c91c1cd44ef115", size = 1218311, upload-time = "2024-11-13T16:36:53.721Z" }, + { url = "https://files.pythonhosted.org/packages/a3/5b/8127022912f1fa72dfc39cf37c36f83e0b56afc3b93594b1cf377b6e4ffc/aiohttp-3.10.11-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f14ebc419a568c2eff3c1ed35f634435c24ead2fe19c07426af41e7adb68713a", size = 1189448, upload-time = "2024-11-13T16:36:55.844Z" }, + { url = "https://files.pythonhosted.org/packages/af/12/752878033c8feab3362c0890a4d24e9895921729a53491f6f6fad64d3287/aiohttp-3.10.11-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:72b191cdf35a518bfc7ca87d770d30941decc5aaf897ec8b484eb5cc8c7706f3", size = 1186484, upload-time = "2024-11-13T16:36:58.472Z" }, + { url = "https://files.pythonhosted.org/packages/61/24/1d91c304fca47d5e5002ca23abab9b2196ac79d5c531258e048195b435b2/aiohttp-3.10.11-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:5ab2328a61fdc86424ee540d0aeb8b73bbcad7351fb7cf7a6546fc0bcffa0038", size = 1183864, upload-time = "2024-11-13T16:37:00.737Z" }, + { url = "https://files.pythonhosted.org/packages/c1/70/022d28b898314dac4cb5dd52ead2a372563c8590b1eaab9c5ed017eefb1e/aiohttp-3.10.11-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:aa93063d4af05c49276cf14e419550a3f45258b6b9d1f16403e777f1addf4519", size = 1241460, upload-time = "2024-11-13T16:37:03.175Z" }, + { url = "https://files.pythonhosted.org/packages/c3/15/2b43853330f82acf180602de0f68be62a2838d25d03d2ed40fecbe82479e/aiohttp-3.10.11-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:30283f9d0ce420363c24c5c2421e71a738a2155f10adbb1a11a4d4d6d2715cfc", size = 1258521, upload-time = "2024-11-13T16:37:06.013Z" }, + { url = "https://files.pythonhosted.org/packages/28/38/9ef2076cb06dcc155e7f02275f5da403a3e7c9327b6b075e999f0eb73613/aiohttp-3.10.11-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e5358addc8044ee49143c546d2182c15b4ac3a60be01c3209374ace05af5733d", size = 1207329, upload-time = "2024-11-13T16:37:08.091Z" }, + { url = "https://files.pythonhosted.org/packages/c2/5f/c5329d67a2c83d8ae17a84e11dec14da5773520913bfc191caaf4cd57e50/aiohttp-3.10.11-cp310-cp310-win32.whl", hash = "sha256:e1ffa713d3ea7cdcd4aea9cddccab41edf6882fa9552940344c44e59652e1120", size = 363835, upload-time = "2024-11-13T16:37:10.017Z" }, + { url = "https://files.pythonhosted.org/packages/0f/c6/ca5d70eea2fdbe283dbc1e7d30649a1a5371b2a2a9150db192446f645789/aiohttp-3.10.11-cp310-cp310-win_amd64.whl", hash = "sha256:778cbd01f18ff78b5dd23c77eb82987ee4ba23408cbed233009fd570dda7e674", size = 382169, upload-time = "2024-11-13T16:37:12.603Z" }, + { url = "https://files.pythonhosted.org/packages/73/96/221ec59bc38395a6c205cbe8bf72c114ce92694b58abc8c3c6b7250efa7f/aiohttp-3.10.11-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:80ff08556c7f59a7972b1e8919f62e9c069c33566a6d28586771711e0eea4f07", size = 587742, upload-time = "2024-11-13T16:37:14.469Z" }, + { url = "https://files.pythonhosted.org/packages/24/17/4e606c969b19de5c31a09b946bd4c37e30c5288ca91d4790aa915518846e/aiohttp-3.10.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c8f96e9ee19f04c4914e4e7a42a60861066d3e1abf05c726f38d9d0a466e695", size = 400357, upload-time = "2024-11-13T16:37:16.482Z" }, + { url = "https://files.pythonhosted.org/packages/a2/e5/433f59b87ba69736e446824710dd7f26fcd05b24c6647cb1e76554ea5d02/aiohttp-3.10.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fb8601394d537da9221947b5d6e62b064c9a43e88a1ecd7414d21a1a6fba9c24", size = 392099, upload-time = "2024-11-13T16:37:20.013Z" }, + { url = "https://files.pythonhosted.org/packages/d2/a3/3be340f5063970bb9e47f065ee8151edab639d9c2dce0d9605a325ab035d/aiohttp-3.10.11-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ea224cf7bc2d8856d6971cea73b1d50c9c51d36971faf1abc169a0d5f85a382", size = 1300367, upload-time = "2024-11-13T16:37:22.645Z" }, + { url = "https://files.pythonhosted.org/packages/ba/7d/a3043918466cbee9429792ebe795f92f70eeb40aee4ccbca14c38ee8fa4d/aiohttp-3.10.11-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db9503f79e12d5d80b3efd4d01312853565c05367493379df76d2674af881caa", size = 1339448, upload-time = "2024-11-13T16:37:24.834Z" }, + { url = "https://files.pythonhosted.org/packages/2c/60/192b378bd9d1ae67716b71ae63c3e97c48b134aad7675915a10853a0b7de/aiohttp-3.10.11-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0f449a50cc33f0384f633894d8d3cd020e3ccef81879c6e6245c3c375c448625", size = 1374875, upload-time = "2024-11-13T16:37:26.799Z" }, + { url = "https://files.pythonhosted.org/packages/e0/d7/cd58bd17f5277d9cc32ecdbb0481ca02c52fc066412de413aa01268dc9b4/aiohttp-3.10.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82052be3e6d9e0c123499127782a01a2b224b8af8c62ab46b3f6197035ad94e9", size = 1285626, upload-time = "2024-11-13T16:37:29.02Z" }, + { url = "https://files.pythonhosted.org/packages/bb/b2/da4953643b7dcdcd29cc99f98209f3653bf02023d95ce8a8fd57ffba0f15/aiohttp-3.10.11-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20063c7acf1eec550c8eb098deb5ed9e1bb0521613b03bb93644b810986027ac", size = 1246120, upload-time = "2024-11-13T16:37:31.268Z" }, + { url = "https://files.pythonhosted.org/packages/6c/22/1217b3c773055f0cb172e3b7108274a74c0fe9900c716362727303931cbb/aiohttp-3.10.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:489cced07a4c11488f47aab1f00d0c572506883f877af100a38f1fedaa884c3a", size = 1265177, upload-time = "2024-11-13T16:37:33.348Z" }, + { url = "https://files.pythonhosted.org/packages/63/5e/3827ad7e61544ed1e73e4fdea7bb87ea35ac59a362d7eb301feb5e859780/aiohttp-3.10.11-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ea9b3bab329aeaa603ed3bf605f1e2a6f36496ad7e0e1aa42025f368ee2dc07b", size = 1257238, upload-time = "2024-11-13T16:37:35.753Z" }, + { url = "https://files.pythonhosted.org/packages/53/31/951f78751d403da6086b662760e6e8b08201b0dcf5357969f48261b4d0e1/aiohttp-3.10.11-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ca117819d8ad113413016cb29774b3f6d99ad23c220069789fc050267b786c16", size = 1315944, upload-time = "2024-11-13T16:37:38.317Z" }, + { url = "https://files.pythonhosted.org/packages/0d/79/06ef7a2a69880649261818b135b245de5a4e89fed5a6987c8645428563fc/aiohttp-3.10.11-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2dfb612dcbe70fb7cdcf3499e8d483079b89749c857a8f6e80263b021745c730", size = 1332065, upload-time = "2024-11-13T16:37:40.725Z" }, + { url = "https://files.pythonhosted.org/packages/10/39/a273857c2d0bbf2152a4201fbf776931c2dac74aa399c6683ed4c286d1d1/aiohttp-3.10.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9b615d3da0d60e7d53c62e22b4fd1c70f4ae5993a44687b011ea3a2e49051b8", size = 1291882, upload-time = "2024-11-13T16:37:43.209Z" }, + { url = "https://files.pythonhosted.org/packages/49/39/7aa387f88403febc96e0494101763afaa14d342109329a01b413b2bac075/aiohttp-3.10.11-cp311-cp311-win32.whl", hash = "sha256:29103f9099b6068bbdf44d6a3d090e0a0b2be6d3c9f16a070dd9d0d910ec08f9", size = 363409, upload-time = "2024-11-13T16:37:45.143Z" }, + { url = "https://files.pythonhosted.org/packages/6f/e9/8eb3dc095ce48499d867ad461d02f1491686b79ad92e4fad4df582f6be7b/aiohttp-3.10.11-cp311-cp311-win_amd64.whl", hash = "sha256:236b28ceb79532da85d59aa9b9bf873b364e27a0acb2ceaba475dc61cffb6f3f", size = 382644, upload-time = "2024-11-13T16:37:47.685Z" }, + { url = "https://files.pythonhosted.org/packages/01/16/077057ef3bd684dbf9a8273a5299e182a8d07b4b252503712ff8b5364fd1/aiohttp-3.10.11-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7480519f70e32bfb101d71fb9a1f330fbd291655a4c1c922232a48c458c52710", size = 584830, upload-time = "2024-11-13T16:37:49.608Z" }, + { url = "https://files.pythonhosted.org/packages/2c/cf/348b93deb9597c61a51b6682e81f7c7d79290249e886022ef0705d858d90/aiohttp-3.10.11-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f65267266c9aeb2287a6622ee2bb39490292552f9fbf851baabc04c9f84e048d", size = 397090, upload-time = "2024-11-13T16:37:51.539Z" }, + { url = "https://files.pythonhosted.org/packages/70/bf/903df5cd739dfaf5b827b3d8c9d68ff4fcea16a0ca1aeb948c9da30f56c8/aiohttp-3.10.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7400a93d629a0608dc1d6c55f1e3d6e07f7375745aaa8bd7f085571e4d1cee97", size = 392361, upload-time = "2024-11-13T16:37:53.586Z" }, + { url = "https://files.pythonhosted.org/packages/fb/97/e4792675448a2ac5bd56f377a095233b805dd1315235c940c8ba5624e3cb/aiohttp-3.10.11-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f34b97e4b11b8d4eb2c3a4f975be626cc8af99ff479da7de49ac2c6d02d35725", size = 1309839, upload-time = "2024-11-13T16:37:55.68Z" }, + { url = "https://files.pythonhosted.org/packages/96/d0/ba19b1260da6fbbda4d5b1550d8a53ba3518868f2c143d672aedfdbc6172/aiohttp-3.10.11-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e7b825da878464a252ccff2958838f9caa82f32a8dbc334eb9b34a026e2c636", size = 1348116, upload-time = "2024-11-13T16:37:58.232Z" }, + { url = "https://files.pythonhosted.org/packages/b3/b9/15100ee7113a2638bfdc91aecc54641609a92a7ce4fe533ebeaa8d43ff93/aiohttp-3.10.11-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f9f92a344c50b9667827da308473005f34767b6a2a60d9acff56ae94f895f385", size = 1391402, upload-time = "2024-11-13T16:38:00.522Z" }, + { url = "https://files.pythonhosted.org/packages/c5/36/831522618ac0dcd0b28f327afd18df7fb6bbf3eaf302f912a40e87714846/aiohttp-3.10.11-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc6f1ab987a27b83c5268a17218463c2ec08dbb754195113867a27b166cd6087", size = 1304239, upload-time = "2024-11-13T16:38:04.195Z" }, + { url = "https://files.pythonhosted.org/packages/60/9f/b7230d0c48b076500ae57adb717aa0656432acd3d8febb1183dedfaa4e75/aiohttp-3.10.11-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1dc0f4ca54842173d03322793ebcf2c8cc2d34ae91cc762478e295d8e361e03f", size = 1256565, upload-time = "2024-11-13T16:38:07.218Z" }, + { url = "https://files.pythonhosted.org/packages/63/c2/35c7b4699f4830b3b0a5c3d5619df16dca8052ae8b488e66065902d559f6/aiohttp-3.10.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7ce6a51469bfaacff146e59e7fb61c9c23006495d11cc24c514a455032bcfa03", size = 1269285, upload-time = "2024-11-13T16:38:09.396Z" }, + { url = "https://files.pythonhosted.org/packages/51/48/bc20ea753909bdeb09f9065260aefa7453e3a57f6a51f56f5216adc1a5e7/aiohttp-3.10.11-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:aad3cd91d484d065ede16f3cf15408254e2469e3f613b241a1db552c5eb7ab7d", size = 1276716, upload-time = "2024-11-13T16:38:12.039Z" }, + { url = "https://files.pythonhosted.org/packages/0c/7b/a8708616b3810f55ead66f8e189afa9474795760473aea734bbea536cd64/aiohttp-3.10.11-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:f4df4b8ca97f658c880fb4b90b1d1ec528315d4030af1ec763247ebfd33d8b9a", size = 1315023, upload-time = "2024-11-13T16:38:15.155Z" }, + { url = "https://files.pythonhosted.org/packages/2a/d6/dfe9134a921e05b01661a127a37b7d157db93428905450e32f9898eef27d/aiohttp-3.10.11-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:2e4e18a0a2d03531edbc06c366954e40a3f8d2a88d2b936bbe78a0c75a3aab3e", size = 1342735, upload-time = "2024-11-13T16:38:17.539Z" }, + { url = "https://files.pythonhosted.org/packages/ca/1a/3bd7f18e3909eabd57e5d17ecdbf5ea4c5828d91341e3676a07de7c76312/aiohttp-3.10.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6ce66780fa1a20e45bc753cda2a149daa6dbf1561fc1289fa0c308391c7bc0a4", size = 1302618, upload-time = "2024-11-13T16:38:19.865Z" }, + { url = "https://files.pythonhosted.org/packages/cf/51/d063133781cda48cfdd1e11fc8ef45ab3912b446feba41556385b3ae5087/aiohttp-3.10.11-cp312-cp312-win32.whl", hash = "sha256:a919c8957695ea4c0e7a3e8d16494e3477b86f33067478f43106921c2fef15bb", size = 360497, upload-time = "2024-11-13T16:38:21.996Z" }, + { url = "https://files.pythonhosted.org/packages/55/4e/f29def9ed39826fe8f85955f2e42fe5cc0cbe3ebb53c97087f225368702e/aiohttp-3.10.11-cp312-cp312-win_amd64.whl", hash = "sha256:b5e29706e6389a2283a91611c91bf24f218962717c8f3b4e528ef529d112ee27", size = 380577, upload-time = "2024-11-13T16:38:24.247Z" }, + { url = "https://files.pythonhosted.org/packages/1f/63/654c185dfe3cf5d4a0d35b6ee49ee6ca91922c694eaa90732e1ba4b40ef1/aiohttp-3.10.11-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:703938e22434d7d14ec22f9f310559331f455018389222eed132808cd8f44127", size = 577381, upload-time = "2024-11-13T16:38:26.708Z" }, + { url = "https://files.pythonhosted.org/packages/4e/c4/ee9c350acb202ba2eb0c44b0f84376b05477e870444192a9f70e06844c28/aiohttp-3.10.11-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9bc50b63648840854e00084c2b43035a62e033cb9b06d8c22b409d56eb098413", size = 393289, upload-time = "2024-11-13T16:38:29.207Z" }, + { url = "https://files.pythonhosted.org/packages/3d/7c/30d161a7e3b208cef1b922eacf2bbb8578b7e5a62266a6a2245a1dd044dc/aiohttp-3.10.11-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5f0463bf8b0754bc744e1feb61590706823795041e63edf30118a6f0bf577461", size = 388859, upload-time = "2024-11-13T16:38:31.567Z" }, + { url = "https://files.pythonhosted.org/packages/79/10/8d050e04be447d3d39e5a4a910fa289d930120cebe1b893096bd3ee29063/aiohttp-3.10.11-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6c6dec398ac5a87cb3a407b068e1106b20ef001c344e34154616183fe684288", size = 1280983, upload-time = "2024-11-13T16:38:33.738Z" }, + { url = "https://files.pythonhosted.org/packages/31/b3/977eca40afe643dcfa6b8d8bb9a93f4cba1d8ed1ead22c92056b08855c7a/aiohttp-3.10.11-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bcaf2d79104d53d4dcf934f7ce76d3d155302d07dae24dff6c9fffd217568067", size = 1317132, upload-time = "2024-11-13T16:38:35.999Z" }, + { url = "https://files.pythonhosted.org/packages/1a/43/b5ee8e697ed0f96a2b3d80b3058fa7590cda508e9cd256274246ba1cf37a/aiohttp-3.10.11-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:25fd5470922091b5a9aeeb7e75be609e16b4fba81cdeaf12981393fb240dd10e", size = 1362630, upload-time = "2024-11-13T16:38:39.016Z" }, + { url = "https://files.pythonhosted.org/packages/28/20/3ae8e993b2990fa722987222dea74d6bac9331e2f530d086f309b4aa8847/aiohttp-3.10.11-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbde2ca67230923a42161b1f408c3992ae6e0be782dca0c44cb3206bf330dee1", size = 1276865, upload-time = "2024-11-13T16:38:41.423Z" }, + { url = "https://files.pythonhosted.org/packages/02/08/1afb0ab7dcff63333b683e998e751aa2547d1ff897b577d2244b00e6fe38/aiohttp-3.10.11-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:249c8ff8d26a8b41a0f12f9df804e7c685ca35a207e2410adbd3e924217b9006", size = 1230448, upload-time = "2024-11-13T16:38:43.962Z" }, + { url = "https://files.pythonhosted.org/packages/c6/fd/ccd0ff842c62128d164ec09e3dd810208a84d79cd402358a3038ae91f3e9/aiohttp-3.10.11-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:878ca6a931ee8c486a8f7b432b65431d095c522cbeb34892bee5be97b3481d0f", size = 1244626, upload-time = "2024-11-13T16:38:47.089Z" }, + { url = "https://files.pythonhosted.org/packages/9f/75/30e9537ab41ed7cb062338d8df7c4afb0a715b3551cd69fc4ea61cfa5a95/aiohttp-3.10.11-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:8663f7777ce775f0413324be0d96d9730959b2ca73d9b7e2c2c90539139cbdd6", size = 1243608, upload-time = "2024-11-13T16:38:49.47Z" }, + { url = "https://files.pythonhosted.org/packages/c2/e0/3e7a62d99b9080793affddc12a82b11c9bc1312916ad849700d2bddf9786/aiohttp-3.10.11-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:6cd3f10b01f0c31481fba8d302b61603a2acb37b9d30e1d14e0f5a58b7b18a31", size = 1286158, upload-time = "2024-11-13T16:38:51.947Z" }, + { url = "https://files.pythonhosted.org/packages/71/b8/df67886802e71e976996ed9324eb7dc379e53a7d972314e9c7fe3f6ac6bc/aiohttp-3.10.11-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:4e8d8aad9402d3aa02fdc5ca2fe68bcb9fdfe1f77b40b10410a94c7f408b664d", size = 1313636, upload-time = "2024-11-13T16:38:54.424Z" }, + { url = "https://files.pythonhosted.org/packages/3c/3b/aea9c3e70ff4e030f46902df28b4cdf486695f4d78fd9c6698827e2bafab/aiohttp-3.10.11-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:38e3c4f80196b4f6c3a85d134a534a56f52da9cb8d8e7af1b79a32eefee73a00", size = 1273772, upload-time = "2024-11-13T16:38:56.846Z" }, + { url = "https://files.pythonhosted.org/packages/e9/9e/4b4c5705270d1c4ee146516ad288af720798d957ba46504aaf99b86e85d9/aiohttp-3.10.11-cp313-cp313-win32.whl", hash = "sha256:fc31820cfc3b2863c6e95e14fcf815dc7afe52480b4dc03393c4873bb5599f71", size = 358679, upload-time = "2024-11-13T16:38:59.787Z" }, + { url = "https://files.pythonhosted.org/packages/28/1d/18ef37549901db94717d4389eb7be807acbfbdeab48a73ff2993fc909118/aiohttp-3.10.11-cp313-cp313-win_amd64.whl", hash = "sha256:4996ff1345704ffdd6d75fb06ed175938c133425af616142e7187f28dc75f14e", size = 378073, upload-time = "2024-11-13T16:39:02.065Z" }, + { url = "https://files.pythonhosted.org/packages/dd/f2/59165bee7bba0b0634525834c622f152a30715a1d8280f6291a0cb86b1e6/aiohttp-3.10.11-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:74baf1a7d948b3d640badeac333af581a367ab916b37e44cf90a0334157cdfd2", size = 592135, upload-time = "2024-11-13T16:39:04.774Z" }, + { url = "https://files.pythonhosted.org/packages/2e/0e/b3555c504745af66efbf89d16811148ff12932b86fad529d115538fe2739/aiohttp-3.10.11-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:473aebc3b871646e1940c05268d451f2543a1d209f47035b594b9d4e91ce8339", size = 402913, upload-time = "2024-11-13T16:39:08.065Z" }, + { url = "https://files.pythonhosted.org/packages/31/bb/2890a3c77126758ef58536ca9f7476a12ba2021e0cd074108fb99b8c8747/aiohttp-3.10.11-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c2f746a6968c54ab2186574e15c3f14f3e7f67aef12b761e043b33b89c5b5f95", size = 394013, upload-time = "2024-11-13T16:39:10.638Z" }, + { url = "https://files.pythonhosted.org/packages/74/82/0ab5199b473558846d72901a714b6afeb6f6a6a6a4c3c629e2c107418afd/aiohttp-3.10.11-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d110cabad8360ffa0dec8f6ec60e43286e9d251e77db4763a87dcfe55b4adb92", size = 1255578, upload-time = "2024-11-13T16:39:13.14Z" }, + { url = "https://files.pythonhosted.org/packages/f8/b2/f232477dd3c0e95693a903c4815bfb8d831f6a1a67e27ad14d30a774eeda/aiohttp-3.10.11-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e0099c7d5d7afff4202a0c670e5b723f7718810000b4abcbc96b064129e64bc7", size = 1298780, upload-time = "2024-11-13T16:39:15.721Z" }, + { url = "https://files.pythonhosted.org/packages/34/8c/11972235a6b53d5b69098f2ee6629ff8f99cd9592dcaa620c7868deb5673/aiohttp-3.10.11-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0316e624b754dbbf8c872b62fe6dcb395ef20c70e59890dfa0de9eafccd2849d", size = 1336093, upload-time = "2024-11-13T16:39:19.11Z" }, + { url = "https://files.pythonhosted.org/packages/03/be/7ad9a6cd2312221cf7b6837d8e2d8e4660fbd4f9f15bccf79ef857f41f4d/aiohttp-3.10.11-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a5f7ab8baf13314e6b2485965cbacb94afff1e93466ac4d06a47a81c50f9cca", size = 1250296, upload-time = "2024-11-13T16:39:22.363Z" }, + { url = "https://files.pythonhosted.org/packages/bb/8d/a3885a582d9fc481bccb155d082f83a7a846942e36e4a4bba061e3d6b95e/aiohttp-3.10.11-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c891011e76041e6508cbfc469dd1a8ea09bc24e87e4c204e05f150c4c455a5fa", size = 1215020, upload-time = "2024-11-13T16:39:25.205Z" }, + { url = "https://files.pythonhosted.org/packages/bb/e7/09a1736b7264316dc3738492d9b559f2a54b985660f21d76095c9890a62e/aiohttp-3.10.11-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:9208299251370ee815473270c52cd3f7069ee9ed348d941d574d1457d2c73e8b", size = 1210591, upload-time = "2024-11-13T16:39:28.311Z" }, + { url = "https://files.pythonhosted.org/packages/58/b1/ee684631f6af98065d49ac8416db7a8e74ea33e1378bc75952ab0522342f/aiohttp-3.10.11-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:459f0f32c8356e8125f45eeff0ecf2b1cb6db1551304972702f34cd9e6c44658", size = 1211255, upload-time = "2024-11-13T16:39:30.799Z" }, + { url = "https://files.pythonhosted.org/packages/8f/55/e21e312fd6c581f244dd2ed077ccb784aade07c19416a6316b1453f02c4e/aiohttp-3.10.11-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:14cdc8c1810bbd4b4b9f142eeee23cda528ae4e57ea0923551a9af4820980e39", size = 1278114, upload-time = "2024-11-13T16:39:34.141Z" }, + { url = "https://files.pythonhosted.org/packages/d8/7f/ff6df0e90df6759693f52720ebedbfa10982d97aa1fd02c6ca917a6399ea/aiohttp-3.10.11-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:971aa438a29701d4b34e4943e91b5e984c3ae6ccbf80dd9efaffb01bd0b243a9", size = 1292714, upload-time = "2024-11-13T16:39:37.216Z" }, + { url = "https://files.pythonhosted.org/packages/3a/45/63f35367dfffae41e7abd0603f92708b5b3655fda55c08388ac2c7fb127b/aiohttp-3.10.11-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:9a309c5de392dfe0f32ee57fa43ed8fc6ddf9985425e84bd51ed66bb16bce3a7", size = 1233734, upload-time = "2024-11-13T16:39:40.599Z" }, + { url = "https://files.pythonhosted.org/packages/ec/ee/74b0696c0e84e06c43beab9302f353d97dc9f0cccd7ccf3ee648411b849b/aiohttp-3.10.11-cp38-cp38-win32.whl", hash = "sha256:9ec1628180241d906a0840b38f162a3215114b14541f1a8711c368a8739a9be4", size = 365350, upload-time = "2024-11-13T16:39:43.852Z" }, + { url = "https://files.pythonhosted.org/packages/21/0c/74c895688db09a2852056abf32d128991ec2fb41e5f57a1fe0928e15151c/aiohttp-3.10.11-cp38-cp38-win_amd64.whl", hash = "sha256:9c6e0ffd52c929f985c7258f83185d17c76d4275ad22e90aa29f38e211aacbec", size = 384542, upload-time = "2024-11-13T16:39:47.093Z" }, + { url = "https://files.pythonhosted.org/packages/cc/df/aa0d1548db818395a372b5f90e62072677ce786d6b19680c49dd4da3825f/aiohttp-3.10.11-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cdc493a2e5d8dc79b2df5bec9558425bcd39aff59fc949810cbd0832e294b106", size = 589833, upload-time = "2024-11-13T16:39:49.72Z" }, + { url = "https://files.pythonhosted.org/packages/75/7c/d11145784b3fa29c0421a3883a4b91ee8c19acb40332b1d2e39f47be4e5b/aiohttp-3.10.11-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b3e70f24e7d0405be2348da9d5a7836936bf3a9b4fd210f8c37e8d48bc32eca6", size = 401685, upload-time = "2024-11-13T16:39:52.263Z" }, + { url = "https://files.pythonhosted.org/packages/e2/67/1b5f93babeb060cb683d23104b243be1d6299fe6cd807dcb56cf67d2e62c/aiohttp-3.10.11-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:968b8fb2a5eee2770eda9c7b5581587ef9b96fbdf8dcabc6b446d35ccc69df01", size = 392957, upload-time = "2024-11-13T16:39:54.668Z" }, + { url = "https://files.pythonhosted.org/packages/e1/4d/441df53aafd8dd97b8cfe9e467c641fa19cb5113e7601a7f77f2124518e0/aiohttp-3.10.11-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:deef4362af9493d1382ef86732ee2e4cbc0d7c005947bd54ad1a9a16dd59298e", size = 1229754, upload-time = "2024-11-13T16:39:57.166Z" }, + { url = "https://files.pythonhosted.org/packages/4d/cc/f1397a2501b95cb94580de7051395e85af95a1e27aed1f8af73459ddfa22/aiohttp-3.10.11-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:686b03196976e327412a1b094f4120778c7c4b9cff9bce8d2fdfeca386b89829", size = 1266246, upload-time = "2024-11-13T16:40:00.723Z" }, + { url = "https://files.pythonhosted.org/packages/c2/b5/7d33dae7630b4e9f90d634c6a90cb0923797e011b71cd9b10fe685aec3f6/aiohttp-3.10.11-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3bf6d027d9d1d34e1c2e1645f18a6498c98d634f8e373395221121f1c258ace8", size = 1301720, upload-time = "2024-11-13T16:40:04.111Z" }, + { url = "https://files.pythonhosted.org/packages/51/36/f917bcc63bc489aa3f534fa81efbf895fa5286745dcd8bbd0eb9dbc923a1/aiohttp-3.10.11-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:099fd126bf960f96d34a760e747a629c27fb3634da5d05c7ef4d35ef4ea519fc", size = 1221527, upload-time = "2024-11-13T16:40:06.851Z" }, + { url = "https://files.pythonhosted.org/packages/32/c2/1a303a072b4763d99d4b0664a3a8b952869e3fbb660d4239826bd0c56cc1/aiohttp-3.10.11-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c73c4d3dae0b4644bc21e3de546530531d6cdc88659cdeb6579cd627d3c206aa", size = 1192309, upload-time = "2024-11-13T16:40:09.65Z" }, + { url = "https://files.pythonhosted.org/packages/62/ef/d62f705dc665382b78ef171e5ba2616c395220ac7c1f452f0d2dcad3f9f5/aiohttp-3.10.11-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0c5580f3c51eea91559db3facd45d72e7ec970b04528b4709b1f9c2555bd6d0b", size = 1189481, upload-time = "2024-11-13T16:40:12.77Z" }, + { url = "https://files.pythonhosted.org/packages/40/22/3e3eb4f97e5c4f52ccd198512b583c0c9135aa4e989c7ade97023c4cd282/aiohttp-3.10.11-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:fdf6429f0caabfd8a30c4e2eaecb547b3c340e4730ebfe25139779b9815ba138", size = 1187877, upload-time = "2024-11-13T16:40:15.985Z" }, + { url = "https://files.pythonhosted.org/packages/b5/73/77475777fbe2b3efaceb49db2859f1a22c96fd5869d736e80375db05bbf4/aiohttp-3.10.11-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:d97187de3c276263db3564bb9d9fad9e15b51ea10a371ffa5947a5ba93ad6777", size = 1246006, upload-time = "2024-11-13T16:40:19.17Z" }, + { url = "https://files.pythonhosted.org/packages/ef/f7/5b060d19065473da91838b63d8fd4d20ef8426a7d905cc8f9cd11eabd780/aiohttp-3.10.11-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:0acafb350cfb2eba70eb5d271f55e08bd4502ec35e964e18ad3e7d34d71f7261", size = 1260403, upload-time = "2024-11-13T16:40:21.761Z" }, + { url = "https://files.pythonhosted.org/packages/6c/ea/e9ad224815cd83c8dfda686d2bafa2cab5b93d7232e09470a8d2a158acde/aiohttp-3.10.11-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c13ed0c779911c7998a58e7848954bd4d63df3e3575f591e321b19a2aec8df9f", size = 1208643, upload-time = "2024-11-13T16:40:24.803Z" }, + { url = "https://files.pythonhosted.org/packages/ba/c1/e1c6bba72f379adbd52958601a8642546ed0807964afba3b1b5b8cfb1bc0/aiohttp-3.10.11-cp39-cp39-win32.whl", hash = "sha256:22b7c540c55909140f63ab4f54ec2c20d2635c0289cdd8006da46f3327f971b9", size = 364419, upload-time = "2024-11-13T16:40:27.817Z" }, + { url = "https://files.pythonhosted.org/packages/30/24/50862e06e86cd263c60661e00b9d2c8d7fdece4fe95454ed5aa21ecf8036/aiohttp-3.10.11-cp39-cp39-win_amd64.whl", hash = "sha256:7b26b1551e481012575dab8e3727b16fe7dd27eb2711d2e63ced7368756268fb", size = 382857, upload-time = "2024-11-13T16:40:30.427Z" }, +] + +[[package]] +name = "aiohttp" +version = "3.13.2" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", + "python_full_version == '3.9.*'", +] +dependencies = [ + { name = "aiohappyeyeballs", version = "2.6.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" }, + { name = "aiosignal", version = "1.4.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" }, + { name = "async-timeout", marker = "python_full_version >= '3.9' and python_full_version < '3.11'" }, + { name = "attrs", version = "25.4.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" }, + { name = "frozenlist", version = "1.8.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" }, + { name = "multidict", version = "6.7.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" }, + { name = "propcache", version = "0.4.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" }, + { name = "yarl", version = "1.22.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1c/ce/3b83ebba6b3207a7135e5fcaba49706f8a4b6008153b4e30540c982fae26/aiohttp-3.13.2.tar.gz", hash = "sha256:40176a52c186aefef6eb3cad2cdd30cd06e3afbe88fe8ab2af9c0b90f228daca", size = 7837994, upload-time = "2025-10-28T20:59:39.937Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6d/34/939730e66b716b76046dedfe0842995842fa906ccc4964bba414ff69e429/aiohttp-3.13.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2372b15a5f62ed37789a6b383ff7344fc5b9f243999b0cd9b629d8bc5f5b4155", size = 736471, upload-time = "2025-10-28T20:55:27.924Z" }, + { url = "https://files.pythonhosted.org/packages/fd/cf/dcbdf2df7f6ca72b0bb4c0b4509701f2d8942cf54e29ca197389c214c07f/aiohttp-3.13.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e7f8659a48995edee7229522984bd1009c1213929c769c2daa80b40fe49a180c", size = 493985, upload-time = "2025-10-28T20:55:29.456Z" }, + { url = "https://files.pythonhosted.org/packages/9d/87/71c8867e0a1d0882dcbc94af767784c3cb381c1c4db0943ab4aae4fed65e/aiohttp-3.13.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:939ced4a7add92296b0ad38892ce62b98c619288a081170695c6babe4f50e636", size = 489274, upload-time = "2025-10-28T20:55:31.134Z" }, + { url = "https://files.pythonhosted.org/packages/38/0f/46c24e8dae237295eaadd113edd56dee96ef6462adf19b88592d44891dc5/aiohttp-3.13.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6315fb6977f1d0dd41a107c527fee2ed5ab0550b7d885bc15fee20ccb17891da", size = 1668171, upload-time = "2025-10-28T20:55:36.065Z" }, + { url = "https://files.pythonhosted.org/packages/eb/c6/4cdfb4440d0e28483681a48f69841fa5e39366347d66ef808cbdadddb20e/aiohttp-3.13.2-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6e7352512f763f760baaed2637055c49134fd1d35b37c2dedfac35bfe5cf8725", size = 1636036, upload-time = "2025-10-28T20:55:37.576Z" }, + { url = "https://files.pythonhosted.org/packages/84/37/8708cf678628216fb678ab327a4e1711c576d6673998f4f43e86e9ae90dd/aiohttp-3.13.2-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e09a0a06348a2dd73e7213353c90d709502d9786219f69b731f6caa0efeb46f5", size = 1727975, upload-time = "2025-10-28T20:55:39.457Z" }, + { url = "https://files.pythonhosted.org/packages/e6/2e/3ebfe12fdcb9b5f66e8a0a42dffcd7636844c8a018f261efb2419f68220b/aiohttp-3.13.2-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a09a6d073fb5789456545bdee2474d14395792faa0527887f2f4ec1a486a59d3", size = 1815823, upload-time = "2025-10-28T20:55:40.958Z" }, + { url = "https://files.pythonhosted.org/packages/a1/4f/ca2ef819488cbb41844c6cf92ca6dd15b9441e6207c58e5ae0e0fc8d70ad/aiohttp-3.13.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b59d13c443f8e049d9e94099c7e412e34610f1f49be0f230ec656a10692a5802", size = 1669374, upload-time = "2025-10-28T20:55:42.745Z" }, + { url = "https://files.pythonhosted.org/packages/f8/fe/1fe2e1179a0d91ce09c99069684aab619bf2ccde9b20bd6ca44f8837203e/aiohttp-3.13.2-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:20db2d67985d71ca033443a1ba2001c4b5693fe09b0e29f6d9358a99d4d62a8a", size = 1555315, upload-time = "2025-10-28T20:55:44.264Z" }, + { url = "https://files.pythonhosted.org/packages/5a/2b/f3781899b81c45d7cbc7140cddb8a3481c195e7cbff8e36374759d2ab5a5/aiohttp-3.13.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:960c2fc686ba27b535f9fd2b52d87ecd7e4fd1cf877f6a5cba8afb5b4a8bd204", size = 1639140, upload-time = "2025-10-28T20:55:46.626Z" }, + { url = "https://files.pythonhosted.org/packages/72/27/c37e85cd3ece6f6c772e549bd5a253d0c122557b25855fb274224811e4f2/aiohttp-3.13.2-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:6c00dbcf5f0d88796151e264a8eab23de2997c9303dd7c0bf622e23b24d3ce22", size = 1645496, upload-time = "2025-10-28T20:55:48.933Z" }, + { url = "https://files.pythonhosted.org/packages/66/20/3af1ab663151bd3780b123e907761cdb86ec2c4e44b2d9b195ebc91fbe37/aiohttp-3.13.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fed38a5edb7945f4d1bcabe2fcd05db4f6ec7e0e82560088b754f7e08d93772d", size = 1697625, upload-time = "2025-10-28T20:55:50.377Z" }, + { url = "https://files.pythonhosted.org/packages/95/eb/ae5cab15efa365e13d56b31b0d085a62600298bf398a7986f8388f73b598/aiohttp-3.13.2-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:b395bbca716c38bef3c764f187860e88c724b342c26275bc03e906142fc5964f", size = 1542025, upload-time = "2025-10-28T20:55:51.861Z" }, + { url = "https://files.pythonhosted.org/packages/e9/2d/1683e8d67ec72d911397fe4e575688d2a9b8f6a6e03c8fdc9f3fd3d4c03f/aiohttp-3.13.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:204ffff2426c25dfda401ba08da85f9c59525cdc42bda26660463dd1cbcfec6f", size = 1714918, upload-time = "2025-10-28T20:55:53.515Z" }, + { url = "https://files.pythonhosted.org/packages/99/a2/ffe8e0e1c57c5e542d47ffa1fcf95ef2b3ea573bf7c4d2ee877252431efc/aiohttp-3.13.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:05c4dd3c48fb5f15db31f57eb35374cb0c09afdde532e7fb70a75aede0ed30f6", size = 1656113, upload-time = "2025-10-28T20:55:55.438Z" }, + { url = "https://files.pythonhosted.org/packages/0d/42/d511aff5c3a2b06c09d7d214f508a4ad8ac7799817f7c3d23e7336b5e896/aiohttp-3.13.2-cp310-cp310-win32.whl", hash = "sha256:e574a7d61cf10351d734bcddabbe15ede0eaa8a02070d85446875dc11189a251", size = 432290, upload-time = "2025-10-28T20:55:56.96Z" }, + { url = "https://files.pythonhosted.org/packages/8b/ea/1c2eb7098b5bad4532994f2b7a8228d27674035c9b3234fe02c37469ef14/aiohttp-3.13.2-cp310-cp310-win_amd64.whl", hash = "sha256:364f55663085d658b8462a1c3f17b2b84a5c2e1ba858e1b79bff7b2e24ad1514", size = 455075, upload-time = "2025-10-28T20:55:58.373Z" }, + { url = "https://files.pythonhosted.org/packages/35/74/b321e7d7ca762638cdf8cdeceb39755d9c745aff7a64c8789be96ddf6e96/aiohttp-3.13.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4647d02df098f6434bafd7f32ad14942f05a9caa06c7016fdcc816f343997dd0", size = 743409, upload-time = "2025-10-28T20:56:00.354Z" }, + { url = "https://files.pythonhosted.org/packages/99/3d/91524b905ec473beaf35158d17f82ef5a38033e5809fe8742e3657cdbb97/aiohttp-3.13.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e3403f24bcb9c3b29113611c3c16a2a447c3953ecf86b79775e7be06f7ae7ccb", size = 497006, upload-time = "2025-10-28T20:56:01.85Z" }, + { url = "https://files.pythonhosted.org/packages/eb/d3/7f68bc02a67716fe80f063e19adbd80a642e30682ce74071269e17d2dba1/aiohttp-3.13.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:43dff14e35aba17e3d6d5ba628858fb8cb51e30f44724a2d2f0c75be492c55e9", size = 493195, upload-time = "2025-10-28T20:56:03.314Z" }, + { url = "https://files.pythonhosted.org/packages/98/31/913f774a4708775433b7375c4f867d58ba58ead833af96c8af3621a0d243/aiohttp-3.13.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e2a9ea08e8c58bb17655630198833109227dea914cd20be660f52215f6de5613", size = 1747759, upload-time = "2025-10-28T20:56:04.904Z" }, + { url = "https://files.pythonhosted.org/packages/e8/63/04efe156f4326f31c7c4a97144f82132c3bb21859b7bb84748d452ccc17c/aiohttp-3.13.2-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:53b07472f235eb80e826ad038c9d106c2f653584753f3ddab907c83f49eedead", size = 1704456, upload-time = "2025-10-28T20:56:06.986Z" }, + { url = "https://files.pythonhosted.org/packages/8e/02/4e16154d8e0a9cf4ae76f692941fd52543bbb148f02f098ca73cab9b1c1b/aiohttp-3.13.2-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e736c93e9c274fce6419af4aac199984d866e55f8a4cec9114671d0ea9688780", size = 1807572, upload-time = "2025-10-28T20:56:08.558Z" }, + { url = "https://files.pythonhosted.org/packages/34/58/b0583defb38689e7f06798f0285b1ffb3a6fb371f38363ce5fd772112724/aiohttp-3.13.2-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ff5e771f5dcbc81c64898c597a434f7682f2259e0cd666932a913d53d1341d1a", size = 1895954, upload-time = "2025-10-28T20:56:10.545Z" }, + { url = "https://files.pythonhosted.org/packages/6b/f3/083907ee3437425b4e376aa58b2c915eb1a33703ec0dc30040f7ae3368c6/aiohttp-3.13.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a3b6fb0c207cc661fa0bf8c66d8d9b657331ccc814f4719468af61034b478592", size = 1747092, upload-time = "2025-10-28T20:56:12.118Z" }, + { url = "https://files.pythonhosted.org/packages/ac/61/98a47319b4e425cc134e05e5f3fc512bf9a04bf65aafd9fdcda5d57ec693/aiohttp-3.13.2-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:97a0895a8e840ab3520e2288db7cace3a1981300d48babeb50e7425609e2e0ab", size = 1606815, upload-time = "2025-10-28T20:56:14.191Z" }, + { url = "https://files.pythonhosted.org/packages/97/4b/e78b854d82f66bb974189135d31fce265dee0f5344f64dd0d345158a5973/aiohttp-3.13.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9e8f8afb552297aca127c90cb840e9a1d4bfd6a10d7d8f2d9176e1acc69bad30", size = 1723789, upload-time = "2025-10-28T20:56:16.101Z" }, + { url = "https://files.pythonhosted.org/packages/ed/fc/9d2ccc794fc9b9acd1379d625c3a8c64a45508b5091c546dea273a41929e/aiohttp-3.13.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:ed2f9c7216e53c3df02264f25d824b079cc5914f9e2deba94155190ef648ee40", size = 1718104, upload-time = "2025-10-28T20:56:17.655Z" }, + { url = "https://files.pythonhosted.org/packages/66/65/34564b8765ea5c7d79d23c9113135d1dd3609173da13084830f1507d56cf/aiohttp-3.13.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:99c5280a329d5fa18ef30fd10c793a190d996567667908bef8a7f81f8202b948", size = 1785584, upload-time = "2025-10-28T20:56:19.238Z" }, + { url = "https://files.pythonhosted.org/packages/30/be/f6a7a426e02fc82781afd62016417b3948e2207426d90a0e478790d1c8a4/aiohttp-3.13.2-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:2ca6ffef405fc9c09a746cb5d019c1672cd7f402542e379afc66b370833170cf", size = 1595126, upload-time = "2025-10-28T20:56:20.836Z" }, + { url = "https://files.pythonhosted.org/packages/e5/c7/8e22d5d28f94f67d2af496f14a83b3c155d915d1fe53d94b66d425ec5b42/aiohttp-3.13.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:47f438b1a28e926c37632bff3c44df7d27c9b57aaf4e34b1def3c07111fdb782", size = 1800665, upload-time = "2025-10-28T20:56:22.922Z" }, + { url = "https://files.pythonhosted.org/packages/d1/11/91133c8b68b1da9fc16555706aa7276fdf781ae2bb0876c838dd86b8116e/aiohttp-3.13.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9acda8604a57bb60544e4646a4615c1866ee6c04a8edef9b8ee6fd1d8fa2ddc8", size = 1739532, upload-time = "2025-10-28T20:56:25.924Z" }, + { url = "https://files.pythonhosted.org/packages/17/6b/3747644d26a998774b21a616016620293ddefa4d63af6286f389aedac844/aiohttp-3.13.2-cp311-cp311-win32.whl", hash = "sha256:868e195e39b24aaa930b063c08bb0c17924899c16c672a28a65afded9c46c6ec", size = 431876, upload-time = "2025-10-28T20:56:27.524Z" }, + { url = "https://files.pythonhosted.org/packages/c3/63/688462108c1a00eb9f05765331c107f95ae86f6b197b865d29e930b7e462/aiohttp-3.13.2-cp311-cp311-win_amd64.whl", hash = "sha256:7fd19df530c292542636c2a9a85854fab93474396a52f1695e799186bbd7f24c", size = 456205, upload-time = "2025-10-28T20:56:29.062Z" }, + { url = "https://files.pythonhosted.org/packages/29/9b/01f00e9856d0a73260e86dd8ed0c2234a466c5c1712ce1c281548df39777/aiohttp-3.13.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:b1e56bab2e12b2b9ed300218c351ee2a3d8c8fdab5b1ec6193e11a817767e47b", size = 737623, upload-time = "2025-10-28T20:56:30.797Z" }, + { url = "https://files.pythonhosted.org/packages/5a/1b/4be39c445e2b2bd0aab4ba736deb649fabf14f6757f405f0c9685019b9e9/aiohttp-3.13.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:364e25edaabd3d37b1db1f0cbcee8c73c9a3727bfa262b83e5e4cf3489a2a9dc", size = 492664, upload-time = "2025-10-28T20:56:32.708Z" }, + { url = "https://files.pythonhosted.org/packages/28/66/d35dcfea8050e131cdd731dff36434390479b4045a8d0b9d7111b0a968f1/aiohttp-3.13.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c5c94825f744694c4b8db20b71dba9a257cd2ba8e010a803042123f3a25d50d7", size = 491808, upload-time = "2025-10-28T20:56:34.57Z" }, + { url = "https://files.pythonhosted.org/packages/00/29/8e4609b93e10a853b65f8291e64985de66d4f5848c5637cddc70e98f01f8/aiohttp-3.13.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ba2715d842ffa787be87cbfce150d5e88c87a98e0b62e0f5aa489169a393dbbb", size = 1738863, upload-time = "2025-10-28T20:56:36.377Z" }, + { url = "https://files.pythonhosted.org/packages/9d/fa/4ebdf4adcc0def75ced1a0d2d227577cd7b1b85beb7edad85fcc87693c75/aiohttp-3.13.2-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:585542825c4bc662221fb257889e011a5aa00f1ae4d75d1d246a5225289183e3", size = 1700586, upload-time = "2025-10-28T20:56:38.034Z" }, + { url = "https://files.pythonhosted.org/packages/da/04/73f5f02ff348a3558763ff6abe99c223381b0bace05cd4530a0258e52597/aiohttp-3.13.2-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:39d02cb6025fe1aabca329c5632f48c9532a3dabccd859e7e2f110668972331f", size = 1768625, upload-time = "2025-10-28T20:56:39.75Z" }, + { url = "https://files.pythonhosted.org/packages/f8/49/a825b79ffec124317265ca7d2344a86bcffeb960743487cb11988ffb3494/aiohttp-3.13.2-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e67446b19e014d37342f7195f592a2a948141d15a312fe0e700c2fd2f03124f6", size = 1867281, upload-time = "2025-10-28T20:56:41.471Z" }, + { url = "https://files.pythonhosted.org/packages/b9/48/adf56e05f81eac31edcfae45c90928f4ad50ef2e3ea72cb8376162a368f8/aiohttp-3.13.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4356474ad6333e41ccefd39eae869ba15a6c5299c9c01dfdcfdd5c107be4363e", size = 1752431, upload-time = "2025-10-28T20:56:43.162Z" }, + { url = "https://files.pythonhosted.org/packages/30/ab/593855356eead019a74e862f21523db09c27f12fd24af72dbc3555b9bfd9/aiohttp-3.13.2-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:eeacf451c99b4525f700f078becff32c32ec327b10dcf31306a8a52d78166de7", size = 1562846, upload-time = "2025-10-28T20:56:44.85Z" }, + { url = "https://files.pythonhosted.org/packages/39/0f/9f3d32271aa8dc35036e9668e31870a9d3b9542dd6b3e2c8a30931cb27ae/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d8a9b889aeabd7a4e9af0b7f4ab5ad94d42e7ff679aaec6d0db21e3b639ad58d", size = 1699606, upload-time = "2025-10-28T20:56:46.519Z" }, + { url = "https://files.pythonhosted.org/packages/2c/3c/52d2658c5699b6ef7692a3f7128b2d2d4d9775f2a68093f74bca06cf01e1/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:fa89cb11bc71a63b69568d5b8a25c3ca25b6d54c15f907ca1c130d72f320b76b", size = 1720663, upload-time = "2025-10-28T20:56:48.528Z" }, + { url = "https://files.pythonhosted.org/packages/9b/d4/8f8f3ff1fb7fb9e3f04fcad4e89d8a1cd8fc7d05de67e3de5b15b33008ff/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8aa7c807df234f693fed0ecd507192fc97692e61fee5702cdc11155d2e5cadc8", size = 1737939, upload-time = "2025-10-28T20:56:50.77Z" }, + { url = "https://files.pythonhosted.org/packages/03/d3/ddd348f8a27a634daae39a1b8e291ff19c77867af438af844bf8b7e3231b/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:9eb3e33fdbe43f88c3c75fa608c25e7c47bbd80f48d012763cb67c47f39a7e16", size = 1555132, upload-time = "2025-10-28T20:56:52.568Z" }, + { url = "https://files.pythonhosted.org/packages/39/b8/46790692dc46218406f94374903ba47552f2f9f90dad554eed61bfb7b64c/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9434bc0d80076138ea986833156c5a48c9c7a8abb0c96039ddbb4afc93184169", size = 1764802, upload-time = "2025-10-28T20:56:54.292Z" }, + { url = "https://files.pythonhosted.org/packages/ba/e4/19ce547b58ab2a385e5f0b8aa3db38674785085abcf79b6e0edd1632b12f/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ff15c147b2ad66da1f2cbb0622313f2242d8e6e8f9b79b5206c84523a4473248", size = 1719512, upload-time = "2025-10-28T20:56:56.428Z" }, + { url = "https://files.pythonhosted.org/packages/70/30/6355a737fed29dcb6dfdd48682d5790cb5eab050f7b4e01f49b121d3acad/aiohttp-3.13.2-cp312-cp312-win32.whl", hash = "sha256:27e569eb9d9e95dbd55c0fc3ec3a9335defbf1d8bc1d20171a49f3c4c607b93e", size = 426690, upload-time = "2025-10-28T20:56:58.736Z" }, + { url = "https://files.pythonhosted.org/packages/0a/0d/b10ac09069973d112de6ef980c1f6bb31cb7dcd0bc363acbdad58f927873/aiohttp-3.13.2-cp312-cp312-win_amd64.whl", hash = "sha256:8709a0f05d59a71f33fd05c17fc11fcb8c30140506e13c2f5e8ee1b8964e1b45", size = 453465, upload-time = "2025-10-28T20:57:00.795Z" }, + { url = "https://files.pythonhosted.org/packages/bf/78/7e90ca79e5aa39f9694dcfd74f4720782d3c6828113bb1f3197f7e7c4a56/aiohttp-3.13.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7519bdc7dfc1940d201651b52bf5e03f5503bda45ad6eacf64dda98be5b2b6be", size = 732139, upload-time = "2025-10-28T20:57:02.455Z" }, + { url = "https://files.pythonhosted.org/packages/db/ed/1f59215ab6853fbaa5c8495fa6cbc39edfc93553426152b75d82a5f32b76/aiohttp-3.13.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:088912a78b4d4f547a1f19c099d5a506df17eacec3c6f4375e2831ec1d995742", size = 490082, upload-time = "2025-10-28T20:57:04.784Z" }, + { url = "https://files.pythonhosted.org/packages/68/7b/fe0fe0f5e05e13629d893c760465173a15ad0039c0a5b0d0040995c8075e/aiohttp-3.13.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5276807b9de9092af38ed23ce120539ab0ac955547b38563a9ba4f5b07b95293", size = 489035, upload-time = "2025-10-28T20:57:06.894Z" }, + { url = "https://files.pythonhosted.org/packages/d2/04/db5279e38471b7ac801d7d36a57d1230feeee130bbe2a74f72731b23c2b1/aiohttp-3.13.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1237c1375eaef0db4dcd7c2559f42e8af7b87ea7d295b118c60c36a6e61cb811", size = 1720387, upload-time = "2025-10-28T20:57:08.685Z" }, + { url = "https://files.pythonhosted.org/packages/31/07/8ea4326bd7dae2bd59828f69d7fdc6e04523caa55e4a70f4a8725a7e4ed2/aiohttp-3.13.2-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:96581619c57419c3d7d78703d5b78c1e5e5fc0172d60f555bdebaced82ded19a", size = 1688314, upload-time = "2025-10-28T20:57:10.693Z" }, + { url = "https://files.pythonhosted.org/packages/48/ab/3d98007b5b87ffd519d065225438cc3b668b2f245572a8cb53da5dd2b1bc/aiohttp-3.13.2-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a2713a95b47374169409d18103366de1050fe0ea73db358fc7a7acb2880422d4", size = 1756317, upload-time = "2025-10-28T20:57:12.563Z" }, + { url = "https://files.pythonhosted.org/packages/97/3d/801ca172b3d857fafb7b50c7c03f91b72b867a13abca982ed6b3081774ef/aiohttp-3.13.2-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:228a1cd556b3caca590e9511a89444925da87d35219a49ab5da0c36d2d943a6a", size = 1858539, upload-time = "2025-10-28T20:57:14.623Z" }, + { url = "https://files.pythonhosted.org/packages/f7/0d/4764669bdf47bd472899b3d3db91fffbe925c8e3038ec591a2fd2ad6a14d/aiohttp-3.13.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ac6cde5fba8d7d8c6ac963dbb0256a9854e9fafff52fbcc58fdf819357892c3e", size = 1739597, upload-time = "2025-10-28T20:57:16.399Z" }, + { url = "https://files.pythonhosted.org/packages/c4/52/7bd3c6693da58ba16e657eb904a5b6decfc48ecd06e9ac098591653b1566/aiohttp-3.13.2-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f2bef8237544f4e42878c61cef4e2839fee6346dc60f5739f876a9c50be7fcdb", size = 1555006, upload-time = "2025-10-28T20:57:18.288Z" }, + { url = "https://files.pythonhosted.org/packages/48/30/9586667acec5993b6f41d2ebcf96e97a1255a85f62f3c653110a5de4d346/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:16f15a4eac3bc2d76c45f7ebdd48a65d41b242eb6c31c2245463b40b34584ded", size = 1683220, upload-time = "2025-10-28T20:57:20.241Z" }, + { url = "https://files.pythonhosted.org/packages/71/01/3afe4c96854cfd7b30d78333852e8e851dceaec1c40fd00fec90c6402dd2/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:bb7fb776645af5cc58ab804c58d7eba545a97e047254a52ce89c157b5af6cd0b", size = 1712570, upload-time = "2025-10-28T20:57:22.253Z" }, + { url = "https://files.pythonhosted.org/packages/11/2c/22799d8e720f4697a9e66fd9c02479e40a49de3de2f0bbe7f9f78a987808/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:e1b4951125ec10c70802f2cb09736c895861cd39fd9dcb35107b4dc8ae6220b8", size = 1733407, upload-time = "2025-10-28T20:57:24.37Z" }, + { url = "https://files.pythonhosted.org/packages/34/cb/90f15dd029f07cebbd91f8238a8b363978b530cd128488085b5703683594/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:550bf765101ae721ee1d37d8095f47b1f220650f85fe1af37a90ce75bab89d04", size = 1550093, upload-time = "2025-10-28T20:57:26.257Z" }, + { url = "https://files.pythonhosted.org/packages/69/46/12dce9be9d3303ecbf4d30ad45a7683dc63d90733c2d9fe512be6716cd40/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:fe91b87fc295973096251e2d25a811388e7d8adf3bd2b97ef6ae78bc4ac6c476", size = 1758084, upload-time = "2025-10-28T20:57:28.349Z" }, + { url = "https://files.pythonhosted.org/packages/f9/c8/0932b558da0c302ffd639fc6362a313b98fdf235dc417bc2493da8394df7/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e0c8e31cfcc4592cb200160344b2fb6ae0f9e4effe06c644b5a125d4ae5ebe23", size = 1716987, upload-time = "2025-10-28T20:57:30.233Z" }, + { url = "https://files.pythonhosted.org/packages/5d/8b/f5bd1a75003daed099baec373aed678f2e9b34f2ad40d85baa1368556396/aiohttp-3.13.2-cp313-cp313-win32.whl", hash = "sha256:0740f31a60848d6edb296a0df827473eede90c689b8f9f2a4cdde74889eb2254", size = 425859, upload-time = "2025-10-28T20:57:32.105Z" }, + { url = "https://files.pythonhosted.org/packages/5d/28/a8a9fc6957b2cee8902414e41816b5ab5536ecf43c3b1843c10e82c559b2/aiohttp-3.13.2-cp313-cp313-win_amd64.whl", hash = "sha256:a88d13e7ca367394908f8a276b89d04a3652044612b9a408a0bb22a5ed976a1a", size = 452192, upload-time = "2025-10-28T20:57:34.166Z" }, + { url = "https://files.pythonhosted.org/packages/9b/36/e2abae1bd815f01c957cbf7be817b3043304e1c87bad526292a0410fdcf9/aiohttp-3.13.2-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:2475391c29230e063ef53a66669b7b691c9bfc3f1426a0f7bcdf1216bdbac38b", size = 735234, upload-time = "2025-10-28T20:57:36.415Z" }, + { url = "https://files.pythonhosted.org/packages/ca/e3/1ee62dde9b335e4ed41db6bba02613295a0d5b41f74a783c142745a12763/aiohttp-3.13.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:f33c8748abef4d8717bb20e8fb1b3e07c6adacb7fd6beaae971a764cf5f30d61", size = 490733, upload-time = "2025-10-28T20:57:38.205Z" }, + { url = "https://files.pythonhosted.org/packages/1a/aa/7a451b1d6a04e8d15a362af3e9b897de71d86feac3babf8894545d08d537/aiohttp-3.13.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:ae32f24bbfb7dbb485a24b30b1149e2f200be94777232aeadba3eecece4d0aa4", size = 491303, upload-time = "2025-10-28T20:57:40.122Z" }, + { url = "https://files.pythonhosted.org/packages/57/1e/209958dbb9b01174870f6a7538cd1f3f28274fdbc88a750c238e2c456295/aiohttp-3.13.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d7f02042c1f009ffb70067326ef183a047425bb2ff3bc434ead4dd4a4a66a2b", size = 1717965, upload-time = "2025-10-28T20:57:42.28Z" }, + { url = "https://files.pythonhosted.org/packages/08/aa/6a01848d6432f241416bc4866cae8dc03f05a5a884d2311280f6a09c73d6/aiohttp-3.13.2-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:93655083005d71cd6c072cdab54c886e6570ad2c4592139c3fb967bfc19e4694", size = 1667221, upload-time = "2025-10-28T20:57:44.869Z" }, + { url = "https://files.pythonhosted.org/packages/87/4f/36c1992432d31bbc789fa0b93c768d2e9047ec8c7177e5cd84ea85155f36/aiohttp-3.13.2-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0db1e24b852f5f664cd728db140cf11ea0e82450471232a394b3d1a540b0f906", size = 1757178, upload-time = "2025-10-28T20:57:47.216Z" }, + { url = "https://files.pythonhosted.org/packages/ac/b4/8e940dfb03b7e0f68a82b88fd182b9be0a65cb3f35612fe38c038c3112cf/aiohttp-3.13.2-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b009194665bcd128e23eaddef362e745601afa4641930848af4c8559e88f18f9", size = 1838001, upload-time = "2025-10-28T20:57:49.337Z" }, + { url = "https://files.pythonhosted.org/packages/d7/ef/39f3448795499c440ab66084a9db7d20ca7662e94305f175a80f5b7e0072/aiohttp-3.13.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c038a8fdc8103cd51dbd986ecdce141473ffd9775a7a8057a6ed9c3653478011", size = 1716325, upload-time = "2025-10-28T20:57:51.327Z" }, + { url = "https://files.pythonhosted.org/packages/d7/51/b311500ffc860b181c05d91c59a1313bdd05c82960fdd4035a15740d431e/aiohttp-3.13.2-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:66bac29b95a00db411cd758fea0e4b9bdba6d549dfe333f9a945430f5f2cc5a6", size = 1547978, upload-time = "2025-10-28T20:57:53.554Z" }, + { url = "https://files.pythonhosted.org/packages/31/64/b9d733296ef79815226dab8c586ff9e3df41c6aff2e16c06697b2d2e6775/aiohttp-3.13.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4ebf9cfc9ba24a74cf0718f04aac2a3bbe745902cc7c5ebc55c0f3b5777ef213", size = 1682042, upload-time = "2025-10-28T20:57:55.617Z" }, + { url = "https://files.pythonhosted.org/packages/3f/30/43d3e0f9d6473a6db7d472104c4eff4417b1e9df01774cb930338806d36b/aiohttp-3.13.2-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:a4b88ebe35ce54205c7074f7302bd08a4cb83256a3e0870c72d6f68a3aaf8e49", size = 1680085, upload-time = "2025-10-28T20:57:57.59Z" }, + { url = "https://files.pythonhosted.org/packages/16/51/c709f352c911b1864cfd1087577760ced64b3e5bee2aa88b8c0c8e2e4972/aiohttp-3.13.2-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:98c4fb90bb82b70a4ed79ca35f656f4281885be076f3f970ce315402b53099ae", size = 1728238, upload-time = "2025-10-28T20:57:59.525Z" }, + { url = "https://files.pythonhosted.org/packages/19/e2/19bd4c547092b773caeb48ff5ae4b1ae86756a0ee76c16727fcfd281404b/aiohttp-3.13.2-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:ec7534e63ae0f3759df3a1ed4fa6bc8f75082a924b590619c0dd2f76d7043caa", size = 1544395, upload-time = "2025-10-28T20:58:01.914Z" }, + { url = "https://files.pythonhosted.org/packages/cf/87/860f2803b27dfc5ed7be532832a3498e4919da61299b4a1f8eb89b8ff44d/aiohttp-3.13.2-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:5b927cf9b935a13e33644cbed6c8c4b2d0f25b713d838743f8fe7191b33829c4", size = 1742965, upload-time = "2025-10-28T20:58:03.972Z" }, + { url = "https://files.pythonhosted.org/packages/67/7f/db2fc7618925e8c7a601094d5cbe539f732df4fb570740be88ed9e40e99a/aiohttp-3.13.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:88d6c017966a78c5265d996c19cdb79235be5e6412268d7e2ce7dee339471b7a", size = 1697585, upload-time = "2025-10-28T20:58:06.189Z" }, + { url = "https://files.pythonhosted.org/packages/0c/07/9127916cb09bb38284db5036036042b7b2c514c8ebaeee79da550c43a6d6/aiohttp-3.13.2-cp314-cp314-win32.whl", hash = "sha256:f7c183e786e299b5d6c49fb43a769f8eb8e04a2726a2bd5887b98b5cc2d67940", size = 431621, upload-time = "2025-10-28T20:58:08.636Z" }, + { url = "https://files.pythonhosted.org/packages/fb/41/554a8a380df6d3a2bba8a7726429a23f4ac62aaf38de43bb6d6cde7b4d4d/aiohttp-3.13.2-cp314-cp314-win_amd64.whl", hash = "sha256:fe242cd381e0fb65758faf5ad96c2e460df6ee5b2de1072fe97e4127927e00b4", size = 457627, upload-time = "2025-10-28T20:58:11Z" }, + { url = "https://files.pythonhosted.org/packages/c7/8e/3824ef98c039d3951cb65b9205a96dd2b20f22241ee17d89c5701557c826/aiohttp-3.13.2-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:f10d9c0b0188fe85398c61147bbd2a657d616c876863bfeff43376e0e3134673", size = 767360, upload-time = "2025-10-28T20:58:13.358Z" }, + { url = "https://files.pythonhosted.org/packages/a4/0f/6a03e3fc7595421274fa34122c973bde2d89344f8a881b728fa8c774e4f1/aiohttp-3.13.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:e7c952aefdf2460f4ae55c5e9c3e80aa72f706a6317e06020f80e96253b1accd", size = 504616, upload-time = "2025-10-28T20:58:15.339Z" }, + { url = "https://files.pythonhosted.org/packages/c6/aa/ed341b670f1bc8a6f2c6a718353d13b9546e2cef3544f573c6a1ff0da711/aiohttp-3.13.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c20423ce14771d98353d2e25e83591fa75dfa90a3c1848f3d7c68243b4fbded3", size = 509131, upload-time = "2025-10-28T20:58:17.693Z" }, + { url = "https://files.pythonhosted.org/packages/7f/f0/c68dac234189dae5c4bbccc0f96ce0cc16b76632cfc3a08fff180045cfa4/aiohttp-3.13.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e96eb1a34396e9430c19d8338d2ec33015e4a87ef2b4449db94c22412e25ccdf", size = 1864168, upload-time = "2025-10-28T20:58:20.113Z" }, + { url = "https://files.pythonhosted.org/packages/8f/65/75a9a76db8364b5d0e52a0c20eabc5d52297385d9af9c35335b924fafdee/aiohttp-3.13.2-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:23fb0783bc1a33640036465019d3bba069942616a6a2353c6907d7fe1ccdaf4e", size = 1719200, upload-time = "2025-10-28T20:58:22.583Z" }, + { url = "https://files.pythonhosted.org/packages/f5/55/8df2ed78d7f41d232f6bd3ff866b6f617026551aa1d07e2f03458f964575/aiohttp-3.13.2-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2e1a9bea6244a1d05a4e57c295d69e159a5c50d8ef16aa390948ee873478d9a5", size = 1843497, upload-time = "2025-10-28T20:58:24.672Z" }, + { url = "https://files.pythonhosted.org/packages/e9/e0/94d7215e405c5a02ccb6a35c7a3a6cfff242f457a00196496935f700cde5/aiohttp-3.13.2-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0a3d54e822688b56e9f6b5816fb3de3a3a64660efac64e4c2dc435230ad23bad", size = 1935703, upload-time = "2025-10-28T20:58:26.758Z" }, + { url = "https://files.pythonhosted.org/packages/0b/78/1eeb63c3f9b2d1015a4c02788fb543141aad0a03ae3f7a7b669b2483f8d4/aiohttp-3.13.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7a653d872afe9f33497215745da7a943d1dc15b728a9c8da1c3ac423af35178e", size = 1792738, upload-time = "2025-10-28T20:58:29.787Z" }, + { url = "https://files.pythonhosted.org/packages/41/75/aaf1eea4c188e51538c04cc568040e3082db263a57086ea74a7d38c39e42/aiohttp-3.13.2-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:56d36e80d2003fa3fc0207fac644216d8532e9504a785ef9a8fd013f84a42c61", size = 1624061, upload-time = "2025-10-28T20:58:32.529Z" }, + { url = "https://files.pythonhosted.org/packages/9b/c2/3b6034de81fbcc43de8aeb209073a2286dfb50b86e927b4efd81cf848197/aiohttp-3.13.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:78cd586d8331fb8e241c2dd6b2f4061778cc69e150514b39a9e28dd050475661", size = 1789201, upload-time = "2025-10-28T20:58:34.618Z" }, + { url = "https://files.pythonhosted.org/packages/c9/38/c15dcf6d4d890217dae79d7213988f4e5fe6183d43893a9cf2fe9e84ca8d/aiohttp-3.13.2-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:20b10bbfbff766294fe99987f7bb3b74fdd2f1a2905f2562132641ad434dcf98", size = 1776868, upload-time = "2025-10-28T20:58:38.835Z" }, + { url = "https://files.pythonhosted.org/packages/04/75/f74fd178ac81adf4f283a74847807ade5150e48feda6aef024403716c30c/aiohttp-3.13.2-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:9ec49dff7e2b3c85cdeaa412e9d438f0ecd71676fde61ec57027dd392f00c693", size = 1790660, upload-time = "2025-10-28T20:58:41.507Z" }, + { url = "https://files.pythonhosted.org/packages/e7/80/7368bd0d06b16b3aba358c16b919e9c46cf11587dc572091031b0e9e3ef0/aiohttp-3.13.2-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:94f05348c4406450f9d73d38efb41d669ad6cd90c7ee194810d0eefbfa875a7a", size = 1617548, upload-time = "2025-10-28T20:58:43.674Z" }, + { url = "https://files.pythonhosted.org/packages/7d/4b/a6212790c50483cb3212e507378fbe26b5086d73941e1ec4b56a30439688/aiohttp-3.13.2-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:fa4dcb605c6f82a80c7f95713c2b11c3b8e9893b3ebd2bc9bde93165ed6107be", size = 1817240, upload-time = "2025-10-28T20:58:45.787Z" }, + { url = "https://files.pythonhosted.org/packages/ff/f7/ba5f0ba4ea8d8f3c32850912944532b933acbf0f3a75546b89269b9b7dde/aiohttp-3.13.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:cf00e5db968c3f67eccd2778574cf64d8b27d95b237770aa32400bd7a1ca4f6c", size = 1762334, upload-time = "2025-10-28T20:58:47.936Z" }, + { url = "https://files.pythonhosted.org/packages/7e/83/1a5a1856574588b1cad63609ea9ad75b32a8353ac995d830bf5da9357364/aiohttp-3.13.2-cp314-cp314t-win32.whl", hash = "sha256:d23b5fe492b0805a50d3371e8a728a9134d8de5447dce4c885f5587294750734", size = 464685, upload-time = "2025-10-28T20:58:50.642Z" }, + { url = "https://files.pythonhosted.org/packages/9f/4d/d22668674122c08f4d56972297c51a624e64b3ed1efaa40187607a7cb66e/aiohttp-3.13.2-cp314-cp314t-win_amd64.whl", hash = "sha256:ff0a7b0a82a7ab905cbda74006318d1b12e37c797eb1b0d4eb3e316cf47f658f", size = 498093, upload-time = "2025-10-28T20:58:52.782Z" }, + { url = "https://files.pythonhosted.org/packages/04/4a/3da532fdf51b5e58fffa1a86d6569184cb1bf4bf81cd4434b6541a8d14fd/aiohttp-3.13.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7fbdf5ad6084f1940ce88933de34b62358d0f4a0b6ec097362dcd3e5a65a4989", size = 739009, upload-time = "2025-10-28T20:58:55.682Z" }, + { url = "https://files.pythonhosted.org/packages/89/74/fefa6f7939cdc1d77e5cad712004e675a8847dccc589dcc3abca7feaed73/aiohttp-3.13.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7c3a50345635a02db61792c85bb86daffac05330f6473d524f1a4e3ef9d0046d", size = 495308, upload-time = "2025-10-28T20:58:58.408Z" }, + { url = "https://files.pythonhosted.org/packages/4e/b4/a0638ae1f12d09a0dc558870968a2f19a1eba1b10ad0a85ef142ddb40b50/aiohttp-3.13.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0e87dff73f46e969af38ab3f7cb75316a7c944e2e574ff7c933bc01b10def7f5", size = 490624, upload-time = "2025-10-28T20:59:00.479Z" }, + { url = "https://files.pythonhosted.org/packages/02/73/361cd4cac9d98a5a4183d1f26faf7b777330f8dba838c5aae2412862bdd0/aiohttp-3.13.2-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2adebd4577724dcae085665f294cc57c8701ddd4d26140504db622b8d566d7aa", size = 1662968, upload-time = "2025-10-28T20:59:03.105Z" }, + { url = "https://files.pythonhosted.org/packages/9e/93/ce2ca7584555a6c7dd78f2e6b539a96c5172d88815e13a05a576e14a5a22/aiohttp-3.13.2-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e036a3a645fe92309ec34b918394bb377950cbb43039a97edae6c08db64b23e2", size = 1627117, upload-time = "2025-10-28T20:59:05.274Z" }, + { url = "https://files.pythonhosted.org/packages/a6/42/7ee0e699111f5fc20a69b3203e8f5d5da0b681f270b90bc088d15e339980/aiohttp-3.13.2-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:23ad365e30108c422d0b4428cf271156dd56790f6dd50d770b8e360e6c5ab2e6", size = 1724037, upload-time = "2025-10-28T20:59:07.522Z" }, + { url = "https://files.pythonhosted.org/packages/66/88/67ad5ff11dd61dd1d7882cda39f085d5fca31cf7e2143f5173429d8a591e/aiohttp-3.13.2-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:1f9b2c2d4b9d958b1f9ae0c984ec1dd6b6689e15c75045be8ccb4011426268ca", size = 1812899, upload-time = "2025-10-28T20:59:11.698Z" }, + { url = "https://files.pythonhosted.org/packages/60/1b/a46f6e1c2a347b9c7a789292279c159b327fadecbf8340f3b05fffff1151/aiohttp-3.13.2-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3a92cf4b9bea33e15ecbaa5c59921be0f23222608143d025c989924f7e3e0c07", size = 1660961, upload-time = "2025-10-28T20:59:14.425Z" }, + { url = "https://files.pythonhosted.org/packages/44/cc/1af9e466eafd9b5d8922238c69aaf95b656137add4c5db65f63ee129bf3c/aiohttp-3.13.2-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:070599407f4954021509193404c4ac53153525a19531051661440644728ba9a7", size = 1553851, upload-time = "2025-10-28T20:59:17.044Z" }, + { url = "https://files.pythonhosted.org/packages/e5/d1/9e5f4f40f9d0ee5668e9b5e7ebfb0eaf371cc09da03785decdc5da56f4b3/aiohttp-3.13.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:29562998ec66f988d49fb83c9b01694fa927186b781463f376c5845c121e4e0b", size = 1634260, upload-time = "2025-10-28T20:59:19.378Z" }, + { url = "https://files.pythonhosted.org/packages/83/2e/5d065091c4ae8b55a153f458f19308191bad3b62a89496aa081385486338/aiohttp-3.13.2-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:4dd3db9d0f4ebca1d887d76f7cdbcd1116ac0d05a9221b9dad82c64a62578c4d", size = 1639499, upload-time = "2025-10-28T20:59:22.013Z" }, + { url = "https://files.pythonhosted.org/packages/a3/de/58ae6dc73691a51ff16f69a94d13657bf417456fa0fdfed2b59dd6b4c293/aiohttp-3.13.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:d7bc4b7f9c4921eba72677cd9fedd2308f4a4ca3e12fab58935295ad9ea98700", size = 1694087, upload-time = "2025-10-28T20:59:24.773Z" }, + { url = "https://files.pythonhosted.org/packages/45/fe/4d9df516268867d83041b6c073ee15cd532dbea58b82d675a7e1cf2ec24c/aiohttp-3.13.2-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:dacd50501cd017f8cccb328da0c90823511d70d24a323196826d923aad865901", size = 1540532, upload-time = "2025-10-28T20:59:27.982Z" }, + { url = "https://files.pythonhosted.org/packages/24/e7/a802619308232499482bf30b3530efb5d141481cfd61850368350fb1acb5/aiohttp-3.13.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:8b2f1414f6a1e0683f212ec80e813f4abef94c739fd090b66c9adf9d2a05feac", size = 1710369, upload-time = "2025-10-28T20:59:30.363Z" }, + { url = "https://files.pythonhosted.org/packages/62/08/e8593f39f025efe96ef59550d17cf097222d84f6f84798bedac5bf037fce/aiohttp-3.13.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:04c3971421576ed24c191f610052bcb2f059e395bc2489dd99e397f9bc466329", size = 1649296, upload-time = "2025-10-28T20:59:33.285Z" }, + { url = "https://files.pythonhosted.org/packages/e5/fd/ffbc1b6aa46fc6c284af4a438b2c7eab79af1c8ac4b6d2ced185c17f403e/aiohttp-3.13.2-cp39-cp39-win32.whl", hash = "sha256:9f377d0a924e5cc94dc620bc6366fc3e889586a7f18b748901cf016c916e2084", size = 432980, upload-time = "2025-10-28T20:59:35.515Z" }, + { url = "https://files.pythonhosted.org/packages/ad/a9/d47e7873175a4d8aed425f2cdea2df700b2dd44fac024ffbd83455a69a50/aiohttp-3.13.2-cp39-cp39-win_amd64.whl", hash = "sha256:9c705601e16c03466cb72011bd1af55d68fa65b045356d8f96c216e5f6db0fa5", size = 456021, upload-time = "2025-10-28T20:59:37.659Z" }, +] + +[[package]] +name = "aiosignal" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.9'", +] +dependencies = [ + { name = "frozenlist", version = "1.5.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ae/67/0952ed97a9793b4958e5736f6d2b346b414a2cd63e82d05940032f45b32f/aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc", size = 19422, upload-time = "2022-11-08T16:03:58.806Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/ac/a7305707cb852b7e16ff80eaf5692309bde30e2b1100a1fcacdc8f731d97/aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17", size = 7617, upload-time = "2022-11-08T16:03:57.483Z" }, +] + +[[package]] +name = "aiosignal" +version = "1.4.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", + "python_full_version == '3.9.*'", +] +dependencies = [ + { name = "frozenlist", version = "1.8.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" }, + { name = "typing-extensions", version = "4.15.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9' and python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/61/62/06741b579156360248d1ec624842ad0edf697050bbaf7c3e46394e106ad1/aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7", size = 25007, upload-time = "2025-07-03T22:54:43.528Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e", size = 7490, upload-time = "2025-07-03T22:54:42.156Z" }, +] + +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", version = "4.13.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, +] + +[[package]] +name = "async-timeout" +version = "5.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a5/ae/136395dfbfe00dfc94da3f3e136d0b13f394cba8f4841120e34226265780/async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3", size = 9274, upload-time = "2024-11-06T16:41:39.6Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fe/ba/e2081de779ca30d473f21f5b30e0e737c438205440784c7dfc81efc2b029/async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c", size = 6233, upload-time = "2024-11-06T16:41:37.9Z" }, +] + +[[package]] +name = "attrs" +version = "25.3.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.9'", +] +sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/1367933a8532ee6ff8d63537de4f1177af4bff9f3e829baf7331f595bb24/attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b", size = 812032, upload-time = "2025-03-13T11:10:22.779Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815, upload-time = "2025-03-13T11:10:21.14Z" }, +] + +[[package]] +name = "attrs" +version = "25.4.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", + "python_full_version == '3.9.*'", +] +sdist = { url = "https://files.pythonhosted.org/packages/6b/5c/685e6633917e101e5dcb62b9dd76946cbb57c26e133bae9e0cd36033c0a9/attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11", size = 934251, upload-time = "2025-10-06T13:54:44.725Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373", size = 67615, upload-time = "2025-10-06T13:54:43.17Z" }, +] + +[[package]] +name = "backports-asyncio-runner" +version = "1.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8e/ff/70dca7d7cb1cbc0edb2c6cc0c38b65cba36cccc491eca64cabd5fe7f8670/backports_asyncio_runner-1.2.0.tar.gz", hash = "sha256:a5aa7b2b7d8f8bfcaa2b57313f70792df84e32a2a746f585213373f900b42162", size = 69893, upload-time = "2025-07-02T02:27:15.685Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/59/76ab57e3fe74484f48a53f8e337171b4a2349e506eabe136d7e01d059086/backports_asyncio_runner-1.2.0-py3-none-any.whl", hash = "sha256:0da0a936a8aeb554eccb426dc55af3ba63bcdc69fa1a600b5bb305413a4477b5", size = 12313, upload-time = "2025-07-02T02:27:14.263Z" }, +] + +[[package]] +name = "black" +version = "24.8.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.9'", +] +dependencies = [ + { name = "click", version = "8.1.8", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, + { name = "mypy-extensions", marker = "python_full_version < '3.9'" }, + { name = "packaging", marker = "python_full_version < '3.9'" }, + { name = "pathspec", marker = "python_full_version < '3.9'" }, + { name = "platformdirs", version = "4.3.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, + { name = "tomli", marker = "python_full_version < '3.9'" }, + { name = "typing-extensions", version = "4.13.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/04/b0/46fb0d4e00372f4a86a6f8efa3cb193c9f64863615e39010b1477e010578/black-24.8.0.tar.gz", hash = "sha256:2500945420b6784c38b9ee885af039f5e7471ef284ab03fa35ecdde4688cd83f", size = 644810, upload-time = "2024-08-02T17:43:18.405Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/47/6e/74e29edf1fba3887ed7066930a87f698ffdcd52c5dbc263eabb06061672d/black-24.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:09cdeb74d494ec023ded657f7092ba518e8cf78fa8386155e4a03fdcc44679e6", size = 1632092, upload-time = "2024-08-02T17:47:26.911Z" }, + { url = "https://files.pythonhosted.org/packages/ab/49/575cb6c3faee690b05c9d11ee2e8dba8fbd6d6c134496e644c1feb1b47da/black-24.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:81c6742da39f33b08e791da38410f32e27d632260e599df7245cccee2064afeb", size = 1457529, upload-time = "2024-08-02T17:47:29.109Z" }, + { url = "https://files.pythonhosted.org/packages/7a/b4/d34099e95c437b53d01c4aa37cf93944b233066eb034ccf7897fa4e5f286/black-24.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:707a1ca89221bc8a1a64fb5e15ef39cd755633daa672a9db7498d1c19de66a42", size = 1757443, upload-time = "2024-08-02T17:46:20.306Z" }, + { url = "https://files.pythonhosted.org/packages/87/a0/6d2e4175ef364b8c4b64f8441ba041ed65c63ea1db2720d61494ac711c15/black-24.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:d6417535d99c37cee4091a2f24eb2b6d5ec42b144d50f1f2e436d9fe1916fe1a", size = 1418012, upload-time = "2024-08-02T17:47:20.33Z" }, + { url = "https://files.pythonhosted.org/packages/08/a6/0a3aa89de9c283556146dc6dbda20cd63a9c94160a6fbdebaf0918e4a3e1/black-24.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fb6e2c0b86bbd43dee042e48059c9ad7830abd5c94b0bc518c0eeec57c3eddc1", size = 1615080, upload-time = "2024-08-02T17:48:05.467Z" }, + { url = "https://files.pythonhosted.org/packages/db/94/b803d810e14588bb297e565821a947c108390a079e21dbdcb9ab6956cd7a/black-24.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:837fd281f1908d0076844bc2b801ad2d369c78c45cf800cad7b61686051041af", size = 1438143, upload-time = "2024-08-02T17:47:30.247Z" }, + { url = "https://files.pythonhosted.org/packages/a5/b5/f485e1bbe31f768e2e5210f52ea3f432256201289fd1a3c0afda693776b0/black-24.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:62e8730977f0b77998029da7971fa896ceefa2c4c4933fcd593fa599ecbf97a4", size = 1738774, upload-time = "2024-08-02T17:46:17.837Z" }, + { url = "https://files.pythonhosted.org/packages/a8/69/a000fc3736f89d1bdc7f4a879f8aaf516fb03613bb51a0154070383d95d9/black-24.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:72901b4913cbac8972ad911dc4098d5753704d1f3c56e44ae8dce99eecb0e3af", size = 1427503, upload-time = "2024-08-02T17:46:22.654Z" }, + { url = "https://files.pythonhosted.org/packages/a2/a8/05fb14195cfef32b7c8d4585a44b7499c2a4b205e1662c427b941ed87054/black-24.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7c046c1d1eeb7aea9335da62472481d3bbf3fd986e093cffd35f4385c94ae368", size = 1646132, upload-time = "2024-08-02T17:49:52.843Z" }, + { url = "https://files.pythonhosted.org/packages/41/77/8d9ce42673e5cb9988f6df73c1c5c1d4e9e788053cccd7f5fb14ef100982/black-24.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:649f6d84ccbae73ab767e206772cc2d7a393a001070a4c814a546afd0d423aed", size = 1448665, upload-time = "2024-08-02T17:47:54.479Z" }, + { url = "https://files.pythonhosted.org/packages/cc/94/eff1ddad2ce1d3cc26c162b3693043c6b6b575f538f602f26fe846dfdc75/black-24.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2b59b250fdba5f9a9cd9d0ece6e6d993d91ce877d121d161e4698af3eb9c1018", size = 1762458, upload-time = "2024-08-02T17:46:19.384Z" }, + { url = "https://files.pythonhosted.org/packages/28/ea/18b8d86a9ca19a6942e4e16759b2fa5fc02bbc0eb33c1b866fcd387640ab/black-24.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:6e55d30d44bed36593c3163b9bc63bf58b3b30e4611e4d88a0c3c239930ed5b2", size = 1436109, upload-time = "2024-08-02T17:46:52.97Z" }, + { url = "https://files.pythonhosted.org/packages/9f/d4/ae03761ddecc1a37d7e743b89cccbcf3317479ff4b88cfd8818079f890d0/black-24.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:505289f17ceda596658ae81b61ebbe2d9b25aa78067035184ed0a9d855d18afd", size = 1617322, upload-time = "2024-08-02T17:51:20.203Z" }, + { url = "https://files.pythonhosted.org/packages/14/4b/4dfe67eed7f9b1ddca2ec8e4418ea74f0d1dc84d36ea874d618ffa1af7d4/black-24.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b19c9ad992c7883ad84c9b22aaa73562a16b819c1d8db7a1a1a49fb7ec13c7d2", size = 1442108, upload-time = "2024-08-02T17:50:40.824Z" }, + { url = "https://files.pythonhosted.org/packages/97/14/95b3f91f857034686cae0e73006b8391d76a8142d339b42970eaaf0416ea/black-24.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1f13f7f386f86f8121d76599114bb8c17b69d962137fc70efe56137727c7047e", size = 1745786, upload-time = "2024-08-02T17:46:02.939Z" }, + { url = "https://files.pythonhosted.org/packages/95/54/68b8883c8aa258a6dde958cd5bdfada8382bec47c5162f4a01e66d839af1/black-24.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:f490dbd59680d809ca31efdae20e634f3fae27fba3ce0ba3208333b713bc3920", size = 1426754, upload-time = "2024-08-02T17:46:38.603Z" }, + { url = "https://files.pythonhosted.org/packages/13/b2/b3f24fdbb46f0e7ef6238e131f13572ee8279b70f237f221dd168a9dba1a/black-24.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:eab4dd44ce80dea27dc69db40dab62d4ca96112f87996bca68cd75639aeb2e4c", size = 1631706, upload-time = "2024-08-02T17:49:57.606Z" }, + { url = "https://files.pythonhosted.org/packages/d9/35/31010981e4a05202a84a3116423970fd1a59d2eda4ac0b3570fbb7029ddc/black-24.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3c4285573d4897a7610054af5a890bde7c65cb466040c5f0c8b732812d7f0e5e", size = 1457429, upload-time = "2024-08-02T17:49:12.764Z" }, + { url = "https://files.pythonhosted.org/packages/27/25/3f706b4f044dd569a20a4835c3b733dedea38d83d2ee0beb8178a6d44945/black-24.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e84e33b37be070ba135176c123ae52a51f82306def9f7d063ee302ecab2cf47", size = 1756488, upload-time = "2024-08-02T17:46:08.067Z" }, + { url = "https://files.pythonhosted.org/packages/63/72/79375cd8277cbf1c5670914e6bd4c1b15dea2c8f8e906dc21c448d0535f0/black-24.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:73bbf84ed136e45d451a260c6b73ed674652f90a2b3211d6a35e78054563a9bb", size = 1417721, upload-time = "2024-08-02T17:46:42.637Z" }, + { url = "https://files.pythonhosted.org/packages/27/1e/83fa8a787180e1632c3d831f7e58994d7aaf23a0961320d21e84f922f919/black-24.8.0-py3-none-any.whl", hash = "sha256:972085c618ee94f402da1af548a4f218c754ea7e5dc70acb168bfaca4c2542ed", size = 206504, upload-time = "2024-08-02T17:43:15.747Z" }, +] + +[[package]] +name = "black" +version = "25.9.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", + "python_full_version == '3.9.*'", +] +dependencies = [ + { name = "click", version = "8.1.8", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.9.*'" }, + { name = "click", version = "8.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "mypy-extensions", marker = "python_full_version >= '3.9'" }, + { name = "packaging", marker = "python_full_version >= '3.9'" }, + { name = "pathspec", marker = "python_full_version >= '3.9'" }, + { name = "platformdirs", version = "4.4.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.9.*'" }, + { name = "platformdirs", version = "4.5.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "pytokens", marker = "python_full_version >= '3.9'" }, + { name = "tomli", marker = "python_full_version >= '3.9' and python_full_version < '3.11'" }, + { name = "typing-extensions", version = "4.15.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9' and python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4b/43/20b5c90612d7bdb2bdbcceeb53d588acca3bb8f0e4c5d5c751a2c8fdd55a/black-25.9.0.tar.gz", hash = "sha256:0474bca9a0dd1b51791fcc507a4e02078a1c63f6d4e4ae5544b9848c7adfb619", size = 648393, upload-time = "2025-09-19T00:27:37.758Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/25/40/dbe31fc56b218a858c8fc6f5d8d3ba61c1fa7e989d43d4a4574b8b992840/black-25.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ce41ed2614b706fd55fd0b4a6909d06b5bab344ffbfadc6ef34ae50adba3d4f7", size = 1715605, upload-time = "2025-09-19T00:36:13.483Z" }, + { url = "https://files.pythonhosted.org/packages/92/b2/f46800621200eab6479b1f4c0e3ede5b4c06b768e79ee228bc80270bcc74/black-25.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2ab0ce111ef026790e9b13bd216fa7bc48edd934ffc4cbf78808b235793cbc92", size = 1571829, upload-time = "2025-09-19T00:32:42.13Z" }, + { url = "https://files.pythonhosted.org/packages/4e/64/5c7f66bd65af5c19b4ea86062bb585adc28d51d37babf70969e804dbd5c2/black-25.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f96b6726d690c96c60ba682955199f8c39abc1ae0c3a494a9c62c0184049a713", size = 1631888, upload-time = "2025-09-19T00:30:54.212Z" }, + { url = "https://files.pythonhosted.org/packages/3b/64/0b9e5bfcf67db25a6eef6d9be6726499a8a72ebab3888c2de135190853d3/black-25.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:d119957b37cc641596063cd7db2656c5be3752ac17877017b2ffcdb9dfc4d2b1", size = 1327056, upload-time = "2025-09-19T00:31:08.877Z" }, + { url = "https://files.pythonhosted.org/packages/b7/f4/7531d4a336d2d4ac6cc101662184c8e7d068b548d35d874415ed9f4116ef/black-25.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:456386fe87bad41b806d53c062e2974615825c7a52159cde7ccaeb0695fa28fa", size = 1698727, upload-time = "2025-09-19T00:31:14.264Z" }, + { url = "https://files.pythonhosted.org/packages/28/f9/66f26bfbbf84b949cc77a41a43e138d83b109502cd9c52dfc94070ca51f2/black-25.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a16b14a44c1af60a210d8da28e108e13e75a284bf21a9afa6b4571f96ab8bb9d", size = 1555679, upload-time = "2025-09-19T00:31:29.265Z" }, + { url = "https://files.pythonhosted.org/packages/bf/59/61475115906052f415f518a648a9ac679d7afbc8da1c16f8fdf68a8cebed/black-25.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:aaf319612536d502fdd0e88ce52d8f1352b2c0a955cc2798f79eeca9d3af0608", size = 1617453, upload-time = "2025-09-19T00:30:42.24Z" }, + { url = "https://files.pythonhosted.org/packages/7f/5b/20fd5c884d14550c911e4fb1b0dae00d4abb60a4f3876b449c4d3a9141d5/black-25.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:c0372a93e16b3954208417bfe448e09b0de5cc721d521866cd9e0acac3c04a1f", size = 1333655, upload-time = "2025-09-19T00:30:56.715Z" }, + { url = "https://files.pythonhosted.org/packages/fb/8e/319cfe6c82f7e2d5bfb4d3353c6cc85b523d677ff59edc61fdb9ee275234/black-25.9.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:1b9dc70c21ef8b43248f1d86aedd2aaf75ae110b958a7909ad8463c4aa0880b0", size = 1742012, upload-time = "2025-09-19T00:33:08.678Z" }, + { url = "https://files.pythonhosted.org/packages/94/cc/f562fe5d0a40cd2a4e6ae3f685e4c36e365b1f7e494af99c26ff7f28117f/black-25.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8e46eecf65a095fa62e53245ae2795c90bdecabd53b50c448d0a8bcd0d2e74c4", size = 1581421, upload-time = "2025-09-19T00:35:25.937Z" }, + { url = "https://files.pythonhosted.org/packages/84/67/6db6dff1ebc8965fd7661498aea0da5d7301074b85bba8606a28f47ede4d/black-25.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9101ee58ddc2442199a25cb648d46ba22cd580b00ca4b44234a324e3ec7a0f7e", size = 1655619, upload-time = "2025-09-19T00:30:49.241Z" }, + { url = "https://files.pythonhosted.org/packages/10/10/3faef9aa2a730306cf469d76f7f155a8cc1f66e74781298df0ba31f8b4c8/black-25.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:77e7060a00c5ec4b3367c55f39cf9b06e68965a4f2e61cecacd6d0d9b7ec945a", size = 1342481, upload-time = "2025-09-19T00:31:29.625Z" }, + { url = "https://files.pythonhosted.org/packages/48/99/3acfea65f5e79f45472c45f87ec13037b506522719cd9d4ac86484ff51ac/black-25.9.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0172a012f725b792c358d57fe7b6b6e8e67375dd157f64fa7a3097b3ed3e2175", size = 1742165, upload-time = "2025-09-19T00:34:10.402Z" }, + { url = "https://files.pythonhosted.org/packages/3a/18/799285282c8236a79f25d590f0222dbd6850e14b060dfaa3e720241fd772/black-25.9.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3bec74ee60f8dfef564b573a96b8930f7b6a538e846123d5ad77ba14a8d7a64f", size = 1581259, upload-time = "2025-09-19T00:32:49.685Z" }, + { url = "https://files.pythonhosted.org/packages/f1/ce/883ec4b6303acdeca93ee06b7622f1fa383c6b3765294824165d49b1a86b/black-25.9.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b756fc75871cb1bcac5499552d771822fd9db5a2bb8db2a7247936ca48f39831", size = 1655583, upload-time = "2025-09-19T00:30:44.505Z" }, + { url = "https://files.pythonhosted.org/packages/21/17/5c253aa80a0639ccc427a5c7144534b661505ae2b5a10b77ebe13fa25334/black-25.9.0-cp313-cp313-win_amd64.whl", hash = "sha256:846d58e3ce7879ec1ffe816bb9df6d006cd9590515ed5d17db14e17666b2b357", size = 1343428, upload-time = "2025-09-19T00:32:13.839Z" }, + { url = "https://files.pythonhosted.org/packages/c4/26/0f724eb152bc9fc03029a9c903ddd77a288285042222a381050d27e64ac1/black-25.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ef69351df3c84485a8beb6f7b8f9721e2009e20ef80a8d619e2d1788b7816d47", size = 1715243, upload-time = "2025-09-19T00:34:14.216Z" }, + { url = "https://files.pythonhosted.org/packages/fb/be/cb986ea2f0fabd0ee58668367724ba16c3a042842e9ebe009c139f8221c9/black-25.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e3c1f4cd5e93842774d9ee4ef6cd8d17790e65f44f7cdbaab5f2cf8ccf22a823", size = 1571246, upload-time = "2025-09-19T00:31:39.624Z" }, + { url = "https://files.pythonhosted.org/packages/82/ce/74cf4d66963fca33ab710e4c5817ceeff843c45649f61f41d88694c2e5db/black-25.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:154b06d618233fe468236ba1f0e40823d4eb08b26f5e9261526fde34916b9140", size = 1631265, upload-time = "2025-09-19T00:31:05.341Z" }, + { url = "https://files.pythonhosted.org/packages/ff/f3/9b11e001e84b4d1721f75e20b3c058854a748407e6fc1abe6da0aa22014f/black-25.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:e593466de7b998374ea2585a471ba90553283fb9beefcfa430d84a2651ed5933", size = 1326615, upload-time = "2025-09-19T00:31:25.347Z" }, + { url = "https://files.pythonhosted.org/packages/1b/46/863c90dcd3f9d41b109b7f19032ae0db021f0b2a81482ba0a1e28c84de86/black-25.9.0-py3-none-any.whl", hash = "sha256:474b34c1342cdc157d307b56c4c65bce916480c4a8f6551fdc6bf9b486a7c4ae", size = 203363, upload-time = "2025-09-19T00:27:35.724Z" }, +] + +[[package]] +name = "boto3" +version = "1.37.38" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.9'", +] +dependencies = [ + { name = "botocore", version = "1.37.38", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, + { name = "jmespath", marker = "python_full_version < '3.9'" }, + { name = "s3transfer", version = "0.11.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0d/b5/d1c2e8c484cea43891629bbab6ca90ce9ca932586750bc0e786c8f096ccf/boto3-1.37.38.tar.gz", hash = "sha256:88c02910933ab7777597d1ca7c62375f52822e0aa1a8e0c51b2598a547af42b2", size = 111623, upload-time = "2025-04-21T19:27:18.06Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d3/87/8189f22ee798177bc7b40afd13f046442c5f91b699e70a950b42ff447e80/boto3-1.37.38-py3-none-any.whl", hash = "sha256:b6d42803607148804dff82389757827a24ce9271f0583748853934c86310999f", size = 139922, upload-time = "2025-04-21T19:27:16.107Z" }, +] + +[[package]] +name = "boto3" +version = "1.40.62" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", + "python_full_version == '3.9.*'", +] +dependencies = [ + { name = "botocore", version = "1.40.62", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" }, + { name = "jmespath", marker = "python_full_version >= '3.9'" }, + { name = "s3transfer", version = "0.14.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b4/69/2612a06d584786500ba7ea068927e95e24719da3b6734bd23c50788f5982/boto3-1.40.62.tar.gz", hash = "sha256:3dbe7e1e7dc9127a4b1f2020a14f38ffe64fad84df00623e8ab6a5d49a82ea28", size = 111499, upload-time = "2025-10-29T21:33:13.873Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/7d/8b67dea3e88b66b67f0ad17a3b443e498c20c6d9a49a7a079c413c624def/boto3-1.40.62-py3-none-any.whl", hash = "sha256:f422d4ae3b278832ba807059aafa553164bce2c464cd65b24c9ea8fb8a6c4192", size = 139320, upload-time = "2025-10-29T21:33:12.422Z" }, +] + +[[package]] +name = "botocore" +version = "1.37.38" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.9'", +] +dependencies = [ + { name = "jmespath", marker = "python_full_version < '3.9'" }, + { name = "python-dateutil", marker = "python_full_version < '3.9'" }, + { name = "urllib3", version = "1.26.20", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/34/79/4e072e614339727f79afef704e5993b5b4d2667c1671c757cc4deb954744/botocore-1.37.38.tar.gz", hash = "sha256:c3ea386177171f2259b284db6afc971c959ec103fa2115911c4368bea7cbbc5d", size = 13832365, upload-time = "2025-04-21T19:27:05.245Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/55/1b/93f3504afc7c523dcaa8a8147cfc75421983e30b08d9f93a533929589630/botocore-1.37.38-py3-none-any.whl", hash = "sha256:23b4097780e156a4dcaadfc1ed156ce25cb95b6087d010c4bb7f7f5d9bc9d219", size = 13499391, upload-time = "2025-04-21T19:27:00.869Z" }, +] + +[[package]] +name = "botocore" +version = "1.40.62" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", + "python_full_version == '3.9.*'", +] +dependencies = [ + { name = "jmespath", marker = "python_full_version >= '3.9'" }, + { name = "python-dateutil", marker = "python_full_version >= '3.9'" }, + { name = "urllib3", version = "1.26.20", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.9.*'" }, + { name = "urllib3", version = "2.5.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/50/d6/dc11fecf450c60175fd568791e2324e059e81bc4adac85d83f272ab293f5/botocore-1.40.62.tar.gz", hash = "sha256:1e8e57c131597dc234d67428bda1323e8f0a687ea13ea570253159ab9256fa28", size = 14389174, upload-time = "2025-10-29T21:33:03.209Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3a/de/be9e3d25e6d114dfd0bb2dd42c9c3ae78b693b5e519a736b76f505fdb0d1/botocore-1.40.62-py3-none-any.whl", hash = "sha256:780f1d476d4b530ce3b12fd9f7112156d97d99ebdbbd9ef60635b0432af9d3a5", size = 14056496, upload-time = "2025-10-29T21:33:00.401Z" }, +] + +[[package]] +name = "certifi" +version = "2025.10.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4c/5b/b6ce21586237c77ce67d01dc5507039d444b630dd76611bbca2d8e5dcd91/certifi-2025.10.5.tar.gz", hash = "sha256:47c09d31ccf2acf0be3f701ea53595ee7e0b8fa08801c6624be771df09ae7b43", size = 164519, upload-time = "2025-10-05T04:12:15.808Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e4/37/af0d2ef3967ac0d6113837b44a4f0bfe1328c2b9763bd5b1744520e5cfed/certifi-2025.10.5-py3-none-any.whl", hash = "sha256:0f212c2744a9bb6de0c56639a6f68afe01ecd92d91f14ae897c4fe7bbeeef0de", size = 163286, upload-time = "2025-10-05T04:12:14.03Z" }, +] + +[[package]] +name = "cfgv" +version = "3.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/11/74/539e56497d9bd1d484fd863dd69cbbfa653cd2aa27abfe35653494d85e94/cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560", size = 7114, upload-time = "2023-08-12T20:38:17.776Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c5/55/51844dd50c4fc7a33b653bfaba4c2456f06955289ca770a5dbd5fd267374/cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9", size = 7249, upload-time = "2023-08-12T20:38:16.269Z" }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418, upload-time = "2025-10-14T04:42:32.879Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1f/b8/6d51fc1d52cbd52cd4ccedd5b5b2f0f6a11bbf6765c782298b0f3e808541/charset_normalizer-3.4.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e824f1492727fa856dd6eda4f7cee25f8518a12f3c4a56a74e8095695089cf6d", size = 209709, upload-time = "2025-10-14T04:40:11.385Z" }, + { url = "https://files.pythonhosted.org/packages/5c/af/1f9d7f7faafe2ddfb6f72a2e07a548a629c61ad510fe60f9630309908fef/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4bd5d4137d500351a30687c2d3971758aac9a19208fc110ccb9d7188fbe709e8", size = 148814, upload-time = "2025-10-14T04:40:13.135Z" }, + { url = "https://files.pythonhosted.org/packages/79/3d/f2e3ac2bbc056ca0c204298ea4e3d9db9b4afe437812638759db2c976b5f/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:027f6de494925c0ab2a55eab46ae5129951638a49a34d87f4c3eda90f696b4ad", size = 144467, upload-time = "2025-10-14T04:40:14.728Z" }, + { url = "https://files.pythonhosted.org/packages/ec/85/1bf997003815e60d57de7bd972c57dc6950446a3e4ccac43bc3070721856/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f820802628d2694cb7e56db99213f930856014862f3fd943d290ea8438d07ca8", size = 162280, upload-time = "2025-10-14T04:40:16.14Z" }, + { url = "https://files.pythonhosted.org/packages/3e/8e/6aa1952f56b192f54921c436b87f2aaf7c7a7c3d0d1a765547d64fd83c13/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:798d75d81754988d2565bff1b97ba5a44411867c0cf32b77a7e8f8d84796b10d", size = 159454, upload-time = "2025-10-14T04:40:17.567Z" }, + { url = "https://files.pythonhosted.org/packages/36/3b/60cbd1f8e93aa25d1c669c649b7a655b0b5fb4c571858910ea9332678558/charset_normalizer-3.4.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d1bb833febdff5c8927f922386db610b49db6e0d4f4ee29601d71e7c2694313", size = 153609, upload-time = "2025-10-14T04:40:19.08Z" }, + { url = "https://files.pythonhosted.org/packages/64/91/6a13396948b8fd3c4b4fd5bc74d045f5637d78c9675585e8e9fbe5636554/charset_normalizer-3.4.4-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:9cd98cdc06614a2f768d2b7286d66805f94c48cde050acdbbb7db2600ab3197e", size = 151849, upload-time = "2025-10-14T04:40:20.607Z" }, + { url = "https://files.pythonhosted.org/packages/b7/7a/59482e28b9981d105691e968c544cc0df3b7d6133152fb3dcdc8f135da7a/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:077fbb858e903c73f6c9db43374fd213b0b6a778106bc7032446a8e8b5b38b93", size = 151586, upload-time = "2025-10-14T04:40:21.719Z" }, + { url = "https://files.pythonhosted.org/packages/92/59/f64ef6a1c4bdd2baf892b04cd78792ed8684fbc48d4c2afe467d96b4df57/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:244bfb999c71b35de57821b8ea746b24e863398194a4014e4c76adc2bbdfeff0", size = 145290, upload-time = "2025-10-14T04:40:23.069Z" }, + { url = "https://files.pythonhosted.org/packages/6b/63/3bf9f279ddfa641ffa1962b0db6a57a9c294361cc2f5fcac997049a00e9c/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:64b55f9dce520635f018f907ff1b0df1fdc31f2795a922fb49dd14fbcdf48c84", size = 163663, upload-time = "2025-10-14T04:40:24.17Z" }, + { url = "https://files.pythonhosted.org/packages/ed/09/c9e38fc8fa9e0849b172b581fd9803bdf6e694041127933934184e19f8c3/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:faa3a41b2b66b6e50f84ae4a68c64fcd0c44355741c6374813a800cd6695db9e", size = 151964, upload-time = "2025-10-14T04:40:25.368Z" }, + { url = "https://files.pythonhosted.org/packages/d2/d1/d28b747e512d0da79d8b6a1ac18b7ab2ecfd81b2944c4c710e166d8dd09c/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6515f3182dbe4ea06ced2d9e8666d97b46ef4c75e326b79bb624110f122551db", size = 161064, upload-time = "2025-10-14T04:40:26.806Z" }, + { url = "https://files.pythonhosted.org/packages/bb/9a/31d62b611d901c3b9e5500c36aab0ff5eb442043fb3a1c254200d3d397d9/charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cc00f04ed596e9dc0da42ed17ac5e596c6ccba999ba6bd92b0e0aef2f170f2d6", size = 155015, upload-time = "2025-10-14T04:40:28.284Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f3/107e008fa2bff0c8b9319584174418e5e5285fef32f79d8ee6a430d0039c/charset_normalizer-3.4.4-cp310-cp310-win32.whl", hash = "sha256:f34be2938726fc13801220747472850852fe6b1ea75869a048d6f896838c896f", size = 99792, upload-time = "2025-10-14T04:40:29.613Z" }, + { url = "https://files.pythonhosted.org/packages/eb/66/e396e8a408843337d7315bab30dbf106c38966f1819f123257f5520f8a96/charset_normalizer-3.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:a61900df84c667873b292c3de315a786dd8dac506704dea57bc957bd31e22c7d", size = 107198, upload-time = "2025-10-14T04:40:30.644Z" }, + { url = "https://files.pythonhosted.org/packages/b5/58/01b4f815bf0312704c267f2ccb6e5d42bcc7752340cd487bc9f8c3710597/charset_normalizer-3.4.4-cp310-cp310-win_arm64.whl", hash = "sha256:cead0978fc57397645f12578bfd2d5ea9138ea0fac82b2f63f7f7c6877986a69", size = 100262, upload-time = "2025-10-14T04:40:32.108Z" }, + { url = "https://files.pythonhosted.org/packages/ed/27/c6491ff4954e58a10f69ad90aca8a1b6fe9c5d3c6f380907af3c37435b59/charset_normalizer-3.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e1fcf0720908f200cd21aa4e6750a48ff6ce4afe7ff5a79a90d5ed8a08296f8", size = 206988, upload-time = "2025-10-14T04:40:33.79Z" }, + { url = "https://files.pythonhosted.org/packages/94/59/2e87300fe67ab820b5428580a53cad894272dbb97f38a7a814a2a1ac1011/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f819d5fe9234f9f82d75bdfa9aef3a3d72c4d24a6e57aeaebba32a704553aa0", size = 147324, upload-time = "2025-10-14T04:40:34.961Z" }, + { url = "https://files.pythonhosted.org/packages/07/fb/0cf61dc84b2b088391830f6274cb57c82e4da8bbc2efeac8c025edb88772/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a59cb51917aa591b1c4e6a43c132f0cdc3c76dbad6155df4e28ee626cc77a0a3", size = 142742, upload-time = "2025-10-14T04:40:36.105Z" }, + { url = "https://files.pythonhosted.org/packages/62/8b/171935adf2312cd745d290ed93cf16cf0dfe320863ab7cbeeae1dcd6535f/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8ef3c867360f88ac904fd3f5e1f902f13307af9052646963ee08ff4f131adafc", size = 160863, upload-time = "2025-10-14T04:40:37.188Z" }, + { url = "https://files.pythonhosted.org/packages/09/73/ad875b192bda14f2173bfc1bc9a55e009808484a4b256748d931b6948442/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d9e45d7faa48ee908174d8fe84854479ef838fc6a705c9315372eacbc2f02897", size = 157837, upload-time = "2025-10-14T04:40:38.435Z" }, + { url = "https://files.pythonhosted.org/packages/6d/fc/de9cce525b2c5b94b47c70a4b4fb19f871b24995c728e957ee68ab1671ea/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:840c25fb618a231545cbab0564a799f101b63b9901f2569faecd6b222ac72381", size = 151550, upload-time = "2025-10-14T04:40:40.053Z" }, + { url = "https://files.pythonhosted.org/packages/55/c2/43edd615fdfba8c6f2dfbd459b25a6b3b551f24ea21981e23fb768503ce1/charset_normalizer-3.4.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ca5862d5b3928c4940729dacc329aa9102900382fea192fc5e52eb69d6093815", size = 149162, upload-time = "2025-10-14T04:40:41.163Z" }, + { url = "https://files.pythonhosted.org/packages/03/86/bde4ad8b4d0e9429a4e82c1e8f5c659993a9a863ad62c7df05cf7b678d75/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9c7f57c3d666a53421049053eaacdd14bbd0a528e2186fcb2e672effd053bb0", size = 150019, upload-time = "2025-10-14T04:40:42.276Z" }, + { url = "https://files.pythonhosted.org/packages/1f/86/a151eb2af293a7e7bac3a739b81072585ce36ccfb4493039f49f1d3cae8c/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:277e970e750505ed74c832b4bf75dac7476262ee2a013f5574dd49075879e161", size = 143310, upload-time = "2025-10-14T04:40:43.439Z" }, + { url = "https://files.pythonhosted.org/packages/b5/fe/43dae6144a7e07b87478fdfc4dbe9efd5defb0e7ec29f5f58a55aeef7bf7/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:31fd66405eaf47bb62e8cd575dc621c56c668f27d46a61d975a249930dd5e2a4", size = 162022, upload-time = "2025-10-14T04:40:44.547Z" }, + { url = "https://files.pythonhosted.org/packages/80/e6/7aab83774f5d2bca81f42ac58d04caf44f0cc2b65fc6db2b3b2e8a05f3b3/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:0d3d8f15c07f86e9ff82319b3d9ef6f4bf907608f53fe9d92b28ea9ae3d1fd89", size = 149383, upload-time = "2025-10-14T04:40:46.018Z" }, + { url = "https://files.pythonhosted.org/packages/4f/e8/b289173b4edae05c0dde07f69f8db476a0b511eac556dfe0d6bda3c43384/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:9f7fcd74d410a36883701fafa2482a6af2ff5ba96b9a620e9e0721e28ead5569", size = 159098, upload-time = "2025-10-14T04:40:47.081Z" }, + { url = "https://files.pythonhosted.org/packages/d8/df/fe699727754cae3f8478493c7f45f777b17c3ef0600e28abfec8619eb49c/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ebf3e58c7ec8a8bed6d66a75d7fb37b55e5015b03ceae72a8e7c74495551e224", size = 152991, upload-time = "2025-10-14T04:40:48.246Z" }, + { url = "https://files.pythonhosted.org/packages/1a/86/584869fe4ddb6ffa3bd9f491b87a01568797fb9bd8933f557dba9771beaf/charset_normalizer-3.4.4-cp311-cp311-win32.whl", hash = "sha256:eecbc200c7fd5ddb9a7f16c7decb07b566c29fa2161a16cf67b8d068bd21690a", size = 99456, upload-time = "2025-10-14T04:40:49.376Z" }, + { url = "https://files.pythonhosted.org/packages/65/f6/62fdd5feb60530f50f7e38b4f6a1d5203f4d16ff4f9f0952962c044e919a/charset_normalizer-3.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:5ae497466c7901d54b639cf42d5b8c1b6a4fead55215500d2f486d34db48d016", size = 106978, upload-time = "2025-10-14T04:40:50.844Z" }, + { url = "https://files.pythonhosted.org/packages/7a/9d/0710916e6c82948b3be62d9d398cb4fcf4e97b56d6a6aeccd66c4b2f2bd5/charset_normalizer-3.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:65e2befcd84bc6f37095f5961e68a6f077bf44946771354a28ad434c2cce0ae1", size = 99969, upload-time = "2025-10-14T04:40:52.272Z" }, + { url = "https://files.pythonhosted.org/packages/f3/85/1637cd4af66fa687396e757dec650f28025f2a2f5a5531a3208dc0ec43f2/charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394", size = 208425, upload-time = "2025-10-14T04:40:53.353Z" }, + { url = "https://files.pythonhosted.org/packages/9d/6a/04130023fef2a0d9c62d0bae2649b69f7b7d8d24ea5536feef50551029df/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25", size = 148162, upload-time = "2025-10-14T04:40:54.558Z" }, + { url = "https://files.pythonhosted.org/packages/78/29/62328d79aa60da22c9e0b9a66539feae06ca0f5a4171ac4f7dc285b83688/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef", size = 144558, upload-time = "2025-10-14T04:40:55.677Z" }, + { url = "https://files.pythonhosted.org/packages/86/bb/b32194a4bf15b88403537c2e120b817c61cd4ecffa9b6876e941c3ee38fe/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d", size = 161497, upload-time = "2025-10-14T04:40:57.217Z" }, + { url = "https://files.pythonhosted.org/packages/19/89/a54c82b253d5b9b111dc74aca196ba5ccfcca8242d0fb64146d4d3183ff1/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8", size = 159240, upload-time = "2025-10-14T04:40:58.358Z" }, + { url = "https://files.pythonhosted.org/packages/c0/10/d20b513afe03acc89ec33948320a5544d31f21b05368436d580dec4e234d/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86", size = 153471, upload-time = "2025-10-14T04:40:59.468Z" }, + { url = "https://files.pythonhosted.org/packages/61/fa/fbf177b55bdd727010f9c0a3c49eefa1d10f960e5f09d1d887bf93c2e698/charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a", size = 150864, upload-time = "2025-10-14T04:41:00.623Z" }, + { url = "https://files.pythonhosted.org/packages/05/12/9fbc6a4d39c0198adeebbde20b619790e9236557ca59fc40e0e3cebe6f40/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f", size = 150647, upload-time = "2025-10-14T04:41:01.754Z" }, + { url = "https://files.pythonhosted.org/packages/ad/1f/6a9a593d52e3e8c5d2b167daf8c6b968808efb57ef4c210acb907c365bc4/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc", size = 145110, upload-time = "2025-10-14T04:41:03.231Z" }, + { url = "https://files.pythonhosted.org/packages/30/42/9a52c609e72471b0fc54386dc63c3781a387bb4fe61c20231a4ebcd58bdd/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf", size = 162839, upload-time = "2025-10-14T04:41:04.715Z" }, + { url = "https://files.pythonhosted.org/packages/c4/5b/c0682bbf9f11597073052628ddd38344a3d673fda35a36773f7d19344b23/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15", size = 150667, upload-time = "2025-10-14T04:41:05.827Z" }, + { url = "https://files.pythonhosted.org/packages/e4/24/a41afeab6f990cf2daf6cb8c67419b63b48cf518e4f56022230840c9bfb2/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9", size = 160535, upload-time = "2025-10-14T04:41:06.938Z" }, + { url = "https://files.pythonhosted.org/packages/2a/e5/6a4ce77ed243c4a50a1fecca6aaaab419628c818a49434be428fe24c9957/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0", size = 154816, upload-time = "2025-10-14T04:41:08.101Z" }, + { url = "https://files.pythonhosted.org/packages/a8/ef/89297262b8092b312d29cdb2517cb1237e51db8ecef2e9af5edbe7b683b1/charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26", size = 99694, upload-time = "2025-10-14T04:41:09.23Z" }, + { url = "https://files.pythonhosted.org/packages/3d/2d/1e5ed9dd3b3803994c155cd9aacb60c82c331bad84daf75bcb9c91b3295e/charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525", size = 107131, upload-time = "2025-10-14T04:41:10.467Z" }, + { url = "https://files.pythonhosted.org/packages/d0/d9/0ed4c7098a861482a7b6a95603edce4c0d9db2311af23da1fb2b75ec26fc/charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3", size = 100390, upload-time = "2025-10-14T04:41:11.915Z" }, + { url = "https://files.pythonhosted.org/packages/97/45/4b3a1239bbacd321068ea6e7ac28875b03ab8bc0aa0966452db17cd36714/charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794", size = 208091, upload-time = "2025-10-14T04:41:13.346Z" }, + { url = "https://files.pythonhosted.org/packages/7d/62/73a6d7450829655a35bb88a88fca7d736f9882a27eacdca2c6d505b57e2e/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed", size = 147936, upload-time = "2025-10-14T04:41:14.461Z" }, + { url = "https://files.pythonhosted.org/packages/89/c5/adb8c8b3d6625bef6d88b251bbb0d95f8205831b987631ab0c8bb5d937c2/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72", size = 144180, upload-time = "2025-10-14T04:41:15.588Z" }, + { url = "https://files.pythonhosted.org/packages/91/ed/9706e4070682d1cc219050b6048bfd293ccf67b3d4f5a4f39207453d4b99/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328", size = 161346, upload-time = "2025-10-14T04:41:16.738Z" }, + { url = "https://files.pythonhosted.org/packages/d5/0d/031f0d95e4972901a2f6f09ef055751805ff541511dc1252ba3ca1f80cf5/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede", size = 158874, upload-time = "2025-10-14T04:41:17.923Z" }, + { url = "https://files.pythonhosted.org/packages/f5/83/6ab5883f57c9c801ce5e5677242328aa45592be8a00644310a008d04f922/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894", size = 153076, upload-time = "2025-10-14T04:41:19.106Z" }, + { url = "https://files.pythonhosted.org/packages/75/1e/5ff781ddf5260e387d6419959ee89ef13878229732732ee73cdae01800f2/charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1", size = 150601, upload-time = "2025-10-14T04:41:20.245Z" }, + { url = "https://files.pythonhosted.org/packages/d7/57/71be810965493d3510a6ca79b90c19e48696fb1ff964da319334b12677f0/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490", size = 150376, upload-time = "2025-10-14T04:41:21.398Z" }, + { url = "https://files.pythonhosted.org/packages/e5/d5/c3d057a78c181d007014feb7e9f2e65905a6c4ef182c0ddf0de2924edd65/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44", size = 144825, upload-time = "2025-10-14T04:41:22.583Z" }, + { url = "https://files.pythonhosted.org/packages/e6/8c/d0406294828d4976f275ffbe66f00266c4b3136b7506941d87c00cab5272/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133", size = 162583, upload-time = "2025-10-14T04:41:23.754Z" }, + { url = "https://files.pythonhosted.org/packages/d7/24/e2aa1f18c8f15c4c0e932d9287b8609dd30ad56dbe41d926bd846e22fb8d/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3", size = 150366, upload-time = "2025-10-14T04:41:25.27Z" }, + { url = "https://files.pythonhosted.org/packages/e4/5b/1e6160c7739aad1e2df054300cc618b06bf784a7a164b0f238360721ab86/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e", size = 160300, upload-time = "2025-10-14T04:41:26.725Z" }, + { url = "https://files.pythonhosted.org/packages/7a/10/f882167cd207fbdd743e55534d5d9620e095089d176d55cb22d5322f2afd/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc", size = 154465, upload-time = "2025-10-14T04:41:28.322Z" }, + { url = "https://files.pythonhosted.org/packages/89/66/c7a9e1b7429be72123441bfdbaf2bc13faab3f90b933f664db506dea5915/charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac", size = 99404, upload-time = "2025-10-14T04:41:29.95Z" }, + { url = "https://files.pythonhosted.org/packages/c4/26/b9924fa27db384bdcd97ab83b4f0a8058d96ad9626ead570674d5e737d90/charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14", size = 107092, upload-time = "2025-10-14T04:41:31.188Z" }, + { url = "https://files.pythonhosted.org/packages/af/8f/3ed4bfa0c0c72a7ca17f0380cd9e4dd842b09f664e780c13cff1dcf2ef1b/charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2", size = 100408, upload-time = "2025-10-14T04:41:32.624Z" }, + { url = "https://files.pythonhosted.org/packages/2a/35/7051599bd493e62411d6ede36fd5af83a38f37c4767b92884df7301db25d/charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd", size = 207746, upload-time = "2025-10-14T04:41:33.773Z" }, + { url = "https://files.pythonhosted.org/packages/10/9a/97c8d48ef10d6cd4fcead2415523221624bf58bcf68a802721a6bc807c8f/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb", size = 147889, upload-time = "2025-10-14T04:41:34.897Z" }, + { url = "https://files.pythonhosted.org/packages/10/bf/979224a919a1b606c82bd2c5fa49b5c6d5727aa47b4312bb27b1734f53cd/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e", size = 143641, upload-time = "2025-10-14T04:41:36.116Z" }, + { url = "https://files.pythonhosted.org/packages/ba/33/0ad65587441fc730dc7bd90e9716b30b4702dc7b617e6ba4997dc8651495/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14", size = 160779, upload-time = "2025-10-14T04:41:37.229Z" }, + { url = "https://files.pythonhosted.org/packages/67/ed/331d6b249259ee71ddea93f6f2f0a56cfebd46938bde6fcc6f7b9a3d0e09/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191", size = 159035, upload-time = "2025-10-14T04:41:38.368Z" }, + { url = "https://files.pythonhosted.org/packages/67/ff/f6b948ca32e4f2a4576aa129d8bed61f2e0543bf9f5f2b7fc3758ed005c9/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838", size = 152542, upload-time = "2025-10-14T04:41:39.862Z" }, + { url = "https://files.pythonhosted.org/packages/16/85/276033dcbcc369eb176594de22728541a925b2632f9716428c851b149e83/charset_normalizer-3.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6", size = 149524, upload-time = "2025-10-14T04:41:41.319Z" }, + { url = "https://files.pythonhosted.org/packages/9e/f2/6a2a1f722b6aba37050e626530a46a68f74e63683947a8acff92569f979a/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e", size = 150395, upload-time = "2025-10-14T04:41:42.539Z" }, + { url = "https://files.pythonhosted.org/packages/60/bb/2186cb2f2bbaea6338cad15ce23a67f9b0672929744381e28b0592676824/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c", size = 143680, upload-time = "2025-10-14T04:41:43.661Z" }, + { url = "https://files.pythonhosted.org/packages/7d/a5/bf6f13b772fbb2a90360eb620d52ed8f796f3c5caee8398c3b2eb7b1c60d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090", size = 162045, upload-time = "2025-10-14T04:41:44.821Z" }, + { url = "https://files.pythonhosted.org/packages/df/c5/d1be898bf0dc3ef9030c3825e5d3b83f2c528d207d246cbabe245966808d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152", size = 149687, upload-time = "2025-10-14T04:41:46.442Z" }, + { url = "https://files.pythonhosted.org/packages/a5/42/90c1f7b9341eef50c8a1cb3f098ac43b0508413f33affd762855f67a410e/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828", size = 160014, upload-time = "2025-10-14T04:41:47.631Z" }, + { url = "https://files.pythonhosted.org/packages/76/be/4d3ee471e8145d12795ab655ece37baed0929462a86e72372fd25859047c/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec", size = 154044, upload-time = "2025-10-14T04:41:48.81Z" }, + { url = "https://files.pythonhosted.org/packages/b0/6f/8f7af07237c34a1defe7defc565a9bc1807762f672c0fde711a4b22bf9c0/charset_normalizer-3.4.4-cp314-cp314-win32.whl", hash = "sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9", size = 99940, upload-time = "2025-10-14T04:41:49.946Z" }, + { url = "https://files.pythonhosted.org/packages/4b/51/8ade005e5ca5b0d80fb4aff72a3775b325bdc3d27408c8113811a7cbe640/charset_normalizer-3.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c", size = 107104, upload-time = "2025-10-14T04:41:51.051Z" }, + { url = "https://files.pythonhosted.org/packages/da/5f/6b8f83a55bb8278772c5ae54a577f3099025f9ade59d0136ac24a0df4bde/charset_normalizer-3.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2", size = 100743, upload-time = "2025-10-14T04:41:52.122Z" }, + { url = "https://files.pythonhosted.org/packages/0a/4e/3926a1c11f0433791985727965263f788af00db3482d89a7545ca5ecc921/charset_normalizer-3.4.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ce8a0633f41a967713a59c4139d29110c07e826d131a316b50ce11b1d79b4f84", size = 198599, upload-time = "2025-10-14T04:41:53.213Z" }, + { url = "https://files.pythonhosted.org/packages/ec/7c/b92d1d1dcffc34592e71ea19c882b6709e43d20fa498042dea8b815638d7/charset_normalizer-3.4.4-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eaabd426fe94daf8fd157c32e571c85cb12e66692f15516a83a03264b08d06c3", size = 143090, upload-time = "2025-10-14T04:41:54.385Z" }, + { url = "https://files.pythonhosted.org/packages/84/ce/61a28d3bb77281eb24107b937a497f3c43089326d27832a63dcedaab0478/charset_normalizer-3.4.4-cp38-cp38-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c4ef880e27901b6cc782f1b95f82da9313c0eb95c3af699103088fa0ac3ce9ac", size = 139490, upload-time = "2025-10-14T04:41:55.551Z" }, + { url = "https://files.pythonhosted.org/packages/c0/bd/c9e59a91b2061c6f8bb98a150670cb16d4cd7c4ba7d11ad0cdf789155f41/charset_normalizer-3.4.4-cp38-cp38-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2aaba3b0819274cc41757a1da876f810a3e4d7b6eb25699253a4effef9e8e4af", size = 155334, upload-time = "2025-10-14T04:41:56.724Z" }, + { url = "https://files.pythonhosted.org/packages/bf/37/f17ae176a80f22ff823456af91ba3bc59df308154ff53aef0d39eb3d3419/charset_normalizer-3.4.4-cp38-cp38-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:778d2e08eda00f4256d7f672ca9fef386071c9202f5e4607920b86d7803387f2", size = 152823, upload-time = "2025-10-14T04:41:58.236Z" }, + { url = "https://files.pythonhosted.org/packages/bf/fa/cf5bb2409a385f78750e78c8d2e24780964976acdaaed65dbd6083ae5b40/charset_normalizer-3.4.4-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f155a433c2ec037d4e8df17d18922c3a0d9b3232a396690f17175d2946f0218d", size = 147618, upload-time = "2025-10-14T04:41:59.409Z" }, + { url = "https://files.pythonhosted.org/packages/9b/63/579784a65bc7de2d4518d40bb8f1870900163e86f17f21fd1384318c459d/charset_normalizer-3.4.4-cp38-cp38-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a8bf8d0f749c5757af2142fe7903a9df1d2e8aa3841559b2bad34b08d0e2bcf3", size = 145516, upload-time = "2025-10-14T04:42:00.579Z" }, + { url = "https://files.pythonhosted.org/packages/a3/a9/94ec6266cd394e8f93a4d69cca651d61bf6ac58d2a0422163b30c698f2c7/charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:194f08cbb32dc406d6e1aea671a68be0823673db2832b38405deba2fb0d88f63", size = 145266, upload-time = "2025-10-14T04:42:01.684Z" }, + { url = "https://files.pythonhosted.org/packages/09/14/d6626eb97764b58c2779fa7928fa7d1a49adb8ce687c2dbba4db003c1939/charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_armv7l.whl", hash = "sha256:6aee717dcfead04c6eb1ce3bd29ac1e22663cdea57f943c87d1eab9a025438d7", size = 139559, upload-time = "2025-10-14T04:42:02.902Z" }, + { url = "https://files.pythonhosted.org/packages/09/01/ddbe6b01313ba191dbb0a43c7563bc770f2448c18127f9ea4b119c44dff0/charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:cd4b7ca9984e5e7985c12bc60a6f173f3c958eae74f3ef6624bb6b26e2abbae4", size = 156653, upload-time = "2025-10-14T04:42:04.005Z" }, + { url = "https://files.pythonhosted.org/packages/95/c8/d05543378bea89296e9af4510b44c704626e191da447235c8fdedfc5b7b2/charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_riscv64.whl", hash = "sha256:b7cf1017d601aa35e6bb650b6ad28652c9cd78ee6caff19f3c28d03e1c80acbf", size = 145644, upload-time = "2025-10-14T04:42:05.211Z" }, + { url = "https://files.pythonhosted.org/packages/72/01/2866c4377998ef8a1f6802f6431e774a4c8ebe75b0a6e569ceec55c9cbfb/charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:e912091979546adf63357d7e2ccff9b44f026c075aeaf25a52d0e95ad2281074", size = 153964, upload-time = "2025-10-14T04:42:06.341Z" }, + { url = "https://files.pythonhosted.org/packages/4a/66/66c72468a737b4cbd7851ba2c522fe35c600575fbeac944460b4fd4a06fe/charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:5cb4d72eea50c8868f5288b7f7f33ed276118325c1dfd3957089f6b519e1382a", size = 148777, upload-time = "2025-10-14T04:42:07.535Z" }, + { url = "https://files.pythonhosted.org/packages/50/94/d0d56677fdddbffa8ca00ec411f67bb8c947f9876374ddc9d160d4f2c4b3/charset_normalizer-3.4.4-cp38-cp38-win32.whl", hash = "sha256:837c2ce8c5a65a2035be9b3569c684358dfbf109fd3b6969630a87535495ceaa", size = 98687, upload-time = "2025-10-14T04:42:08.678Z" }, + { url = "https://files.pythonhosted.org/packages/00/64/c3bc303d1b586480b1c8e6e1e2191a6d6dd40255244e5cf16763dcec52e6/charset_normalizer-3.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:44c2a8734b333e0578090c4cd6b16f275e07aa6614ca8715e6c038e865e70576", size = 106115, upload-time = "2025-10-14T04:42:09.793Z" }, + { url = "https://files.pythonhosted.org/packages/46/7c/0c4760bccf082737ca7ab84a4c2034fcc06b1f21cf3032ea98bd6feb1725/charset_normalizer-3.4.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a9768c477b9d7bd54bc0c86dbaebdec6f03306675526c9927c0e8a04e8f94af9", size = 209609, upload-time = "2025-10-14T04:42:10.922Z" }, + { url = "https://files.pythonhosted.org/packages/bb/a4/69719daef2f3d7f1819de60c9a6be981b8eeead7542d5ec4440f3c80e111/charset_normalizer-3.4.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1bee1e43c28aa63cb16e5c14e582580546b08e535299b8b6158a7c9c768a1f3d", size = 149029, upload-time = "2025-10-14T04:42:12.38Z" }, + { url = "https://files.pythonhosted.org/packages/e6/21/8d4e1d6c1e6070d3672908b8e4533a71b5b53e71d16828cc24d0efec564c/charset_normalizer-3.4.4-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:fd44c878ea55ba351104cb93cc85e74916eb8fa440ca7903e57575e97394f608", size = 144580, upload-time = "2025-10-14T04:42:13.549Z" }, + { url = "https://files.pythonhosted.org/packages/a7/0a/a616d001b3f25647a9068e0b9199f697ce507ec898cacb06a0d5a1617c99/charset_normalizer-3.4.4-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0f04b14ffe5fdc8c4933862d8306109a2c51e0704acfa35d51598eb45a1e89fc", size = 162340, upload-time = "2025-10-14T04:42:14.892Z" }, + { url = "https://files.pythonhosted.org/packages/85/93/060b52deb249a5450460e0585c88a904a83aec474ab8e7aba787f45e79f2/charset_normalizer-3.4.4-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:cd09d08005f958f370f539f186d10aec3377d55b9eeb0d796025d4886119d76e", size = 159619, upload-time = "2025-10-14T04:42:16.676Z" }, + { url = "https://files.pythonhosted.org/packages/dd/21/0274deb1cc0632cd587a9a0ec6b4674d9108e461cb4cd40d457adaeb0564/charset_normalizer-3.4.4-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4fe7859a4e3e8457458e2ff592f15ccb02f3da787fcd31e0183879c3ad4692a1", size = 153980, upload-time = "2025-10-14T04:42:17.917Z" }, + { url = "https://files.pythonhosted.org/packages/28/2b/e3d7d982858dccc11b31906976323d790dded2017a0572f093ff982d692f/charset_normalizer-3.4.4-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fa09f53c465e532f4d3db095e0c55b615f010ad81803d383195b6b5ca6cbf5f3", size = 152174, upload-time = "2025-10-14T04:42:19.018Z" }, + { url = "https://files.pythonhosted.org/packages/6e/ff/4a269f8e35f1e58b2df52c131a1fa019acb7ef3f8697b7d464b07e9b492d/charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7fa17817dc5625de8a027cb8b26d9fefa3ea28c8253929b8d6649e705d2835b6", size = 151666, upload-time = "2025-10-14T04:42:20.171Z" }, + { url = "https://files.pythonhosted.org/packages/da/c9/ec39870f0b330d58486001dd8e532c6b9a905f5765f58a6f8204926b4a93/charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:5947809c8a2417be3267efc979c47d76a079758166f7d43ef5ae8e9f92751f88", size = 145550, upload-time = "2025-10-14T04:42:21.324Z" }, + { url = "https://files.pythonhosted.org/packages/75/8f/d186ab99e40e0ed9f82f033d6e49001701c81244d01905dd4a6924191a30/charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:4902828217069c3c5c71094537a8e623f5d097858ac6ca8252f7b4d10b7560f1", size = 163721, upload-time = "2025-10-14T04:42:22.46Z" }, + { url = "https://files.pythonhosted.org/packages/96/b1/6047663b9744df26a7e479ac1e77af7134b1fcf9026243bb48ee2d18810f/charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:7c308f7e26e4363d79df40ca5b2be1c6ba9f02bdbccfed5abddb7859a6ce72cf", size = 152127, upload-time = "2025-10-14T04:42:23.712Z" }, + { url = "https://files.pythonhosted.org/packages/59/78/e5a6eac9179f24f704d1be67d08704c3c6ab9f00963963524be27c18ed87/charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:2c9d3c380143a1fedbff95a312aa798578371eb29da42106a29019368a475318", size = 161175, upload-time = "2025-10-14T04:42:24.87Z" }, + { url = "https://files.pythonhosted.org/packages/e5/43/0e626e42d54dd2f8dd6fc5e1c5ff00f05fbca17cb699bedead2cae69c62f/charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cb01158d8b88ee68f15949894ccc6712278243d95f344770fa7593fa2d94410c", size = 155375, upload-time = "2025-10-14T04:42:27.246Z" }, + { url = "https://files.pythonhosted.org/packages/e9/91/d9615bf2e06f35e4997616ff31248c3657ed649c5ab9d35ea12fce54e380/charset_normalizer-3.4.4-cp39-cp39-win32.whl", hash = "sha256:2677acec1a2f8ef614c6888b5b4ae4060cc184174a938ed4e8ef690e15d3e505", size = 99692, upload-time = "2025-10-14T04:42:28.425Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a9/6c040053909d9d1ef4fcab45fddec083aedc9052c10078339b47c8573ea8/charset_normalizer-3.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:f8e160feb2aed042cd657a72acc0b481212ed28b1b9a95c0cee1621b524e1966", size = 107192, upload-time = "2025-10-14T04:42:29.482Z" }, + { url = "https://files.pythonhosted.org/packages/f0/c6/4fa536b2c0cd3edfb7ccf8469fa0f363ea67b7213a842b90909ca33dd851/charset_normalizer-3.4.4-cp39-cp39-win_arm64.whl", hash = "sha256:b5d84d37db046c5ca74ee7bb47dd6cbc13f80665fdde3e8040bdd3fb015ecb50", size = 100220, upload-time = "2025-10-14T04:42:30.632Z" }, + { url = "https://files.pythonhosted.org/packages/0a/4c/925909008ed5a988ccbb72dcc897407e5d6d3bd72410d69e051fc0c14647/charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f", size = 53402, upload-time = "2025-10-14T04:42:31.76Z" }, +] + +[[package]] +name = "click" +version = "8.1.8" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version == '3.9.*'", + "python_full_version < '3.9'", +] +dependencies = [ + { name = "colorama", marker = "python_full_version < '3.10' and sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593, upload-time = "2024-12-21T18:38:44.339Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/d4/7ebdbd03970677812aac39c869717059dbb71a4cfc033ca6e5221787892c/click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2", size = 98188, upload-time = "2024-12-21T18:38:41.666Z" }, +] + +[[package]] +name = "click" +version = "8.3.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", +] +dependencies = [ + { name = "colorama", marker = "python_full_version >= '3.10' and sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/46/61/de6cd827efad202d7057d93e0fed9294b96952e188f7384832791c7b2254/click-8.3.0.tar.gz", hash = "sha256:e7b8232224eba16f4ebe410c25ced9f7875cb5f3263ffc93cc3e8da705e229c4", size = 276943, upload-time = "2025-09-18T17:32:23.696Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/db/d3/9dcc0f5797f070ec8edf30fbadfb200e71d9db6b84d211e3b2085a7589a0/click-8.3.0-py3-none-any.whl", hash = "sha256:9b9f285302c6e3064f4330c05f05b81945b2a39544279343e6e7c5f27a9baddc", size = 107295, upload-time = "2025-09-18T17:32:22.42Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "coverage" +version = "7.6.1" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.9'", +] +sdist = { url = "https://files.pythonhosted.org/packages/f7/08/7e37f82e4d1aead42a7443ff06a1e406aabf7302c4f00a546e4b320b994c/coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d", size = 798791, upload-time = "2024-08-04T19:45:30.9Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/61/eb7ce5ed62bacf21beca4937a90fe32545c91a3c8a42a30c6616d48fc70d/coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16", size = 206690, upload-time = "2024-08-04T19:43:07.695Z" }, + { url = "https://files.pythonhosted.org/packages/7d/73/041928e434442bd3afde5584bdc3f932fb4562b1597629f537387cec6f3d/coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36", size = 207127, upload-time = "2024-08-04T19:43:10.15Z" }, + { url = "https://files.pythonhosted.org/packages/c7/c8/6ca52b5147828e45ad0242388477fdb90df2c6cbb9a441701a12b3c71bc8/coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02", size = 235654, upload-time = "2024-08-04T19:43:12.405Z" }, + { url = "https://files.pythonhosted.org/packages/d5/da/9ac2b62557f4340270942011d6efeab9833648380109e897d48ab7c1035d/coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc", size = 233598, upload-time = "2024-08-04T19:43:14.078Z" }, + { url = "https://files.pythonhosted.org/packages/53/23/9e2c114d0178abc42b6d8d5281f651a8e6519abfa0ef460a00a91f80879d/coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23", size = 234732, upload-time = "2024-08-04T19:43:16.632Z" }, + { url = "https://files.pythonhosted.org/packages/0f/7e/a0230756fb133343a52716e8b855045f13342b70e48e8ad41d8a0d60ab98/coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34", size = 233816, upload-time = "2024-08-04T19:43:19.049Z" }, + { url = "https://files.pythonhosted.org/packages/28/7c/3753c8b40d232b1e5eeaed798c875537cf3cb183fb5041017c1fdb7ec14e/coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c", size = 232325, upload-time = "2024-08-04T19:43:21.246Z" }, + { url = "https://files.pythonhosted.org/packages/57/e3/818a2b2af5b7573b4b82cf3e9f137ab158c90ea750a8f053716a32f20f06/coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959", size = 233418, upload-time = "2024-08-04T19:43:22.945Z" }, + { url = "https://files.pythonhosted.org/packages/c8/fb/4532b0b0cefb3f06d201648715e03b0feb822907edab3935112b61b885e2/coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232", size = 209343, upload-time = "2024-08-04T19:43:25.121Z" }, + { url = "https://files.pythonhosted.org/packages/5a/25/af337cc7421eca1c187cc9c315f0a755d48e755d2853715bfe8c418a45fa/coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0", size = 210136, upload-time = "2024-08-04T19:43:26.851Z" }, + { url = "https://files.pythonhosted.org/packages/ad/5f/67af7d60d7e8ce61a4e2ddcd1bd5fb787180c8d0ae0fbd073f903b3dd95d/coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93", size = 206796, upload-time = "2024-08-04T19:43:29.115Z" }, + { url = "https://files.pythonhosted.org/packages/e1/0e/e52332389e057daa2e03be1fbfef25bb4d626b37d12ed42ae6281d0a274c/coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3", size = 207244, upload-time = "2024-08-04T19:43:31.285Z" }, + { url = "https://files.pythonhosted.org/packages/aa/cd/766b45fb6e090f20f8927d9c7cb34237d41c73a939358bc881883fd3a40d/coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff", size = 239279, upload-time = "2024-08-04T19:43:33.581Z" }, + { url = "https://files.pythonhosted.org/packages/70/6c/a9ccd6fe50ddaf13442a1e2dd519ca805cbe0f1fcd377fba6d8339b98ccb/coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d", size = 236859, upload-time = "2024-08-04T19:43:35.301Z" }, + { url = "https://files.pythonhosted.org/packages/14/6f/8351b465febb4dbc1ca9929505202db909c5a635c6fdf33e089bbc3d7d85/coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6", size = 238549, upload-time = "2024-08-04T19:43:37.578Z" }, + { url = "https://files.pythonhosted.org/packages/68/3c/289b81fa18ad72138e6d78c4c11a82b5378a312c0e467e2f6b495c260907/coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56", size = 237477, upload-time = "2024-08-04T19:43:39.92Z" }, + { url = "https://files.pythonhosted.org/packages/ed/1c/aa1efa6459d822bd72c4abc0b9418cf268de3f60eeccd65dc4988553bd8d/coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234", size = 236134, upload-time = "2024-08-04T19:43:41.453Z" }, + { url = "https://files.pythonhosted.org/packages/fb/c8/521c698f2d2796565fe9c789c2ee1ccdae610b3aa20b9b2ef980cc253640/coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133", size = 236910, upload-time = "2024-08-04T19:43:43.037Z" }, + { url = "https://files.pythonhosted.org/packages/7d/30/033e663399ff17dca90d793ee8a2ea2890e7fdf085da58d82468b4220bf7/coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c", size = 209348, upload-time = "2024-08-04T19:43:44.787Z" }, + { url = "https://files.pythonhosted.org/packages/20/05/0d1ccbb52727ccdadaa3ff37e4d2dc1cd4d47f0c3df9eb58d9ec8508ca88/coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6", size = 210230, upload-time = "2024-08-04T19:43:46.707Z" }, + { url = "https://files.pythonhosted.org/packages/7e/d4/300fc921dff243cd518c7db3a4c614b7e4b2431b0d1145c1e274fd99bd70/coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778", size = 206983, upload-time = "2024-08-04T19:43:49.082Z" }, + { url = "https://files.pythonhosted.org/packages/e1/ab/6bf00de5327ecb8db205f9ae596885417a31535eeda6e7b99463108782e1/coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391", size = 207221, upload-time = "2024-08-04T19:43:52.15Z" }, + { url = "https://files.pythonhosted.org/packages/92/8f/2ead05e735022d1a7f3a0a683ac7f737de14850395a826192f0288703472/coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8", size = 240342, upload-time = "2024-08-04T19:43:53.746Z" }, + { url = "https://files.pythonhosted.org/packages/0f/ef/94043e478201ffa85b8ae2d2c79b4081e5a1b73438aafafccf3e9bafb6b5/coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d", size = 237371, upload-time = "2024-08-04T19:43:55.993Z" }, + { url = "https://files.pythonhosted.org/packages/1f/0f/c890339dd605f3ebc269543247bdd43b703cce6825b5ed42ff5f2d6122c7/coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca", size = 239455, upload-time = "2024-08-04T19:43:57.618Z" }, + { url = "https://files.pythonhosted.org/packages/d1/04/7fd7b39ec7372a04efb0f70c70e35857a99b6a9188b5205efb4c77d6a57a/coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163", size = 238924, upload-time = "2024-08-04T19:44:00.012Z" }, + { url = "https://files.pythonhosted.org/packages/ed/bf/73ce346a9d32a09cf369f14d2a06651329c984e106f5992c89579d25b27e/coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a", size = 237252, upload-time = "2024-08-04T19:44:01.713Z" }, + { url = "https://files.pythonhosted.org/packages/86/74/1dc7a20969725e917b1e07fe71a955eb34bc606b938316bcc799f228374b/coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d", size = 238897, upload-time = "2024-08-04T19:44:03.898Z" }, + { url = "https://files.pythonhosted.org/packages/b6/e9/d9cc3deceb361c491b81005c668578b0dfa51eed02cd081620e9a62f24ec/coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5", size = 209606, upload-time = "2024-08-04T19:44:05.532Z" }, + { url = "https://files.pythonhosted.org/packages/47/c8/5a2e41922ea6740f77d555c4d47544acd7dc3f251fe14199c09c0f5958d3/coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb", size = 210373, upload-time = "2024-08-04T19:44:07.079Z" }, + { url = "https://files.pythonhosted.org/packages/8c/f9/9aa4dfb751cb01c949c990d136a0f92027fbcc5781c6e921df1cb1563f20/coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106", size = 207007, upload-time = "2024-08-04T19:44:09.453Z" }, + { url = "https://files.pythonhosted.org/packages/b9/67/e1413d5a8591622a46dd04ff80873b04c849268831ed5c304c16433e7e30/coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9", size = 207269, upload-time = "2024-08-04T19:44:11.045Z" }, + { url = "https://files.pythonhosted.org/packages/14/5b/9dec847b305e44a5634d0fb8498d135ab1d88330482b74065fcec0622224/coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c", size = 239886, upload-time = "2024-08-04T19:44:12.83Z" }, + { url = "https://files.pythonhosted.org/packages/7b/b7/35760a67c168e29f454928f51f970342d23cf75a2bb0323e0f07334c85f3/coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a", size = 237037, upload-time = "2024-08-04T19:44:15.393Z" }, + { url = "https://files.pythonhosted.org/packages/f7/95/d2fd31f1d638df806cae59d7daea5abf2b15b5234016a5ebb502c2f3f7ee/coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060", size = 239038, upload-time = "2024-08-04T19:44:17.466Z" }, + { url = "https://files.pythonhosted.org/packages/6e/bd/110689ff5752b67924efd5e2aedf5190cbbe245fc81b8dec1abaffba619d/coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862", size = 238690, upload-time = "2024-08-04T19:44:19.336Z" }, + { url = "https://files.pythonhosted.org/packages/d3/a8/08d7b38e6ff8df52331c83130d0ab92d9c9a8b5462f9e99c9f051a4ae206/coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388", size = 236765, upload-time = "2024-08-04T19:44:20.994Z" }, + { url = "https://files.pythonhosted.org/packages/d6/6a/9cf96839d3147d55ae713eb2d877f4d777e7dc5ba2bce227167d0118dfe8/coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155", size = 238611, upload-time = "2024-08-04T19:44:22.616Z" }, + { url = "https://files.pythonhosted.org/packages/74/e4/7ff20d6a0b59eeaab40b3140a71e38cf52547ba21dbcf1d79c5a32bba61b/coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a", size = 209671, upload-time = "2024-08-04T19:44:24.418Z" }, + { url = "https://files.pythonhosted.org/packages/35/59/1812f08a85b57c9fdb6d0b383d779e47b6f643bc278ed682859512517e83/coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129", size = 210368, upload-time = "2024-08-04T19:44:26.276Z" }, + { url = "https://files.pythonhosted.org/packages/9c/15/08913be1c59d7562a3e39fce20661a98c0a3f59d5754312899acc6cb8a2d/coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e", size = 207758, upload-time = "2024-08-04T19:44:29.028Z" }, + { url = "https://files.pythonhosted.org/packages/c4/ae/b5d58dff26cade02ada6ca612a76447acd69dccdbb3a478e9e088eb3d4b9/coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962", size = 208035, upload-time = "2024-08-04T19:44:30.673Z" }, + { url = "https://files.pythonhosted.org/packages/b8/d7/62095e355ec0613b08dfb19206ce3033a0eedb6f4a67af5ed267a8800642/coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb", size = 250839, upload-time = "2024-08-04T19:44:32.412Z" }, + { url = "https://files.pythonhosted.org/packages/7c/1e/c2967cb7991b112ba3766df0d9c21de46b476d103e32bb401b1b2adf3380/coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704", size = 246569, upload-time = "2024-08-04T19:44:34.547Z" }, + { url = "https://files.pythonhosted.org/packages/8b/61/a7a6a55dd266007ed3b1df7a3386a0d760d014542d72f7c2c6938483b7bd/coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b", size = 248927, upload-time = "2024-08-04T19:44:36.313Z" }, + { url = "https://files.pythonhosted.org/packages/c8/fa/13a6f56d72b429f56ef612eb3bc5ce1b75b7ee12864b3bd12526ab794847/coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f", size = 248401, upload-time = "2024-08-04T19:44:38.155Z" }, + { url = "https://files.pythonhosted.org/packages/75/06/0429c652aa0fb761fc60e8c6b291338c9173c6aa0f4e40e1902345b42830/coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223", size = 246301, upload-time = "2024-08-04T19:44:39.883Z" }, + { url = "https://files.pythonhosted.org/packages/52/76/1766bb8b803a88f93c3a2d07e30ffa359467810e5cbc68e375ebe6906efb/coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3", size = 247598, upload-time = "2024-08-04T19:44:41.59Z" }, + { url = "https://files.pythonhosted.org/packages/66/8b/f54f8db2ae17188be9566e8166ac6df105c1c611e25da755738025708d54/coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f", size = 210307, upload-time = "2024-08-04T19:44:43.301Z" }, + { url = "https://files.pythonhosted.org/packages/9f/b0/e0dca6da9170aefc07515cce067b97178cefafb512d00a87a1c717d2efd5/coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657", size = 211453, upload-time = "2024-08-04T19:44:45.677Z" }, + { url = "https://files.pythonhosted.org/packages/81/d0/d9e3d554e38beea5a2e22178ddb16587dbcbe9a1ef3211f55733924bf7fa/coverage-7.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0", size = 206674, upload-time = "2024-08-04T19:44:47.694Z" }, + { url = "https://files.pythonhosted.org/packages/38/ea/cab2dc248d9f45b2b7f9f1f596a4d75a435cb364437c61b51d2eb33ceb0e/coverage-7.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a", size = 207101, upload-time = "2024-08-04T19:44:49.32Z" }, + { url = "https://files.pythonhosted.org/packages/ca/6f/f82f9a500c7c5722368978a5390c418d2a4d083ef955309a8748ecaa8920/coverage-7.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b", size = 236554, upload-time = "2024-08-04T19:44:51.631Z" }, + { url = "https://files.pythonhosted.org/packages/a6/94/d3055aa33d4e7e733d8fa309d9adf147b4b06a82c1346366fc15a2b1d5fa/coverage-7.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3", size = 234440, upload-time = "2024-08-04T19:44:53.464Z" }, + { url = "https://files.pythonhosted.org/packages/e4/6e/885bcd787d9dd674de4a7d8ec83faf729534c63d05d51d45d4fa168f7102/coverage-7.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de", size = 235889, upload-time = "2024-08-04T19:44:55.165Z" }, + { url = "https://files.pythonhosted.org/packages/f4/63/df50120a7744492710854860783d6819ff23e482dee15462c9a833cc428a/coverage-7.6.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6", size = 235142, upload-time = "2024-08-04T19:44:57.269Z" }, + { url = "https://files.pythonhosted.org/packages/3a/5d/9d0acfcded2b3e9ce1c7923ca52ccc00c78a74e112fc2aee661125b7843b/coverage-7.6.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569", size = 233805, upload-time = "2024-08-04T19:44:59.033Z" }, + { url = "https://files.pythonhosted.org/packages/c4/56/50abf070cb3cd9b1dd32f2c88f083aab561ecbffbcd783275cb51c17f11d/coverage-7.6.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989", size = 234655, upload-time = "2024-08-04T19:45:01.398Z" }, + { url = "https://files.pythonhosted.org/packages/25/ee/b4c246048b8485f85a2426ef4abab88e48c6e80c74e964bea5cd4cd4b115/coverage-7.6.1-cp38-cp38-win32.whl", hash = "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7", size = 209296, upload-time = "2024-08-04T19:45:03.819Z" }, + { url = "https://files.pythonhosted.org/packages/5c/1c/96cf86b70b69ea2b12924cdf7cabb8ad10e6130eab8d767a1099fbd2a44f/coverage-7.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8", size = 210137, upload-time = "2024-08-04T19:45:06.25Z" }, + { url = "https://files.pythonhosted.org/packages/19/d3/d54c5aa83268779d54c86deb39c1c4566e5d45c155369ca152765f8db413/coverage-7.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255", size = 206688, upload-time = "2024-08-04T19:45:08.358Z" }, + { url = "https://files.pythonhosted.org/packages/a5/fe/137d5dca72e4a258b1bc17bb04f2e0196898fe495843402ce826a7419fe3/coverage-7.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8", size = 207120, upload-time = "2024-08-04T19:45:11.526Z" }, + { url = "https://files.pythonhosted.org/packages/78/5b/a0a796983f3201ff5485323b225d7c8b74ce30c11f456017e23d8e8d1945/coverage-7.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2", size = 235249, upload-time = "2024-08-04T19:45:13.202Z" }, + { url = "https://files.pythonhosted.org/packages/4e/e1/76089d6a5ef9d68f018f65411fcdaaeb0141b504587b901d74e8587606ad/coverage-7.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a", size = 233237, upload-time = "2024-08-04T19:45:14.961Z" }, + { url = "https://files.pythonhosted.org/packages/9a/6f/eef79b779a540326fee9520e5542a8b428cc3bfa8b7c8f1022c1ee4fc66c/coverage-7.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc", size = 234311, upload-time = "2024-08-04T19:45:16.924Z" }, + { url = "https://files.pythonhosted.org/packages/75/e1/656d65fb126c29a494ef964005702b012f3498db1a30dd562958e85a4049/coverage-7.6.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004", size = 233453, upload-time = "2024-08-04T19:45:18.672Z" }, + { url = "https://files.pythonhosted.org/packages/68/6a/45f108f137941a4a1238c85f28fd9d048cc46b5466d6b8dda3aba1bb9d4f/coverage-7.6.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb", size = 231958, upload-time = "2024-08-04T19:45:20.63Z" }, + { url = "https://files.pythonhosted.org/packages/9b/e7/47b809099168b8b8c72ae311efc3e88c8d8a1162b3ba4b8da3cfcdb85743/coverage-7.6.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36", size = 232938, upload-time = "2024-08-04T19:45:23.062Z" }, + { url = "https://files.pythonhosted.org/packages/52/80/052222ba7058071f905435bad0ba392cc12006380731c37afaf3fe749b88/coverage-7.6.1-cp39-cp39-win32.whl", hash = "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c", size = 209352, upload-time = "2024-08-04T19:45:25.042Z" }, + { url = "https://files.pythonhosted.org/packages/b8/d8/1b92e0b3adcf384e98770a00ca095da1b5f7b483e6563ae4eb5e935d24a1/coverage-7.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca", size = 210153, upload-time = "2024-08-04T19:45:27.079Z" }, + { url = "https://files.pythonhosted.org/packages/a5/2b/0354ed096bca64dc8e32a7cbcae28b34cb5ad0b1fe2125d6d99583313ac0/coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df", size = 198926, upload-time = "2024-08-04T19:45:28.875Z" }, +] + +[package.optional-dependencies] +toml = [ + { name = "tomli", marker = "python_full_version < '3.9'" }, +] + +[[package]] +name = "coverage" +version = "7.10.7" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version == '3.9.*'", +] +sdist = { url = "https://files.pythonhosted.org/packages/51/26/d22c300112504f5f9a9fd2297ce33c35f3d353e4aeb987c8419453b2a7c2/coverage-7.10.7.tar.gz", hash = "sha256:f4ab143ab113be368a3e9b795f9cd7906c5ef407d6173fe9675a902e1fffc239", size = 827704, upload-time = "2025-09-21T20:03:56.815Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/6c/3a3f7a46888e69d18abe3ccc6fe4cb16cccb1e6a2f99698931dafca489e6/coverage-7.10.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:fc04cc7a3db33664e0c2d10eb8990ff6b3536f6842c9590ae8da4c614b9ed05a", size = 217987, upload-time = "2025-09-21T20:00:57.218Z" }, + { url = "https://files.pythonhosted.org/packages/03/94/952d30f180b1a916c11a56f5c22d3535e943aa22430e9e3322447e520e1c/coverage-7.10.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e201e015644e207139f7e2351980feb7040e6f4b2c2978892f3e3789d1c125e5", size = 218388, upload-time = "2025-09-21T20:01:00.081Z" }, + { url = "https://files.pythonhosted.org/packages/50/2b/9e0cf8ded1e114bcd8b2fd42792b57f1c4e9e4ea1824cde2af93a67305be/coverage-7.10.7-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:240af60539987ced2c399809bd34f7c78e8abe0736af91c3d7d0e795df633d17", size = 245148, upload-time = "2025-09-21T20:01:01.768Z" }, + { url = "https://files.pythonhosted.org/packages/19/20/d0384ac06a6f908783d9b6aa6135e41b093971499ec488e47279f5b846e6/coverage-7.10.7-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8421e088bc051361b01c4b3a50fd39a4b9133079a2229978d9d30511fd05231b", size = 246958, upload-time = "2025-09-21T20:01:03.355Z" }, + { url = "https://files.pythonhosted.org/packages/60/83/5c283cff3d41285f8eab897651585db908a909c572bdc014bcfaf8a8b6ae/coverage-7.10.7-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6be8ed3039ae7f7ac5ce058c308484787c86e8437e72b30bf5e88b8ea10f3c87", size = 248819, upload-time = "2025-09-21T20:01:04.968Z" }, + { url = "https://files.pythonhosted.org/packages/60/22/02eb98fdc5ff79f423e990d877693e5310ae1eab6cb20ae0b0b9ac45b23b/coverage-7.10.7-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e28299d9f2e889e6d51b1f043f58d5f997c373cc12e6403b90df95b8b047c13e", size = 245754, upload-time = "2025-09-21T20:01:06.321Z" }, + { url = "https://files.pythonhosted.org/packages/b4/bc/25c83bcf3ad141b32cd7dc45485ef3c01a776ca3aa8ef0a93e77e8b5bc43/coverage-7.10.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c4e16bd7761c5e454f4efd36f345286d6f7c5fa111623c355691e2755cae3b9e", size = 246860, upload-time = "2025-09-21T20:01:07.605Z" }, + { url = "https://files.pythonhosted.org/packages/3c/b7/95574702888b58c0928a6e982038c596f9c34d52c5e5107f1eef729399b5/coverage-7.10.7-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b1c81d0e5e160651879755c9c675b974276f135558cf4ba79fee7b8413a515df", size = 244877, upload-time = "2025-09-21T20:01:08.829Z" }, + { url = "https://files.pythonhosted.org/packages/47/b6/40095c185f235e085df0e0b158f6bd68cc6e1d80ba6c7721dc81d97ec318/coverage-7.10.7-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:606cc265adc9aaedcc84f1f064f0e8736bc45814f15a357e30fca7ecc01504e0", size = 245108, upload-time = "2025-09-21T20:01:10.527Z" }, + { url = "https://files.pythonhosted.org/packages/c8/50/4aea0556da7a4b93ec9168420d170b55e2eb50ae21b25062513d020c6861/coverage-7.10.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:10b24412692df990dbc34f8fb1b6b13d236ace9dfdd68df5b28c2e39cafbba13", size = 245752, upload-time = "2025-09-21T20:01:11.857Z" }, + { url = "https://files.pythonhosted.org/packages/6a/28/ea1a84a60828177ae3b100cb6723838523369a44ec5742313ed7db3da160/coverage-7.10.7-cp310-cp310-win32.whl", hash = "sha256:b51dcd060f18c19290d9b8a9dd1e0181538df2ce0717f562fff6cf74d9fc0b5b", size = 220497, upload-time = "2025-09-21T20:01:13.459Z" }, + { url = "https://files.pythonhosted.org/packages/fc/1a/a81d46bbeb3c3fd97b9602ebaa411e076219a150489bcc2c025f151bd52d/coverage-7.10.7-cp310-cp310-win_amd64.whl", hash = "sha256:3a622ac801b17198020f09af3eaf45666b344a0d69fc2a6ffe2ea83aeef1d807", size = 221392, upload-time = "2025-09-21T20:01:14.722Z" }, + { url = "https://files.pythonhosted.org/packages/d2/5d/c1a17867b0456f2e9ce2d8d4708a4c3a089947d0bec9c66cdf60c9e7739f/coverage-7.10.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a609f9c93113be646f44c2a0256d6ea375ad047005d7f57a5c15f614dc1b2f59", size = 218102, upload-time = "2025-09-21T20:01:16.089Z" }, + { url = "https://files.pythonhosted.org/packages/54/f0/514dcf4b4e3698b9a9077f084429681bf3aad2b4a72578f89d7f643eb506/coverage-7.10.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:65646bb0359386e07639c367a22cf9b5bf6304e8630b565d0626e2bdf329227a", size = 218505, upload-time = "2025-09-21T20:01:17.788Z" }, + { url = "https://files.pythonhosted.org/packages/20/f6/9626b81d17e2a4b25c63ac1b425ff307ecdeef03d67c9a147673ae40dc36/coverage-7.10.7-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5f33166f0dfcce728191f520bd2692914ec70fac2713f6bf3ce59c3deacb4699", size = 248898, upload-time = "2025-09-21T20:01:19.488Z" }, + { url = "https://files.pythonhosted.org/packages/b0/ef/bd8e719c2f7417ba03239052e099b76ea1130ac0cbb183ee1fcaa58aaff3/coverage-7.10.7-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:35f5e3f9e455bb17831876048355dca0f758b6df22f49258cb5a91da23ef437d", size = 250831, upload-time = "2025-09-21T20:01:20.817Z" }, + { url = "https://files.pythonhosted.org/packages/a5/b6/bf054de41ec948b151ae2b79a55c107f5760979538f5fb80c195f2517718/coverage-7.10.7-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4da86b6d62a496e908ac2898243920c7992499c1712ff7c2b6d837cc69d9467e", size = 252937, upload-time = "2025-09-21T20:01:22.171Z" }, + { url = "https://files.pythonhosted.org/packages/0f/e5/3860756aa6f9318227443c6ce4ed7bf9e70bb7f1447a0353f45ac5c7974b/coverage-7.10.7-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6b8b09c1fad947c84bbbc95eca841350fad9cbfa5a2d7ca88ac9f8d836c92e23", size = 249021, upload-time = "2025-09-21T20:01:23.907Z" }, + { url = "https://files.pythonhosted.org/packages/26/0f/bd08bd042854f7fd07b45808927ebcce99a7ed0f2f412d11629883517ac2/coverage-7.10.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4376538f36b533b46f8971d3a3e63464f2c7905c9800db97361c43a2b14792ab", size = 250626, upload-time = "2025-09-21T20:01:25.721Z" }, + { url = "https://files.pythonhosted.org/packages/8e/a7/4777b14de4abcc2e80c6b1d430f5d51eb18ed1d75fca56cbce5f2db9b36e/coverage-7.10.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:121da30abb574f6ce6ae09840dae322bef734480ceafe410117627aa54f76d82", size = 248682, upload-time = "2025-09-21T20:01:27.105Z" }, + { url = "https://files.pythonhosted.org/packages/34/72/17d082b00b53cd45679bad682fac058b87f011fd8b9fe31d77f5f8d3a4e4/coverage-7.10.7-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:88127d40df529336a9836870436fc2751c339fbaed3a836d42c93f3e4bd1d0a2", size = 248402, upload-time = "2025-09-21T20:01:28.629Z" }, + { url = "https://files.pythonhosted.org/packages/81/7a/92367572eb5bdd6a84bfa278cc7e97db192f9f45b28c94a9ca1a921c3577/coverage-7.10.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ba58bbcd1b72f136080c0bccc2400d66cc6115f3f906c499013d065ac33a4b61", size = 249320, upload-time = "2025-09-21T20:01:30.004Z" }, + { url = "https://files.pythonhosted.org/packages/2f/88/a23cc185f6a805dfc4fdf14a94016835eeb85e22ac3a0e66d5e89acd6462/coverage-7.10.7-cp311-cp311-win32.whl", hash = "sha256:972b9e3a4094b053a4e46832b4bc829fc8a8d347160eb39d03f1690316a99c14", size = 220536, upload-time = "2025-09-21T20:01:32.184Z" }, + { url = "https://files.pythonhosted.org/packages/fe/ef/0b510a399dfca17cec7bc2f05ad8bd78cf55f15c8bc9a73ab20c5c913c2e/coverage-7.10.7-cp311-cp311-win_amd64.whl", hash = "sha256:a7b55a944a7f43892e28ad4bc0561dfd5f0d73e605d1aa5c3c976b52aea121d2", size = 221425, upload-time = "2025-09-21T20:01:33.557Z" }, + { url = "https://files.pythonhosted.org/packages/51/7f/023657f301a276e4ba1850f82749bc136f5a7e8768060c2e5d9744a22951/coverage-7.10.7-cp311-cp311-win_arm64.whl", hash = "sha256:736f227fb490f03c6488f9b6d45855f8e0fd749c007f9303ad30efab0e73c05a", size = 220103, upload-time = "2025-09-21T20:01:34.929Z" }, + { url = "https://files.pythonhosted.org/packages/13/e4/eb12450f71b542a53972d19117ea5a5cea1cab3ac9e31b0b5d498df1bd5a/coverage-7.10.7-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7bb3b9ddb87ef7725056572368040c32775036472d5a033679d1fa6c8dc08417", size = 218290, upload-time = "2025-09-21T20:01:36.455Z" }, + { url = "https://files.pythonhosted.org/packages/37/66/593f9be12fc19fb36711f19a5371af79a718537204d16ea1d36f16bd78d2/coverage-7.10.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:18afb24843cbc175687225cab1138c95d262337f5473512010e46831aa0c2973", size = 218515, upload-time = "2025-09-21T20:01:37.982Z" }, + { url = "https://files.pythonhosted.org/packages/66/80/4c49f7ae09cafdacc73fbc30949ffe77359635c168f4e9ff33c9ebb07838/coverage-7.10.7-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:399a0b6347bcd3822be369392932884b8216d0944049ae22925631a9b3d4ba4c", size = 250020, upload-time = "2025-09-21T20:01:39.617Z" }, + { url = "https://files.pythonhosted.org/packages/a6/90/a64aaacab3b37a17aaedd83e8000142561a29eb262cede42d94a67f7556b/coverage-7.10.7-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:314f2c326ded3f4b09be11bc282eb2fc861184bc95748ae67b360ac962770be7", size = 252769, upload-time = "2025-09-21T20:01:41.341Z" }, + { url = "https://files.pythonhosted.org/packages/98/2e/2dda59afd6103b342e096f246ebc5f87a3363b5412609946c120f4e7750d/coverage-7.10.7-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c41e71c9cfb854789dee6fc51e46743a6d138b1803fab6cb860af43265b42ea6", size = 253901, upload-time = "2025-09-21T20:01:43.042Z" }, + { url = "https://files.pythonhosted.org/packages/53/dc/8d8119c9051d50f3119bb4a75f29f1e4a6ab9415cd1fa8bf22fcc3fb3b5f/coverage-7.10.7-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc01f57ca26269c2c706e838f6422e2a8788e41b3e3c65e2f41148212e57cd59", size = 250413, upload-time = "2025-09-21T20:01:44.469Z" }, + { url = "https://files.pythonhosted.org/packages/98/b3/edaff9c5d79ee4d4b6d3fe046f2b1d799850425695b789d491a64225d493/coverage-7.10.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a6442c59a8ac8b85812ce33bc4d05bde3fb22321fa8294e2a5b487c3505f611b", size = 251820, upload-time = "2025-09-21T20:01:45.915Z" }, + { url = "https://files.pythonhosted.org/packages/11/25/9a0728564bb05863f7e513e5a594fe5ffef091b325437f5430e8cfb0d530/coverage-7.10.7-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:78a384e49f46b80fb4c901d52d92abe098e78768ed829c673fbb53c498bef73a", size = 249941, upload-time = "2025-09-21T20:01:47.296Z" }, + { url = "https://files.pythonhosted.org/packages/e0/fd/ca2650443bfbef5b0e74373aac4df67b08180d2f184b482c41499668e258/coverage-7.10.7-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:5e1e9802121405ede4b0133aa4340ad8186a1d2526de5b7c3eca519db7bb89fb", size = 249519, upload-time = "2025-09-21T20:01:48.73Z" }, + { url = "https://files.pythonhosted.org/packages/24/79/f692f125fb4299b6f963b0745124998ebb8e73ecdfce4ceceb06a8c6bec5/coverage-7.10.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d41213ea25a86f69efd1575073d34ea11aabe075604ddf3d148ecfec9e1e96a1", size = 251375, upload-time = "2025-09-21T20:01:50.529Z" }, + { url = "https://files.pythonhosted.org/packages/5e/75/61b9bbd6c7d24d896bfeec57acba78e0f8deac68e6baf2d4804f7aae1f88/coverage-7.10.7-cp312-cp312-win32.whl", hash = "sha256:77eb4c747061a6af8d0f7bdb31f1e108d172762ef579166ec84542f711d90256", size = 220699, upload-time = "2025-09-21T20:01:51.941Z" }, + { url = "https://files.pythonhosted.org/packages/ca/f3/3bf7905288b45b075918d372498f1cf845b5b579b723c8fd17168018d5f5/coverage-7.10.7-cp312-cp312-win_amd64.whl", hash = "sha256:f51328ffe987aecf6d09f3cd9d979face89a617eacdaea43e7b3080777f647ba", size = 221512, upload-time = "2025-09-21T20:01:53.481Z" }, + { url = "https://files.pythonhosted.org/packages/5c/44/3e32dbe933979d05cf2dac5e697c8599cfe038aaf51223ab901e208d5a62/coverage-7.10.7-cp312-cp312-win_arm64.whl", hash = "sha256:bda5e34f8a75721c96085903c6f2197dc398c20ffd98df33f866a9c8fd95f4bf", size = 220147, upload-time = "2025-09-21T20:01:55.2Z" }, + { url = "https://files.pythonhosted.org/packages/9a/94/b765c1abcb613d103b64fcf10395f54d69b0ef8be6a0dd9c524384892cc7/coverage-7.10.7-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:981a651f543f2854abd3b5fcb3263aac581b18209be49863ba575de6edf4c14d", size = 218320, upload-time = "2025-09-21T20:01:56.629Z" }, + { url = "https://files.pythonhosted.org/packages/72/4f/732fff31c119bb73b35236dd333030f32c4bfe909f445b423e6c7594f9a2/coverage-7.10.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:73ab1601f84dc804f7812dc297e93cd99381162da39c47040a827d4e8dafe63b", size = 218575, upload-time = "2025-09-21T20:01:58.203Z" }, + { url = "https://files.pythonhosted.org/packages/87/02/ae7e0af4b674be47566707777db1aa375474f02a1d64b9323e5813a6cdd5/coverage-7.10.7-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:a8b6f03672aa6734e700bbcd65ff050fd19cddfec4b031cc8cf1c6967de5a68e", size = 249568, upload-time = "2025-09-21T20:01:59.748Z" }, + { url = "https://files.pythonhosted.org/packages/a2/77/8c6d22bf61921a59bce5471c2f1f7ac30cd4ac50aadde72b8c48d5727902/coverage-7.10.7-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:10b6ba00ab1132a0ce4428ff68cf50a25efd6840a42cdf4239c9b99aad83be8b", size = 252174, upload-time = "2025-09-21T20:02:01.192Z" }, + { url = "https://files.pythonhosted.org/packages/b1/20/b6ea4f69bbb52dac0aebd62157ba6a9dddbfe664f5af8122dac296c3ee15/coverage-7.10.7-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c79124f70465a150e89340de5963f936ee97097d2ef76c869708c4248c63ca49", size = 253447, upload-time = "2025-09-21T20:02:02.701Z" }, + { url = "https://files.pythonhosted.org/packages/f9/28/4831523ba483a7f90f7b259d2018fef02cb4d5b90bc7c1505d6e5a84883c/coverage-7.10.7-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:69212fbccdbd5b0e39eac4067e20a4a5256609e209547d86f740d68ad4f04911", size = 249779, upload-time = "2025-09-21T20:02:04.185Z" }, + { url = "https://files.pythonhosted.org/packages/a7/9f/4331142bc98c10ca6436d2d620c3e165f31e6c58d43479985afce6f3191c/coverage-7.10.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7ea7c6c9d0d286d04ed3541747e6597cbe4971f22648b68248f7ddcd329207f0", size = 251604, upload-time = "2025-09-21T20:02:06.034Z" }, + { url = "https://files.pythonhosted.org/packages/ce/60/bda83b96602036b77ecf34e6393a3836365481b69f7ed7079ab85048202b/coverage-7.10.7-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b9be91986841a75042b3e3243d0b3cb0b2434252b977baaf0cd56e960fe1e46f", size = 249497, upload-time = "2025-09-21T20:02:07.619Z" }, + { url = "https://files.pythonhosted.org/packages/5f/af/152633ff35b2af63977edd835d8e6430f0caef27d171edf2fc76c270ef31/coverage-7.10.7-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:b281d5eca50189325cfe1f365fafade89b14b4a78d9b40b05ddd1fc7d2a10a9c", size = 249350, upload-time = "2025-09-21T20:02:10.34Z" }, + { url = "https://files.pythonhosted.org/packages/9d/71/d92105d122bd21cebba877228990e1646d862e34a98bb3374d3fece5a794/coverage-7.10.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:99e4aa63097ab1118e75a848a28e40d68b08a5e19ce587891ab7fd04475e780f", size = 251111, upload-time = "2025-09-21T20:02:12.122Z" }, + { url = "https://files.pythonhosted.org/packages/a2/9e/9fdb08f4bf476c912f0c3ca292e019aab6712c93c9344a1653986c3fd305/coverage-7.10.7-cp313-cp313-win32.whl", hash = "sha256:dc7c389dce432500273eaf48f410b37886be9208b2dd5710aaf7c57fd442c698", size = 220746, upload-time = "2025-09-21T20:02:13.919Z" }, + { url = "https://files.pythonhosted.org/packages/b1/b1/a75fd25df44eab52d1931e89980d1ada46824c7a3210be0d3c88a44aaa99/coverage-7.10.7-cp313-cp313-win_amd64.whl", hash = "sha256:cac0fdca17b036af3881a9d2729a850b76553f3f716ccb0360ad4dbc06b3b843", size = 221541, upload-time = "2025-09-21T20:02:15.57Z" }, + { url = "https://files.pythonhosted.org/packages/14/3a/d720d7c989562a6e9a14b2c9f5f2876bdb38e9367126d118495b89c99c37/coverage-7.10.7-cp313-cp313-win_arm64.whl", hash = "sha256:4b6f236edf6e2f9ae8fcd1332da4e791c1b6ba0dc16a2dc94590ceccb482e546", size = 220170, upload-time = "2025-09-21T20:02:17.395Z" }, + { url = "https://files.pythonhosted.org/packages/bb/22/e04514bf2a735d8b0add31d2b4ab636fc02370730787c576bb995390d2d5/coverage-7.10.7-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a0ec07fd264d0745ee396b666d47cef20875f4ff2375d7c4f58235886cc1ef0c", size = 219029, upload-time = "2025-09-21T20:02:18.936Z" }, + { url = "https://files.pythonhosted.org/packages/11/0b/91128e099035ece15da3445d9015e4b4153a6059403452d324cbb0a575fa/coverage-7.10.7-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:dd5e856ebb7bfb7672b0086846db5afb4567a7b9714b8a0ebafd211ec7ce6a15", size = 219259, upload-time = "2025-09-21T20:02:20.44Z" }, + { url = "https://files.pythonhosted.org/packages/8b/51/66420081e72801536a091a0c8f8c1f88a5c4bf7b9b1bdc6222c7afe6dc9b/coverage-7.10.7-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f57b2a3c8353d3e04acf75b3fed57ba41f5c0646bbf1d10c7c282291c97936b4", size = 260592, upload-time = "2025-09-21T20:02:22.313Z" }, + { url = "https://files.pythonhosted.org/packages/5d/22/9b8d458c2881b22df3db5bb3e7369e63d527d986decb6c11a591ba2364f7/coverage-7.10.7-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:1ef2319dd15a0b009667301a3f84452a4dc6fddfd06b0c5c53ea472d3989fbf0", size = 262768, upload-time = "2025-09-21T20:02:24.287Z" }, + { url = "https://files.pythonhosted.org/packages/f7/08/16bee2c433e60913c610ea200b276e8eeef084b0d200bdcff69920bd5828/coverage-7.10.7-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:83082a57783239717ceb0ad584de3c69cf581b2a95ed6bf81ea66034f00401c0", size = 264995, upload-time = "2025-09-21T20:02:26.133Z" }, + { url = "https://files.pythonhosted.org/packages/20/9d/e53eb9771d154859b084b90201e5221bca7674ba449a17c101a5031d4054/coverage-7.10.7-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:50aa94fb1fb9a397eaa19c0d5ec15a5edd03a47bf1a3a6111a16b36e190cff65", size = 259546, upload-time = "2025-09-21T20:02:27.716Z" }, + { url = "https://files.pythonhosted.org/packages/ad/b0/69bc7050f8d4e56a89fb550a1577d5d0d1db2278106f6f626464067b3817/coverage-7.10.7-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2120043f147bebb41c85b97ac45dd173595ff14f2a584f2963891cbcc3091541", size = 262544, upload-time = "2025-09-21T20:02:29.216Z" }, + { url = "https://files.pythonhosted.org/packages/ef/4b/2514b060dbd1bc0aaf23b852c14bb5818f244c664cb16517feff6bb3a5ab/coverage-7.10.7-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:2fafd773231dd0378fdba66d339f84904a8e57a262f583530f4f156ab83863e6", size = 260308, upload-time = "2025-09-21T20:02:31.226Z" }, + { url = "https://files.pythonhosted.org/packages/54/78/7ba2175007c246d75e496f64c06e94122bdb914790a1285d627a918bd271/coverage-7.10.7-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:0b944ee8459f515f28b851728ad224fa2d068f1513ef6b7ff1efafeb2185f999", size = 258920, upload-time = "2025-09-21T20:02:32.823Z" }, + { url = "https://files.pythonhosted.org/packages/c0/b3/fac9f7abbc841409b9a410309d73bfa6cfb2e51c3fada738cb607ce174f8/coverage-7.10.7-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4b583b97ab2e3efe1b3e75248a9b333bd3f8b0b1b8e5b45578e05e5850dfb2c2", size = 261434, upload-time = "2025-09-21T20:02:34.86Z" }, + { url = "https://files.pythonhosted.org/packages/ee/51/a03bec00d37faaa891b3ff7387192cef20f01604e5283a5fabc95346befa/coverage-7.10.7-cp313-cp313t-win32.whl", hash = "sha256:2a78cd46550081a7909b3329e2266204d584866e8d97b898cd7fb5ac8d888b1a", size = 221403, upload-time = "2025-09-21T20:02:37.034Z" }, + { url = "https://files.pythonhosted.org/packages/53/22/3cf25d614e64bf6d8e59c7c669b20d6d940bb337bdee5900b9ca41c820bb/coverage-7.10.7-cp313-cp313t-win_amd64.whl", hash = "sha256:33a5e6396ab684cb43dc7befa386258acb2d7fae7f67330ebb85ba4ea27938eb", size = 222469, upload-time = "2025-09-21T20:02:39.011Z" }, + { url = "https://files.pythonhosted.org/packages/49/a1/00164f6d30d8a01c3c9c48418a7a5be394de5349b421b9ee019f380df2a0/coverage-7.10.7-cp313-cp313t-win_arm64.whl", hash = "sha256:86b0e7308289ddde73d863b7683f596d8d21c7d8664ce1dee061d0bcf3fbb4bb", size = 220731, upload-time = "2025-09-21T20:02:40.939Z" }, + { url = "https://files.pythonhosted.org/packages/23/9c/5844ab4ca6a4dd97a1850e030a15ec7d292b5c5cb93082979225126e35dd/coverage-7.10.7-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:b06f260b16ead11643a5a9f955bd4b5fd76c1a4c6796aeade8520095b75de520", size = 218302, upload-time = "2025-09-21T20:02:42.527Z" }, + { url = "https://files.pythonhosted.org/packages/f0/89/673f6514b0961d1f0e20ddc242e9342f6da21eaba3489901b565c0689f34/coverage-7.10.7-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:212f8f2e0612778f09c55dd4872cb1f64a1f2b074393d139278ce902064d5b32", size = 218578, upload-time = "2025-09-21T20:02:44.468Z" }, + { url = "https://files.pythonhosted.org/packages/05/e8/261cae479e85232828fb17ad536765c88dd818c8470aca690b0ac6feeaa3/coverage-7.10.7-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3445258bcded7d4aa630ab8296dea4d3f15a255588dd535f980c193ab6b95f3f", size = 249629, upload-time = "2025-09-21T20:02:46.503Z" }, + { url = "https://files.pythonhosted.org/packages/82/62/14ed6546d0207e6eda876434e3e8475a3e9adbe32110ce896c9e0c06bb9a/coverage-7.10.7-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bb45474711ba385c46a0bfe696c695a929ae69ac636cda8f532be9e8c93d720a", size = 252162, upload-time = "2025-09-21T20:02:48.689Z" }, + { url = "https://files.pythonhosted.org/packages/ff/49/07f00db9ac6478e4358165a08fb41b469a1b053212e8a00cb02f0d27a05f/coverage-7.10.7-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:813922f35bd800dca9994c5971883cbc0d291128a5de6b167c7aa697fcf59360", size = 253517, upload-time = "2025-09-21T20:02:50.31Z" }, + { url = "https://files.pythonhosted.org/packages/a2/59/c5201c62dbf165dfbc91460f6dbbaa85a8b82cfa6131ac45d6c1bfb52deb/coverage-7.10.7-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:93c1b03552081b2a4423091d6fb3787265b8f86af404cff98d1b5342713bdd69", size = 249632, upload-time = "2025-09-21T20:02:51.971Z" }, + { url = "https://files.pythonhosted.org/packages/07/ae/5920097195291a51fb00b3a70b9bbd2edbfe3c84876a1762bd1ef1565ebc/coverage-7.10.7-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:cc87dd1b6eaf0b848eebb1c86469b9f72a1891cb42ac7adcfbce75eadb13dd14", size = 251520, upload-time = "2025-09-21T20:02:53.858Z" }, + { url = "https://files.pythonhosted.org/packages/b9/3c/a815dde77a2981f5743a60b63df31cb322c944843e57dbd579326625a413/coverage-7.10.7-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:39508ffda4f343c35f3236fe8d1a6634a51f4581226a1262769d7f970e73bffe", size = 249455, upload-time = "2025-09-21T20:02:55.807Z" }, + { url = "https://files.pythonhosted.org/packages/aa/99/f5cdd8421ea656abefb6c0ce92556709db2265c41e8f9fc6c8ae0f7824c9/coverage-7.10.7-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:925a1edf3d810537c5a3abe78ec5530160c5f9a26b1f4270b40e62cc79304a1e", size = 249287, upload-time = "2025-09-21T20:02:57.784Z" }, + { url = "https://files.pythonhosted.org/packages/c3/7a/e9a2da6a1fc5d007dd51fca083a663ab930a8c4d149c087732a5dbaa0029/coverage-7.10.7-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2c8b9a0636f94c43cd3576811e05b89aa9bc2d0a85137affc544ae5cb0e4bfbd", size = 250946, upload-time = "2025-09-21T20:02:59.431Z" }, + { url = "https://files.pythonhosted.org/packages/ef/5b/0b5799aa30380a949005a353715095d6d1da81927d6dbed5def2200a4e25/coverage-7.10.7-cp314-cp314-win32.whl", hash = "sha256:b7b8288eb7cdd268b0304632da8cb0bb93fadcfec2fe5712f7b9cc8f4d487be2", size = 221009, upload-time = "2025-09-21T20:03:01.324Z" }, + { url = "https://files.pythonhosted.org/packages/da/b0/e802fbb6eb746de006490abc9bb554b708918b6774b722bb3a0e6aa1b7de/coverage-7.10.7-cp314-cp314-win_amd64.whl", hash = "sha256:1ca6db7c8807fb9e755d0379ccc39017ce0a84dcd26d14b5a03b78563776f681", size = 221804, upload-time = "2025-09-21T20:03:03.4Z" }, + { url = "https://files.pythonhosted.org/packages/9e/e8/71d0c8e374e31f39e3389bb0bd19e527d46f00ea8571ec7ec8fd261d8b44/coverage-7.10.7-cp314-cp314-win_arm64.whl", hash = "sha256:097c1591f5af4496226d5783d036bf6fd6cd0cbc132e071b33861de756efb880", size = 220384, upload-time = "2025-09-21T20:03:05.111Z" }, + { url = "https://files.pythonhosted.org/packages/62/09/9a5608d319fa3eba7a2019addeacb8c746fb50872b57a724c9f79f146969/coverage-7.10.7-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:a62c6ef0d50e6de320c270ff91d9dd0a05e7250cac2a800b7784bae474506e63", size = 219047, upload-time = "2025-09-21T20:03:06.795Z" }, + { url = "https://files.pythonhosted.org/packages/f5/6f/f58d46f33db9f2e3647b2d0764704548c184e6f5e014bef528b7f979ef84/coverage-7.10.7-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:9fa6e4dd51fe15d8738708a973470f67a855ca50002294852e9571cdbd9433f2", size = 219266, upload-time = "2025-09-21T20:03:08.495Z" }, + { url = "https://files.pythonhosted.org/packages/74/5c/183ffc817ba68e0b443b8c934c8795553eb0c14573813415bd59941ee165/coverage-7.10.7-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:8fb190658865565c549b6b4706856d6a7b09302c797eb2cf8e7fe9dabb043f0d", size = 260767, upload-time = "2025-09-21T20:03:10.172Z" }, + { url = "https://files.pythonhosted.org/packages/0f/48/71a8abe9c1ad7e97548835e3cc1adbf361e743e9d60310c5f75c9e7bf847/coverage-7.10.7-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:affef7c76a9ef259187ef31599a9260330e0335a3011732c4b9effa01e1cd6e0", size = 262931, upload-time = "2025-09-21T20:03:11.861Z" }, + { url = "https://files.pythonhosted.org/packages/84/fd/193a8fb132acfc0a901f72020e54be5e48021e1575bb327d8ee1097a28fd/coverage-7.10.7-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6e16e07d85ca0cf8bafe5f5d23a0b850064e8e945d5677492b06bbe6f09cc699", size = 265186, upload-time = "2025-09-21T20:03:13.539Z" }, + { url = "https://files.pythonhosted.org/packages/b1/8f/74ecc30607dd95ad50e3034221113ccb1c6d4e8085cc761134782995daae/coverage-7.10.7-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:03ffc58aacdf65d2a82bbeb1ffe4d01ead4017a21bfd0454983b88ca73af94b9", size = 259470, upload-time = "2025-09-21T20:03:15.584Z" }, + { url = "https://files.pythonhosted.org/packages/0f/55/79ff53a769f20d71b07023ea115c9167c0bb56f281320520cf64c5298a96/coverage-7.10.7-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1b4fd784344d4e52647fd7857b2af5b3fbe6c239b0b5fa63e94eb67320770e0f", size = 262626, upload-time = "2025-09-21T20:03:17.673Z" }, + { url = "https://files.pythonhosted.org/packages/88/e2/dac66c140009b61ac3fc13af673a574b00c16efdf04f9b5c740703e953c0/coverage-7.10.7-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:0ebbaddb2c19b71912c6f2518e791aa8b9f054985a0769bdb3a53ebbc765c6a1", size = 260386, upload-time = "2025-09-21T20:03:19.36Z" }, + { url = "https://files.pythonhosted.org/packages/a2/f1/f48f645e3f33bb9ca8a496bc4a9671b52f2f353146233ebd7c1df6160440/coverage-7.10.7-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:a2d9a3b260cc1d1dbdb1c582e63ddcf5363426a1a68faa0f5da28d8ee3c722a0", size = 258852, upload-time = "2025-09-21T20:03:21.007Z" }, + { url = "https://files.pythonhosted.org/packages/bb/3b/8442618972c51a7affeead957995cfa8323c0c9bcf8fa5a027421f720ff4/coverage-7.10.7-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:a3cc8638b2480865eaa3926d192e64ce6c51e3d29c849e09d5b4ad95efae5399", size = 261534, upload-time = "2025-09-21T20:03:23.12Z" }, + { url = "https://files.pythonhosted.org/packages/b2/dc/101f3fa3a45146db0cb03f5b4376e24c0aac818309da23e2de0c75295a91/coverage-7.10.7-cp314-cp314t-win32.whl", hash = "sha256:67f8c5cbcd3deb7a60b3345dffc89a961a484ed0af1f6f73de91705cc6e31235", size = 221784, upload-time = "2025-09-21T20:03:24.769Z" }, + { url = "https://files.pythonhosted.org/packages/4c/a1/74c51803fc70a8a40d7346660379e144be772bab4ac7bb6e6b905152345c/coverage-7.10.7-cp314-cp314t-win_amd64.whl", hash = "sha256:e1ed71194ef6dea7ed2d5cb5f7243d4bcd334bfb63e59878519be558078f848d", size = 222905, upload-time = "2025-09-21T20:03:26.93Z" }, + { url = "https://files.pythonhosted.org/packages/12/65/f116a6d2127df30bcafbceef0302d8a64ba87488bf6f73a6d8eebf060873/coverage-7.10.7-cp314-cp314t-win_arm64.whl", hash = "sha256:7fe650342addd8524ca63d77b2362b02345e5f1a093266787d210c70a50b471a", size = 220922, upload-time = "2025-09-21T20:03:28.672Z" }, + { url = "https://files.pythonhosted.org/packages/a3/ad/d1c25053764b4c42eb294aae92ab617d2e4f803397f9c7c8295caa77a260/coverage-7.10.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fff7b9c3f19957020cac546c70025331113d2e61537f6e2441bc7657913de7d3", size = 217978, upload-time = "2025-09-21T20:03:30.362Z" }, + { url = "https://files.pythonhosted.org/packages/52/2f/b9f9daa39b80ece0b9548bbb723381e29bc664822d9a12c2135f8922c22b/coverage-7.10.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:bc91b314cef27742da486d6839b677b3f2793dfe52b51bbbb7cf736d5c29281c", size = 218370, upload-time = "2025-09-21T20:03:32.147Z" }, + { url = "https://files.pythonhosted.org/packages/dd/6e/30d006c3b469e58449650642383dddf1c8fb63d44fdf92994bfd46570695/coverage-7.10.7-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:567f5c155eda8df1d3d439d40a45a6a5f029b429b06648235f1e7e51b522b396", size = 244802, upload-time = "2025-09-21T20:03:33.919Z" }, + { url = "https://files.pythonhosted.org/packages/b0/49/8a070782ce7e6b94ff6a0b6d7c65ba6bc3091d92a92cef4cd4eb0767965c/coverage-7.10.7-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2af88deffcc8a4d5974cf2d502251bc3b2db8461f0b66d80a449c33757aa9f40", size = 246625, upload-time = "2025-09-21T20:03:36.09Z" }, + { url = "https://files.pythonhosted.org/packages/6a/92/1c1c5a9e8677ce56d42b97bdaca337b2d4d9ebe703d8c174ede52dbabd5f/coverage-7.10.7-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c7315339eae3b24c2d2fa1ed7d7a38654cba34a13ef19fbcb9425da46d3dc594", size = 248399, upload-time = "2025-09-21T20:03:38.342Z" }, + { url = "https://files.pythonhosted.org/packages/c0/54/b140edee7257e815de7426d5d9846b58505dffc29795fff2dfb7f8a1c5a0/coverage-7.10.7-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:912e6ebc7a6e4adfdbb1aec371ad04c68854cd3bf3608b3514e7ff9062931d8a", size = 245142, upload-time = "2025-09-21T20:03:40.591Z" }, + { url = "https://files.pythonhosted.org/packages/e4/9e/6d6b8295940b118e8b7083b29226c71f6154f7ff41e9ca431f03de2eac0d/coverage-7.10.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f49a05acd3dfe1ce9715b657e28d138578bc40126760efb962322c56e9ca344b", size = 246284, upload-time = "2025-09-21T20:03:42.355Z" }, + { url = "https://files.pythonhosted.org/packages/db/e5/5e957ca747d43dbe4d9714358375c7546cb3cb533007b6813fc20fce37ad/coverage-7.10.7-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:cce2109b6219f22ece99db7644b9622f54a4e915dad65660ec435e89a3ea7cc3", size = 244353, upload-time = "2025-09-21T20:03:44.218Z" }, + { url = "https://files.pythonhosted.org/packages/9a/45/540fc5cc92536a1b783b7ef99450bd55a4b3af234aae35a18a339973ce30/coverage-7.10.7-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:f3c887f96407cea3916294046fc7dab611c2552beadbed4ea901cbc6a40cc7a0", size = 244430, upload-time = "2025-09-21T20:03:46.065Z" }, + { url = "https://files.pythonhosted.org/packages/75/0b/8287b2e5b38c8fe15d7e3398849bb58d382aedc0864ea0fa1820e8630491/coverage-7.10.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:635adb9a4507c9fd2ed65f39693fa31c9a3ee3a8e6dc64df033e8fdf52a7003f", size = 245311, upload-time = "2025-09-21T20:03:48.19Z" }, + { url = "https://files.pythonhosted.org/packages/0c/1d/29724999984740f0c86d03e6420b942439bf5bd7f54d4382cae386a9d1e9/coverage-7.10.7-cp39-cp39-win32.whl", hash = "sha256:5a02d5a850e2979b0a014c412573953995174743a3f7fa4ea5a6e9a3c5617431", size = 220500, upload-time = "2025-09-21T20:03:50.024Z" }, + { url = "https://files.pythonhosted.org/packages/43/11/4b1e6b129943f905ca54c339f343877b55b365ae2558806c1be4f7476ed5/coverage-7.10.7-cp39-cp39-win_amd64.whl", hash = "sha256:c134869d5ffe34547d14e174c866fd8fe2254918cc0a95e99052903bc1543e07", size = 221408, upload-time = "2025-09-21T20:03:51.803Z" }, + { url = "https://files.pythonhosted.org/packages/ec/16/114df1c291c22cac3b0c127a73e0af5c12ed7bbb6558d310429a0ae24023/coverage-7.10.7-py3-none-any.whl", hash = "sha256:f7941f6f2fe6dd6807a1208737b8a0cbcf1cc6d7b07d24998ad2d63590868260", size = 209952, upload-time = "2025-09-21T20:03:53.918Z" }, +] + +[package.optional-dependencies] +toml = [ + { name = "tomli", marker = "python_full_version == '3.9.*'" }, +] + +[[package]] +name = "coverage" +version = "7.11.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", +] +sdist = { url = "https://files.pythonhosted.org/packages/1c/38/ee22495420457259d2f3390309505ea98f98a5eed40901cf62196abad006/coverage-7.11.0.tar.gz", hash = "sha256:167bd504ac1ca2af7ff3b81d245dfea0292c5032ebef9d66cc08a7d28c1b8050", size = 811905, upload-time = "2025-10-15T15:15:08.542Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/95/c49df0aceb5507a80b9fe5172d3d39bf23f05be40c23c8d77d556df96cec/coverage-7.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eb53f1e8adeeb2e78962bade0c08bfdc461853c7969706ed901821e009b35e31", size = 215800, upload-time = "2025-10-15T15:12:19.824Z" }, + { url = "https://files.pythonhosted.org/packages/dc/c6/7bb46ce01ed634fff1d7bb53a54049f539971862cc388b304ff3c51b4f66/coverage-7.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d9a03ec6cb9f40a5c360f138b88266fd8f58408d71e89f536b4f91d85721d075", size = 216198, upload-time = "2025-10-15T15:12:22.549Z" }, + { url = "https://files.pythonhosted.org/packages/94/b2/75d9d8fbf2900268aca5de29cd0a0fe671b0f69ef88be16767cc3c828b85/coverage-7.11.0-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0d7f0616c557cbc3d1c2090334eddcbb70e1ae3a40b07222d62b3aa47f608fab", size = 242953, upload-time = "2025-10-15T15:12:24.139Z" }, + { url = "https://files.pythonhosted.org/packages/65/ac/acaa984c18f440170525a8743eb4b6c960ace2dbad80dc22056a437fc3c6/coverage-7.11.0-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:e44a86a47bbdf83b0a3ea4d7df5410d6b1a0de984fbd805fa5101f3624b9abe0", size = 244766, upload-time = "2025-10-15T15:12:25.974Z" }, + { url = "https://files.pythonhosted.org/packages/d8/0d/938d0bff76dfa4a6b228c3fc4b3e1c0e2ad4aa6200c141fcda2bd1170227/coverage-7.11.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:596763d2f9a0ee7eec6e643e29660def2eef297e1de0d334c78c08706f1cb785", size = 246625, upload-time = "2025-10-15T15:12:27.387Z" }, + { url = "https://files.pythonhosted.org/packages/38/54/8f5f5e84bfa268df98f46b2cb396b1009734cfb1e5d6adb663d284893b32/coverage-7.11.0-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ef55537ff511b5e0a43edb4c50a7bf7ba1c3eea20b4f49b1490f1e8e0e42c591", size = 243568, upload-time = "2025-10-15T15:12:28.799Z" }, + { url = "https://files.pythonhosted.org/packages/68/30/8ba337c2877fe3f2e1af0ed7ff4be0c0c4aca44d6f4007040f3ca2255e99/coverage-7.11.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9cbabd8f4d0d3dc571d77ae5bdbfa6afe5061e679a9d74b6797c48d143307088", size = 244665, upload-time = "2025-10-15T15:12:30.297Z" }, + { url = "https://files.pythonhosted.org/packages/cc/fb/c6f1d6d9a665536b7dde2333346f0cc41dc6a60bd1ffc10cd5c33e7eb000/coverage-7.11.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e24045453384e0ae2a587d562df2a04d852672eb63051d16096d3f08aa4c7c2f", size = 242681, upload-time = "2025-10-15T15:12:32.326Z" }, + { url = "https://files.pythonhosted.org/packages/be/38/1b532319af5f991fa153c20373291dc65c2bf532af7dbcffdeef745c8f79/coverage-7.11.0-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:7161edd3426c8d19bdccde7d49e6f27f748f3c31cc350c5de7c633fea445d866", size = 242912, upload-time = "2025-10-15T15:12:34.079Z" }, + { url = "https://files.pythonhosted.org/packages/67/3d/f39331c60ef6050d2a861dc1b514fa78f85f792820b68e8c04196ad733d6/coverage-7.11.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3d4ed4de17e692ba6415b0587bc7f12bc80915031fc9db46a23ce70fc88c9841", size = 243559, upload-time = "2025-10-15T15:12:35.809Z" }, + { url = "https://files.pythonhosted.org/packages/4b/55/cb7c9df9d0495036ce582a8a2958d50c23cd73f84a23284bc23bd4711a6f/coverage-7.11.0-cp310-cp310-win32.whl", hash = "sha256:765c0bc8fe46f48e341ef737c91c715bd2a53a12792592296a095f0c237e09cf", size = 218266, upload-time = "2025-10-15T15:12:37.429Z" }, + { url = "https://files.pythonhosted.org/packages/68/a8/b79cb275fa7bd0208767f89d57a1b5f6ba830813875738599741b97c2e04/coverage-7.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:24d6f3128f1b2d20d84b24f4074475457faedc3d4613a7e66b5e769939c7d969", size = 219169, upload-time = "2025-10-15T15:12:39.25Z" }, + { url = "https://files.pythonhosted.org/packages/49/3a/ee1074c15c408ddddddb1db7dd904f6b81bc524e01f5a1c5920e13dbde23/coverage-7.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d58ecaa865c5b9fa56e35efc51d1014d4c0d22838815b9fce57a27dd9576847", size = 215912, upload-time = "2025-10-15T15:12:40.665Z" }, + { url = "https://files.pythonhosted.org/packages/70/c4/9f44bebe5cb15f31608597b037d78799cc5f450044465bcd1ae8cb222fe1/coverage-7.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b679e171f1c104a5668550ada700e3c4937110dbdd153b7ef9055c4f1a1ee3cc", size = 216310, upload-time = "2025-10-15T15:12:42.461Z" }, + { url = "https://files.pythonhosted.org/packages/42/01/5e06077cfef92d8af926bdd86b84fb28bf9bc6ad27343d68be9b501d89f2/coverage-7.11.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:ca61691ba8c5b6797deb221a0d09d7470364733ea9c69425a640f1f01b7c5bf0", size = 246706, upload-time = "2025-10-15T15:12:44.001Z" }, + { url = "https://files.pythonhosted.org/packages/40/b8/7a3f1f33b35cc4a6c37e759137533119560d06c0cc14753d1a803be0cd4a/coverage-7.11.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:aef1747ede4bd8ca9cfc04cc3011516500c6891f1b33a94add3253f6f876b7b7", size = 248634, upload-time = "2025-10-15T15:12:45.768Z" }, + { url = "https://files.pythonhosted.org/packages/7a/41/7f987eb33de386bc4c665ab0bf98d15fcf203369d6aacae74f5dd8ec489a/coverage-7.11.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a1839d08406e4cba2953dcc0ffb312252f14d7c4c96919f70167611f4dee2623", size = 250741, upload-time = "2025-10-15T15:12:47.222Z" }, + { url = "https://files.pythonhosted.org/packages/23/c1/a4e0ca6a4e83069fb8216b49b30a7352061ca0cb38654bd2dc96b7b3b7da/coverage-7.11.0-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e0eb0a2dcc62478eb5b4cbb80b97bdee852d7e280b90e81f11b407d0b81c4287", size = 246837, upload-time = "2025-10-15T15:12:48.904Z" }, + { url = "https://files.pythonhosted.org/packages/5d/03/ced062a17f7c38b4728ff76c3acb40d8465634b20b4833cdb3cc3a74e115/coverage-7.11.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:bc1fbea96343b53f65d5351d8fd3b34fd415a2670d7c300b06d3e14a5af4f552", size = 248429, upload-time = "2025-10-15T15:12:50.73Z" }, + { url = "https://files.pythonhosted.org/packages/97/af/a7c6f194bb8c5a2705ae019036b8fe7f49ea818d638eedb15fdb7bed227c/coverage-7.11.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:214b622259dd0cf435f10241f1333d32caa64dbc27f8790ab693428a141723de", size = 246490, upload-time = "2025-10-15T15:12:52.646Z" }, + { url = "https://files.pythonhosted.org/packages/ab/c3/aab4df02b04a8fde79068c3c41ad7a622b0ef2b12e1ed154da986a727c3f/coverage-7.11.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:258d9967520cca899695d4eb7ea38be03f06951d6ca2f21fb48b1235f791e601", size = 246208, upload-time = "2025-10-15T15:12:54.586Z" }, + { url = "https://files.pythonhosted.org/packages/30/d8/e282ec19cd658238d60ed404f99ef2e45eed52e81b866ab1518c0d4163cf/coverage-7.11.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cf9e6ff4ca908ca15c157c409d608da77a56a09877b97c889b98fb2c32b6465e", size = 247126, upload-time = "2025-10-15T15:12:56.485Z" }, + { url = "https://files.pythonhosted.org/packages/d1/17/a635fa07fac23adb1a5451ec756216768c2767efaed2e4331710342a3399/coverage-7.11.0-cp311-cp311-win32.whl", hash = "sha256:fcc15fc462707b0680cff6242c48625da7f9a16a28a41bb8fd7a4280920e676c", size = 218314, upload-time = "2025-10-15T15:12:58.365Z" }, + { url = "https://files.pythonhosted.org/packages/2a/29/2ac1dfcdd4ab9a70026edc8d715ece9b4be9a1653075c658ee6f271f394d/coverage-7.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:865965bf955d92790f1facd64fe7ff73551bd2c1e7e6b26443934e9701ba30b9", size = 219203, upload-time = "2025-10-15T15:12:59.902Z" }, + { url = "https://files.pythonhosted.org/packages/03/21/5ce8b3a0133179115af4c041abf2ee652395837cb896614beb8ce8ddcfd9/coverage-7.11.0-cp311-cp311-win_arm64.whl", hash = "sha256:5693e57a065760dcbeb292d60cc4d0231a6d4b6b6f6a3191561e1d5e8820b745", size = 217879, upload-time = "2025-10-15T15:13:01.35Z" }, + { url = "https://files.pythonhosted.org/packages/c4/db/86f6906a7c7edc1a52b2c6682d6dd9be775d73c0dfe2b84f8923dfea5784/coverage-7.11.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9c49e77811cf9d024b95faf86c3f059b11c0c9be0b0d61bc598f453703bd6fd1", size = 216098, upload-time = "2025-10-15T15:13:02.916Z" }, + { url = "https://files.pythonhosted.org/packages/21/54/e7b26157048c7ba555596aad8569ff903d6cd67867d41b75287323678ede/coverage-7.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a61e37a403a778e2cda2a6a39abcc895f1d984071942a41074b5c7ee31642007", size = 216331, upload-time = "2025-10-15T15:13:04.403Z" }, + { url = "https://files.pythonhosted.org/packages/b9/19/1ce6bf444f858b83a733171306134a0544eaddf1ca8851ede6540a55b2ad/coverage-7.11.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:c79cae102bb3b1801e2ef1511fb50e91ec83a1ce466b2c7c25010d884336de46", size = 247825, upload-time = "2025-10-15T15:13:05.92Z" }, + { url = "https://files.pythonhosted.org/packages/71/0b/d3bcbbc259fcced5fb67c5d78f6e7ee965f49760c14afd931e9e663a83b2/coverage-7.11.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:16ce17ceb5d211f320b62df002fa7016b7442ea0fd260c11cec8ce7730954893", size = 250573, upload-time = "2025-10-15T15:13:07.471Z" }, + { url = "https://files.pythonhosted.org/packages/58/8d/b0ff3641a320abb047258d36ed1c21d16be33beed4152628331a1baf3365/coverage-7.11.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:80027673e9d0bd6aef86134b0771845e2da85755cf686e7c7c59566cf5a89115", size = 251706, upload-time = "2025-10-15T15:13:09.4Z" }, + { url = "https://files.pythonhosted.org/packages/59/c8/5a586fe8c7b0458053d9c687f5cff515a74b66c85931f7fe17a1c958b4ac/coverage-7.11.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:4d3ffa07a08657306cd2215b0da53761c4d73cb54d9143b9303a6481ec0cd415", size = 248221, upload-time = "2025-10-15T15:13:10.964Z" }, + { url = "https://files.pythonhosted.org/packages/d0/ff/3a25e3132804ba44cfa9a778cdf2b73dbbe63ef4b0945e39602fc896ba52/coverage-7.11.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a3b6a5f8b2524fd6c1066bc85bfd97e78709bb5e37b5b94911a6506b65f47186", size = 249624, upload-time = "2025-10-15T15:13:12.5Z" }, + { url = "https://files.pythonhosted.org/packages/c5/12/ff10c8ce3895e1b17a73485ea79ebc1896a9e466a9d0f4aef63e0d17b718/coverage-7.11.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:fcc0a4aa589de34bc56e1a80a740ee0f8c47611bdfb28cd1849de60660f3799d", size = 247744, upload-time = "2025-10-15T15:13:14.554Z" }, + { url = "https://files.pythonhosted.org/packages/16/02/d500b91f5471b2975947e0629b8980e5e90786fe316b6d7299852c1d793d/coverage-7.11.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:dba82204769d78c3fd31b35c3d5f46e06511936c5019c39f98320e05b08f794d", size = 247325, upload-time = "2025-10-15T15:13:16.438Z" }, + { url = "https://files.pythonhosted.org/packages/77/11/dee0284fbbd9cd64cfce806b827452c6df3f100d9e66188e82dfe771d4af/coverage-7.11.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:81b335f03ba67309a95210caf3eb43bd6fe75a4e22ba653ef97b4696c56c7ec2", size = 249180, upload-time = "2025-10-15T15:13:17.959Z" }, + { url = "https://files.pythonhosted.org/packages/59/1b/cdf1def928f0a150a057cab03286774e73e29c2395f0d30ce3d9e9f8e697/coverage-7.11.0-cp312-cp312-win32.whl", hash = "sha256:037b2d064c2f8cc8716fe4d39cb705779af3fbf1ba318dc96a1af858888c7bb5", size = 218479, upload-time = "2025-10-15T15:13:19.608Z" }, + { url = "https://files.pythonhosted.org/packages/ff/55/e5884d55e031da9c15b94b90a23beccc9d6beee65e9835cd6da0a79e4f3a/coverage-7.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:d66c0104aec3b75e5fd897e7940188ea1892ca1d0235316bf89286d6a22568c0", size = 219290, upload-time = "2025-10-15T15:13:21.593Z" }, + { url = "https://files.pythonhosted.org/packages/23/a8/faa930cfc71c1d16bc78f9a19bb73700464f9c331d9e547bfbc1dbd3a108/coverage-7.11.0-cp312-cp312-win_arm64.whl", hash = "sha256:d91ebeac603812a09cf6a886ba6e464f3bbb367411904ae3790dfe28311b15ad", size = 217924, upload-time = "2025-10-15T15:13:23.39Z" }, + { url = "https://files.pythonhosted.org/packages/60/7f/85e4dfe65e400645464b25c036a26ac226cf3a69d4a50c3934c532491cdd/coverage-7.11.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:cc3f49e65ea6e0d5d9bd60368684fe52a704d46f9e7fc413918f18d046ec40e1", size = 216129, upload-time = "2025-10-15T15:13:25.371Z" }, + { url = "https://files.pythonhosted.org/packages/96/5d/dc5fa98fea3c175caf9d360649cb1aa3715e391ab00dc78c4c66fabd7356/coverage-7.11.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f39ae2f63f37472c17b4990f794035c9890418b1b8cca75c01193f3c8d3e01be", size = 216380, upload-time = "2025-10-15T15:13:26.976Z" }, + { url = "https://files.pythonhosted.org/packages/b2/f5/3da9cc9596708273385189289c0e4d8197d37a386bdf17619013554b3447/coverage-7.11.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7db53b5cdd2917b6eaadd0b1251cf4e7d96f4a8d24e174bdbdf2f65b5ea7994d", size = 247375, upload-time = "2025-10-15T15:13:28.923Z" }, + { url = "https://files.pythonhosted.org/packages/65/6c/f7f59c342359a235559d2bc76b0c73cfc4bac7d61bb0df210965cb1ecffd/coverage-7.11.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:10ad04ac3a122048688387828b4537bc9cf60c0bf4869c1e9989c46e45690b82", size = 249978, upload-time = "2025-10-15T15:13:30.525Z" }, + { url = "https://files.pythonhosted.org/packages/e7/8c/042dede2e23525e863bf1ccd2b92689692a148d8b5fd37c37899ba882645/coverage-7.11.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4036cc9c7983a2b1f2556d574d2eb2154ac6ed55114761685657e38782b23f52", size = 251253, upload-time = "2025-10-15T15:13:32.174Z" }, + { url = "https://files.pythonhosted.org/packages/7b/a9/3c58df67bfa809a7bddd786356d9c5283e45d693edb5f3f55d0986dd905a/coverage-7.11.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7ab934dd13b1c5e94b692b1e01bd87e4488cb746e3a50f798cb9464fd128374b", size = 247591, upload-time = "2025-10-15T15:13:34.147Z" }, + { url = "https://files.pythonhosted.org/packages/26/5b/c7f32efd862ee0477a18c41e4761305de6ddd2d49cdeda0c1116227570fd/coverage-7.11.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:59a6e5a265f7cfc05f76e3bb53eca2e0dfe90f05e07e849930fecd6abb8f40b4", size = 249411, upload-time = "2025-10-15T15:13:38.425Z" }, + { url = "https://files.pythonhosted.org/packages/76/b5/78cb4f1e86c1611431c990423ec0768122905b03837e1b4c6a6f388a858b/coverage-7.11.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:df01d6c4c81e15a7c88337b795bb7595a8596e92310266b5072c7e301168efbd", size = 247303, upload-time = "2025-10-15T15:13:40.464Z" }, + { url = "https://files.pythonhosted.org/packages/87/c9/23c753a8641a330f45f221286e707c427e46d0ffd1719b080cedc984ec40/coverage-7.11.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:8c934bd088eed6174210942761e38ee81d28c46de0132ebb1801dbe36a390dcc", size = 247157, upload-time = "2025-10-15T15:13:42.087Z" }, + { url = "https://files.pythonhosted.org/packages/c5/42/6e0cc71dc8a464486e944a4fa0d85bdec031cc2969e98ed41532a98336b9/coverage-7.11.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5a03eaf7ec24078ad64a07f02e30060aaf22b91dedf31a6b24d0d98d2bba7f48", size = 248921, upload-time = "2025-10-15T15:13:43.715Z" }, + { url = "https://files.pythonhosted.org/packages/e8/1c/743c2ef665e6858cccb0f84377dfe3a4c25add51e8c7ef19249be92465b6/coverage-7.11.0-cp313-cp313-win32.whl", hash = "sha256:695340f698a5f56f795b2836abe6fb576e7c53d48cd155ad2f80fd24bc63a040", size = 218526, upload-time = "2025-10-15T15:13:45.336Z" }, + { url = "https://files.pythonhosted.org/packages/ff/d5/226daadfd1bf8ddbccefbd3aa3547d7b960fb48e1bdac124e2dd13a2b71a/coverage-7.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:2727d47fce3ee2bac648528e41455d1b0c46395a087a229deac75e9f88ba5a05", size = 219317, upload-time = "2025-10-15T15:13:47.401Z" }, + { url = "https://files.pythonhosted.org/packages/97/54/47db81dcbe571a48a298f206183ba8a7ba79200a37cd0d9f4788fcd2af4a/coverage-7.11.0-cp313-cp313-win_arm64.whl", hash = "sha256:0efa742f431529699712b92ecdf22de8ff198df41e43aeaaadf69973eb93f17a", size = 217948, upload-time = "2025-10-15T15:13:49.096Z" }, + { url = "https://files.pythonhosted.org/packages/e5/8b/cb68425420154e7e2a82fd779a8cc01549b6fa83c2ad3679cd6c088ebd07/coverage-7.11.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:587c38849b853b157706407e9ebdca8fd12f45869edb56defbef2daa5fb0812b", size = 216837, upload-time = "2025-10-15T15:13:51.09Z" }, + { url = "https://files.pythonhosted.org/packages/33/55/9d61b5765a025685e14659c8d07037247de6383c0385757544ffe4606475/coverage-7.11.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b971bdefdd75096163dd4261c74be813c4508477e39ff7b92191dea19f24cd37", size = 217061, upload-time = "2025-10-15T15:13:52.747Z" }, + { url = "https://files.pythonhosted.org/packages/52/85/292459c9186d70dcec6538f06ea251bc968046922497377bf4a1dc9a71de/coverage-7.11.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:269bfe913b7d5be12ab13a95f3a76da23cf147be7fa043933320ba5625f0a8de", size = 258398, upload-time = "2025-10-15T15:13:54.45Z" }, + { url = "https://files.pythonhosted.org/packages/1f/e2/46edd73fb8bf51446c41148d81944c54ed224854812b6ca549be25113ee0/coverage-7.11.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:dadbcce51a10c07b7c72b0ce4a25e4b6dcb0c0372846afb8e5b6307a121eb99f", size = 260574, upload-time = "2025-10-15T15:13:56.145Z" }, + { url = "https://files.pythonhosted.org/packages/07/5e/1df469a19007ff82e2ca8fe509822820a31e251f80ee7344c34f6cd2ec43/coverage-7.11.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9ed43fa22c6436f7957df036331f8fe4efa7af132054e1844918866cd228af6c", size = 262797, upload-time = "2025-10-15T15:13:58.635Z" }, + { url = "https://files.pythonhosted.org/packages/f9/50/de216b31a1434b94d9b34a964c09943c6be45069ec704bfc379d8d89a649/coverage-7.11.0-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:9516add7256b6713ec08359b7b05aeff8850c98d357784c7205b2e60aa2513fa", size = 257361, upload-time = "2025-10-15T15:14:00.409Z" }, + { url = "https://files.pythonhosted.org/packages/82/1e/3f9f8344a48111e152e0fd495b6fff13cc743e771a6050abf1627a7ba918/coverage-7.11.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:eb92e47c92fcbcdc692f428da67db33337fa213756f7adb6a011f7b5a7a20740", size = 260349, upload-time = "2025-10-15T15:14:02.188Z" }, + { url = "https://files.pythonhosted.org/packages/65/9b/3f52741f9e7d82124272f3070bbe316006a7de1bad1093f88d59bfc6c548/coverage-7.11.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:d06f4fc7acf3cabd6d74941d53329e06bab00a8fe10e4df2714f0b134bfc64ef", size = 258114, upload-time = "2025-10-15T15:14:03.907Z" }, + { url = "https://files.pythonhosted.org/packages/0b/8b/918f0e15f0365d50d3986bbd3338ca01178717ac5678301f3f547b6619e6/coverage-7.11.0-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:6fbcee1a8f056af07ecd344482f711f563a9eb1c2cad192e87df00338ec3cdb0", size = 256723, upload-time = "2025-10-15T15:14:06.324Z" }, + { url = "https://files.pythonhosted.org/packages/44/9e/7776829f82d3cf630878a7965a7d70cc6ca94f22c7d20ec4944f7148cb46/coverage-7.11.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dbbf012be5f32533a490709ad597ad8a8ff80c582a95adc8d62af664e532f9ca", size = 259238, upload-time = "2025-10-15T15:14:08.002Z" }, + { url = "https://files.pythonhosted.org/packages/9a/b8/49cf253e1e7a3bedb85199b201862dd7ca4859f75b6cf25ffa7298aa0760/coverage-7.11.0-cp313-cp313t-win32.whl", hash = "sha256:cee6291bb4fed184f1c2b663606a115c743df98a537c969c3c64b49989da96c2", size = 219180, upload-time = "2025-10-15T15:14:09.786Z" }, + { url = "https://files.pythonhosted.org/packages/ac/e1/1a541703826be7ae2125a0fb7f821af5729d56bb71e946e7b933cc7a89a4/coverage-7.11.0-cp313-cp313t-win_amd64.whl", hash = "sha256:a386c1061bf98e7ea4758e4313c0ab5ecf57af341ef0f43a0bf26c2477b5c268", size = 220241, upload-time = "2025-10-15T15:14:11.471Z" }, + { url = "https://files.pythonhosted.org/packages/d5/d1/5ee0e0a08621140fd418ec4020f595b4d52d7eb429ae6a0c6542b4ba6f14/coverage-7.11.0-cp313-cp313t-win_arm64.whl", hash = "sha256:f9ea02ef40bb83823b2b04964459d281688fe173e20643870bb5d2edf68bc836", size = 218510, upload-time = "2025-10-15T15:14:13.46Z" }, + { url = "https://files.pythonhosted.org/packages/f4/06/e923830c1985ce808e40a3fa3eb46c13350b3224b7da59757d37b6ce12b8/coverage-7.11.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:c770885b28fb399aaf2a65bbd1c12bf6f307ffd112d6a76c5231a94276f0c497", size = 216110, upload-time = "2025-10-15T15:14:15.157Z" }, + { url = "https://files.pythonhosted.org/packages/42/82/cdeed03bfead45203fb651ed756dfb5266028f5f939e7f06efac4041dad5/coverage-7.11.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:a3d0e2087dba64c86a6b254f43e12d264b636a39e88c5cc0a01a7c71bcfdab7e", size = 216395, upload-time = "2025-10-15T15:14:16.863Z" }, + { url = "https://files.pythonhosted.org/packages/fc/ba/e1c80caffc3199aa699813f73ff097bc2df7b31642bdbc7493600a8f1de5/coverage-7.11.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:73feb83bb41c32811973b8565f3705caf01d928d972b72042b44e97c71fd70d1", size = 247433, upload-time = "2025-10-15T15:14:18.589Z" }, + { url = "https://files.pythonhosted.org/packages/80/c0/5b259b029694ce0a5bbc1548834c7ba3db41d3efd3474489d7efce4ceb18/coverage-7.11.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c6f31f281012235ad08f9a560976cc2fc9c95c17604ff3ab20120fe480169bca", size = 249970, upload-time = "2025-10-15T15:14:20.307Z" }, + { url = "https://files.pythonhosted.org/packages/8c/86/171b2b5e1aac7e2fd9b43f7158b987dbeb95f06d1fbecad54ad8163ae3e8/coverage-7.11.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e9570ad567f880ef675673992222746a124b9595506826b210fbe0ce3f0499cd", size = 251324, upload-time = "2025-10-15T15:14:22.419Z" }, + { url = "https://files.pythonhosted.org/packages/1a/7e/7e10414d343385b92024af3932a27a1caf75c6e27ee88ba211221ff1a145/coverage-7.11.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8badf70446042553a773547a61fecaa734b55dc738cacf20c56ab04b77425e43", size = 247445, upload-time = "2025-10-15T15:14:24.205Z" }, + { url = "https://files.pythonhosted.org/packages/c4/3b/e4f966b21f5be8c4bf86ad75ae94efa0de4c99c7bbb8114476323102e345/coverage-7.11.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:a09c1211959903a479e389685b7feb8a17f59ec5a4ef9afde7650bd5eabc2777", size = 249324, upload-time = "2025-10-15T15:14:26.234Z" }, + { url = "https://files.pythonhosted.org/packages/00/a2/8479325576dfcd909244d0df215f077f47437ab852ab778cfa2f8bf4d954/coverage-7.11.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:5ef83b107f50db3f9ae40f69e34b3bd9337456c5a7fe3461c7abf8b75dd666a2", size = 247261, upload-time = "2025-10-15T15:14:28.42Z" }, + { url = "https://files.pythonhosted.org/packages/7b/d8/3a9e2db19d94d65771d0f2e21a9ea587d11b831332a73622f901157cc24b/coverage-7.11.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:f91f927a3215b8907e214af77200250bb6aae36eca3f760f89780d13e495388d", size = 247092, upload-time = "2025-10-15T15:14:30.784Z" }, + { url = "https://files.pythonhosted.org/packages/b3/b1/bbca3c472544f9e2ad2d5116b2379732957048be4b93a9c543fcd0207e5f/coverage-7.11.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:cdbcd376716d6b7fbfeedd687a6c4be019c5a5671b35f804ba76a4c0a778cba4", size = 248755, upload-time = "2025-10-15T15:14:32.585Z" }, + { url = "https://files.pythonhosted.org/packages/89/49/638d5a45a6a0f00af53d6b637c87007eb2297042186334e9923a61aa8854/coverage-7.11.0-cp314-cp314-win32.whl", hash = "sha256:bab7ec4bb501743edc63609320aaec8cd9188b396354f482f4de4d40a9d10721", size = 218793, upload-time = "2025-10-15T15:14:34.972Z" }, + { url = "https://files.pythonhosted.org/packages/30/cc/b675a51f2d068adb3cdf3799212c662239b0ca27f4691d1fff81b92ea850/coverage-7.11.0-cp314-cp314-win_amd64.whl", hash = "sha256:3d4ba9a449e9364a936a27322b20d32d8b166553bfe63059bd21527e681e2fad", size = 219587, upload-time = "2025-10-15T15:14:37.047Z" }, + { url = "https://files.pythonhosted.org/packages/93/98/5ac886876026de04f00820e5094fe22166b98dcb8b426bf6827aaf67048c/coverage-7.11.0-cp314-cp314-win_arm64.whl", hash = "sha256:ce37f215223af94ef0f75ac68ea096f9f8e8c8ec7d6e8c346ee45c0d363f0479", size = 218168, upload-time = "2025-10-15T15:14:38.861Z" }, + { url = "https://files.pythonhosted.org/packages/14/d1/b4145d35b3e3ecf4d917e97fc8895bcf027d854879ba401d9ff0f533f997/coverage-7.11.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:f413ce6e07e0d0dc9c433228727b619871532674b45165abafe201f200cc215f", size = 216850, upload-time = "2025-10-15T15:14:40.651Z" }, + { url = "https://files.pythonhosted.org/packages/ca/d1/7f645fc2eccd318369a8a9948acc447bb7c1ade2911e31d3c5620544c22b/coverage-7.11.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:05791e528a18f7072bf5998ba772fe29db4da1234c45c2087866b5ba4dea710e", size = 217071, upload-time = "2025-10-15T15:14:42.755Z" }, + { url = "https://files.pythonhosted.org/packages/54/7d/64d124649db2737ceced1dfcbdcb79898d5868d311730f622f8ecae84250/coverage-7.11.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cacb29f420cfeb9283b803263c3b9a068924474ff19ca126ba9103e1278dfa44", size = 258570, upload-time = "2025-10-15T15:14:44.542Z" }, + { url = "https://files.pythonhosted.org/packages/6c/3f/6f5922f80dc6f2d8b2c6f974835c43f53eb4257a7797727e6ca5b7b2ec1f/coverage-7.11.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:314c24e700d7027ae3ab0d95fbf8d53544fca1f20345fd30cd219b737c6e58d3", size = 260738, upload-time = "2025-10-15T15:14:46.436Z" }, + { url = "https://files.pythonhosted.org/packages/0e/5f/9e883523c4647c860b3812b417a2017e361eca5b635ee658387dc11b13c1/coverage-7.11.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:630d0bd7a293ad2fc8b4b94e5758c8b2536fdf36c05f1681270203e463cbfa9b", size = 262994, upload-time = "2025-10-15T15:14:48.3Z" }, + { url = "https://files.pythonhosted.org/packages/07/bb/43b5a8e94c09c8bf51743ffc65c4c841a4ca5d3ed191d0a6919c379a1b83/coverage-7.11.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e89641f5175d65e2dbb44db15fe4ea48fade5d5bbb9868fdc2b4fce22f4a469d", size = 257282, upload-time = "2025-10-15T15:14:50.236Z" }, + { url = "https://files.pythonhosted.org/packages/aa/e5/0ead8af411411330b928733e1d201384b39251a5f043c1612970310e8283/coverage-7.11.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c9f08ea03114a637dab06cedb2e914da9dc67fa52c6015c018ff43fdde25b9c2", size = 260430, upload-time = "2025-10-15T15:14:52.413Z" }, + { url = "https://files.pythonhosted.org/packages/ae/66/03dd8bb0ba5b971620dcaac145461950f6d8204953e535d2b20c6b65d729/coverage-7.11.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:ce9f3bde4e9b031eaf1eb61df95c1401427029ea1bfddb8621c1161dcb0fa02e", size = 258190, upload-time = "2025-10-15T15:14:54.268Z" }, + { url = "https://files.pythonhosted.org/packages/45/ae/28a9cce40bf3174426cb2f7e71ee172d98e7f6446dff936a7ccecee34b14/coverage-7.11.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:e4dc07e95495923d6fd4d6c27bf70769425b71c89053083843fd78f378558996", size = 256658, upload-time = "2025-10-15T15:14:56.436Z" }, + { url = "https://files.pythonhosted.org/packages/5c/7c/3a44234a8599513684bfc8684878fd7b126c2760f79712bb78c56f19efc4/coverage-7.11.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:424538266794db2861db4922b05d729ade0940ee69dcf0591ce8f69784db0e11", size = 259342, upload-time = "2025-10-15T15:14:58.538Z" }, + { url = "https://files.pythonhosted.org/packages/e1/e6/0108519cba871af0351725ebdb8660fd7a0fe2ba3850d56d32490c7d9b4b/coverage-7.11.0-cp314-cp314t-win32.whl", hash = "sha256:4c1eeb3fb8eb9e0190bebafd0462936f75717687117339f708f395fe455acc73", size = 219568, upload-time = "2025-10-15T15:15:00.382Z" }, + { url = "https://files.pythonhosted.org/packages/c9/76/44ba876e0942b4e62fdde23ccb029ddb16d19ba1bef081edd00857ba0b16/coverage-7.11.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b56efee146c98dbf2cf5cffc61b9829d1e94442df4d7398b26892a53992d3547", size = 220687, upload-time = "2025-10-15T15:15:02.322Z" }, + { url = "https://files.pythonhosted.org/packages/b9/0c/0df55ecb20d0d0ed5c322e10a441775e1a3a5d78c60f0c4e1abfe6fcf949/coverage-7.11.0-cp314-cp314t-win_arm64.whl", hash = "sha256:b5c2705afa83f49bd91962a4094b6b082f94aef7626365ab3f8f4bd159c5acf3", size = 218711, upload-time = "2025-10-15T15:15:04.575Z" }, + { url = "https://files.pythonhosted.org/packages/5f/04/642c1d8a448ae5ea1369eac8495740a79eb4e581a9fb0cbdce56bbf56da1/coverage-7.11.0-py3-none-any.whl", hash = "sha256:4b7589765348d78fb4e5fb6ea35d07564e387da2fc5efff62e0222971f155f68", size = 207761, upload-time = "2025-10-15T15:15:06.439Z" }, +] + +[package.optional-dependencies] +toml = [ + { name = "tomli", marker = "python_full_version >= '3.10' and python_full_version <= '3.11'" }, +] + +[[package]] +name = "distlib" +version = "0.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/96/8e/709914eb2b5749865801041647dc7f4e6d00b549cfe88b65ca192995f07c/distlib-0.4.0.tar.gz", hash = "sha256:feec40075be03a04501a973d81f633735b4b69f98b05450592310c0f401a4e0d", size = 614605, upload-time = "2025-07-17T16:52:00.465Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/33/6b/e0547afaf41bf2c42e52430072fa5658766e3d65bd4b03a563d1b6336f57/distlib-0.4.0-py2.py3-none-any.whl", hash = "sha256:9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16", size = 469047, upload-time = "2025-07-17T16:51:58.613Z" }, +] + +[[package]] +name = "exceptiongroup" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", version = "4.13.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, + { name = "typing-extensions", version = "4.15.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9' and python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749, upload-time = "2025-05-10T17:42:51.123Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", size = 16674, upload-time = "2025-05-10T17:42:49.33Z" }, +] + +[[package]] +name = "filelock" +version = "3.16.1" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.9'", +] +sdist = { url = "https://files.pythonhosted.org/packages/9d/db/3ef5bb276dae18d6ec2124224403d1d67bccdbefc17af4cc8f553e341ab1/filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435", size = 18037, upload-time = "2024-09-17T19:02:01.779Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b9/f8/feced7779d755758a52d1f6635d990b8d98dc0a29fa568bbe0625f18fdf3/filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0", size = 16163, upload-time = "2024-09-17T19:02:00.268Z" }, +] + +[[package]] +name = "filelock" +version = "3.19.1" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version == '3.9.*'", +] +sdist = { url = "https://files.pythonhosted.org/packages/40/bb/0ab3e58d22305b6f5440629d20683af28959bf793d98d11950e305c1c326/filelock-3.19.1.tar.gz", hash = "sha256:66eda1888b0171c998b35be2bcc0f6d75c388a7ce20c3f3f37aa8e96c2dddf58", size = 17687, upload-time = "2025-08-14T16:56:03.016Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/42/14/42b2651a2f46b022ccd948bca9f2d5af0fd8929c4eec235b8d6d844fbe67/filelock-3.19.1-py3-none-any.whl", hash = "sha256:d38e30481def20772f5baf097c122c3babc4fcdb7e14e57049eb9d88c6dc017d", size = 15988, upload-time = "2025-08-14T16:56:01.633Z" }, +] + +[[package]] +name = "filelock" +version = "3.20.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", +] +sdist = { url = "https://files.pythonhosted.org/packages/58/46/0028a82567109b5ef6e4d2a1f04a583fb513e6cf9527fcdd09afd817deeb/filelock-3.20.0.tar.gz", hash = "sha256:711e943b4ec6be42e1d4e6690b48dc175c822967466bb31c0c293f34334c13f4", size = 18922, upload-time = "2025-10-08T18:03:50.056Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/91/7216b27286936c16f5b4d0c530087e4a54eead683e6b0b73dd0c64844af6/filelock-3.20.0-py3-none-any.whl", hash = "sha256:339b4732ffda5cd79b13f4e2711a31b0365ce445d95d243bb996273d072546a2", size = 16054, upload-time = "2025-10-08T18:03:48.35Z" }, +] + +[[package]] +name = "frozenlist" +version = "1.5.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.9'", +] +sdist = { url = "https://files.pythonhosted.org/packages/8f/ed/0f4cec13a93c02c47ec32d81d11c0c1efbadf4a471e3f3ce7cad366cbbd3/frozenlist-1.5.0.tar.gz", hash = "sha256:81d5af29e61b9c8348e876d442253723928dce6433e0e76cd925cd83f1b4b817", size = 39930, upload-time = "2024-10-23T09:48:29.903Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/79/29d44c4af36b2b240725dce566b20f63f9b36ef267aaaa64ee7466f4f2f8/frozenlist-1.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5b6a66c18b5b9dd261ca98dffcb826a525334b2f29e7caa54e182255c5f6a65a", size = 94451, upload-time = "2024-10-23T09:46:20.558Z" }, + { url = "https://files.pythonhosted.org/packages/47/47/0c999aeace6ead8a44441b4f4173e2261b18219e4ad1fe9a479871ca02fc/frozenlist-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d1b3eb7b05ea246510b43a7e53ed1653e55c2121019a97e60cad7efb881a97bb", size = 54301, upload-time = "2024-10-23T09:46:21.759Z" }, + { url = "https://files.pythonhosted.org/packages/8d/60/107a38c1e54176d12e06e9d4b5d755b677d71d1219217cee063911b1384f/frozenlist-1.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:15538c0cbf0e4fa11d1e3a71f823524b0c46299aed6e10ebb4c2089abd8c3bec", size = 52213, upload-time = "2024-10-23T09:46:22.993Z" }, + { url = "https://files.pythonhosted.org/packages/17/62/594a6829ac5679c25755362a9dc93486a8a45241394564309641425d3ff6/frozenlist-1.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e79225373c317ff1e35f210dd5f1344ff31066ba8067c307ab60254cd3a78ad5", size = 240946, upload-time = "2024-10-23T09:46:24.661Z" }, + { url = "https://files.pythonhosted.org/packages/7e/75/6c8419d8f92c80dd0ee3f63bdde2702ce6398b0ac8410ff459f9b6f2f9cb/frozenlist-1.5.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9272fa73ca71266702c4c3e2d4a28553ea03418e591e377a03b8e3659d94fa76", size = 264608, upload-time = "2024-10-23T09:46:26.017Z" }, + { url = "https://files.pythonhosted.org/packages/88/3e/82a6f0b84bc6fb7e0be240e52863c6d4ab6098cd62e4f5b972cd31e002e8/frozenlist-1.5.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:498524025a5b8ba81695761d78c8dd7382ac0b052f34e66939c42df860b8ff17", size = 261361, upload-time = "2024-10-23T09:46:27.787Z" }, + { url = "https://files.pythonhosted.org/packages/fd/85/14e5f9ccac1b64ff2f10c927b3ffdf88772aea875882406f9ba0cec8ad84/frozenlist-1.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:92b5278ed9d50fe610185ecd23c55d8b307d75ca18e94c0e7de328089ac5dcba", size = 231649, upload-time = "2024-10-23T09:46:28.992Z" }, + { url = "https://files.pythonhosted.org/packages/ee/59/928322800306f6529d1852323014ee9008551e9bb027cc38d276cbc0b0e7/frozenlist-1.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f3c8c1dacd037df16e85227bac13cca58c30da836c6f936ba1df0c05d046d8d", size = 241853, upload-time = "2024-10-23T09:46:30.211Z" }, + { url = "https://files.pythonhosted.org/packages/7d/bd/e01fa4f146a6f6c18c5d34cab8abdc4013774a26c4ff851128cd1bd3008e/frozenlist-1.5.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f2ac49a9bedb996086057b75bf93538240538c6d9b38e57c82d51f75a73409d2", size = 243652, upload-time = "2024-10-23T09:46:31.758Z" }, + { url = "https://files.pythonhosted.org/packages/a5/bd/e4771fd18a8ec6757033f0fa903e447aecc3fbba54e3630397b61596acf0/frozenlist-1.5.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e66cc454f97053b79c2ab09c17fbe3c825ea6b4de20baf1be28919460dd7877f", size = 241734, upload-time = "2024-10-23T09:46:33.044Z" }, + { url = "https://files.pythonhosted.org/packages/21/13/c83821fa5544af4f60c5d3a65d054af3213c26b14d3f5f48e43e5fb48556/frozenlist-1.5.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:5a3ba5f9a0dfed20337d3e966dc359784c9f96503674c2faf015f7fe8e96798c", size = 260959, upload-time = "2024-10-23T09:46:34.916Z" }, + { url = "https://files.pythonhosted.org/packages/71/f3/1f91c9a9bf7ed0e8edcf52698d23f3c211d8d00291a53c9f115ceb977ab1/frozenlist-1.5.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6321899477db90bdeb9299ac3627a6a53c7399c8cd58d25da094007402b039ab", size = 262706, upload-time = "2024-10-23T09:46:36.159Z" }, + { url = "https://files.pythonhosted.org/packages/4c/22/4a256fdf5d9bcb3ae32622c796ee5ff9451b3a13a68cfe3f68e2c95588ce/frozenlist-1.5.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:76e4753701248476e6286f2ef492af900ea67d9706a0155335a40ea21bf3b2f5", size = 250401, upload-time = "2024-10-23T09:46:37.327Z" }, + { url = "https://files.pythonhosted.org/packages/af/89/c48ebe1f7991bd2be6d5f4ed202d94960c01b3017a03d6954dd5fa9ea1e8/frozenlist-1.5.0-cp310-cp310-win32.whl", hash = "sha256:977701c081c0241d0955c9586ffdd9ce44f7a7795df39b9151cd9a6fd0ce4cfb", size = 45498, upload-time = "2024-10-23T09:46:38.552Z" }, + { url = "https://files.pythonhosted.org/packages/28/2f/cc27d5f43e023d21fe5c19538e08894db3d7e081cbf582ad5ed366c24446/frozenlist-1.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:189f03b53e64144f90990d29a27ec4f7997d91ed3d01b51fa39d2dbe77540fd4", size = 51622, upload-time = "2024-10-23T09:46:39.513Z" }, + { url = "https://files.pythonhosted.org/packages/79/43/0bed28bf5eb1c9e4301003b74453b8e7aa85fb293b31dde352aac528dafc/frozenlist-1.5.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:fd74520371c3c4175142d02a976aee0b4cb4a7cc912a60586ffd8d5929979b30", size = 94987, upload-time = "2024-10-23T09:46:40.487Z" }, + { url = "https://files.pythonhosted.org/packages/bb/bf/b74e38f09a246e8abbe1e90eb65787ed745ccab6eaa58b9c9308e052323d/frozenlist-1.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2f3f7a0fbc219fb4455264cae4d9f01ad41ae6ee8524500f381de64ffaa077d5", size = 54584, upload-time = "2024-10-23T09:46:41.463Z" }, + { url = "https://files.pythonhosted.org/packages/2c/31/ab01375682f14f7613a1ade30149f684c84f9b8823a4391ed950c8285656/frozenlist-1.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f47c9c9028f55a04ac254346e92977bf0f166c483c74b4232bee19a6697e4778", size = 52499, upload-time = "2024-10-23T09:46:42.451Z" }, + { url = "https://files.pythonhosted.org/packages/98/a8/d0ac0b9276e1404f58fec3ab6e90a4f76b778a49373ccaf6a563f100dfbc/frozenlist-1.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0996c66760924da6e88922756d99b47512a71cfd45215f3570bf1e0b694c206a", size = 276357, upload-time = "2024-10-23T09:46:44.166Z" }, + { url = "https://files.pythonhosted.org/packages/ad/c9/c7761084fa822f07dac38ac29f841d4587570dd211e2262544aa0b791d21/frozenlist-1.5.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a2fe128eb4edeabe11896cb6af88fca5346059f6c8d807e3b910069f39157869", size = 287516, upload-time = "2024-10-23T09:46:45.369Z" }, + { url = "https://files.pythonhosted.org/packages/a1/ff/cd7479e703c39df7bdab431798cef89dc75010d8aa0ca2514c5b9321db27/frozenlist-1.5.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a8ea951bbb6cacd492e3948b8da8c502a3f814f5d20935aae74b5df2b19cf3d", size = 283131, upload-time = "2024-10-23T09:46:46.654Z" }, + { url = "https://files.pythonhosted.org/packages/59/a0/370941beb47d237eca4fbf27e4e91389fd68699e6f4b0ebcc95da463835b/frozenlist-1.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de537c11e4aa01d37db0d403b57bd6f0546e71a82347a97c6a9f0dcc532b3a45", size = 261320, upload-time = "2024-10-23T09:46:47.825Z" }, + { url = "https://files.pythonhosted.org/packages/b8/5f/c10123e8d64867bc9b4f2f510a32042a306ff5fcd7e2e09e5ae5100ee333/frozenlist-1.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c2623347b933fcb9095841f1cc5d4ff0b278addd743e0e966cb3d460278840d", size = 274877, upload-time = "2024-10-23T09:46:48.989Z" }, + { url = "https://files.pythonhosted.org/packages/fa/79/38c505601ae29d4348f21706c5d89755ceded02a745016ba2f58bd5f1ea6/frozenlist-1.5.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cee6798eaf8b1416ef6909b06f7dc04b60755206bddc599f52232606e18179d3", size = 269592, upload-time = "2024-10-23T09:46:50.235Z" }, + { url = "https://files.pythonhosted.org/packages/19/e2/39f3a53191b8204ba9f0bb574b926b73dd2efba2a2b9d2d730517e8f7622/frozenlist-1.5.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f5f9da7f5dbc00a604fe74aa02ae7c98bcede8a3b8b9666f9f86fc13993bc71a", size = 265934, upload-time = "2024-10-23T09:46:51.829Z" }, + { url = "https://files.pythonhosted.org/packages/d5/c9/3075eb7f7f3a91f1a6b00284af4de0a65a9ae47084930916f5528144c9dd/frozenlist-1.5.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:90646abbc7a5d5c7c19461d2e3eeb76eb0b204919e6ece342feb6032c9325ae9", size = 283859, upload-time = "2024-10-23T09:46:52.947Z" }, + { url = "https://files.pythonhosted.org/packages/05/f5/549f44d314c29408b962fa2b0e69a1a67c59379fb143b92a0a065ffd1f0f/frozenlist-1.5.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:bdac3c7d9b705d253b2ce370fde941836a5f8b3c5c2b8fd70940a3ea3af7f4f2", size = 287560, upload-time = "2024-10-23T09:46:54.162Z" }, + { url = "https://files.pythonhosted.org/packages/9d/f8/cb09b3c24a3eac02c4c07a9558e11e9e244fb02bf62c85ac2106d1eb0c0b/frozenlist-1.5.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03d33c2ddbc1816237a67f66336616416e2bbb6beb306e5f890f2eb22b959cdf", size = 277150, upload-time = "2024-10-23T09:46:55.361Z" }, + { url = "https://files.pythonhosted.org/packages/37/48/38c2db3f54d1501e692d6fe058f45b6ad1b358d82cd19436efab80cfc965/frozenlist-1.5.0-cp311-cp311-win32.whl", hash = "sha256:237f6b23ee0f44066219dae14c70ae38a63f0440ce6750f868ee08775073f942", size = 45244, upload-time = "2024-10-23T09:46:56.578Z" }, + { url = "https://files.pythonhosted.org/packages/ca/8c/2ddffeb8b60a4bce3b196c32fcc30d8830d4615e7b492ec2071da801b8ad/frozenlist-1.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:0cc974cc93d32c42e7b0f6cf242a6bd941c57c61b618e78b6c0a96cb72788c1d", size = 51634, upload-time = "2024-10-23T09:46:57.6Z" }, + { url = "https://files.pythonhosted.org/packages/79/73/fa6d1a96ab7fd6e6d1c3500700963eab46813847f01ef0ccbaa726181dd5/frozenlist-1.5.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:31115ba75889723431aa9a4e77d5f398f5cf976eea3bdf61749731f62d4a4a21", size = 94026, upload-time = "2024-10-23T09:46:58.601Z" }, + { url = "https://files.pythonhosted.org/packages/ab/04/ea8bf62c8868b8eada363f20ff1b647cf2e93377a7b284d36062d21d81d1/frozenlist-1.5.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7437601c4d89d070eac8323f121fcf25f88674627505334654fd027b091db09d", size = 54150, upload-time = "2024-10-23T09:46:59.608Z" }, + { url = "https://files.pythonhosted.org/packages/d0/9a/8e479b482a6f2070b26bda572c5e6889bb3ba48977e81beea35b5ae13ece/frozenlist-1.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7948140d9f8ece1745be806f2bfdf390127cf1a763b925c4a805c603df5e697e", size = 51927, upload-time = "2024-10-23T09:47:00.625Z" }, + { url = "https://files.pythonhosted.org/packages/e3/12/2aad87deb08a4e7ccfb33600871bbe8f0e08cb6d8224371387f3303654d7/frozenlist-1.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:feeb64bc9bcc6b45c6311c9e9b99406660a9c05ca8a5b30d14a78555088b0b3a", size = 282647, upload-time = "2024-10-23T09:47:01.992Z" }, + { url = "https://files.pythonhosted.org/packages/77/f2/07f06b05d8a427ea0060a9cef6e63405ea9e0d761846b95ef3fb3be57111/frozenlist-1.5.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:683173d371daad49cffb8309779e886e59c2f369430ad28fe715f66d08d4ab1a", size = 289052, upload-time = "2024-10-23T09:47:04.039Z" }, + { url = "https://files.pythonhosted.org/packages/bd/9f/8bf45a2f1cd4aa401acd271b077989c9267ae8463e7c8b1eb0d3f561b65e/frozenlist-1.5.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7d57d8f702221405a9d9b40f9da8ac2e4a1a8b5285aac6100f3393675f0a85ee", size = 291719, upload-time = "2024-10-23T09:47:05.58Z" }, + { url = "https://files.pythonhosted.org/packages/41/d1/1f20fd05a6c42d3868709b7604c9f15538a29e4f734c694c6bcfc3d3b935/frozenlist-1.5.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30c72000fbcc35b129cb09956836c7d7abf78ab5416595e4857d1cae8d6251a6", size = 267433, upload-time = "2024-10-23T09:47:07.807Z" }, + { url = "https://files.pythonhosted.org/packages/af/f2/64b73a9bb86f5a89fb55450e97cd5c1f84a862d4ff90d9fd1a73ab0f64a5/frozenlist-1.5.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:000a77d6034fbad9b6bb880f7ec073027908f1b40254b5d6f26210d2dab1240e", size = 283591, upload-time = "2024-10-23T09:47:09.645Z" }, + { url = "https://files.pythonhosted.org/packages/29/e2/ffbb1fae55a791fd6c2938dd9ea779509c977435ba3940b9f2e8dc9d5316/frozenlist-1.5.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5d7f5a50342475962eb18b740f3beecc685a15b52c91f7d975257e13e029eca9", size = 273249, upload-time = "2024-10-23T09:47:10.808Z" }, + { url = "https://files.pythonhosted.org/packages/2e/6e/008136a30798bb63618a114b9321b5971172a5abddff44a100c7edc5ad4f/frozenlist-1.5.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:87f724d055eb4785d9be84e9ebf0f24e392ddfad00b3fe036e43f489fafc9039", size = 271075, upload-time = "2024-10-23T09:47:11.938Z" }, + { url = "https://files.pythonhosted.org/packages/ae/f0/4e71e54a026b06724cec9b6c54f0b13a4e9e298cc8db0f82ec70e151f5ce/frozenlist-1.5.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:6e9080bb2fb195a046e5177f10d9d82b8a204c0736a97a153c2466127de87784", size = 285398, upload-time = "2024-10-23T09:47:14.071Z" }, + { url = "https://files.pythonhosted.org/packages/4d/36/70ec246851478b1c0b59f11ef8ade9c482ff447c1363c2bd5fad45098b12/frozenlist-1.5.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9b93d7aaa36c966fa42efcaf716e6b3900438632a626fb09c049f6a2f09fc631", size = 294445, upload-time = "2024-10-23T09:47:15.318Z" }, + { url = "https://files.pythonhosted.org/packages/37/e0/47f87544055b3349b633a03c4d94b405956cf2437f4ab46d0928b74b7526/frozenlist-1.5.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:52ef692a4bc60a6dd57f507429636c2af8b6046db8b31b18dac02cbc8f507f7f", size = 280569, upload-time = "2024-10-23T09:47:17.149Z" }, + { url = "https://files.pythonhosted.org/packages/f9/7c/490133c160fb6b84ed374c266f42800e33b50c3bbab1652764e6e1fc498a/frozenlist-1.5.0-cp312-cp312-win32.whl", hash = "sha256:29d94c256679247b33a3dc96cce0f93cbc69c23bf75ff715919332fdbb6a32b8", size = 44721, upload-time = "2024-10-23T09:47:19.012Z" }, + { url = "https://files.pythonhosted.org/packages/b1/56/4e45136ffc6bdbfa68c29ca56ef53783ef4c2fd395f7cbf99a2624aa9aaa/frozenlist-1.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:8969190d709e7c48ea386db202d708eb94bdb29207a1f269bab1196ce0dcca1f", size = 51329, upload-time = "2024-10-23T09:47:20.177Z" }, + { url = "https://files.pythonhosted.org/packages/da/3b/915f0bca8a7ea04483622e84a9bd90033bab54bdf485479556c74fd5eaf5/frozenlist-1.5.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7a1a048f9215c90973402e26c01d1cff8a209e1f1b53f72b95c13db61b00f953", size = 91538, upload-time = "2024-10-23T09:47:21.176Z" }, + { url = "https://files.pythonhosted.org/packages/c7/d1/a7c98aad7e44afe5306a2b068434a5830f1470675f0e715abb86eb15f15b/frozenlist-1.5.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:dd47a5181ce5fcb463b5d9e17ecfdb02b678cca31280639255ce9d0e5aa67af0", size = 52849, upload-time = "2024-10-23T09:47:22.439Z" }, + { url = "https://files.pythonhosted.org/packages/3a/c8/76f23bf9ab15d5f760eb48701909645f686f9c64fbb8982674c241fbef14/frozenlist-1.5.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1431d60b36d15cda188ea222033eec8e0eab488f39a272461f2e6d9e1a8e63c2", size = 50583, upload-time = "2024-10-23T09:47:23.44Z" }, + { url = "https://files.pythonhosted.org/packages/1f/22/462a3dd093d11df623179d7754a3b3269de3b42de2808cddef50ee0f4f48/frozenlist-1.5.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6482a5851f5d72767fbd0e507e80737f9c8646ae7fd303def99bfe813f76cf7f", size = 265636, upload-time = "2024-10-23T09:47:24.82Z" }, + { url = "https://files.pythonhosted.org/packages/80/cf/e075e407fc2ae7328155a1cd7e22f932773c8073c1fc78016607d19cc3e5/frozenlist-1.5.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:44c49271a937625619e862baacbd037a7ef86dd1ee215afc298a417ff3270608", size = 270214, upload-time = "2024-10-23T09:47:26.156Z" }, + { url = "https://files.pythonhosted.org/packages/a1/58/0642d061d5de779f39c50cbb00df49682832923f3d2ebfb0fedf02d05f7f/frozenlist-1.5.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:12f78f98c2f1c2429d42e6a485f433722b0061d5c0b0139efa64f396efb5886b", size = 273905, upload-time = "2024-10-23T09:47:27.741Z" }, + { url = "https://files.pythonhosted.org/packages/ab/66/3fe0f5f8f2add5b4ab7aa4e199f767fd3b55da26e3ca4ce2cc36698e50c4/frozenlist-1.5.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce3aa154c452d2467487765e3adc730a8c153af77ad84096bc19ce19a2400840", size = 250542, upload-time = "2024-10-23T09:47:28.938Z" }, + { url = "https://files.pythonhosted.org/packages/f6/b8/260791bde9198c87a465224e0e2bb62c4e716f5d198fc3a1dacc4895dbd1/frozenlist-1.5.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b7dc0c4338e6b8b091e8faf0db3168a37101943e687f373dce00959583f7439", size = 267026, upload-time = "2024-10-23T09:47:30.283Z" }, + { url = "https://files.pythonhosted.org/packages/2e/a4/3d24f88c527f08f8d44ade24eaee83b2627793fa62fa07cbb7ff7a2f7d42/frozenlist-1.5.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:45e0896250900b5aa25180f9aec243e84e92ac84bd4a74d9ad4138ef3f5c97de", size = 257690, upload-time = "2024-10-23T09:47:32.388Z" }, + { url = "https://files.pythonhosted.org/packages/de/9a/d311d660420b2beeff3459b6626f2ab4fb236d07afbdac034a4371fe696e/frozenlist-1.5.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:561eb1c9579d495fddb6da8959fd2a1fca2c6d060d4113f5844b433fc02f2641", size = 253893, upload-time = "2024-10-23T09:47:34.274Z" }, + { url = "https://files.pythonhosted.org/packages/c6/23/e491aadc25b56eabd0f18c53bb19f3cdc6de30b2129ee0bc39cd387cd560/frozenlist-1.5.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:df6e2f325bfee1f49f81aaac97d2aa757c7646534a06f8f577ce184afe2f0a9e", size = 267006, upload-time = "2024-10-23T09:47:35.499Z" }, + { url = "https://files.pythonhosted.org/packages/08/c4/ab918ce636a35fb974d13d666dcbe03969592aeca6c3ab3835acff01f79c/frozenlist-1.5.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:140228863501b44b809fb39ec56b5d4071f4d0aa6d216c19cbb08b8c5a7eadb9", size = 276157, upload-time = "2024-10-23T09:47:37.522Z" }, + { url = "https://files.pythonhosted.org/packages/c0/29/3b7a0bbbbe5a34833ba26f686aabfe982924adbdcafdc294a7a129c31688/frozenlist-1.5.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7707a25d6a77f5d27ea7dc7d1fc608aa0a478193823f88511ef5e6b8a48f9d03", size = 264642, upload-time = "2024-10-23T09:47:38.75Z" }, + { url = "https://files.pythonhosted.org/packages/ab/42/0595b3dbffc2e82d7fe658c12d5a5bafcd7516c6bf2d1d1feb5387caa9c1/frozenlist-1.5.0-cp313-cp313-win32.whl", hash = "sha256:31a9ac2b38ab9b5a8933b693db4939764ad3f299fcaa931a3e605bc3460e693c", size = 44914, upload-time = "2024-10-23T09:47:40.145Z" }, + { url = "https://files.pythonhosted.org/packages/17/c4/b7db1206a3fea44bf3b838ca61deb6f74424a8a5db1dd53ecb21da669be6/frozenlist-1.5.0-cp313-cp313-win_amd64.whl", hash = "sha256:11aabdd62b8b9c4b84081a3c246506d1cddd2dd93ff0ad53ede5defec7886b28", size = 51167, upload-time = "2024-10-23T09:47:41.812Z" }, + { url = "https://files.pythonhosted.org/packages/33/b5/00fcbe8e7e7e172829bf4addc8227d8f599a3d5def3a4e9aa2b54b3145aa/frozenlist-1.5.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:dd94994fc91a6177bfaafd7d9fd951bc8689b0a98168aa26b5f543868548d3ca", size = 95648, upload-time = "2024-10-23T09:47:43.118Z" }, + { url = "https://files.pythonhosted.org/packages/1e/69/e4a32fc4b2fa8e9cb6bcb1bad9c7eeb4b254bc34da475b23f93264fdc306/frozenlist-1.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2d0da8bbec082bf6bf18345b180958775363588678f64998c2b7609e34719b10", size = 54888, upload-time = "2024-10-23T09:47:44.832Z" }, + { url = "https://files.pythonhosted.org/packages/76/a3/c08322a91e73d1199901a77ce73971cffa06d3c74974270ff97aed6e152a/frozenlist-1.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:73f2e31ea8dd7df61a359b731716018c2be196e5bb3b74ddba107f694fbd7604", size = 52975, upload-time = "2024-10-23T09:47:46.579Z" }, + { url = "https://files.pythonhosted.org/packages/fc/60/a315321d8ada167b578ff9d2edc147274ead6129523b3a308501b6621b4f/frozenlist-1.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:828afae9f17e6de596825cf4228ff28fbdf6065974e5ac1410cecc22f699d2b3", size = 241912, upload-time = "2024-10-23T09:47:47.687Z" }, + { url = "https://files.pythonhosted.org/packages/bd/d0/1f0980987bca4f94f9e8bae01980b23495ffc2e5049a3da4d9b7d2762bee/frozenlist-1.5.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1577515d35ed5649d52ab4319db757bb881ce3b2b796d7283e6634d99ace307", size = 259433, upload-time = "2024-10-23T09:47:49.339Z" }, + { url = "https://files.pythonhosted.org/packages/28/e7/d00600c072eec8f18a606e281afdf0e8606e71a4882104d0438429b02468/frozenlist-1.5.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2150cc6305a2c2ab33299453e2968611dacb970d2283a14955923062c8d00b10", size = 255576, upload-time = "2024-10-23T09:47:50.519Z" }, + { url = "https://files.pythonhosted.org/packages/82/71/993c5f45dba7be347384ddec1ebc1b4d998291884e7690c06aa6ba755211/frozenlist-1.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a72b7a6e3cd2725eff67cd64c8f13335ee18fc3c7befc05aed043d24c7b9ccb9", size = 233349, upload-time = "2024-10-23T09:47:53.197Z" }, + { url = "https://files.pythonhosted.org/packages/66/30/f9c006223feb2ac87f1826b57f2367b60aacc43092f562dab60d2312562e/frozenlist-1.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c16d2fa63e0800723139137d667e1056bee1a1cf7965153d2d104b62855e9b99", size = 243126, upload-time = "2024-10-23T09:47:54.432Z" }, + { url = "https://files.pythonhosted.org/packages/b5/34/e4219c9343f94b81068d0018cbe37948e66c68003b52bf8a05e9509d09ec/frozenlist-1.5.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:17dcc32fc7bda7ce5875435003220a457bcfa34ab7924a49a1c19f55b6ee185c", size = 241261, upload-time = "2024-10-23T09:47:56.01Z" }, + { url = "https://files.pythonhosted.org/packages/48/96/9141758f6a19f2061a51bb59b9907c92f9bda1ac7b2baaf67a6e352b280f/frozenlist-1.5.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:97160e245ea33d8609cd2b8fd997c850b56db147a304a262abc2b3be021a9171", size = 240203, upload-time = "2024-10-23T09:47:57.337Z" }, + { url = "https://files.pythonhosted.org/packages/f9/71/0ef5970e68d181571a050958e84c76a061ca52f9c6f50257d9bfdd84c7f7/frozenlist-1.5.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:f1e6540b7fa044eee0bb5111ada694cf3dc15f2b0347ca125ee9ca984d5e9e6e", size = 267539, upload-time = "2024-10-23T09:47:58.874Z" }, + { url = "https://files.pythonhosted.org/packages/ab/bd/6e7d450c5d993b413591ad9cdab6dcdfa2c6ab2cd835b2b5c1cfeb0323bf/frozenlist-1.5.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:91d6c171862df0a6c61479d9724f22efb6109111017c87567cfeb7b5d1449fdf", size = 268518, upload-time = "2024-10-23T09:48:00.771Z" }, + { url = "https://files.pythonhosted.org/packages/cc/3d/5a7c4dfff1ae57ca2cbbe9041521472ecd9446d49e7044a0e9bfd0200fd0/frozenlist-1.5.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c1fac3e2ace2eb1052e9f7c7db480818371134410e1f5c55d65e8f3ac6d1407e", size = 248114, upload-time = "2024-10-23T09:48:02.625Z" }, + { url = "https://files.pythonhosted.org/packages/f7/41/2342ec4c714349793f1a1e7bd5c4aeec261e24e697fa9a5499350c3a2415/frozenlist-1.5.0-cp38-cp38-win32.whl", hash = "sha256:b97f7b575ab4a8af9b7bc1d2ef7f29d3afee2226bd03ca3875c16451ad5a7723", size = 45648, upload-time = "2024-10-23T09:48:03.895Z" }, + { url = "https://files.pythonhosted.org/packages/0c/90/85bb3547c327f5975078c1be018478d5e8d250a540c828f8f31a35d2a1bd/frozenlist-1.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:374ca2dabdccad8e2a76d40b1d037f5bd16824933bf7bcea3e59c891fd4a0923", size = 51930, upload-time = "2024-10-23T09:48:05.293Z" }, + { url = "https://files.pythonhosted.org/packages/da/4d/d94ff0fb0f5313902c132817c62d19cdc5bdcd0c195d392006ef4b779fc6/frozenlist-1.5.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9bbcdfaf4af7ce002694a4e10a0159d5a8d20056a12b05b45cea944a4953f972", size = 95319, upload-time = "2024-10-23T09:48:06.405Z" }, + { url = "https://files.pythonhosted.org/packages/8c/1b/d90e554ca2b483d31cb2296e393f72c25bdc38d64526579e95576bfda587/frozenlist-1.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1893f948bf6681733aaccf36c5232c231e3b5166d607c5fa77773611df6dc336", size = 54749, upload-time = "2024-10-23T09:48:07.48Z" }, + { url = "https://files.pythonhosted.org/packages/f8/66/7fdecc9ef49f8db2aa4d9da916e4ecf357d867d87aea292efc11e1b2e932/frozenlist-1.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2b5e23253bb709ef57a8e95e6ae48daa9ac5f265637529e4ce6b003a37b2621f", size = 52718, upload-time = "2024-10-23T09:48:08.725Z" }, + { url = "https://files.pythonhosted.org/packages/08/04/e2fddc92135276e07addbc1cf413acffa0c2d848b3e54cacf684e146df49/frozenlist-1.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f253985bb515ecd89629db13cb58d702035ecd8cfbca7d7a7e29a0e6d39af5f", size = 241756, upload-time = "2024-10-23T09:48:09.843Z" }, + { url = "https://files.pythonhosted.org/packages/c6/52/be5ff200815d8a341aee5b16b6b707355e0ca3652953852238eb92b120c2/frozenlist-1.5.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:04a5c6babd5e8fb7d3c871dc8b321166b80e41b637c31a995ed844a6139942b6", size = 267718, upload-time = "2024-10-23T09:48:11.828Z" }, + { url = "https://files.pythonhosted.org/packages/88/be/4bd93a58be57a3722fc544c36debdf9dcc6758f761092e894d78f18b8f20/frozenlist-1.5.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9fe0f1c29ba24ba6ff6abf688cb0b7cf1efab6b6aa6adc55441773c252f7411", size = 263494, upload-time = "2024-10-23T09:48:13.424Z" }, + { url = "https://files.pythonhosted.org/packages/32/ba/58348b90193caa096ce9e9befea6ae67f38dabfd3aacb47e46137a6250a8/frozenlist-1.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:226d72559fa19babe2ccd920273e767c96a49b9d3d38badd7c91a0fdeda8ea08", size = 232838, upload-time = "2024-10-23T09:48:14.792Z" }, + { url = "https://files.pythonhosted.org/packages/f6/33/9f152105227630246135188901373c4f322cc026565ca6215b063f4c82f4/frozenlist-1.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15b731db116ab3aedec558573c1a5eec78822b32292fe4f2f0345b7f697745c2", size = 242912, upload-time = "2024-10-23T09:48:16.249Z" }, + { url = "https://files.pythonhosted.org/packages/a0/10/3db38fb3ccbafadd80a1b0d6800c987b0e3fe3ef2d117c6ced0246eea17a/frozenlist-1.5.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:366d8f93e3edfe5a918c874702f78faac300209a4d5bf38352b2c1bdc07a766d", size = 244763, upload-time = "2024-10-23T09:48:17.781Z" }, + { url = "https://files.pythonhosted.org/packages/e2/cd/1df468fdce2f66a4608dffe44c40cdc35eeaa67ef7fd1d813f99a9a37842/frozenlist-1.5.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1b96af8c582b94d381a1c1f51ffaedeb77c821c690ea5f01da3d70a487dd0a9b", size = 242841, upload-time = "2024-10-23T09:48:19.507Z" }, + { url = "https://files.pythonhosted.org/packages/ee/5f/16097a5ca0bb6b6779c02cc9379c72fe98d56115d4c54d059fb233168fb6/frozenlist-1.5.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:c03eff4a41bd4e38415cbed054bbaff4a075b093e2394b6915dca34a40d1e38b", size = 263407, upload-time = "2024-10-23T09:48:21.467Z" }, + { url = "https://files.pythonhosted.org/packages/0f/f7/58cd220ee1c2248ee65a32f5b4b93689e3fe1764d85537eee9fc392543bc/frozenlist-1.5.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:50cf5e7ee9b98f22bdecbabf3800ae78ddcc26e4a435515fc72d97903e8488e0", size = 265083, upload-time = "2024-10-23T09:48:22.725Z" }, + { url = "https://files.pythonhosted.org/packages/62/b8/49768980caabf81ac4a2d156008f7cbd0107e6b36d08a313bb31035d9201/frozenlist-1.5.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1e76bfbc72353269c44e0bc2cfe171900fbf7f722ad74c9a7b638052afe6a00c", size = 251564, upload-time = "2024-10-23T09:48:24.272Z" }, + { url = "https://files.pythonhosted.org/packages/cb/83/619327da3b86ef957ee7a0cbf3c166a09ed1e87a3f7f1ff487d7d0284683/frozenlist-1.5.0-cp39-cp39-win32.whl", hash = "sha256:666534d15ba8f0fda3f53969117383d5dc021266b3c1a42c9ec4855e4b58b9d3", size = 45691, upload-time = "2024-10-23T09:48:26.317Z" }, + { url = "https://files.pythonhosted.org/packages/8b/28/407bc34a745151ed2322c690b6e7d83d7101472e81ed76e1ebdac0b70a78/frozenlist-1.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:5c28f4b5dbef8a0d8aad0d4de24d1e9e981728628afaf4ea0792f5d0939372f0", size = 51767, upload-time = "2024-10-23T09:48:27.427Z" }, + { url = "https://files.pythonhosted.org/packages/c6/c8/a5be5b7550c10858fcf9b0ea054baccab474da77d37f1e828ce043a3a5d4/frozenlist-1.5.0-py3-none-any.whl", hash = "sha256:d994863bba198a4a518b467bb971c56e1db3f180a25c6cf7bb1949c267f748c3", size = 11901, upload-time = "2024-10-23T09:48:28.851Z" }, +] + +[[package]] +name = "frozenlist" +version = "1.8.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", + "python_full_version == '3.9.*'", +] +sdist = { url = "https://files.pythonhosted.org/packages/2d/f5/c831fac6cc817d26fd54c7eaccd04ef7e0288806943f7cc5bbf69f3ac1f0/frozenlist-1.8.0.tar.gz", hash = "sha256:3ede829ed8d842f6cd48fc7081d7a41001a56f1f38603f9d49bf3020d59a31ad", size = 45875, upload-time = "2025-10-06T05:38:17.865Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/83/4a/557715d5047da48d54e659203b9335be7bfaafda2c3f627b7c47e0b3aaf3/frozenlist-1.8.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b37f6d31b3dcea7deb5e9696e529a6aa4a898adc33db82da12e4c60a7c4d2011", size = 86230, upload-time = "2025-10-06T05:35:23.699Z" }, + { url = "https://files.pythonhosted.org/packages/a2/fb/c85f9fed3ea8fe8740e5b46a59cc141c23b842eca617da8876cfce5f760e/frozenlist-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef2b7b394f208233e471abc541cc6991f907ffd47dc72584acee3147899d6565", size = 49621, upload-time = "2025-10-06T05:35:25.341Z" }, + { url = "https://files.pythonhosted.org/packages/63/70/26ca3f06aace16f2352796b08704338d74b6d1a24ca38f2771afbb7ed915/frozenlist-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a88f062f072d1589b7b46e951698950e7da00442fc1cacbe17e19e025dc327ad", size = 49889, upload-time = "2025-10-06T05:35:26.797Z" }, + { url = "https://files.pythonhosted.org/packages/5d/ed/c7895fd2fde7f3ee70d248175f9b6cdf792fb741ab92dc59cd9ef3bd241b/frozenlist-1.8.0-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f57fb59d9f385710aa7060e89410aeb5058b99e62f4d16b08b91986b9a2140c2", size = 219464, upload-time = "2025-10-06T05:35:28.254Z" }, + { url = "https://files.pythonhosted.org/packages/6b/83/4d587dccbfca74cb8b810472392ad62bfa100bf8108c7223eb4c4fa2f7b3/frozenlist-1.8.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:799345ab092bee59f01a915620b5d014698547afd011e691a208637312db9186", size = 221649, upload-time = "2025-10-06T05:35:29.454Z" }, + { url = "https://files.pythonhosted.org/packages/6a/c6/fd3b9cd046ec5fff9dab66831083bc2077006a874a2d3d9247dea93ddf7e/frozenlist-1.8.0-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c23c3ff005322a6e16f71bf8692fcf4d5a304aaafe1e262c98c6d4adc7be863e", size = 219188, upload-time = "2025-10-06T05:35:30.951Z" }, + { url = "https://files.pythonhosted.org/packages/ce/80/6693f55eb2e085fc8afb28cf611448fb5b90e98e068fa1d1b8d8e66e5c7d/frozenlist-1.8.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8a76ea0f0b9dfa06f254ee06053d93a600865b3274358ca48a352ce4f0798450", size = 231748, upload-time = "2025-10-06T05:35:32.101Z" }, + { url = "https://files.pythonhosted.org/packages/97/d6/e9459f7c5183854abd989ba384fe0cc1a0fb795a83c033f0571ec5933ca4/frozenlist-1.8.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c7366fe1418a6133d5aa824ee53d406550110984de7637d65a178010f759c6ef", size = 236351, upload-time = "2025-10-06T05:35:33.834Z" }, + { url = "https://files.pythonhosted.org/packages/97/92/24e97474b65c0262e9ecd076e826bfd1d3074adcc165a256e42e7b8a7249/frozenlist-1.8.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:13d23a45c4cebade99340c4165bd90eeb4a56c6d8a9d8aa49568cac19a6d0dc4", size = 218767, upload-time = "2025-10-06T05:35:35.205Z" }, + { url = "https://files.pythonhosted.org/packages/ee/bf/dc394a097508f15abff383c5108cb8ad880d1f64a725ed3b90d5c2fbf0bb/frozenlist-1.8.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:e4a3408834f65da56c83528fb52ce7911484f0d1eaf7b761fc66001db1646eff", size = 235887, upload-time = "2025-10-06T05:35:36.354Z" }, + { url = "https://files.pythonhosted.org/packages/40/90/25b201b9c015dbc999a5baf475a257010471a1fa8c200c843fd4abbee725/frozenlist-1.8.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:42145cd2748ca39f32801dad54aeea10039da6f86e303659db90db1c4b614c8c", size = 228785, upload-time = "2025-10-06T05:35:37.949Z" }, + { url = "https://files.pythonhosted.org/packages/84/f4/b5bc148df03082f05d2dd30c089e269acdbe251ac9a9cf4e727b2dbb8a3d/frozenlist-1.8.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:e2de870d16a7a53901e41b64ffdf26f2fbb8917b3e6ebf398098d72c5b20bd7f", size = 230312, upload-time = "2025-10-06T05:35:39.178Z" }, + { url = "https://files.pythonhosted.org/packages/db/4b/87e95b5d15097c302430e647136b7d7ab2398a702390cf4c8601975709e7/frozenlist-1.8.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:20e63c9493d33ee48536600d1a5c95eefc870cd71e7ab037763d1fbb89cc51e7", size = 217650, upload-time = "2025-10-06T05:35:40.377Z" }, + { url = "https://files.pythonhosted.org/packages/e5/70/78a0315d1fea97120591a83e0acd644da638c872f142fd72a6cebee825f3/frozenlist-1.8.0-cp310-cp310-win32.whl", hash = "sha256:adbeebaebae3526afc3c96fad434367cafbfd1b25d72369a9e5858453b1bb71a", size = 39659, upload-time = "2025-10-06T05:35:41.863Z" }, + { url = "https://files.pythonhosted.org/packages/66/aa/3f04523fb189a00e147e60c5b2205126118f216b0aa908035c45336e27e4/frozenlist-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:667c3777ca571e5dbeb76f331562ff98b957431df140b54c85fd4d52eea8d8f6", size = 43837, upload-time = "2025-10-06T05:35:43.205Z" }, + { url = "https://files.pythonhosted.org/packages/39/75/1135feecdd7c336938bd55b4dc3b0dfc46d85b9be12ef2628574b28de776/frozenlist-1.8.0-cp310-cp310-win_arm64.whl", hash = "sha256:80f85f0a7cc86e7a54c46d99c9e1318ff01f4687c172ede30fd52d19d1da1c8e", size = 39989, upload-time = "2025-10-06T05:35:44.596Z" }, + { url = "https://files.pythonhosted.org/packages/bc/03/077f869d540370db12165c0aa51640a873fb661d8b315d1d4d67b284d7ac/frozenlist-1.8.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:09474e9831bc2b2199fad6da3c14c7b0fbdd377cce9d3d77131be28906cb7d84", size = 86912, upload-time = "2025-10-06T05:35:45.98Z" }, + { url = "https://files.pythonhosted.org/packages/df/b5/7610b6bd13e4ae77b96ba85abea1c8cb249683217ef09ac9e0ae93f25a91/frozenlist-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:17c883ab0ab67200b5f964d2b9ed6b00971917d5d8a92df149dc2c9779208ee9", size = 50046, upload-time = "2025-10-06T05:35:47.009Z" }, + { url = "https://files.pythonhosted.org/packages/6e/ef/0e8f1fe32f8a53dd26bdd1f9347efe0778b0fddf62789ea683f4cc7d787d/frozenlist-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa47e444b8ba08fffd1c18e8cdb9a75db1b6a27f17507522834ad13ed5922b93", size = 50119, upload-time = "2025-10-06T05:35:48.38Z" }, + { url = "https://files.pythonhosted.org/packages/11/b1/71a477adc7c36e5fb628245dfbdea2166feae310757dea848d02bd0689fd/frozenlist-1.8.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2552f44204b744fba866e573be4c1f9048d6a324dfe14475103fd51613eb1d1f", size = 231067, upload-time = "2025-10-06T05:35:49.97Z" }, + { url = "https://files.pythonhosted.org/packages/45/7e/afe40eca3a2dc19b9904c0f5d7edfe82b5304cb831391edec0ac04af94c2/frozenlist-1.8.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:957e7c38f250991e48a9a73e6423db1bb9dd14e722a10f6b8bb8e16a0f55f695", size = 233160, upload-time = "2025-10-06T05:35:51.729Z" }, + { url = "https://files.pythonhosted.org/packages/a6/aa/7416eac95603ce428679d273255ffc7c998d4132cfae200103f164b108aa/frozenlist-1.8.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8585e3bb2cdea02fc88ffa245069c36555557ad3609e83be0ec71f54fd4abb52", size = 228544, upload-time = "2025-10-06T05:35:53.246Z" }, + { url = "https://files.pythonhosted.org/packages/8b/3d/2a2d1f683d55ac7e3875e4263d28410063e738384d3adc294f5ff3d7105e/frozenlist-1.8.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:edee74874ce20a373d62dc28b0b18b93f645633c2943fd90ee9d898550770581", size = 243797, upload-time = "2025-10-06T05:35:54.497Z" }, + { url = "https://files.pythonhosted.org/packages/78/1e/2d5565b589e580c296d3bb54da08d206e797d941a83a6fdea42af23be79c/frozenlist-1.8.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c9a63152fe95756b85f31186bddf42e4c02c6321207fd6601a1c89ebac4fe567", size = 247923, upload-time = "2025-10-06T05:35:55.861Z" }, + { url = "https://files.pythonhosted.org/packages/aa/c3/65872fcf1d326a7f101ad4d86285c403c87be7d832b7470b77f6d2ed5ddc/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b6db2185db9be0a04fecf2f241c70b63b1a242e2805be291855078f2b404dd6b", size = 230886, upload-time = "2025-10-06T05:35:57.399Z" }, + { url = "https://files.pythonhosted.org/packages/a0/76/ac9ced601d62f6956f03cc794f9e04c81719509f85255abf96e2510f4265/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:f4be2e3d8bc8aabd566f8d5b8ba7ecc09249d74ba3c9ed52e54dc23a293f0b92", size = 245731, upload-time = "2025-10-06T05:35:58.563Z" }, + { url = "https://files.pythonhosted.org/packages/b9/49/ecccb5f2598daf0b4a1415497eba4c33c1e8ce07495eb07d2860c731b8d5/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:c8d1634419f39ea6f5c427ea2f90ca85126b54b50837f31497f3bf38266e853d", size = 241544, upload-time = "2025-10-06T05:35:59.719Z" }, + { url = "https://files.pythonhosted.org/packages/53/4b/ddf24113323c0bbcc54cb38c8b8916f1da7165e07b8e24a717b4a12cbf10/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:1a7fa382a4a223773ed64242dbe1c9c326ec09457e6b8428efb4118c685c3dfd", size = 241806, upload-time = "2025-10-06T05:36:00.959Z" }, + { url = "https://files.pythonhosted.org/packages/a7/fb/9b9a084d73c67175484ba2789a59f8eebebd0827d186a8102005ce41e1ba/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:11847b53d722050808926e785df837353bd4d75f1d494377e59b23594d834967", size = 229382, upload-time = "2025-10-06T05:36:02.22Z" }, + { url = "https://files.pythonhosted.org/packages/95/a3/c8fb25aac55bf5e12dae5c5aa6a98f85d436c1dc658f21c3ac73f9fa95e5/frozenlist-1.8.0-cp311-cp311-win32.whl", hash = "sha256:27c6e8077956cf73eadd514be8fb04d77fc946a7fe9f7fe167648b0b9085cc25", size = 39647, upload-time = "2025-10-06T05:36:03.409Z" }, + { url = "https://files.pythonhosted.org/packages/0a/f5/603d0d6a02cfd4c8f2a095a54672b3cf967ad688a60fb9faf04fc4887f65/frozenlist-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:ac913f8403b36a2c8610bbfd25b8013488533e71e62b4b4adce9c86c8cea905b", size = 44064, upload-time = "2025-10-06T05:36:04.368Z" }, + { url = "https://files.pythonhosted.org/packages/5d/16/c2c9ab44e181f043a86f9a8f84d5124b62dbcb3a02c0977ec72b9ac1d3e0/frozenlist-1.8.0-cp311-cp311-win_arm64.whl", hash = "sha256:d4d3214a0f8394edfa3e303136d0575eece0745ff2b47bd2cb2e66dd92d4351a", size = 39937, upload-time = "2025-10-06T05:36:05.669Z" }, + { url = "https://files.pythonhosted.org/packages/69/29/948b9aa87e75820a38650af445d2ef2b6b8a6fab1a23b6bb9e4ef0be2d59/frozenlist-1.8.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:78f7b9e5d6f2fdb88cdde9440dc147259b62b9d3b019924def9f6478be254ac1", size = 87782, upload-time = "2025-10-06T05:36:06.649Z" }, + { url = "https://files.pythonhosted.org/packages/64/80/4f6e318ee2a7c0750ed724fa33a4bdf1eacdc5a39a7a24e818a773cd91af/frozenlist-1.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:229bf37d2e4acdaf808fd3f06e854a4a7a3661e871b10dc1f8f1896a3b05f18b", size = 50594, upload-time = "2025-10-06T05:36:07.69Z" }, + { url = "https://files.pythonhosted.org/packages/2b/94/5c8a2b50a496b11dd519f4a24cb5496cf125681dd99e94c604ccdea9419a/frozenlist-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f833670942247a14eafbb675458b4e61c82e002a148f49e68257b79296e865c4", size = 50448, upload-time = "2025-10-06T05:36:08.78Z" }, + { url = "https://files.pythonhosted.org/packages/6a/bd/d91c5e39f490a49df14320f4e8c80161cfcce09f1e2cde1edd16a551abb3/frozenlist-1.8.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:494a5952b1c597ba44e0e78113a7266e656b9794eec897b19ead706bd7074383", size = 242411, upload-time = "2025-10-06T05:36:09.801Z" }, + { url = "https://files.pythonhosted.org/packages/8f/83/f61505a05109ef3293dfb1ff594d13d64a2324ac3482be2cedc2be818256/frozenlist-1.8.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:96f423a119f4777a4a056b66ce11527366a8bb92f54e541ade21f2374433f6d4", size = 243014, upload-time = "2025-10-06T05:36:11.394Z" }, + { url = "https://files.pythonhosted.org/packages/d8/cb/cb6c7b0f7d4023ddda30cf56b8b17494eb3a79e3fda666bf735f63118b35/frozenlist-1.8.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3462dd9475af2025c31cc61be6652dfa25cbfb56cbbf52f4ccfe029f38decaf8", size = 234909, upload-time = "2025-10-06T05:36:12.598Z" }, + { url = "https://files.pythonhosted.org/packages/31/c5/cd7a1f3b8b34af009fb17d4123c5a778b44ae2804e3ad6b86204255f9ec5/frozenlist-1.8.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c4c800524c9cd9bac5166cd6f55285957fcfc907db323e193f2afcd4d9abd69b", size = 250049, upload-time = "2025-10-06T05:36:14.065Z" }, + { url = "https://files.pythonhosted.org/packages/c0/01/2f95d3b416c584a1e7f0e1d6d31998c4a795f7544069ee2e0962a4b60740/frozenlist-1.8.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d6a5df73acd3399d893dafc71663ad22534b5aa4f94e8a2fabfe856c3c1b6a52", size = 256485, upload-time = "2025-10-06T05:36:15.39Z" }, + { url = "https://files.pythonhosted.org/packages/ce/03/024bf7720b3abaebcff6d0793d73c154237b85bdf67b7ed55e5e9596dc9a/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:405e8fe955c2280ce66428b3ca55e12b3c4e9c336fb2103a4937e891c69a4a29", size = 237619, upload-time = "2025-10-06T05:36:16.558Z" }, + { url = "https://files.pythonhosted.org/packages/69/fa/f8abdfe7d76b731f5d8bd217827cf6764d4f1d9763407e42717b4bed50a0/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:908bd3f6439f2fef9e85031b59fd4f1297af54415fb60e4254a95f75b3cab3f3", size = 250320, upload-time = "2025-10-06T05:36:17.821Z" }, + { url = "https://files.pythonhosted.org/packages/f5/3c/b051329f718b463b22613e269ad72138cc256c540f78a6de89452803a47d/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:294e487f9ec720bd8ffcebc99d575f7eff3568a08a253d1ee1a0378754b74143", size = 246820, upload-time = "2025-10-06T05:36:19.046Z" }, + { url = "https://files.pythonhosted.org/packages/0f/ae/58282e8f98e444b3f4dd42448ff36fa38bef29e40d40f330b22e7108f565/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:74c51543498289c0c43656701be6b077f4b265868fa7f8a8859c197006efb608", size = 250518, upload-time = "2025-10-06T05:36:20.763Z" }, + { url = "https://files.pythonhosted.org/packages/8f/96/007e5944694d66123183845a106547a15944fbbb7154788cbf7272789536/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:776f352e8329135506a1d6bf16ac3f87bc25b28e765949282dcc627af36123aa", size = 239096, upload-time = "2025-10-06T05:36:22.129Z" }, + { url = "https://files.pythonhosted.org/packages/66/bb/852b9d6db2fa40be96f29c0d1205c306288f0684df8fd26ca1951d461a56/frozenlist-1.8.0-cp312-cp312-win32.whl", hash = "sha256:433403ae80709741ce34038da08511d4a77062aa924baf411ef73d1146e74faf", size = 39985, upload-time = "2025-10-06T05:36:23.661Z" }, + { url = "https://files.pythonhosted.org/packages/b8/af/38e51a553dd66eb064cdf193841f16f077585d4d28394c2fa6235cb41765/frozenlist-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:34187385b08f866104f0c0617404c8eb08165ab1272e884abc89c112e9c00746", size = 44591, upload-time = "2025-10-06T05:36:24.958Z" }, + { url = "https://files.pythonhosted.org/packages/a7/06/1dc65480ab147339fecc70797e9c2f69d9cea9cf38934ce08df070fdb9cb/frozenlist-1.8.0-cp312-cp312-win_arm64.whl", hash = "sha256:fe3c58d2f5db5fbd18c2987cba06d51b0529f52bc3a6cdc33d3f4eab725104bd", size = 40102, upload-time = "2025-10-06T05:36:26.333Z" }, + { url = "https://files.pythonhosted.org/packages/2d/40/0832c31a37d60f60ed79e9dfb5a92e1e2af4f40a16a29abcc7992af9edff/frozenlist-1.8.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8d92f1a84bb12d9e56f818b3a746f3efba93c1b63c8387a73dde655e1e42282a", size = 85717, upload-time = "2025-10-06T05:36:27.341Z" }, + { url = "https://files.pythonhosted.org/packages/30/ba/b0b3de23f40bc55a7057bd38434e25c34fa48e17f20ee273bbde5e0650f3/frozenlist-1.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:96153e77a591c8adc2ee805756c61f59fef4cf4073a9275ee86fe8cba41241f7", size = 49651, upload-time = "2025-10-06T05:36:28.855Z" }, + { url = "https://files.pythonhosted.org/packages/0c/ab/6e5080ee374f875296c4243c381bbdef97a9ac39c6e3ce1d5f7d42cb78d6/frozenlist-1.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f21f00a91358803399890ab167098c131ec2ddd5f8f5fd5fe9c9f2c6fcd91e40", size = 49417, upload-time = "2025-10-06T05:36:29.877Z" }, + { url = "https://files.pythonhosted.org/packages/d5/4e/e4691508f9477ce67da2015d8c00acd751e6287739123113a9fca6f1604e/frozenlist-1.8.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fb30f9626572a76dfe4293c7194a09fb1fe93ba94c7d4f720dfae3b646b45027", size = 234391, upload-time = "2025-10-06T05:36:31.301Z" }, + { url = "https://files.pythonhosted.org/packages/40/76/c202df58e3acdf12969a7895fd6f3bc016c642e6726aa63bd3025e0fc71c/frozenlist-1.8.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eaa352d7047a31d87dafcacbabe89df0aa506abb5b1b85a2fb91bc3faa02d822", size = 233048, upload-time = "2025-10-06T05:36:32.531Z" }, + { url = "https://files.pythonhosted.org/packages/f9/c0/8746afb90f17b73ca5979c7a3958116e105ff796e718575175319b5bb4ce/frozenlist-1.8.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:03ae967b4e297f58f8c774c7eabcce57fe3c2434817d4385c50661845a058121", size = 226549, upload-time = "2025-10-06T05:36:33.706Z" }, + { url = "https://files.pythonhosted.org/packages/7e/eb/4c7eefc718ff72f9b6c4893291abaae5fbc0c82226a32dcd8ef4f7a5dbef/frozenlist-1.8.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f6292f1de555ffcc675941d65fffffb0a5bcd992905015f85d0592201793e0e5", size = 239833, upload-time = "2025-10-06T05:36:34.947Z" }, + { url = "https://files.pythonhosted.org/packages/c2/4e/e5c02187cf704224f8b21bee886f3d713ca379535f16893233b9d672ea71/frozenlist-1.8.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:29548f9b5b5e3460ce7378144c3010363d8035cea44bc0bf02d57f5a685e084e", size = 245363, upload-time = "2025-10-06T05:36:36.534Z" }, + { url = "https://files.pythonhosted.org/packages/1f/96/cb85ec608464472e82ad37a17f844889c36100eed57bea094518bf270692/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ec3cc8c5d4084591b4237c0a272cc4f50a5b03396a47d9caaf76f5d7b38a4f11", size = 229314, upload-time = "2025-10-06T05:36:38.582Z" }, + { url = "https://files.pythonhosted.org/packages/5d/6f/4ae69c550e4cee66b57887daeebe006fe985917c01d0fff9caab9883f6d0/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:517279f58009d0b1f2e7c1b130b377a349405da3f7621ed6bfae50b10adf20c1", size = 243365, upload-time = "2025-10-06T05:36:40.152Z" }, + { url = "https://files.pythonhosted.org/packages/7a/58/afd56de246cf11780a40a2c28dc7cbabbf06337cc8ddb1c780a2d97e88d8/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:db1e72ede2d0d7ccb213f218df6a078a9c09a7de257c2fe8fcef16d5925230b1", size = 237763, upload-time = "2025-10-06T05:36:41.355Z" }, + { url = "https://files.pythonhosted.org/packages/cb/36/cdfaf6ed42e2644740d4a10452d8e97fa1c062e2a8006e4b09f1b5fd7d63/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:b4dec9482a65c54a5044486847b8a66bf10c9cb4926d42927ec4e8fd5db7fed8", size = 240110, upload-time = "2025-10-06T05:36:42.716Z" }, + { url = "https://files.pythonhosted.org/packages/03/a8/9ea226fbefad669f11b52e864c55f0bd57d3c8d7eb07e9f2e9a0b39502e1/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:21900c48ae04d13d416f0e1e0c4d81f7931f73a9dfa0b7a8746fb2fe7dd970ed", size = 233717, upload-time = "2025-10-06T05:36:44.251Z" }, + { url = "https://files.pythonhosted.org/packages/1e/0b/1b5531611e83ba7d13ccc9988967ea1b51186af64c42b7a7af465dcc9568/frozenlist-1.8.0-cp313-cp313-win32.whl", hash = "sha256:8b7b94a067d1c504ee0b16def57ad5738701e4ba10cec90529f13fa03c833496", size = 39628, upload-time = "2025-10-06T05:36:45.423Z" }, + { url = "https://files.pythonhosted.org/packages/d8/cf/174c91dbc9cc49bc7b7aab74d8b734e974d1faa8f191c74af9b7e80848e6/frozenlist-1.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:878be833caa6a3821caf85eb39c5ba92d28e85df26d57afb06b35b2efd937231", size = 43882, upload-time = "2025-10-06T05:36:46.796Z" }, + { url = "https://files.pythonhosted.org/packages/c1/17/502cd212cbfa96eb1388614fe39a3fc9ab87dbbe042b66f97acb57474834/frozenlist-1.8.0-cp313-cp313-win_arm64.whl", hash = "sha256:44389d135b3ff43ba8cc89ff7f51f5a0bb6b63d829c8300f79a2fe4fe61bcc62", size = 39676, upload-time = "2025-10-06T05:36:47.8Z" }, + { url = "https://files.pythonhosted.org/packages/d2/5c/3bbfaa920dfab09e76946a5d2833a7cbdf7b9b4a91c714666ac4855b88b4/frozenlist-1.8.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:e25ac20a2ef37e91c1b39938b591457666a0fa835c7783c3a8f33ea42870db94", size = 89235, upload-time = "2025-10-06T05:36:48.78Z" }, + { url = "https://files.pythonhosted.org/packages/d2/d6/f03961ef72166cec1687e84e8925838442b615bd0b8854b54923ce5b7b8a/frozenlist-1.8.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:07cdca25a91a4386d2e76ad992916a85038a9b97561bf7a3fd12d5d9ce31870c", size = 50742, upload-time = "2025-10-06T05:36:49.837Z" }, + { url = "https://files.pythonhosted.org/packages/1e/bb/a6d12b7ba4c3337667d0e421f7181c82dda448ce4e7ad7ecd249a16fa806/frozenlist-1.8.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4e0c11f2cc6717e0a741f84a527c52616140741cd812a50422f83dc31749fb52", size = 51725, upload-time = "2025-10-06T05:36:50.851Z" }, + { url = "https://files.pythonhosted.org/packages/bc/71/d1fed0ffe2c2ccd70b43714c6cab0f4188f09f8a67a7914a6b46ee30f274/frozenlist-1.8.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b3210649ee28062ea6099cfda39e147fa1bc039583c8ee4481cb7811e2448c51", size = 284533, upload-time = "2025-10-06T05:36:51.898Z" }, + { url = "https://files.pythonhosted.org/packages/c9/1f/fb1685a7b009d89f9bf78a42d94461bc06581f6e718c39344754a5d9bada/frozenlist-1.8.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:581ef5194c48035a7de2aefc72ac6539823bb71508189e5de01d60c9dcd5fa65", size = 292506, upload-time = "2025-10-06T05:36:53.101Z" }, + { url = "https://files.pythonhosted.org/packages/e6/3b/b991fe1612703f7e0d05c0cf734c1b77aaf7c7d321df4572e8d36e7048c8/frozenlist-1.8.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3ef2d026f16a2b1866e1d86fc4e1291e1ed8a387b2c333809419a2f8b3a77b82", size = 274161, upload-time = "2025-10-06T05:36:54.309Z" }, + { url = "https://files.pythonhosted.org/packages/ca/ec/c5c618767bcdf66e88945ec0157d7f6c4a1322f1473392319b7a2501ded7/frozenlist-1.8.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5500ef82073f599ac84d888e3a8c1f77ac831183244bfd7f11eaa0289fb30714", size = 294676, upload-time = "2025-10-06T05:36:55.566Z" }, + { url = "https://files.pythonhosted.org/packages/7c/ce/3934758637d8f8a88d11f0585d6495ef54b2044ed6ec84492a91fa3b27aa/frozenlist-1.8.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:50066c3997d0091c411a66e710f4e11752251e6d2d73d70d8d5d4c76442a199d", size = 300638, upload-time = "2025-10-06T05:36:56.758Z" }, + { url = "https://files.pythonhosted.org/packages/fc/4f/a7e4d0d467298f42de4b41cbc7ddaf19d3cfeabaf9ff97c20c6c7ee409f9/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:5c1c8e78426e59b3f8005e9b19f6ff46e5845895adbde20ece9218319eca6506", size = 283067, upload-time = "2025-10-06T05:36:57.965Z" }, + { url = "https://files.pythonhosted.org/packages/dc/48/c7b163063d55a83772b268e6d1affb960771b0e203b632cfe09522d67ea5/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:eefdba20de0d938cec6a89bd4d70f346a03108a19b9df4248d3cf0d88f1b0f51", size = 292101, upload-time = "2025-10-06T05:36:59.237Z" }, + { url = "https://files.pythonhosted.org/packages/9f/d0/2366d3c4ecdc2fd391e0afa6e11500bfba0ea772764d631bbf82f0136c9d/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:cf253e0e1c3ceb4aaff6df637ce033ff6535fb8c70a764a8f46aafd3d6ab798e", size = 289901, upload-time = "2025-10-06T05:37:00.811Z" }, + { url = "https://files.pythonhosted.org/packages/b8/94/daff920e82c1b70e3618a2ac39fbc01ae3e2ff6124e80739ce5d71c9b920/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:032efa2674356903cd0261c4317a561a6850f3ac864a63fc1583147fb05a79b0", size = 289395, upload-time = "2025-10-06T05:37:02.115Z" }, + { url = "https://files.pythonhosted.org/packages/e3/20/bba307ab4235a09fdcd3cc5508dbabd17c4634a1af4b96e0f69bfe551ebd/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6da155091429aeba16851ecb10a9104a108bcd32f6c1642867eadaee401c1c41", size = 283659, upload-time = "2025-10-06T05:37:03.711Z" }, + { url = "https://files.pythonhosted.org/packages/fd/00/04ca1c3a7a124b6de4f8a9a17cc2fcad138b4608e7a3fc5877804b8715d7/frozenlist-1.8.0-cp313-cp313t-win32.whl", hash = "sha256:0f96534f8bfebc1a394209427d0f8a63d343c9779cda6fc25e8e121b5fd8555b", size = 43492, upload-time = "2025-10-06T05:37:04.915Z" }, + { url = "https://files.pythonhosted.org/packages/59/5e/c69f733a86a94ab10f68e496dc6b7e8bc078ebb415281d5698313e3af3a1/frozenlist-1.8.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5d63a068f978fc69421fb0e6eb91a9603187527c86b7cd3f534a5b77a592b888", size = 48034, upload-time = "2025-10-06T05:37:06.343Z" }, + { url = "https://files.pythonhosted.org/packages/16/6c/be9d79775d8abe79b05fa6d23da99ad6e7763a1d080fbae7290b286093fd/frozenlist-1.8.0-cp313-cp313t-win_arm64.whl", hash = "sha256:bf0a7e10b077bf5fb9380ad3ae8ce20ef919a6ad93b4552896419ac7e1d8e042", size = 41749, upload-time = "2025-10-06T05:37:07.431Z" }, + { url = "https://files.pythonhosted.org/packages/f1/c8/85da824b7e7b9b6e7f7705b2ecaf9591ba6f79c1177f324c2735e41d36a2/frozenlist-1.8.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:cee686f1f4cadeb2136007ddedd0aaf928ab95216e7691c63e50a8ec066336d0", size = 86127, upload-time = "2025-10-06T05:37:08.438Z" }, + { url = "https://files.pythonhosted.org/packages/8e/e8/a1185e236ec66c20afd72399522f142c3724c785789255202d27ae992818/frozenlist-1.8.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:119fb2a1bd47307e899c2fac7f28e85b9a543864df47aa7ec9d3c1b4545f096f", size = 49698, upload-time = "2025-10-06T05:37:09.48Z" }, + { url = "https://files.pythonhosted.org/packages/a1/93/72b1736d68f03fda5fdf0f2180fb6caaae3894f1b854d006ac61ecc727ee/frozenlist-1.8.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4970ece02dbc8c3a92fcc5228e36a3e933a01a999f7094ff7c23fbd2beeaa67c", size = 49749, upload-time = "2025-10-06T05:37:10.569Z" }, + { url = "https://files.pythonhosted.org/packages/a7/b2/fabede9fafd976b991e9f1b9c8c873ed86f202889b864756f240ce6dd855/frozenlist-1.8.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:cba69cb73723c3f329622e34bdbf5ce1f80c21c290ff04256cff1cd3c2036ed2", size = 231298, upload-time = "2025-10-06T05:37:11.993Z" }, + { url = "https://files.pythonhosted.org/packages/3a/3b/d9b1e0b0eed36e70477ffb8360c49c85c8ca8ef9700a4e6711f39a6e8b45/frozenlist-1.8.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:778a11b15673f6f1df23d9586f83c4846c471a8af693a22e066508b77d201ec8", size = 232015, upload-time = "2025-10-06T05:37:13.194Z" }, + { url = "https://files.pythonhosted.org/packages/dc/94/be719d2766c1138148564a3960fc2c06eb688da592bdc25adcf856101be7/frozenlist-1.8.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0325024fe97f94c41c08872db482cf8ac4800d80e79222c6b0b7b162d5b13686", size = 225038, upload-time = "2025-10-06T05:37:14.577Z" }, + { url = "https://files.pythonhosted.org/packages/e4/09/6712b6c5465f083f52f50cf74167b92d4ea2f50e46a9eea0523d658454ae/frozenlist-1.8.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:97260ff46b207a82a7567b581ab4190bd4dfa09f4db8a8b49d1a958f6aa4940e", size = 240130, upload-time = "2025-10-06T05:37:15.781Z" }, + { url = "https://files.pythonhosted.org/packages/f8/d4/cd065cdcf21550b54f3ce6a22e143ac9e4836ca42a0de1022da8498eac89/frozenlist-1.8.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:54b2077180eb7f83dd52c40b2750d0a9f175e06a42e3213ce047219de902717a", size = 242845, upload-time = "2025-10-06T05:37:17.037Z" }, + { url = "https://files.pythonhosted.org/packages/62/c3/f57a5c8c70cd1ead3d5d5f776f89d33110b1addae0ab010ad774d9a44fb9/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:2f05983daecab868a31e1da44462873306d3cbfd76d1f0b5b69c473d21dbb128", size = 229131, upload-time = "2025-10-06T05:37:18.221Z" }, + { url = "https://files.pythonhosted.org/packages/6c/52/232476fe9cb64f0742f3fde2b7d26c1dac18b6d62071c74d4ded55e0ef94/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:33f48f51a446114bc5d251fb2954ab0164d5be02ad3382abcbfe07e2531d650f", size = 240542, upload-time = "2025-10-06T05:37:19.771Z" }, + { url = "https://files.pythonhosted.org/packages/5f/85/07bf3f5d0fb5414aee5f47d33c6f5c77bfe49aac680bfece33d4fdf6a246/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:154e55ec0655291b5dd1b8731c637ecdb50975a2ae70c606d100750a540082f7", size = 237308, upload-time = "2025-10-06T05:37:20.969Z" }, + { url = "https://files.pythonhosted.org/packages/11/99/ae3a33d5befd41ac0ca2cc7fd3aa707c9c324de2e89db0e0f45db9a64c26/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:4314debad13beb564b708b4a496020e5306c7333fa9a3ab90374169a20ffab30", size = 238210, upload-time = "2025-10-06T05:37:22.252Z" }, + { url = "https://files.pythonhosted.org/packages/b2/60/b1d2da22f4970e7a155f0adde9b1435712ece01b3cd45ba63702aea33938/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:073f8bf8becba60aa931eb3bc420b217bb7d5b8f4750e6f8b3be7f3da85d38b7", size = 231972, upload-time = "2025-10-06T05:37:23.5Z" }, + { url = "https://files.pythonhosted.org/packages/3f/ab/945b2f32de889993b9c9133216c068b7fcf257d8595a0ac420ac8677cab0/frozenlist-1.8.0-cp314-cp314-win32.whl", hash = "sha256:bac9c42ba2ac65ddc115d930c78d24ab8d4f465fd3fc473cdedfccadb9429806", size = 40536, upload-time = "2025-10-06T05:37:25.581Z" }, + { url = "https://files.pythonhosted.org/packages/59/ad/9caa9b9c836d9ad6f067157a531ac48b7d36499f5036d4141ce78c230b1b/frozenlist-1.8.0-cp314-cp314-win_amd64.whl", hash = "sha256:3e0761f4d1a44f1d1a47996511752cf3dcec5bbdd9cc2b4fe595caf97754b7a0", size = 44330, upload-time = "2025-10-06T05:37:26.928Z" }, + { url = "https://files.pythonhosted.org/packages/82/13/e6950121764f2676f43534c555249f57030150260aee9dcf7d64efda11dd/frozenlist-1.8.0-cp314-cp314-win_arm64.whl", hash = "sha256:d1eaff1d00c7751b7c6662e9c5ba6eb2c17a2306ba5e2a37f24ddf3cc953402b", size = 40627, upload-time = "2025-10-06T05:37:28.075Z" }, + { url = "https://files.pythonhosted.org/packages/c0/c7/43200656ecc4e02d3f8bc248df68256cd9572b3f0017f0a0c4e93440ae23/frozenlist-1.8.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:d3bb933317c52d7ea5004a1c442eef86f426886fba134ef8cf4226ea6ee1821d", size = 89238, upload-time = "2025-10-06T05:37:29.373Z" }, + { url = "https://files.pythonhosted.org/packages/d1/29/55c5f0689b9c0fb765055629f472c0de484dcaf0acee2f7707266ae3583c/frozenlist-1.8.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:8009897cdef112072f93a0efdce29cd819e717fd2f649ee3016efd3cd885a7ed", size = 50738, upload-time = "2025-10-06T05:37:30.792Z" }, + { url = "https://files.pythonhosted.org/packages/ba/7d/b7282a445956506fa11da8c2db7d276adcbf2b17d8bb8407a47685263f90/frozenlist-1.8.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2c5dcbbc55383e5883246d11fd179782a9d07a986c40f49abe89ddf865913930", size = 51739, upload-time = "2025-10-06T05:37:32.127Z" }, + { url = "https://files.pythonhosted.org/packages/62/1c/3d8622e60d0b767a5510d1d3cf21065b9db874696a51ea6d7a43180a259c/frozenlist-1.8.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:39ecbc32f1390387d2aa4f5a995e465e9e2f79ba3adcac92d68e3e0afae6657c", size = 284186, upload-time = "2025-10-06T05:37:33.21Z" }, + { url = "https://files.pythonhosted.org/packages/2d/14/aa36d5f85a89679a85a1d44cd7a6657e0b1c75f61e7cad987b203d2daca8/frozenlist-1.8.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92db2bf818d5cc8d9c1f1fc56b897662e24ea5adb36ad1f1d82875bd64e03c24", size = 292196, upload-time = "2025-10-06T05:37:36.107Z" }, + { url = "https://files.pythonhosted.org/packages/05/23/6bde59eb55abd407d34f77d39a5126fb7b4f109a3f611d3929f14b700c66/frozenlist-1.8.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2dc43a022e555de94c3b68a4ef0b11c4f747d12c024a520c7101709a2144fb37", size = 273830, upload-time = "2025-10-06T05:37:37.663Z" }, + { url = "https://files.pythonhosted.org/packages/d2/3f/22cff331bfad7a8afa616289000ba793347fcd7bc275f3b28ecea2a27909/frozenlist-1.8.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:cb89a7f2de3602cfed448095bab3f178399646ab7c61454315089787df07733a", size = 294289, upload-time = "2025-10-06T05:37:39.261Z" }, + { url = "https://files.pythonhosted.org/packages/a4/89/5b057c799de4838b6c69aa82b79705f2027615e01be996d2486a69ca99c4/frozenlist-1.8.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:33139dc858c580ea50e7e60a1b0ea003efa1fd42e6ec7fdbad78fff65fad2fd2", size = 300318, upload-time = "2025-10-06T05:37:43.213Z" }, + { url = "https://files.pythonhosted.org/packages/30/de/2c22ab3eb2a8af6d69dc799e48455813bab3690c760de58e1bf43b36da3e/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:168c0969a329b416119507ba30b9ea13688fafffac1b7822802537569a1cb0ef", size = 282814, upload-time = "2025-10-06T05:37:45.337Z" }, + { url = "https://files.pythonhosted.org/packages/59/f7/970141a6a8dbd7f556d94977858cfb36fa9b66e0892c6dd780d2219d8cd8/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:28bd570e8e189d7f7b001966435f9dac6718324b5be2990ac496cf1ea9ddb7fe", size = 291762, upload-time = "2025-10-06T05:37:46.657Z" }, + { url = "https://files.pythonhosted.org/packages/c1/15/ca1adae83a719f82df9116d66f5bb28bb95557b3951903d39135620ef157/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:b2a095d45c5d46e5e79ba1e5b9cb787f541a8dee0433836cea4b96a2c439dcd8", size = 289470, upload-time = "2025-10-06T05:37:47.946Z" }, + { url = "https://files.pythonhosted.org/packages/ac/83/dca6dc53bf657d371fbc88ddeb21b79891e747189c5de990b9dfff2ccba1/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:eab8145831a0d56ec9c4139b6c3e594c7a83c2c8be25d5bcf2d86136a532287a", size = 289042, upload-time = "2025-10-06T05:37:49.499Z" }, + { url = "https://files.pythonhosted.org/packages/96/52/abddd34ca99be142f354398700536c5bd315880ed0a213812bc491cff5e4/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:974b28cf63cc99dfb2188d8d222bc6843656188164848c4f679e63dae4b0708e", size = 283148, upload-time = "2025-10-06T05:37:50.745Z" }, + { url = "https://files.pythonhosted.org/packages/af/d3/76bd4ed4317e7119c2b7f57c3f6934aba26d277acc6309f873341640e21f/frozenlist-1.8.0-cp314-cp314t-win32.whl", hash = "sha256:342c97bf697ac5480c0a7ec73cd700ecfa5a8a40ac923bd035484616efecc2df", size = 44676, upload-time = "2025-10-06T05:37:52.222Z" }, + { url = "https://files.pythonhosted.org/packages/89/76/c615883b7b521ead2944bb3480398cbb07e12b7b4e4d073d3752eb721558/frozenlist-1.8.0-cp314-cp314t-win_amd64.whl", hash = "sha256:06be8f67f39c8b1dc671f5d83aaefd3358ae5cdcf8314552c57e7ed3e6475bdd", size = 49451, upload-time = "2025-10-06T05:37:53.425Z" }, + { url = "https://files.pythonhosted.org/packages/e0/a3/5982da14e113d07b325230f95060e2169f5311b1017ea8af2a29b374c289/frozenlist-1.8.0-cp314-cp314t-win_arm64.whl", hash = "sha256:102e6314ca4da683dca92e3b1355490fed5f313b768500084fbe6371fddfdb79", size = 42507, upload-time = "2025-10-06T05:37:54.513Z" }, + { url = "https://files.pythonhosted.org/packages/c2/59/ae5cdac87a00962122ea37bb346d41b66aec05f9ce328fa2b9e216f8967b/frozenlist-1.8.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d8b7138e5cd0647e4523d6685b0eac5d4be9a184ae9634492f25c6eb38c12a47", size = 86967, upload-time = "2025-10-06T05:37:55.607Z" }, + { url = "https://files.pythonhosted.org/packages/8a/10/17059b2db5a032fd9323c41c39e9d1f5f9d0c8f04d1e4e3e788573086e61/frozenlist-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a6483e309ca809f1efd154b4d37dc6d9f61037d6c6a81c2dc7a15cb22c8c5dca", size = 49984, upload-time = "2025-10-06T05:37:57.049Z" }, + { url = "https://files.pythonhosted.org/packages/4b/de/ad9d82ca8e5fa8f0c636e64606553c79e2b859ad253030b62a21fe9986f5/frozenlist-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1b9290cf81e95e93fdf90548ce9d3c1211cf574b8e3f4b3b7cb0537cf2227068", size = 50240, upload-time = "2025-10-06T05:37:58.145Z" }, + { url = "https://files.pythonhosted.org/packages/4e/45/3dfb7767c2a67d123650122b62ce13c731b6c745bc14424eea67678b508c/frozenlist-1.8.0-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:59a6a5876ca59d1b63af8cd5e7ffffb024c3dc1e9cf9301b21a2e76286505c95", size = 219472, upload-time = "2025-10-06T05:37:59.239Z" }, + { url = "https://files.pythonhosted.org/packages/0b/bf/5bf23d913a741b960d5c1dac7c1985d8a2a1d015772b2d18ea168b08e7ff/frozenlist-1.8.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6dc4126390929823e2d2d9dc79ab4046ed74680360fc5f38b585c12c66cdf459", size = 221531, upload-time = "2025-10-06T05:38:00.521Z" }, + { url = "https://files.pythonhosted.org/packages/d0/03/27ec393f3b55860859f4b74cdc8c2a4af3dbf3533305e8eacf48a4fd9a54/frozenlist-1.8.0-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:332db6b2563333c5671fecacd085141b5800cb866be16d5e3eb15a2086476675", size = 219211, upload-time = "2025-10-06T05:38:01.842Z" }, + { url = "https://files.pythonhosted.org/packages/3a/ad/0fd00c404fa73fe9b169429e9a972d5ed807973c40ab6b3cf9365a33d360/frozenlist-1.8.0-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:9ff15928d62a0b80bb875655c39bf517938c7d589554cbd2669be42d97c2cb61", size = 231775, upload-time = "2025-10-06T05:38:03.384Z" }, + { url = "https://files.pythonhosted.org/packages/8a/c3/86962566154cb4d2995358bc8331bfc4ea19d07db1a96f64935a1607f2b6/frozenlist-1.8.0-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:7bf6cdf8e07c8151fba6fe85735441240ec7f619f935a5205953d58009aef8c6", size = 236631, upload-time = "2025-10-06T05:38:04.609Z" }, + { url = "https://files.pythonhosted.org/packages/ea/9e/6ffad161dbd83782d2c66dc4d378a9103b31770cb1e67febf43aea42d202/frozenlist-1.8.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:48e6d3f4ec5c7273dfe83ff27c91083c6c9065af655dc2684d2c200c94308bb5", size = 218632, upload-time = "2025-10-06T05:38:05.917Z" }, + { url = "https://files.pythonhosted.org/packages/58/b2/4677eee46e0a97f9b30735e6ad0bf6aba3e497986066eb68807ac85cf60f/frozenlist-1.8.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:1a7607e17ad33361677adcd1443edf6f5da0ce5e5377b798fba20fae194825f3", size = 235967, upload-time = "2025-10-06T05:38:07.614Z" }, + { url = "https://files.pythonhosted.org/packages/05/f3/86e75f8639c5a93745ca7addbbc9de6af56aebb930d233512b17e46f6493/frozenlist-1.8.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:5a3a935c3a4e89c733303a2d5a7c257ea44af3a56c8202df486b7f5de40f37e1", size = 228799, upload-time = "2025-10-06T05:38:08.845Z" }, + { url = "https://files.pythonhosted.org/packages/30/00/39aad3a7f0d98f5eb1d99a3c311215674ed87061aecee7851974b335c050/frozenlist-1.8.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:940d4a017dbfed9daf46a3b086e1d2167e7012ee297fef9e1c545c4d022f5178", size = 230566, upload-time = "2025-10-06T05:38:10.52Z" }, + { url = "https://files.pythonhosted.org/packages/0d/4d/aa144cac44568d137846ddc4d5210fb5d9719eb1d7ec6fa2728a54b5b94a/frozenlist-1.8.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b9be22a69a014bc47e78072d0ecae716f5eb56c15238acca0f43d6eb8e4a5bda", size = 217715, upload-time = "2025-10-06T05:38:11.832Z" }, + { url = "https://files.pythonhosted.org/packages/64/4c/8f665921667509d25a0dd72540513bc86b356c95541686f6442a3283019f/frozenlist-1.8.0-cp39-cp39-win32.whl", hash = "sha256:1aa77cb5697069af47472e39612976ed05343ff2e84a3dcf15437b232cbfd087", size = 39933, upload-time = "2025-10-06T05:38:13.061Z" }, + { url = "https://files.pythonhosted.org/packages/79/bd/bcc926f87027fad5e59926ff12d136e1082a115025d33c032d1cd69ab377/frozenlist-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:7398c222d1d405e796970320036b1b563892b65809d9e5261487bb2c7f7b5c6a", size = 44121, upload-time = "2025-10-06T05:38:14.572Z" }, + { url = "https://files.pythonhosted.org/packages/4c/07/9c2e4eb7584af4b705237b971b89a4155a8e57599c4483a131a39256a9a0/frozenlist-1.8.0-cp39-cp39-win_arm64.whl", hash = "sha256:b4f3b365f31c6cd4af24545ca0a244a53688cad8834e32f56831c4923b50a103", size = 40312, upload-time = "2025-10-06T05:38:15.699Z" }, + { url = "https://files.pythonhosted.org/packages/9a/9a/e35b4a917281c0b8419d4207f4334c8e8c5dbf4f3f5f9ada73958d937dcc/frozenlist-1.8.0-py3-none-any.whl", hash = "sha256:0c18a16eab41e82c295618a77502e17b195883241c563b00f0aa5106fc4eaa0d", size = 13409, upload-time = "2025-10-06T05:38:16.721Z" }, +] + +[[package]] +name = "identify" +version = "2.6.1" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.9'", +] +sdist = { url = "https://files.pythonhosted.org/packages/29/bb/25024dbcc93516c492b75919e76f389bac754a3e4248682fba32b250c880/identify-2.6.1.tar.gz", hash = "sha256:91478c5fb7c3aac5ff7bf9b4344f803843dc586832d5f110d672b19aa1984c98", size = 99097, upload-time = "2024-09-14T23:50:32.513Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7d/0c/4ef72754c050979fdcc06c744715ae70ea37e734816bb6514f79df77a42f/identify-2.6.1-py2.py3-none-any.whl", hash = "sha256:53863bcac7caf8d2ed85bd20312ea5dcfc22226800f6d6881f232d861db5a8f0", size = 98972, upload-time = "2024-09-14T23:50:30.747Z" }, +] + +[[package]] +name = "identify" +version = "2.6.15" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", + "python_full_version == '3.9.*'", +] +sdist = { url = "https://files.pythonhosted.org/packages/ff/e7/685de97986c916a6d93b3876139e00eef26ad5bbbd61925d670ae8013449/identify-2.6.15.tar.gz", hash = "sha256:e4f4864b96c6557ef2a1e1c951771838f4edc9df3a72ec7118b338801b11c7bf", size = 99311, upload-time = "2025-10-02T17:43:40.631Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0f/1c/e5fd8f973d4f375adb21565739498e2e9a1e54c858a97b9a8ccfdc81da9b/identify-2.6.15-py2.py3-none-any.whl", hash = "sha256:1181ef7608e00704db228516541eb83a88a9f94433a8c80bb9b5bd54b1d81757", size = 99183, upload-time = "2025-10-02T17:43:39.137Z" }, +] + +[[package]] +name = "idna" +version = "3.11" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, +] + +[[package]] +name = "importlib-metadata" +version = "8.7.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "zipp", marker = "python_full_version == '3.9.*'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/76/66/650a33bd90f786193e4de4b3ad86ea60b53c89b669a5c7be931fac31cdb0/importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000", size = 56641, upload-time = "2025-04-27T15:29:01.736Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/b0/36bd937216ec521246249be3bf9855081de4c5e06a0c9b4219dbeda50373/importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd", size = 27656, upload-time = "2025-04-27T15:29:00.214Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version == '3.9.*'", + "python_full_version < '3.9'", +] +sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", +] +sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, +] + +[[package]] +name = "isort" +version = "5.13.2" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.9'", +] +sdist = { url = "https://files.pythonhosted.org/packages/87/f9/c1eb8635a24e87ade2efce21e3ce8cd6b8630bb685ddc9cdaca1349b2eb5/isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109", size = 175303, upload-time = "2023-12-13T20:37:26.124Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/b3/8def84f539e7d2289a02f0524b944b15d7c75dab7628bedf1c4f0992029c/isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6", size = 92310, upload-time = "2023-12-13T20:37:23.244Z" }, +] + +[[package]] +name = "isort" +version = "6.1.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version == '3.9.*'", +] +dependencies = [ + { name = "importlib-metadata", marker = "python_full_version == '3.9.*'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1e/82/fa43935523efdfcce6abbae9da7f372b627b27142c3419fcf13bf5b0c397/isort-6.1.0.tar.gz", hash = "sha256:9b8f96a14cfee0677e78e941ff62f03769a06d412aabb9e2a90487b3b7e8d481", size = 824325, upload-time = "2025-10-01T16:26:45.027Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7f/cc/9b681a170efab4868a032631dea1e8446d8ec718a7f657b94d49d1a12643/isort-6.1.0-py3-none-any.whl", hash = "sha256:58d8927ecce74e5087aef019f778d4081a3b6c98f15a80ba35782ca8a2097784", size = 94329, upload-time = "2025-10-01T16:26:43.291Z" }, +] + +[[package]] +name = "isort" +version = "7.0.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", +] +sdist = { url = "https://files.pythonhosted.org/packages/63/53/4f3c058e3bace40282876f9b553343376ee687f3c35a525dc79dbd450f88/isort-7.0.0.tar.gz", hash = "sha256:5513527951aadb3ac4292a41a16cbc50dd1642432f5e8c20057d414bdafb4187", size = 805049, upload-time = "2025-10-11T13:30:59.107Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7f/ed/e3705d6d02b4f7aea715a353c8ce193efd0b5db13e204df895d38734c244/isort-7.0.0-py3-none-any.whl", hash = "sha256:1bcabac8bc3c36c7fb7b98a76c8abb18e0f841a3ba81decac7691008592499c1", size = 94672, upload-time = "2025-10-11T13:30:57.665Z" }, +] + +[[package]] +name = "jmespath" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/00/2a/e867e8531cf3e36b41201936b7fa7ba7b5702dbef42922193f05c8976cd6/jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe", size = 25843, upload-time = "2022-06-17T18:00:12.224Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/31/b4/b9b800c45527aadd64d5b442f9b932b00648617eb5d63d2c7a6587b7cafc/jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980", size = 20256, upload-time = "2022-06-17T18:00:10.251Z" }, +] + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version == '3.9.*'", + "python_full_version < '3.9'", +] +dependencies = [ + { name = "mdurl", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596, upload-time = "2023-06-03T06:41:14.443Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528, upload-time = "2023-06-03T06:41:11.019Z" }, +] + +[[package]] +name = "markdown-it-py" +version = "4.0.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", +] +dependencies = [ + { name = "mdurl", marker = "python_full_version >= '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" }, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, +] + +[[package]] +name = "multidict" +version = "6.1.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.9'", +] +dependencies = [ + { name = "typing-extensions", version = "4.13.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d6/be/504b89a5e9ca731cd47487e91c469064f8ae5af93b7259758dcfc2b9c848/multidict-6.1.0.tar.gz", hash = "sha256:22ae2ebf9b0c69d206c003e2f6a914ea33f0a932d4aa16f236afc049d9958f4a", size = 64002, upload-time = "2024-09-09T23:49:38.163Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/29/68/259dee7fd14cf56a17c554125e534f6274c2860159692a414d0b402b9a6d/multidict-6.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3380252550e372e8511d49481bd836264c009adb826b23fefcc5dd3c69692f60", size = 48628, upload-time = "2024-09-09T23:47:18.278Z" }, + { url = "https://files.pythonhosted.org/packages/50/79/53ba256069fe5386a4a9e80d4e12857ced9de295baf3e20c68cdda746e04/multidict-6.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:99f826cbf970077383d7de805c0681799491cb939c25450b9b5b3ced03ca99f1", size = 29327, upload-time = "2024-09-09T23:47:20.224Z" }, + { url = "https://files.pythonhosted.org/packages/ff/10/71f1379b05b196dae749b5ac062e87273e3f11634f447ebac12a571d90ae/multidict-6.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a114d03b938376557927ab23f1e950827c3b893ccb94b62fd95d430fd0e5cf53", size = 29689, upload-time = "2024-09-09T23:47:21.667Z" }, + { url = "https://files.pythonhosted.org/packages/71/45/70bac4f87438ded36ad4793793c0095de6572d433d98575a5752629ef549/multidict-6.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1c416351ee6271b2f49b56ad7f308072f6f44b37118d69c2cad94f3fa8a40d5", size = 126639, upload-time = "2024-09-09T23:47:23.333Z" }, + { url = "https://files.pythonhosted.org/packages/80/cf/17f35b3b9509b4959303c05379c4bfb0d7dd05c3306039fc79cf035bbac0/multidict-6.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b5d83030255983181005e6cfbac1617ce9746b219bc2aad52201ad121226581", size = 134315, upload-time = "2024-09-09T23:47:24.99Z" }, + { url = "https://files.pythonhosted.org/packages/ef/1f/652d70ab5effb33c031510a3503d4d6efc5ec93153562f1ee0acdc895a57/multidict-6.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3e97b5e938051226dc025ec80980c285b053ffb1e25a3db2a3aa3bc046bf7f56", size = 129471, upload-time = "2024-09-09T23:47:26.305Z" }, + { url = "https://files.pythonhosted.org/packages/a6/64/2dd6c4c681688c0165dea3975a6a4eab4944ea30f35000f8b8af1df3148c/multidict-6.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d618649d4e70ac6efcbba75be98b26ef5078faad23592f9b51ca492953012429", size = 124585, upload-time = "2024-09-09T23:47:27.958Z" }, + { url = "https://files.pythonhosted.org/packages/87/56/e6ee5459894c7e554b57ba88f7257dc3c3d2d379cb15baaa1e265b8c6165/multidict-6.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10524ebd769727ac77ef2278390fb0068d83f3acb7773792a5080f2b0abf7748", size = 116957, upload-time = "2024-09-09T23:47:29.376Z" }, + { url = "https://files.pythonhosted.org/packages/36/9e/616ce5e8d375c24b84f14fc263c7ef1d8d5e8ef529dbc0f1df8ce71bb5b8/multidict-6.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ff3827aef427c89a25cc96ded1759271a93603aba9fb977a6d264648ebf989db", size = 128609, upload-time = "2024-09-09T23:47:31.038Z" }, + { url = "https://files.pythonhosted.org/packages/8c/4f/4783e48a38495d000f2124020dc96bacc806a4340345211b1ab6175a6cb4/multidict-6.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:06809f4f0f7ab7ea2cabf9caca7d79c22c0758b58a71f9d32943ae13c7ace056", size = 123016, upload-time = "2024-09-09T23:47:32.47Z" }, + { url = "https://files.pythonhosted.org/packages/3e/b3/4950551ab8fc39862ba5e9907dc821f896aa829b4524b4deefd3e12945ab/multidict-6.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:f179dee3b863ab1c59580ff60f9d99f632f34ccb38bf67a33ec6b3ecadd0fd76", size = 133542, upload-time = "2024-09-09T23:47:34.103Z" }, + { url = "https://files.pythonhosted.org/packages/96/4d/f0ce6ac9914168a2a71df117935bb1f1781916acdecbb43285e225b484b8/multidict-6.1.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:aaed8b0562be4a0876ee3b6946f6869b7bcdb571a5d1496683505944e268b160", size = 130163, upload-time = "2024-09-09T23:47:35.716Z" }, + { url = "https://files.pythonhosted.org/packages/be/72/17c9f67e7542a49dd252c5ae50248607dfb780bcc03035907dafefb067e3/multidict-6.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3c8b88a2ccf5493b6c8da9076fb151ba106960a2df90c2633f342f120751a9e7", size = 126832, upload-time = "2024-09-09T23:47:37.116Z" }, + { url = "https://files.pythonhosted.org/packages/71/9f/72d719e248cbd755c8736c6d14780533a1606ffb3fbb0fbd77da9f0372da/multidict-6.1.0-cp310-cp310-win32.whl", hash = "sha256:4a9cb68166a34117d6646c0023c7b759bf197bee5ad4272f420a0141d7eb03a0", size = 26402, upload-time = "2024-09-09T23:47:38.863Z" }, + { url = "https://files.pythonhosted.org/packages/04/5a/d88cd5d00a184e1ddffc82aa2e6e915164a6d2641ed3606e766b5d2f275a/multidict-6.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:20b9b5fbe0b88d0bdef2012ef7dee867f874b72528cf1d08f1d59b0e3850129d", size = 28800, upload-time = "2024-09-09T23:47:40.056Z" }, + { url = "https://files.pythonhosted.org/packages/93/13/df3505a46d0cd08428e4c8169a196131d1b0c4b515c3649829258843dde6/multidict-6.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3efe2c2cb5763f2f1b275ad2bf7a287d3f7ebbef35648a9726e3b69284a4f3d6", size = 48570, upload-time = "2024-09-09T23:47:41.36Z" }, + { url = "https://files.pythonhosted.org/packages/f0/e1/a215908bfae1343cdb72f805366592bdd60487b4232d039c437fe8f5013d/multidict-6.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7053d3b0353a8b9de430a4f4b4268ac9a4fb3481af37dfe49825bf45ca24156", size = 29316, upload-time = "2024-09-09T23:47:42.612Z" }, + { url = "https://files.pythonhosted.org/packages/70/0f/6dc70ddf5d442702ed74f298d69977f904960b82368532c88e854b79f72b/multidict-6.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:27e5fc84ccef8dfaabb09d82b7d179c7cf1a3fbc8a966f8274fcb4ab2eb4cadb", size = 29640, upload-time = "2024-09-09T23:47:44.028Z" }, + { url = "https://files.pythonhosted.org/packages/d8/6d/9c87b73a13d1cdea30b321ef4b3824449866bd7f7127eceed066ccb9b9ff/multidict-6.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e2b90b43e696f25c62656389d32236e049568b39320e2735d51f08fd362761b", size = 131067, upload-time = "2024-09-09T23:47:45.617Z" }, + { url = "https://files.pythonhosted.org/packages/cc/1e/1b34154fef373371fd6c65125b3d42ff5f56c7ccc6bfff91b9b3c60ae9e0/multidict-6.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d83a047959d38a7ff552ff94be767b7fd79b831ad1cd9920662db05fec24fe72", size = 138507, upload-time = "2024-09-09T23:47:47.429Z" }, + { url = "https://files.pythonhosted.org/packages/fb/e0/0bc6b2bac6e461822b5f575eae85da6aae76d0e2a79b6665d6206b8e2e48/multidict-6.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d1a9dd711d0877a1ece3d2e4fea11a8e75741ca21954c919406b44e7cf971304", size = 133905, upload-time = "2024-09-09T23:47:48.878Z" }, + { url = "https://files.pythonhosted.org/packages/ba/af/73d13b918071ff9b2205fcf773d316e0f8fefb4ec65354bbcf0b10908cc6/multidict-6.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec2abea24d98246b94913b76a125e855eb5c434f7c46546046372fe60f666351", size = 129004, upload-time = "2024-09-09T23:47:50.124Z" }, + { url = "https://files.pythonhosted.org/packages/74/21/23960627b00ed39643302d81bcda44c9444ebcdc04ee5bedd0757513f259/multidict-6.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4867cafcbc6585e4b678876c489b9273b13e9fff9f6d6d66add5e15d11d926cb", size = 121308, upload-time = "2024-09-09T23:47:51.97Z" }, + { url = "https://files.pythonhosted.org/packages/8b/5c/cf282263ffce4a596ed0bb2aa1a1dddfe1996d6a62d08842a8d4b33dca13/multidict-6.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5b48204e8d955c47c55b72779802b219a39acc3ee3d0116d5080c388970b76e3", size = 132608, upload-time = "2024-09-09T23:47:53.201Z" }, + { url = "https://files.pythonhosted.org/packages/d7/3e/97e778c041c72063f42b290888daff008d3ab1427f5b09b714f5a8eff294/multidict-6.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d8fff389528cad1618fb4b26b95550327495462cd745d879a8c7c2115248e399", size = 127029, upload-time = "2024-09-09T23:47:54.435Z" }, + { url = "https://files.pythonhosted.org/packages/47/ac/3efb7bfe2f3aefcf8d103e9a7162572f01936155ab2f7ebcc7c255a23212/multidict-6.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a7a9541cd308eed5e30318430a9c74d2132e9a8cb46b901326272d780bf2d423", size = 137594, upload-time = "2024-09-09T23:47:55.659Z" }, + { url = "https://files.pythonhosted.org/packages/42/9b/6c6e9e8dc4f915fc90a9b7798c44a30773dea2995fdcb619870e705afe2b/multidict-6.1.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:da1758c76f50c39a2efd5e9859ce7d776317eb1dd34317c8152ac9251fc574a3", size = 134556, upload-time = "2024-09-09T23:47:56.98Z" }, + { url = "https://files.pythonhosted.org/packages/1d/10/8e881743b26aaf718379a14ac58572a240e8293a1c9d68e1418fb11c0f90/multidict-6.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c943a53e9186688b45b323602298ab727d8865d8c9ee0b17f8d62d14b56f0753", size = 130993, upload-time = "2024-09-09T23:47:58.163Z" }, + { url = "https://files.pythonhosted.org/packages/45/84/3eb91b4b557442802d058a7579e864b329968c8d0ea57d907e7023c677f2/multidict-6.1.0-cp311-cp311-win32.whl", hash = "sha256:90f8717cb649eea3504091e640a1b8568faad18bd4b9fcd692853a04475a4b80", size = 26405, upload-time = "2024-09-09T23:47:59.391Z" }, + { url = "https://files.pythonhosted.org/packages/9f/0b/ad879847ecbf6d27e90a6eabb7eff6b62c129eefe617ea45eae7c1f0aead/multidict-6.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:82176036e65644a6cc5bd619f65f6f19781e8ec2e5330f51aa9ada7504cc1926", size = 28795, upload-time = "2024-09-09T23:48:00.359Z" }, + { url = "https://files.pythonhosted.org/packages/fd/16/92057c74ba3b96d5e211b553895cd6dc7cc4d1e43d9ab8fafc727681ef71/multidict-6.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b04772ed465fa3cc947db808fa306d79b43e896beb677a56fb2347ca1a49c1fa", size = 48713, upload-time = "2024-09-09T23:48:01.893Z" }, + { url = "https://files.pythonhosted.org/packages/94/3d/37d1b8893ae79716179540b89fc6a0ee56b4a65fcc0d63535c6f5d96f217/multidict-6.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6180c0ae073bddeb5a97a38c03f30c233e0a4d39cd86166251617d1bbd0af436", size = 29516, upload-time = "2024-09-09T23:48:03.463Z" }, + { url = "https://files.pythonhosted.org/packages/a2/12/adb6b3200c363062f805275b4c1e656be2b3681aada66c80129932ff0bae/multidict-6.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:071120490b47aa997cca00666923a83f02c7fbb44f71cf7f136df753f7fa8761", size = 29557, upload-time = "2024-09-09T23:48:04.905Z" }, + { url = "https://files.pythonhosted.org/packages/47/e9/604bb05e6e5bce1e6a5cf80a474e0f072e80d8ac105f1b994a53e0b28c42/multidict-6.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50b3a2710631848991d0bf7de077502e8994c804bb805aeb2925a981de58ec2e", size = 130170, upload-time = "2024-09-09T23:48:06.862Z" }, + { url = "https://files.pythonhosted.org/packages/7e/13/9efa50801785eccbf7086b3c83b71a4fb501a4d43549c2f2f80b8787d69f/multidict-6.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b58c621844d55e71c1b7f7c498ce5aa6985d743a1a59034c57a905b3f153c1ef", size = 134836, upload-time = "2024-09-09T23:48:08.537Z" }, + { url = "https://files.pythonhosted.org/packages/bf/0f/93808b765192780d117814a6dfcc2e75de6dcc610009ad408b8814dca3ba/multidict-6.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55b6d90641869892caa9ca42ff913f7ff1c5ece06474fbd32fb2cf6834726c95", size = 133475, upload-time = "2024-09-09T23:48:09.865Z" }, + { url = "https://files.pythonhosted.org/packages/d3/c8/529101d7176fe7dfe1d99604e48d69c5dfdcadb4f06561f465c8ef12b4df/multidict-6.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b820514bfc0b98a30e3d85462084779900347e4d49267f747ff54060cc33925", size = 131049, upload-time = "2024-09-09T23:48:11.115Z" }, + { url = "https://files.pythonhosted.org/packages/ca/0c/fc85b439014d5a58063e19c3a158a889deec399d47b5269a0f3b6a2e28bc/multidict-6.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10a9b09aba0c5b48c53761b7c720aaaf7cf236d5fe394cd399c7ba662d5f9966", size = 120370, upload-time = "2024-09-09T23:48:12.78Z" }, + { url = "https://files.pythonhosted.org/packages/db/46/d4416eb20176492d2258fbd47b4abe729ff3b6e9c829ea4236f93c865089/multidict-6.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e16bf3e5fc9f44632affb159d30a437bfe286ce9e02754759be5536b169b305", size = 125178, upload-time = "2024-09-09T23:48:14.295Z" }, + { url = "https://files.pythonhosted.org/packages/5b/46/73697ad7ec521df7de5531a32780bbfd908ded0643cbe457f981a701457c/multidict-6.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:76f364861c3bfc98cbbcbd402d83454ed9e01a5224bb3a28bf70002a230f73e2", size = 119567, upload-time = "2024-09-09T23:48:16.284Z" }, + { url = "https://files.pythonhosted.org/packages/cd/ed/51f060e2cb0e7635329fa6ff930aa5cffa17f4c7f5c6c3ddc3500708e2f2/multidict-6.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:820c661588bd01a0aa62a1283f20d2be4281b086f80dad9e955e690c75fb54a2", size = 129822, upload-time = "2024-09-09T23:48:17.835Z" }, + { url = "https://files.pythonhosted.org/packages/df/9e/ee7d1954b1331da3eddea0c4e08d9142da5f14b1321c7301f5014f49d492/multidict-6.1.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:0e5f362e895bc5b9e67fe6e4ded2492d8124bdf817827f33c5b46c2fe3ffaca6", size = 128656, upload-time = "2024-09-09T23:48:19.576Z" }, + { url = "https://files.pythonhosted.org/packages/77/00/8538f11e3356b5d95fa4b024aa566cde7a38aa7a5f08f4912b32a037c5dc/multidict-6.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3ec660d19bbc671e3a6443325f07263be452c453ac9e512f5eb935e7d4ac28b3", size = 125360, upload-time = "2024-09-09T23:48:20.957Z" }, + { url = "https://files.pythonhosted.org/packages/be/05/5d334c1f2462d43fec2363cd00b1c44c93a78c3925d952e9a71caf662e96/multidict-6.1.0-cp312-cp312-win32.whl", hash = "sha256:58130ecf8f7b8112cdb841486404f1282b9c86ccb30d3519faf301b2e5659133", size = 26382, upload-time = "2024-09-09T23:48:22.351Z" }, + { url = "https://files.pythonhosted.org/packages/a3/bf/f332a13486b1ed0496d624bcc7e8357bb8053823e8cd4b9a18edc1d97e73/multidict-6.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:188215fc0aafb8e03341995e7c4797860181562380f81ed0a87ff455b70bf1f1", size = 28529, upload-time = "2024-09-09T23:48:23.478Z" }, + { url = "https://files.pythonhosted.org/packages/22/67/1c7c0f39fe069aa4e5d794f323be24bf4d33d62d2a348acdb7991f8f30db/multidict-6.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:d569388c381b24671589335a3be6e1d45546c2988c2ebe30fdcada8457a31008", size = 48771, upload-time = "2024-09-09T23:48:24.594Z" }, + { url = "https://files.pythonhosted.org/packages/3c/25/c186ee7b212bdf0df2519eacfb1981a017bda34392c67542c274651daf23/multidict-6.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:052e10d2d37810b99cc170b785945421141bf7bb7d2f8799d431e7db229c385f", size = 29533, upload-time = "2024-09-09T23:48:26.187Z" }, + { url = "https://files.pythonhosted.org/packages/67/5e/04575fd837e0958e324ca035b339cea174554f6f641d3fb2b4f2e7ff44a2/multidict-6.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f90c822a402cb865e396a504f9fc8173ef34212a342d92e362ca498cad308e28", size = 29595, upload-time = "2024-09-09T23:48:27.305Z" }, + { url = "https://files.pythonhosted.org/packages/d3/b2/e56388f86663810c07cfe4a3c3d87227f3811eeb2d08450b9e5d19d78876/multidict-6.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b225d95519a5bf73860323e633a664b0d85ad3d5bede6d30d95b35d4dfe8805b", size = 130094, upload-time = "2024-09-09T23:48:28.544Z" }, + { url = "https://files.pythonhosted.org/packages/6c/ee/30ae9b4186a644d284543d55d491fbd4239b015d36b23fea43b4c94f7052/multidict-6.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:23bfd518810af7de1116313ebd9092cb9aa629beb12f6ed631ad53356ed6b86c", size = 134876, upload-time = "2024-09-09T23:48:30.098Z" }, + { url = "https://files.pythonhosted.org/packages/84/c7/70461c13ba8ce3c779503c70ec9d0345ae84de04521c1f45a04d5f48943d/multidict-6.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c09fcfdccdd0b57867577b719c69e347a436b86cd83747f179dbf0cc0d4c1f3", size = 133500, upload-time = "2024-09-09T23:48:31.793Z" }, + { url = "https://files.pythonhosted.org/packages/4a/9f/002af221253f10f99959561123fae676148dd730e2daa2cd053846a58507/multidict-6.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf6bea52ec97e95560af5ae576bdac3aa3aae0b6758c6efa115236d9e07dae44", size = 131099, upload-time = "2024-09-09T23:48:33.193Z" }, + { url = "https://files.pythonhosted.org/packages/82/42/d1c7a7301d52af79d88548a97e297f9d99c961ad76bbe6f67442bb77f097/multidict-6.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57feec87371dbb3520da6192213c7d6fc892d5589a93db548331954de8248fd2", size = 120403, upload-time = "2024-09-09T23:48:34.942Z" }, + { url = "https://files.pythonhosted.org/packages/68/f3/471985c2c7ac707547553e8f37cff5158030d36bdec4414cb825fbaa5327/multidict-6.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0c3f390dc53279cbc8ba976e5f8035eab997829066756d811616b652b00a23a3", size = 125348, upload-time = "2024-09-09T23:48:36.222Z" }, + { url = "https://files.pythonhosted.org/packages/67/2c/e6df05c77e0e433c214ec1d21ddd203d9a4770a1f2866a8ca40a545869a0/multidict-6.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:59bfeae4b25ec05b34f1956eaa1cb38032282cd4dfabc5056d0a1ec4d696d3aa", size = 119673, upload-time = "2024-09-09T23:48:37.588Z" }, + { url = "https://files.pythonhosted.org/packages/c5/cd/bc8608fff06239c9fb333f9db7743a1b2eafe98c2666c9a196e867a3a0a4/multidict-6.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b2f59caeaf7632cc633b5cf6fc449372b83bbdf0da4ae04d5be36118e46cc0aa", size = 129927, upload-time = "2024-09-09T23:48:39.128Z" }, + { url = "https://files.pythonhosted.org/packages/44/8e/281b69b7bc84fc963a44dc6e0bbcc7150e517b91df368a27834299a526ac/multidict-6.1.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:37bb93b2178e02b7b618893990941900fd25b6b9ac0fa49931a40aecdf083fe4", size = 128711, upload-time = "2024-09-09T23:48:40.55Z" }, + { url = "https://files.pythonhosted.org/packages/12/a4/63e7cd38ed29dd9f1881d5119f272c898ca92536cdb53ffe0843197f6c85/multidict-6.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4e9f48f58c2c523d5a06faea47866cd35b32655c46b443f163d08c6d0ddb17d6", size = 125519, upload-time = "2024-09-09T23:48:42.446Z" }, + { url = "https://files.pythonhosted.org/packages/38/e0/4f5855037a72cd8a7a2f60a3952d9aa45feedb37ae7831642102604e8a37/multidict-6.1.0-cp313-cp313-win32.whl", hash = "sha256:3a37ffb35399029b45c6cc33640a92bef403c9fd388acce75cdc88f58bd19a81", size = 26426, upload-time = "2024-09-09T23:48:43.936Z" }, + { url = "https://files.pythonhosted.org/packages/7e/a5/17ee3a4db1e310b7405f5d25834460073a8ccd86198ce044dfaf69eac073/multidict-6.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:e9aa71e15d9d9beaad2c6b9319edcdc0a49a43ef5c0a4c8265ca9ee7d6c67774", size = 28531, upload-time = "2024-09-09T23:48:45.122Z" }, + { url = "https://files.pythonhosted.org/packages/3e/6a/af41f3aaf5f00fd86cc7d470a2f5b25299b0c84691163b8757f4a1a205f2/multidict-6.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:db7457bac39421addd0c8449933ac32d8042aae84a14911a757ae6ca3eef1392", size = 48597, upload-time = "2024-09-09T23:48:46.391Z" }, + { url = "https://files.pythonhosted.org/packages/d9/d6/3d4082760ed11b05734f8bf32a0615b99e7d9d2b3730ad698a4d7377c00a/multidict-6.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d094ddec350a2fb899fec68d8353c78233debde9b7d8b4beeafa70825f1c281a", size = 29338, upload-time = "2024-09-09T23:48:47.891Z" }, + { url = "https://files.pythonhosted.org/packages/9d/7f/5d1ce7f47d44393d429922910afbe88fcd29ee3069babbb47507a4c3a7ea/multidict-6.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5845c1fd4866bb5dd3125d89b90e57ed3138241540897de748cdf19de8a2fca2", size = 29562, upload-time = "2024-09-09T23:48:49.254Z" }, + { url = "https://files.pythonhosted.org/packages/ce/ec/c425257671af9308a9b626e2e21f7f43841616e4551de94eb3c92aca75b2/multidict-6.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9079dfc6a70abe341f521f78405b8949f96db48da98aeb43f9907f342f627cdc", size = 130980, upload-time = "2024-09-09T23:48:50.606Z" }, + { url = "https://files.pythonhosted.org/packages/d8/d7/d4220ad2633a89b314593e9b85b5bc9287a7c563c7f9108a4a68d9da5374/multidict-6.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3914f5aaa0f36d5d60e8ece6a308ee1c9784cd75ec8151062614657a114c4478", size = 136694, upload-time = "2024-09-09T23:48:52.042Z" }, + { url = "https://files.pythonhosted.org/packages/a1/2a/13e554db5830c8d40185a2e22aa8325516a5de9634c3fb2caf3886a829b3/multidict-6.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c08be4f460903e5a9d0f76818db3250f12e9c344e79314d1d570fc69d7f4eae4", size = 131616, upload-time = "2024-09-09T23:48:54.283Z" }, + { url = "https://files.pythonhosted.org/packages/2e/a9/83692e37d8152f104333132105b67100aabfb2e96a87f6bed67f566035a7/multidict-6.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d093be959277cb7dee84b801eb1af388b6ad3ca6a6b6bf1ed7585895789d027d", size = 129664, upload-time = "2024-09-09T23:48:55.785Z" }, + { url = "https://files.pythonhosted.org/packages/cc/1c/1718cd518fb9da7e8890d9d1611c1af0ea5e60f68ff415d026e38401ed36/multidict-6.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3702ea6872c5a2a4eeefa6ffd36b042e9773f05b1f37ae3ef7264b1163c2dcf6", size = 121855, upload-time = "2024-09-09T23:48:57.333Z" }, + { url = "https://files.pythonhosted.org/packages/2b/92/f6ed67514b0e3894198f0eb42dcde22f0851ea35f4561a1e4acf36c7b1be/multidict-6.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:2090f6a85cafc5b2db085124d752757c9d251548cedabe9bd31afe6363e0aff2", size = 127928, upload-time = "2024-09-09T23:48:58.778Z" }, + { url = "https://files.pythonhosted.org/packages/f7/30/c66954115a4dc4dc3c84e02c8ae11bb35a43d79ef93122c3c3a40c4d459b/multidict-6.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:f67f217af4b1ff66c68a87318012de788dd95fcfeb24cc889011f4e1c7454dfd", size = 122793, upload-time = "2024-09-09T23:49:00.244Z" }, + { url = "https://files.pythonhosted.org/packages/62/c9/d386d01b43871e8e1631eb7b3695f6af071b7ae1ab716caf371100f0eb24/multidict-6.1.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:189f652a87e876098bbc67b4da1049afb5f5dfbaa310dd67c594b01c10388db6", size = 132762, upload-time = "2024-09-09T23:49:02.188Z" }, + { url = "https://files.pythonhosted.org/packages/69/ff/f70cb0a2f7a358acf48e32139ce3a150ff18c961ee9c714cc8c0dc7e3584/multidict-6.1.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:6bb5992037f7a9eff7991ebe4273ea7f51f1c1c511e6a2ce511d0e7bdb754492", size = 127872, upload-time = "2024-09-09T23:49:04.389Z" }, + { url = "https://files.pythonhosted.org/packages/89/5b/abea7db3ba4cd07752a9b560f9275a11787cd13f86849b5d99c1ceea921d/multidict-6.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:ac10f4c2b9e770c4e393876e35a7046879d195cd123b4f116d299d442b335bcd", size = 126161, upload-time = "2024-09-09T23:49:06.306Z" }, + { url = "https://files.pythonhosted.org/packages/22/03/acc77a4667cca4462ee974fc39990803e58fa573d5a923d6e82b7ef6da7e/multidict-6.1.0-cp38-cp38-win32.whl", hash = "sha256:e27bbb6d14416713a8bd7aaa1313c0fc8d44ee48d74497a0ff4c3a1b6ccb5167", size = 26338, upload-time = "2024-09-09T23:49:07.782Z" }, + { url = "https://files.pythonhosted.org/packages/90/bf/3d0c1cc9c8163abc24625fae89c0ade1ede9bccb6eceb79edf8cff3cca46/multidict-6.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:22f3105d4fb15c8f57ff3959a58fcab6ce36814486500cd7485651230ad4d4ef", size = 28736, upload-time = "2024-09-09T23:49:09.126Z" }, + { url = "https://files.pythonhosted.org/packages/e7/c9/9e153a6572b38ac5ff4434113af38acf8d5e9957897cdb1f513b3d6614ed/multidict-6.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:4e18b656c5e844539d506a0a06432274d7bd52a7487e6828c63a63d69185626c", size = 48550, upload-time = "2024-09-09T23:49:10.475Z" }, + { url = "https://files.pythonhosted.org/packages/76/f5/79565ddb629eba6c7f704f09a09df085c8dc04643b12506f10f718cee37a/multidict-6.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a185f876e69897a6f3325c3f19f26a297fa058c5e456bfcff8015e9a27e83ae1", size = 29298, upload-time = "2024-09-09T23:49:12.119Z" }, + { url = "https://files.pythonhosted.org/packages/60/1b/9851878b704bc98e641a3e0bce49382ae9e05743dac6d97748feb5b7baba/multidict-6.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ab7c4ceb38d91570a650dba194e1ca87c2b543488fe9309b4212694174fd539c", size = 29641, upload-time = "2024-09-09T23:49:13.714Z" }, + { url = "https://files.pythonhosted.org/packages/89/87/d451d45aab9e422cb0fb2f7720c31a4c1d3012c740483c37f642eba568fb/multidict-6.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e617fb6b0b6953fffd762669610c1c4ffd05632c138d61ac7e14ad187870669c", size = 126202, upload-time = "2024-09-09T23:49:15.238Z" }, + { url = "https://files.pythonhosted.org/packages/fa/b4/27cbe9f3e2e469359887653f2e45470272eef7295139916cc21107c6b48c/multidict-6.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16e5f4bf4e603eb1fdd5d8180f1a25f30056f22e55ce51fb3d6ad4ab29f7d96f", size = 133925, upload-time = "2024-09-09T23:49:16.786Z" }, + { url = "https://files.pythonhosted.org/packages/4d/a3/afc841899face8adfd004235ce759a37619f6ec99eafd959650c5ce4df57/multidict-6.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4c035da3f544b1882bac24115f3e2e8760f10a0107614fc9839fd232200b875", size = 129039, upload-time = "2024-09-09T23:49:18.381Z" }, + { url = "https://files.pythonhosted.org/packages/5e/41/0d0fb18c1ad574f807196f5f3d99164edf9de3e169a58c6dc2d6ed5742b9/multidict-6.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:957cf8e4b6e123a9eea554fa7ebc85674674b713551de587eb318a2df3e00255", size = 124072, upload-time = "2024-09-09T23:49:20.115Z" }, + { url = "https://files.pythonhosted.org/packages/00/22/defd7a2e71a44e6e5b9a5428f972e5b572e7fe28e404dfa6519bbf057c93/multidict-6.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:483a6aea59cb89904e1ceabd2b47368b5600fb7de78a6e4a2c2987b2d256cf30", size = 116532, upload-time = "2024-09-09T23:49:21.685Z" }, + { url = "https://files.pythonhosted.org/packages/91/25/f7545102def0b1d456ab6449388eed2dfd822debba1d65af60194904a23a/multidict-6.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:87701f25a2352e5bf7454caa64757642734da9f6b11384c1f9d1a8e699758057", size = 128173, upload-time = "2024-09-09T23:49:23.657Z" }, + { url = "https://files.pythonhosted.org/packages/45/79/3dbe8d35fc99f5ea610813a72ab55f426cb9cf482f860fa8496e5409be11/multidict-6.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:682b987361e5fd7a139ed565e30d81fd81e9629acc7d925a205366877d8c8657", size = 122654, upload-time = "2024-09-09T23:49:25.7Z" }, + { url = "https://files.pythonhosted.org/packages/97/cb/209e735eeab96e1b160825b5d0b36c56d3862abff828fc43999bb957dcad/multidict-6.1.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ce2186a7df133a9c895dea3331ddc5ddad42cdd0d1ea2f0a51e5d161e4762f28", size = 133197, upload-time = "2024-09-09T23:49:27.906Z" }, + { url = "https://files.pythonhosted.org/packages/e4/3a/a13808a7ada62808afccea67837a79d00ad6581440015ef00f726d064c2d/multidict-6.1.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:9f636b730f7e8cb19feb87094949ba54ee5357440b9658b2a32a5ce4bce53972", size = 129754, upload-time = "2024-09-09T23:49:29.508Z" }, + { url = "https://files.pythonhosted.org/packages/77/dd/8540e139eafb240079242da8f8ffdf9d3f4b4ad1aac5a786cd4050923783/multidict-6.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:73eae06aa53af2ea5270cc066dcaf02cc60d2994bbb2c4ef5764949257d10f43", size = 126402, upload-time = "2024-09-09T23:49:31.243Z" }, + { url = "https://files.pythonhosted.org/packages/86/99/e82e1a275d8b1ea16d3a251474262258dbbe41c05cce0c01bceda1fc8ea5/multidict-6.1.0-cp39-cp39-win32.whl", hash = "sha256:1ca0083e80e791cffc6efce7660ad24af66c8d4079d2a750b29001b53ff59ada", size = 26421, upload-time = "2024-09-09T23:49:32.648Z" }, + { url = "https://files.pythonhosted.org/packages/86/1c/9fa630272355af7e4446a2c7550c259f11ee422ab2d30ff90a0a71cf3d9e/multidict-6.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:aa466da5b15ccea564bdab9c89175c762bc12825f4659c11227f515cee76fa4a", size = 28791, upload-time = "2024-09-09T23:49:34.725Z" }, + { url = "https://files.pythonhosted.org/packages/99/b7/b9e70fde2c0f0c9af4cc5277782a89b66d35948ea3369ec9f598358c3ac5/multidict-6.1.0-py3-none-any.whl", hash = "sha256:48e171e52d1c4d33888e529b999e5900356b9ae588c2f09a52dcefb158b27506", size = 10051, upload-time = "2024-09-09T23:49:36.506Z" }, +] + +[[package]] +name = "multidict" +version = "6.7.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", + "python_full_version == '3.9.*'", +] +dependencies = [ + { name = "typing-extensions", version = "4.15.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9' and python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/80/1e/5492c365f222f907de1039b91f922b93fa4f764c713ee858d235495d8f50/multidict-6.7.0.tar.gz", hash = "sha256:c6e99d9a65ca282e578dfea819cfa9c0a62b2499d8677392e09feaf305e9e6f5", size = 101834, upload-time = "2025-10-06T14:52:30.657Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a9/63/7bdd4adc330abcca54c85728db2327130e49e52e8c3ce685cec44e0f2e9f/multidict-6.7.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9f474ad5acda359c8758c8accc22032c6abe6dc87a8be2440d097785e27a9349", size = 77153, upload-time = "2025-10-06T14:48:26.409Z" }, + { url = "https://files.pythonhosted.org/packages/3f/bb/b6c35ff175ed1a3142222b78455ee31be71a8396ed3ab5280fbe3ebe4e85/multidict-6.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4b7a9db5a870f780220e931d0002bbfd88fb53aceb6293251e2c839415c1b20e", size = 44993, upload-time = "2025-10-06T14:48:28.4Z" }, + { url = "https://files.pythonhosted.org/packages/e0/1f/064c77877c5fa6df6d346e68075c0f6998547afe952d6471b4c5f6a7345d/multidict-6.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:03ca744319864e92721195fa28c7a3b2bc7b686246b35e4078c1e4d0eb5466d3", size = 44607, upload-time = "2025-10-06T14:48:29.581Z" }, + { url = "https://files.pythonhosted.org/packages/04/7a/bf6aa92065dd47f287690000b3d7d332edfccb2277634cadf6a810463c6a/multidict-6.7.0-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f0e77e3c0008bc9316e662624535b88d360c3a5d3f81e15cf12c139a75250046", size = 241847, upload-time = "2025-10-06T14:48:32.107Z" }, + { url = "https://files.pythonhosted.org/packages/94/39/297a8de920f76eda343e4ce05f3b489f0ab3f9504f2576dfb37b7c08ca08/multidict-6.7.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:08325c9e5367aa379a3496aa9a022fe8837ff22e00b94db256d3a1378c76ab32", size = 242616, upload-time = "2025-10-06T14:48:34.054Z" }, + { url = "https://files.pythonhosted.org/packages/39/3a/d0eee2898cfd9d654aea6cb8c4addc2f9756e9a7e09391cfe55541f917f7/multidict-6.7.0-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e2862408c99f84aa571ab462d25236ef9cb12a602ea959ba9c9009a54902fc73", size = 222333, upload-time = "2025-10-06T14:48:35.9Z" }, + { url = "https://files.pythonhosted.org/packages/05/48/3b328851193c7a4240815b71eea165b49248867bbb6153a0aee227a0bb47/multidict-6.7.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4d72a9a2d885f5c208b0cb91ff2ed43636bb7e345ec839ff64708e04f69a13cc", size = 253239, upload-time = "2025-10-06T14:48:37.302Z" }, + { url = "https://files.pythonhosted.org/packages/b1/ca/0706a98c8d126a89245413225ca4a3fefc8435014de309cf8b30acb68841/multidict-6.7.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:478cc36476687bac1514d651cbbaa94b86b0732fb6855c60c673794c7dd2da62", size = 251618, upload-time = "2025-10-06T14:48:38.963Z" }, + { url = "https://files.pythonhosted.org/packages/5e/4f/9c7992f245554d8b173f6f0a048ad24b3e645d883f096857ec2c0822b8bd/multidict-6.7.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6843b28b0364dc605f21481c90fadb5f60d9123b442eb8a726bb74feef588a84", size = 241655, upload-time = "2025-10-06T14:48:40.312Z" }, + { url = "https://files.pythonhosted.org/packages/31/79/26a85991ae67efd1c0b1fc2e0c275b8a6aceeb155a68861f63f87a798f16/multidict-6.7.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:23bfeee5316266e5ee2d625df2d2c602b829435fc3a235c2ba2131495706e4a0", size = 239245, upload-time = "2025-10-06T14:48:41.848Z" }, + { url = "https://files.pythonhosted.org/packages/14/1e/75fa96394478930b79d0302eaf9a6c69f34005a1a5251ac8b9c336486ec9/multidict-6.7.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:680878b9f3d45c31e1f730eef731f9b0bc1da456155688c6745ee84eb818e90e", size = 233523, upload-time = "2025-10-06T14:48:43.749Z" }, + { url = "https://files.pythonhosted.org/packages/b2/5e/085544cb9f9c4ad2b5d97467c15f856df8d9bac410cffd5c43991a5d878b/multidict-6.7.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:eb866162ef2f45063acc7a53a88ef6fe8bf121d45c30ea3c9cd87ce7e191a8d4", size = 243129, upload-time = "2025-10-06T14:48:45.225Z" }, + { url = "https://files.pythonhosted.org/packages/b9/c3/e9d9e2f20c9474e7a8fcef28f863c5cbd29bb5adce6b70cebe8bdad0039d/multidict-6.7.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:df0e3bf7993bdbeca5ac25aa859cf40d39019e015c9c91809ba7093967f7a648", size = 248999, upload-time = "2025-10-06T14:48:46.703Z" }, + { url = "https://files.pythonhosted.org/packages/b5/3f/df171b6efa3239ae33b97b887e42671cd1d94d460614bfb2c30ffdab3b95/multidict-6.7.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:661709cdcd919a2ece2234f9bae7174e5220c80b034585d7d8a755632d3e2111", size = 243711, upload-time = "2025-10-06T14:48:48.146Z" }, + { url = "https://files.pythonhosted.org/packages/3c/2f/9b5564888c4e14b9af64c54acf149263721a283aaf4aa0ae89b091d5d8c1/multidict-6.7.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:096f52730c3fb8ed419db2d44391932b63891b2c5ed14850a7e215c0ba9ade36", size = 237504, upload-time = "2025-10-06T14:48:49.447Z" }, + { url = "https://files.pythonhosted.org/packages/6c/3a/0bd6ca0f7d96d790542d591c8c3354c1e1b6bfd2024d4d92dc3d87485ec7/multidict-6.7.0-cp310-cp310-win32.whl", hash = "sha256:afa8a2978ec65d2336305550535c9c4ff50ee527914328c8677b3973ade52b85", size = 41422, upload-time = "2025-10-06T14:48:50.789Z" }, + { url = "https://files.pythonhosted.org/packages/00/35/f6a637ea2c75f0d3b7c7d41b1189189acff0d9deeb8b8f35536bb30f5e33/multidict-6.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:b15b3afff74f707b9275d5ba6a91ae8f6429c3ffb29bbfd216b0b375a56f13d7", size = 46050, upload-time = "2025-10-06T14:48:51.938Z" }, + { url = "https://files.pythonhosted.org/packages/e7/b8/f7bf8329b39893d02d9d95cf610c75885d12fc0f402b1c894e1c8e01c916/multidict-6.7.0-cp310-cp310-win_arm64.whl", hash = "sha256:4b73189894398d59131a66ff157837b1fafea9974be486d036bb3d32331fdbf0", size = 43153, upload-time = "2025-10-06T14:48:53.146Z" }, + { url = "https://files.pythonhosted.org/packages/34/9e/5c727587644d67b2ed479041e4b1c58e30afc011e3d45d25bbe35781217c/multidict-6.7.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4d409aa42a94c0b3fa617708ef5276dfe81012ba6753a0370fcc9d0195d0a1fc", size = 76604, upload-time = "2025-10-06T14:48:54.277Z" }, + { url = "https://files.pythonhosted.org/packages/17/e4/67b5c27bd17c085a5ea8f1ec05b8a3e5cba0ca734bfcad5560fb129e70ca/multidict-6.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14c9e076eede3b54c636f8ce1c9c252b5f057c62131211f0ceeec273810c9721", size = 44715, upload-time = "2025-10-06T14:48:55.445Z" }, + { url = "https://files.pythonhosted.org/packages/4d/e1/866a5d77be6ea435711bef2a4291eed11032679b6b28b56b4776ab06ba3e/multidict-6.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c09703000a9d0fa3c3404b27041e574cc7f4df4c6563873246d0e11812a94b6", size = 44332, upload-time = "2025-10-06T14:48:56.706Z" }, + { url = "https://files.pythonhosted.org/packages/31/61/0c2d50241ada71ff61a79518db85ada85fdabfcf395d5968dae1cbda04e5/multidict-6.7.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:a265acbb7bb33a3a2d626afbe756371dce0279e7b17f4f4eda406459c2b5ff1c", size = 245212, upload-time = "2025-10-06T14:48:58.042Z" }, + { url = "https://files.pythonhosted.org/packages/ac/e0/919666a4e4b57fff1b57f279be1c9316e6cdc5de8a8b525d76f6598fefc7/multidict-6.7.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:51cb455de290ae462593e5b1cb1118c5c22ea7f0d3620d9940bf695cea5a4bd7", size = 246671, upload-time = "2025-10-06T14:49:00.004Z" }, + { url = "https://files.pythonhosted.org/packages/a1/cc/d027d9c5a520f3321b65adea289b965e7bcbd2c34402663f482648c716ce/multidict-6.7.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:db99677b4457c7a5c5a949353e125ba72d62b35f74e26da141530fbb012218a7", size = 225491, upload-time = "2025-10-06T14:49:01.393Z" }, + { url = "https://files.pythonhosted.org/packages/75/c4/bbd633980ce6155a28ff04e6a6492dd3335858394d7bb752d8b108708558/multidict-6.7.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f470f68adc395e0183b92a2f4689264d1ea4b40504a24d9882c27375e6662bb9", size = 257322, upload-time = "2025-10-06T14:49:02.745Z" }, + { url = "https://files.pythonhosted.org/packages/4c/6d/d622322d344f1f053eae47e033b0b3f965af01212de21b10bcf91be991fb/multidict-6.7.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0db4956f82723cc1c270de9c6e799b4c341d327762ec78ef82bb962f79cc07d8", size = 254694, upload-time = "2025-10-06T14:49:04.15Z" }, + { url = "https://files.pythonhosted.org/packages/a8/9f/78f8761c2705d4c6d7516faed63c0ebdac569f6db1bef95e0d5218fdc146/multidict-6.7.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3e56d780c238f9e1ae66a22d2adf8d16f485381878250db8d496623cd38b22bd", size = 246715, upload-time = "2025-10-06T14:49:05.967Z" }, + { url = "https://files.pythonhosted.org/packages/78/59/950818e04f91b9c2b95aab3d923d9eabd01689d0dcd889563988e9ea0fd8/multidict-6.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9d14baca2ee12c1a64740d4531356ba50b82543017f3ad6de0deb943c5979abb", size = 243189, upload-time = "2025-10-06T14:49:07.37Z" }, + { url = "https://files.pythonhosted.org/packages/7a/3d/77c79e1934cad2ee74991840f8a0110966d9599b3af95964c0cd79bb905b/multidict-6.7.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:295a92a76188917c7f99cda95858c822f9e4aae5824246bba9b6b44004ddd0a6", size = 237845, upload-time = "2025-10-06T14:49:08.759Z" }, + { url = "https://files.pythonhosted.org/packages/63/1b/834ce32a0a97a3b70f86437f685f880136677ac00d8bce0027e9fd9c2db7/multidict-6.7.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:39f1719f57adbb767ef592a50ae5ebb794220d1188f9ca93de471336401c34d2", size = 246374, upload-time = "2025-10-06T14:49:10.574Z" }, + { url = "https://files.pythonhosted.org/packages/23/ef/43d1c3ba205b5dec93dc97f3fba179dfa47910fc73aaaea4f7ceb41cec2a/multidict-6.7.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:0a13fb8e748dfc94749f622de065dd5c1def7e0d2216dba72b1d8069a389c6ff", size = 253345, upload-time = "2025-10-06T14:49:12.331Z" }, + { url = "https://files.pythonhosted.org/packages/6b/03/eaf95bcc2d19ead522001f6a650ef32811aa9e3624ff0ad37c445c7a588c/multidict-6.7.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e3aa16de190d29a0ea1b48253c57d99a68492c8dd8948638073ab9e74dc9410b", size = 246940, upload-time = "2025-10-06T14:49:13.821Z" }, + { url = "https://files.pythonhosted.org/packages/e8/df/ec8a5fd66ea6cd6f525b1fcbb23511b033c3e9bc42b81384834ffa484a62/multidict-6.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a048ce45dcdaaf1defb76b2e684f997fb5abf74437b6cb7b22ddad934a964e34", size = 242229, upload-time = "2025-10-06T14:49:15.603Z" }, + { url = "https://files.pythonhosted.org/packages/8a/a2/59b405d59fd39ec86d1142630e9049243015a5f5291ba49cadf3c090c541/multidict-6.7.0-cp311-cp311-win32.whl", hash = "sha256:a90af66facec4cebe4181b9e62a68be65e45ac9b52b67de9eec118701856e7ff", size = 41308, upload-time = "2025-10-06T14:49:16.871Z" }, + { url = "https://files.pythonhosted.org/packages/32/0f/13228f26f8b882c34da36efa776c3b7348455ec383bab4a66390e42963ae/multidict-6.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:95b5ffa4349df2887518bb839409bcf22caa72d82beec453216802f475b23c81", size = 46037, upload-time = "2025-10-06T14:49:18.457Z" }, + { url = "https://files.pythonhosted.org/packages/84/1f/68588e31b000535a3207fd3c909ebeec4fb36b52c442107499c18a896a2a/multidict-6.7.0-cp311-cp311-win_arm64.whl", hash = "sha256:329aa225b085b6f004a4955271a7ba9f1087e39dcb7e65f6284a988264a63912", size = 43023, upload-time = "2025-10-06T14:49:19.648Z" }, + { url = "https://files.pythonhosted.org/packages/c2/9e/9f61ac18d9c8b475889f32ccfa91c9f59363480613fc807b6e3023d6f60b/multidict-6.7.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8a3862568a36d26e650a19bb5cbbba14b71789032aebc0423f8cc5f150730184", size = 76877, upload-time = "2025-10-06T14:49:20.884Z" }, + { url = "https://files.pythonhosted.org/packages/38/6f/614f09a04e6184f8824268fce4bc925e9849edfa654ddd59f0b64508c595/multidict-6.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:960c60b5849b9b4f9dcc9bea6e3626143c252c74113df2c1540aebce70209b45", size = 45467, upload-time = "2025-10-06T14:49:22.054Z" }, + { url = "https://files.pythonhosted.org/packages/b3/93/c4f67a436dd026f2e780c433277fff72be79152894d9fc36f44569cab1a6/multidict-6.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2049be98fb57a31b4ccf870bf377af2504d4ae35646a19037ec271e4c07998aa", size = 43834, upload-time = "2025-10-06T14:49:23.566Z" }, + { url = "https://files.pythonhosted.org/packages/7f/f5/013798161ca665e4a422afbc5e2d9e4070142a9ff8905e482139cd09e4d0/multidict-6.7.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0934f3843a1860dd465d38895c17fce1f1cb37295149ab05cd1b9a03afacb2a7", size = 250545, upload-time = "2025-10-06T14:49:24.882Z" }, + { url = "https://files.pythonhosted.org/packages/71/2f/91dbac13e0ba94669ea5119ba267c9a832f0cb65419aca75549fcf09a3dc/multidict-6.7.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b3e34f3a1b8131ba06f1a73adab24f30934d148afcd5f5de9a73565a4404384e", size = 258305, upload-time = "2025-10-06T14:49:26.778Z" }, + { url = "https://files.pythonhosted.org/packages/ef/b0/754038b26f6e04488b48ac621f779c341338d78503fb45403755af2df477/multidict-6.7.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:efbb54e98446892590dc2458c19c10344ee9a883a79b5cec4bc34d6656e8d546", size = 242363, upload-time = "2025-10-06T14:49:28.562Z" }, + { url = "https://files.pythonhosted.org/packages/87/15/9da40b9336a7c9fa606c4cf2ed80a649dffeb42b905d4f63a1d7eb17d746/multidict-6.7.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a35c5fc61d4f51eb045061e7967cfe3123d622cd500e8868e7c0c592a09fedc4", size = 268375, upload-time = "2025-10-06T14:49:29.96Z" }, + { url = "https://files.pythonhosted.org/packages/82/72/c53fcade0cc94dfaad583105fd92b3a783af2091eddcb41a6d5a52474000/multidict-6.7.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:29fe6740ebccba4175af1b9b87bf553e9c15cd5868ee967e010efcf94e4fd0f1", size = 269346, upload-time = "2025-10-06T14:49:31.404Z" }, + { url = "https://files.pythonhosted.org/packages/0d/e2/9baffdae21a76f77ef8447f1a05a96ec4bc0a24dae08767abc0a2fe680b8/multidict-6.7.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:123e2a72e20537add2f33a79e605f6191fba2afda4cbb876e35c1a7074298a7d", size = 256107, upload-time = "2025-10-06T14:49:32.974Z" }, + { url = "https://files.pythonhosted.org/packages/3c/06/3f06f611087dc60d65ef775f1fb5aca7c6d61c6db4990e7cda0cef9b1651/multidict-6.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b284e319754366c1aee2267a2036248b24eeb17ecd5dc16022095e747f2f4304", size = 253592, upload-time = "2025-10-06T14:49:34.52Z" }, + { url = "https://files.pythonhosted.org/packages/20/24/54e804ec7945b6023b340c412ce9c3f81e91b3bf5fa5ce65558740141bee/multidict-6.7.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:803d685de7be4303b5a657b76e2f6d1240e7e0a8aa2968ad5811fa2285553a12", size = 251024, upload-time = "2025-10-06T14:49:35.956Z" }, + { url = "https://files.pythonhosted.org/packages/14/48/011cba467ea0b17ceb938315d219391d3e421dfd35928e5dbdc3f4ae76ef/multidict-6.7.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c04a328260dfd5db8c39538f999f02779012268f54614902d0afc775d44e0a62", size = 251484, upload-time = "2025-10-06T14:49:37.631Z" }, + { url = "https://files.pythonhosted.org/packages/0d/2f/919258b43bb35b99fa127435cfb2d91798eb3a943396631ef43e3720dcf4/multidict-6.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8a19cdb57cd3df4cd865849d93ee14920fb97224300c88501f16ecfa2604b4e0", size = 263579, upload-time = "2025-10-06T14:49:39.502Z" }, + { url = "https://files.pythonhosted.org/packages/31/22/a0e884d86b5242b5a74cf08e876bdf299e413016b66e55511f7a804a366e/multidict-6.7.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9b2fd74c52accced7e75de26023b7dccee62511a600e62311b918ec5c168fc2a", size = 259654, upload-time = "2025-10-06T14:49:41.32Z" }, + { url = "https://files.pythonhosted.org/packages/b2/e5/17e10e1b5c5f5a40f2fcbb45953c9b215f8a4098003915e46a93f5fcaa8f/multidict-6.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3e8bfdd0e487acf992407a140d2589fe598238eaeffa3da8448d63a63cd363f8", size = 251511, upload-time = "2025-10-06T14:49:46.021Z" }, + { url = "https://files.pythonhosted.org/packages/e3/9a/201bb1e17e7af53139597069c375e7b0dcbd47594604f65c2d5359508566/multidict-6.7.0-cp312-cp312-win32.whl", hash = "sha256:dd32a49400a2c3d52088e120ee00c1e3576cbff7e10b98467962c74fdb762ed4", size = 41895, upload-time = "2025-10-06T14:49:48.718Z" }, + { url = "https://files.pythonhosted.org/packages/46/e2/348cd32faad84eaf1d20cce80e2bb0ef8d312c55bca1f7fa9865e7770aaf/multidict-6.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:92abb658ef2d7ef22ac9f8bb88e8b6c3e571671534e029359b6d9e845923eb1b", size = 46073, upload-time = "2025-10-06T14:49:50.28Z" }, + { url = "https://files.pythonhosted.org/packages/25/ec/aad2613c1910dce907480e0c3aa306905830f25df2e54ccc9dea450cb5aa/multidict-6.7.0-cp312-cp312-win_arm64.whl", hash = "sha256:490dab541a6a642ce1a9d61a4781656b346a55c13038f0b1244653828e3a83ec", size = 43226, upload-time = "2025-10-06T14:49:52.304Z" }, + { url = "https://files.pythonhosted.org/packages/d2/86/33272a544eeb36d66e4d9a920602d1a2f57d4ebea4ef3cdfe5a912574c95/multidict-6.7.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bee7c0588aa0076ce77c0ea5d19a68d76ad81fcd9fe8501003b9a24f9d4000f6", size = 76135, upload-time = "2025-10-06T14:49:54.26Z" }, + { url = "https://files.pythonhosted.org/packages/91/1c/eb97db117a1ebe46d457a3d235a7b9d2e6dcab174f42d1b67663dd9e5371/multidict-6.7.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7ef6b61cad77091056ce0e7ce69814ef72afacb150b7ac6a3e9470def2198159", size = 45117, upload-time = "2025-10-06T14:49:55.82Z" }, + { url = "https://files.pythonhosted.org/packages/f1/d8/6c3442322e41fb1dd4de8bd67bfd11cd72352ac131f6368315617de752f1/multidict-6.7.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9c0359b1ec12b1d6849c59f9d319610b7f20ef990a6d454ab151aa0e3b9f78ca", size = 43472, upload-time = "2025-10-06T14:49:57.048Z" }, + { url = "https://files.pythonhosted.org/packages/75/3f/e2639e80325af0b6c6febdf8e57cc07043ff15f57fa1ef808f4ccb5ac4cd/multidict-6.7.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cd240939f71c64bd658f186330603aac1a9a81bf6273f523fca63673cb7378a8", size = 249342, upload-time = "2025-10-06T14:49:58.368Z" }, + { url = "https://files.pythonhosted.org/packages/5d/cc/84e0585f805cbeaa9cbdaa95f9a3d6aed745b9d25700623ac89a6ecff400/multidict-6.7.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a60a4d75718a5efa473ebd5ab685786ba0c67b8381f781d1be14da49f1a2dc60", size = 257082, upload-time = "2025-10-06T14:49:59.89Z" }, + { url = "https://files.pythonhosted.org/packages/b0/9c/ac851c107c92289acbbf5cfb485694084690c1b17e555f44952c26ddc5bd/multidict-6.7.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:53a42d364f323275126aff81fb67c5ca1b7a04fda0546245730a55c8c5f24bc4", size = 240704, upload-time = "2025-10-06T14:50:01.485Z" }, + { url = "https://files.pythonhosted.org/packages/50/cc/5f93e99427248c09da95b62d64b25748a5f5c98c7c2ab09825a1d6af0e15/multidict-6.7.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3b29b980d0ddbecb736735ee5bef69bb2ddca56eff603c86f3f29a1128299b4f", size = 266355, upload-time = "2025-10-06T14:50:02.955Z" }, + { url = "https://files.pythonhosted.org/packages/ec/0c/2ec1d883ceb79c6f7f6d7ad90c919c898f5d1c6ea96d322751420211e072/multidict-6.7.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f8a93b1c0ed2d04b97a5e9336fd2d33371b9a6e29ab7dd6503d63407c20ffbaf", size = 267259, upload-time = "2025-10-06T14:50:04.446Z" }, + { url = "https://files.pythonhosted.org/packages/c6/2d/f0b184fa88d6630aa267680bdb8623fb69cb0d024b8c6f0d23f9a0f406d3/multidict-6.7.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9ff96e8815eecacc6645da76c413eb3b3d34cfca256c70b16b286a687d013c32", size = 254903, upload-time = "2025-10-06T14:50:05.98Z" }, + { url = "https://files.pythonhosted.org/packages/06/c9/11ea263ad0df7dfabcad404feb3c0dd40b131bc7f232d5537f2fb1356951/multidict-6.7.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7516c579652f6a6be0e266aec0acd0db80829ca305c3d771ed898538804c2036", size = 252365, upload-time = "2025-10-06T14:50:07.511Z" }, + { url = "https://files.pythonhosted.org/packages/41/88/d714b86ee2c17d6e09850c70c9d310abac3d808ab49dfa16b43aba9d53fd/multidict-6.7.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:040f393368e63fb0f3330e70c26bfd336656bed925e5cbe17c9da839a6ab13ec", size = 250062, upload-time = "2025-10-06T14:50:09.074Z" }, + { url = "https://files.pythonhosted.org/packages/15/fe/ad407bb9e818c2b31383f6131ca19ea7e35ce93cf1310fce69f12e89de75/multidict-6.7.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b3bc26a951007b1057a1c543af845f1c7e3e71cc240ed1ace7bf4484aa99196e", size = 249683, upload-time = "2025-10-06T14:50:10.714Z" }, + { url = "https://files.pythonhosted.org/packages/8c/a4/a89abdb0229e533fb925e7c6e5c40201c2873efebc9abaf14046a4536ee6/multidict-6.7.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:7b022717c748dd1992a83e219587aabe45980d88969f01b316e78683e6285f64", size = 261254, upload-time = "2025-10-06T14:50:12.28Z" }, + { url = "https://files.pythonhosted.org/packages/8d/aa/0e2b27bd88b40a4fb8dc53dd74eecac70edaa4c1dd0707eb2164da3675b3/multidict-6.7.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:9600082733859f00d79dee64effc7aef1beb26adb297416a4ad2116fd61374bd", size = 257967, upload-time = "2025-10-06T14:50:14.16Z" }, + { url = "https://files.pythonhosted.org/packages/d0/8e/0c67b7120d5d5f6d874ed85a085f9dc770a7f9d8813e80f44a9fec820bb7/multidict-6.7.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:94218fcec4d72bc61df51c198d098ce2b378e0ccbac41ddbed5ef44092913288", size = 250085, upload-time = "2025-10-06T14:50:15.639Z" }, + { url = "https://files.pythonhosted.org/packages/ba/55/b73e1d624ea4b8fd4dd07a3bb70f6e4c7c6c5d9d640a41c6ffe5cdbd2a55/multidict-6.7.0-cp313-cp313-win32.whl", hash = "sha256:a37bd74c3fa9d00be2d7b8eca074dc56bd8077ddd2917a839bd989612671ed17", size = 41713, upload-time = "2025-10-06T14:50:17.066Z" }, + { url = "https://files.pythonhosted.org/packages/32/31/75c59e7d3b4205075b4c183fa4ca398a2daf2303ddf616b04ae6ef55cffe/multidict-6.7.0-cp313-cp313-win_amd64.whl", hash = "sha256:30d193c6cc6d559db42b6bcec8a5d395d34d60c9877a0b71ecd7c204fcf15390", size = 45915, upload-time = "2025-10-06T14:50:18.264Z" }, + { url = "https://files.pythonhosted.org/packages/31/2a/8987831e811f1184c22bc2e45844934385363ee61c0a2dcfa8f71b87e608/multidict-6.7.0-cp313-cp313-win_arm64.whl", hash = "sha256:ea3334cabe4d41b7ccd01e4d349828678794edbc2d3ae97fc162a3312095092e", size = 43077, upload-time = "2025-10-06T14:50:19.853Z" }, + { url = "https://files.pythonhosted.org/packages/e8/68/7b3a5170a382a340147337b300b9eb25a9ddb573bcdfff19c0fa3f31ffba/multidict-6.7.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:ad9ce259f50abd98a1ca0aa6e490b58c316a0fce0617f609723e40804add2c00", size = 83114, upload-time = "2025-10-06T14:50:21.223Z" }, + { url = "https://files.pythonhosted.org/packages/55/5c/3fa2d07c84df4e302060f555bbf539310980362236ad49f50eeb0a1c1eb9/multidict-6.7.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:07f5594ac6d084cbb5de2df218d78baf55ef150b91f0ff8a21cc7a2e3a5a58eb", size = 48442, upload-time = "2025-10-06T14:50:22.871Z" }, + { url = "https://files.pythonhosted.org/packages/fc/56/67212d33239797f9bd91962bb899d72bb0f4c35a8652dcdb8ed049bef878/multidict-6.7.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:0591b48acf279821a579282444814a2d8d0af624ae0bc600aa4d1b920b6e924b", size = 46885, upload-time = "2025-10-06T14:50:24.258Z" }, + { url = "https://files.pythonhosted.org/packages/46/d1/908f896224290350721597a61a69cd19b89ad8ee0ae1f38b3f5cd12ea2ac/multidict-6.7.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:749a72584761531d2b9467cfbdfd29487ee21124c304c4b6cb760d8777b27f9c", size = 242588, upload-time = "2025-10-06T14:50:25.716Z" }, + { url = "https://files.pythonhosted.org/packages/ab/67/8604288bbd68680eee0ab568fdcb56171d8b23a01bcd5cb0c8fedf6e5d99/multidict-6.7.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b4c3d199f953acd5b446bf7c0de1fe25d94e09e79086f8dc2f48a11a129cdf1", size = 249966, upload-time = "2025-10-06T14:50:28.192Z" }, + { url = "https://files.pythonhosted.org/packages/20/33/9228d76339f1ba51e3efef7da3ebd91964d3006217aae13211653193c3ff/multidict-6.7.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:9fb0211dfc3b51efea2f349ec92c114d7754dd62c01f81c3e32b765b70c45c9b", size = 228618, upload-time = "2025-10-06T14:50:29.82Z" }, + { url = "https://files.pythonhosted.org/packages/f8/2d/25d9b566d10cab1c42b3b9e5b11ef79c9111eaf4463b8c257a3bd89e0ead/multidict-6.7.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a027ec240fe73a8d6281872690b988eed307cd7d91b23998ff35ff577ca688b5", size = 257539, upload-time = "2025-10-06T14:50:31.731Z" }, + { url = "https://files.pythonhosted.org/packages/b6/b1/8d1a965e6637fc33de3c0d8f414485c2b7e4af00f42cab3d84e7b955c222/multidict-6.7.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1d964afecdf3a8288789df2f5751dc0a8261138c3768d9af117ed384e538fad", size = 256345, upload-time = "2025-10-06T14:50:33.26Z" }, + { url = "https://files.pythonhosted.org/packages/ba/0c/06b5a8adbdeedada6f4fb8d8f193d44a347223b11939b42953eeb6530b6b/multidict-6.7.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:caf53b15b1b7df9fbd0709aa01409000a2b4dd03a5f6f5cc548183c7c8f8b63c", size = 247934, upload-time = "2025-10-06T14:50:34.808Z" }, + { url = "https://files.pythonhosted.org/packages/8f/31/b2491b5fe167ca044c6eb4b8f2c9f3b8a00b24c432c365358eadac5d7625/multidict-6.7.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:654030da3197d927f05a536a66186070e98765aa5142794c9904555d3a9d8fb5", size = 245243, upload-time = "2025-10-06T14:50:36.436Z" }, + { url = "https://files.pythonhosted.org/packages/61/1a/982913957cb90406c8c94f53001abd9eafc271cb3e70ff6371590bec478e/multidict-6.7.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:2090d3718829d1e484706a2f525e50c892237b2bf9b17a79b059cb98cddc2f10", size = 235878, upload-time = "2025-10-06T14:50:37.953Z" }, + { url = "https://files.pythonhosted.org/packages/be/c0/21435d804c1a1cf7a2608593f4d19bca5bcbd7a81a70b253fdd1c12af9c0/multidict-6.7.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:2d2cfeec3f6f45651b3d408c4acec0ebf3daa9bc8a112a084206f5db5d05b754", size = 243452, upload-time = "2025-10-06T14:50:39.574Z" }, + { url = "https://files.pythonhosted.org/packages/54/0a/4349d540d4a883863191be6eb9a928846d4ec0ea007d3dcd36323bb058ac/multidict-6.7.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:4ef089f985b8c194d341eb2c24ae6e7408c9a0e2e5658699c92f497437d88c3c", size = 252312, upload-time = "2025-10-06T14:50:41.612Z" }, + { url = "https://files.pythonhosted.org/packages/26/64/d5416038dbda1488daf16b676e4dbfd9674dde10a0cc8f4fc2b502d8125d/multidict-6.7.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e93a0617cd16998784bf4414c7e40f17a35d2350e5c6f0bd900d3a8e02bd3762", size = 246935, upload-time = "2025-10-06T14:50:43.972Z" }, + { url = "https://files.pythonhosted.org/packages/9f/8c/8290c50d14e49f35e0bd4abc25e1bc7711149ca9588ab7d04f886cdf03d9/multidict-6.7.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f0feece2ef8ebc42ed9e2e8c78fc4aa3cf455733b507c09ef7406364c94376c6", size = 243385, upload-time = "2025-10-06T14:50:45.648Z" }, + { url = "https://files.pythonhosted.org/packages/ef/a0/f83ae75e42d694b3fbad3e047670e511c138be747bc713cf1b10d5096416/multidict-6.7.0-cp313-cp313t-win32.whl", hash = "sha256:19a1d55338ec1be74ef62440ca9e04a2f001a04d0cc49a4983dc320ff0f3212d", size = 47777, upload-time = "2025-10-06T14:50:47.154Z" }, + { url = "https://files.pythonhosted.org/packages/dc/80/9b174a92814a3830b7357307a792300f42c9e94664b01dee8e457551fa66/multidict-6.7.0-cp313-cp313t-win_amd64.whl", hash = "sha256:3da4fb467498df97e986af166b12d01f05d2e04f978a9c1c680ea1988e0bc4b6", size = 53104, upload-time = "2025-10-06T14:50:48.851Z" }, + { url = "https://files.pythonhosted.org/packages/cc/28/04baeaf0428d95bb7a7bea0e691ba2f31394338ba424fb0679a9ed0f4c09/multidict-6.7.0-cp313-cp313t-win_arm64.whl", hash = "sha256:b4121773c49a0776461f4a904cdf6264c88e42218aaa8407e803ca8025872792", size = 45503, upload-time = "2025-10-06T14:50:50.16Z" }, + { url = "https://files.pythonhosted.org/packages/e2/b1/3da6934455dd4b261d4c72f897e3a5728eba81db59959f3a639245891baa/multidict-6.7.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3bab1e4aff7adaa34410f93b1f8e57c4b36b9af0426a76003f441ee1d3c7e842", size = 75128, upload-time = "2025-10-06T14:50:51.92Z" }, + { url = "https://files.pythonhosted.org/packages/14/2c/f069cab5b51d175a1a2cb4ccdf7a2c2dabd58aa5bd933fa036a8d15e2404/multidict-6.7.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:b8512bac933afc3e45fb2b18da8e59b78d4f408399a960339598374d4ae3b56b", size = 44410, upload-time = "2025-10-06T14:50:53.275Z" }, + { url = "https://files.pythonhosted.org/packages/42/e2/64bb41266427af6642b6b128e8774ed84c11b80a90702c13ac0a86bb10cc/multidict-6.7.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:79dcf9e477bc65414ebfea98ffd013cb39552b5ecd62908752e0e413d6d06e38", size = 43205, upload-time = "2025-10-06T14:50:54.911Z" }, + { url = "https://files.pythonhosted.org/packages/02/68/6b086fef8a3f1a8541b9236c594f0c9245617c29841f2e0395d979485cde/multidict-6.7.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:31bae522710064b5cbeddaf2e9f32b1abab70ac6ac91d42572502299e9953128", size = 245084, upload-time = "2025-10-06T14:50:56.369Z" }, + { url = "https://files.pythonhosted.org/packages/15/ee/f524093232007cd7a75c1d132df70f235cfd590a7c9eaccd7ff422ef4ae8/multidict-6.7.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4a0df7ff02397bb63e2fd22af2c87dfa39e8c7f12947bc524dbdc528282c7e34", size = 252667, upload-time = "2025-10-06T14:50:57.991Z" }, + { url = "https://files.pythonhosted.org/packages/02/a5/eeb3f43ab45878f1895118c3ef157a480db58ede3f248e29b5354139c2c9/multidict-6.7.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7a0222514e8e4c514660e182d5156a415c13ef0aabbd71682fc714e327b95e99", size = 233590, upload-time = "2025-10-06T14:50:59.589Z" }, + { url = "https://files.pythonhosted.org/packages/6a/1e/76d02f8270b97269d7e3dbd45644b1785bda457b474315f8cf999525a193/multidict-6.7.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2397ab4daaf2698eb51a76721e98db21ce4f52339e535725de03ea962b5a3202", size = 264112, upload-time = "2025-10-06T14:51:01.183Z" }, + { url = "https://files.pythonhosted.org/packages/76/0b/c28a70ecb58963847c2a8efe334904cd254812b10e535aefb3bcce513918/multidict-6.7.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8891681594162635948a636c9fe0ff21746aeb3dd5463f6e25d9bea3a8a39ca1", size = 261194, upload-time = "2025-10-06T14:51:02.794Z" }, + { url = "https://files.pythonhosted.org/packages/b4/63/2ab26e4209773223159b83aa32721b4021ffb08102f8ac7d689c943fded1/multidict-6.7.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18706cc31dbf402a7945916dd5cddf160251b6dab8a2c5f3d6d5a55949f676b3", size = 248510, upload-time = "2025-10-06T14:51:04.724Z" }, + { url = "https://files.pythonhosted.org/packages/93/cd/06c1fa8282af1d1c46fd55c10a7930af652afdce43999501d4d68664170c/multidict-6.7.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:f844a1bbf1d207dd311a56f383f7eda2d0e134921d45751842d8235e7778965d", size = 248395, upload-time = "2025-10-06T14:51:06.306Z" }, + { url = "https://files.pythonhosted.org/packages/99/ac/82cb419dd6b04ccf9e7e61befc00c77614fc8134362488b553402ecd55ce/multidict-6.7.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:d4393e3581e84e5645506923816b9cc81f5609a778c7e7534054091acc64d1c6", size = 239520, upload-time = "2025-10-06T14:51:08.091Z" }, + { url = "https://files.pythonhosted.org/packages/fa/f3/a0f9bf09493421bd8716a362e0cd1d244f5a6550f5beffdd6b47e885b331/multidict-6.7.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:fbd18dc82d7bf274b37aa48d664534330af744e03bccf696d6f4c6042e7d19e7", size = 245479, upload-time = "2025-10-06T14:51:10.365Z" }, + { url = "https://files.pythonhosted.org/packages/8d/01/476d38fc73a212843f43c852b0eee266b6971f0e28329c2184a8df90c376/multidict-6.7.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:b6234e14f9314731ec45c42fc4554b88133ad53a09092cc48a88e771c125dadb", size = 258903, upload-time = "2025-10-06T14:51:12.466Z" }, + { url = "https://files.pythonhosted.org/packages/49/6d/23faeb0868adba613b817d0e69c5f15531b24d462af8012c4f6de4fa8dc3/multidict-6.7.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:08d4379f9744d8f78d98c8673c06e202ffa88296f009c71bbafe8a6bf847d01f", size = 252333, upload-time = "2025-10-06T14:51:14.48Z" }, + { url = "https://files.pythonhosted.org/packages/1e/cc/48d02ac22b30fa247f7dad82866e4b1015431092f4ba6ebc7e77596e0b18/multidict-6.7.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:9fe04da3f79387f450fd0061d4dd2e45a72749d31bf634aecc9e27f24fdc4b3f", size = 243411, upload-time = "2025-10-06T14:51:16.072Z" }, + { url = "https://files.pythonhosted.org/packages/4a/03/29a8bf5a18abf1fe34535c88adbdfa88c9fb869b5a3b120692c64abe8284/multidict-6.7.0-cp314-cp314-win32.whl", hash = "sha256:fbafe31d191dfa7c4c51f7a6149c9fb7e914dcf9ffead27dcfd9f1ae382b3885", size = 40940, upload-time = "2025-10-06T14:51:17.544Z" }, + { url = "https://files.pythonhosted.org/packages/82/16/7ed27b680791b939de138f906d5cf2b4657b0d45ca6f5dd6236fdddafb1a/multidict-6.7.0-cp314-cp314-win_amd64.whl", hash = "sha256:2f67396ec0310764b9222a1728ced1ab638f61aadc6226f17a71dd9324f9a99c", size = 45087, upload-time = "2025-10-06T14:51:18.875Z" }, + { url = "https://files.pythonhosted.org/packages/cd/3c/e3e62eb35a1950292fe39315d3c89941e30a9d07d5d2df42965ab041da43/multidict-6.7.0-cp314-cp314-win_arm64.whl", hash = "sha256:ba672b26069957ee369cfa7fc180dde1fc6f176eaf1e6beaf61fbebbd3d9c000", size = 42368, upload-time = "2025-10-06T14:51:20.225Z" }, + { url = "https://files.pythonhosted.org/packages/8b/40/cd499bd0dbc5f1136726db3153042a735fffd0d77268e2ee20d5f33c010f/multidict-6.7.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:c1dcc7524066fa918c6a27d61444d4ee7900ec635779058571f70d042d86ed63", size = 82326, upload-time = "2025-10-06T14:51:21.588Z" }, + { url = "https://files.pythonhosted.org/packages/13/8a/18e031eca251c8df76daf0288e6790561806e439f5ce99a170b4af30676b/multidict-6.7.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:27e0b36c2d388dc7b6ced3406671b401e84ad7eb0656b8f3a2f46ed0ce483718", size = 48065, upload-time = "2025-10-06T14:51:22.93Z" }, + { url = "https://files.pythonhosted.org/packages/40/71/5e6701277470a87d234e433fb0a3a7deaf3bcd92566e421e7ae9776319de/multidict-6.7.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2a7baa46a22e77f0988e3b23d4ede5513ebec1929e34ee9495be535662c0dfe2", size = 46475, upload-time = "2025-10-06T14:51:24.352Z" }, + { url = "https://files.pythonhosted.org/packages/fe/6a/bab00cbab6d9cfb57afe1663318f72ec28289ea03fd4e8236bb78429893a/multidict-6.7.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7bf77f54997a9166a2f5675d1201520586439424c2511723a7312bdb4bcc034e", size = 239324, upload-time = "2025-10-06T14:51:25.822Z" }, + { url = "https://files.pythonhosted.org/packages/2a/5f/8de95f629fc22a7769ade8b41028e3e5a822c1f8904f618d175945a81ad3/multidict-6.7.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e011555abada53f1578d63389610ac8a5400fc70ce71156b0aa30d326f1a5064", size = 246877, upload-time = "2025-10-06T14:51:27.604Z" }, + { url = "https://files.pythonhosted.org/packages/23/b4/38881a960458f25b89e9f4a4fdcb02ac101cfa710190db6e5528841e67de/multidict-6.7.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:28b37063541b897fd6a318007373930a75ca6d6ac7c940dbe14731ffdd8d498e", size = 225824, upload-time = "2025-10-06T14:51:29.664Z" }, + { url = "https://files.pythonhosted.org/packages/1e/39/6566210c83f8a261575f18e7144736059f0c460b362e96e9cf797a24b8e7/multidict-6.7.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:05047ada7a2fde2631a0ed706f1fd68b169a681dfe5e4cf0f8e4cb6618bbc2cd", size = 253558, upload-time = "2025-10-06T14:51:31.684Z" }, + { url = "https://files.pythonhosted.org/packages/00/a3/67f18315100f64c269f46e6c0319fa87ba68f0f64f2b8e7fd7c72b913a0b/multidict-6.7.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:716133f7d1d946a4e1b91b1756b23c088881e70ff180c24e864c26192ad7534a", size = 252339, upload-time = "2025-10-06T14:51:33.699Z" }, + { url = "https://files.pythonhosted.org/packages/c8/2a/1cb77266afee2458d82f50da41beba02159b1d6b1f7973afc9a1cad1499b/multidict-6.7.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d1bed1b467ef657f2a0ae62844a607909ef1c6889562de5e1d505f74457d0b96", size = 244895, upload-time = "2025-10-06T14:51:36.189Z" }, + { url = "https://files.pythonhosted.org/packages/dd/72/09fa7dd487f119b2eb9524946ddd36e2067c08510576d43ff68469563b3b/multidict-6.7.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:ca43bdfa5d37bd6aee89d85e1d0831fb86e25541be7e9d376ead1b28974f8e5e", size = 241862, upload-time = "2025-10-06T14:51:41.291Z" }, + { url = "https://files.pythonhosted.org/packages/65/92/bc1f8bd0853d8669300f732c801974dfc3702c3eeadae2f60cef54dc69d7/multidict-6.7.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:44b546bd3eb645fd26fb949e43c02a25a2e632e2ca21a35e2e132c8105dc8599", size = 232376, upload-time = "2025-10-06T14:51:43.55Z" }, + { url = "https://files.pythonhosted.org/packages/09/86/ac39399e5cb9d0c2ac8ef6e10a768e4d3bc933ac808d49c41f9dc23337eb/multidict-6.7.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:a6ef16328011d3f468e7ebc326f24c1445f001ca1dec335b2f8e66bed3006394", size = 240272, upload-time = "2025-10-06T14:51:45.265Z" }, + { url = "https://files.pythonhosted.org/packages/3d/b6/fed5ac6b8563ec72df6cb1ea8dac6d17f0a4a1f65045f66b6d3bf1497c02/multidict-6.7.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:5aa873cbc8e593d361ae65c68f85faadd755c3295ea2c12040ee146802f23b38", size = 248774, upload-time = "2025-10-06T14:51:46.836Z" }, + { url = "https://files.pythonhosted.org/packages/6b/8d/b954d8c0dc132b68f760aefd45870978deec6818897389dace00fcde32ff/multidict-6.7.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:3d7b6ccce016e29df4b7ca819659f516f0bc7a4b3efa3bb2012ba06431b044f9", size = 242731, upload-time = "2025-10-06T14:51:48.541Z" }, + { url = "https://files.pythonhosted.org/packages/16/9d/a2dac7009125d3540c2f54e194829ea18ac53716c61b655d8ed300120b0f/multidict-6.7.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:171b73bd4ee683d307599b66793ac80981b06f069b62eea1c9e29c9241aa66b0", size = 240193, upload-time = "2025-10-06T14:51:50.355Z" }, + { url = "https://files.pythonhosted.org/packages/39/ca/c05f144128ea232ae2178b008d5011d4e2cea86e4ee8c85c2631b1b94802/multidict-6.7.0-cp314-cp314t-win32.whl", hash = "sha256:b2d7f80c4e1fd010b07cb26820aae86b7e73b681ee4889684fb8d2d4537aab13", size = 48023, upload-time = "2025-10-06T14:51:51.883Z" }, + { url = "https://files.pythonhosted.org/packages/ba/8f/0a60e501584145588be1af5cc829265701ba3c35a64aec8e07cbb71d39bb/multidict-6.7.0-cp314-cp314t-win_amd64.whl", hash = "sha256:09929cab6fcb68122776d575e03c6cc64ee0b8fca48d17e135474b042ce515cd", size = 53507, upload-time = "2025-10-06T14:51:53.672Z" }, + { url = "https://files.pythonhosted.org/packages/7f/ae/3148b988a9c6239903e786eac19c889fab607c31d6efa7fb2147e5680f23/multidict-6.7.0-cp314-cp314t-win_arm64.whl", hash = "sha256:cc41db090ed742f32bd2d2c721861725e6109681eddf835d0a82bd3a5c382827", size = 44804, upload-time = "2025-10-06T14:51:55.415Z" }, + { url = "https://files.pythonhosted.org/packages/90/d7/4cf84257902265c4250769ac49f4eaab81c182ee9aff8bf59d2714dbb174/multidict-6.7.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:363eb68a0a59bd2303216d2346e6c441ba10d36d1f9969fcb6f1ba700de7bb5c", size = 77073, upload-time = "2025-10-06T14:51:57.386Z" }, + { url = "https://files.pythonhosted.org/packages/6d/51/194e999630a656e76c2965a1590d12faa5cd528170f2abaa04423e09fe8d/multidict-6.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d874eb056410ca05fed180b6642e680373688efafc7f077b2a2f61811e873a40", size = 44928, upload-time = "2025-10-06T14:51:58.791Z" }, + { url = "https://files.pythonhosted.org/packages/e5/6b/2a195373c33068c9158e0941d0b46cfcc9c1d894ca2eb137d1128081dff0/multidict-6.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8b55d5497b51afdfde55925e04a022f1de14d4f4f25cdfd4f5d9b0aa96166851", size = 44581, upload-time = "2025-10-06T14:52:00.174Z" }, + { url = "https://files.pythonhosted.org/packages/69/7b/7f4f2e644b6978bf011a5fd9a5ebb7c21de3f38523b1f7897d36a1ac1311/multidict-6.7.0-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f8e5c0031b90ca9ce555e2e8fd5c3b02a25f14989cbc310701823832c99eb687", size = 239901, upload-time = "2025-10-06T14:52:02.416Z" }, + { url = "https://files.pythonhosted.org/packages/3c/b5/952c72786710a031aa204a9adf7db66d7f97a2c6573889d58b9e60fe6702/multidict-6.7.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9cf41880c991716f3c7cec48e2f19ae4045fc9db5fc9cff27347ada24d710bb5", size = 240534, upload-time = "2025-10-06T14:52:04.105Z" }, + { url = "https://files.pythonhosted.org/packages/f3/ef/109fe1f2471e4c458c74242c7e4a833f2d9fc8a6813cd7ee345b0bad18f9/multidict-6.7.0-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8cfc12a8630a29d601f48d47787bd7eb730e475e83edb5d6c5084317463373eb", size = 219545, upload-time = "2025-10-06T14:52:06.208Z" }, + { url = "https://files.pythonhosted.org/packages/42/bd/327d91288114967f9fe90dc53de70aa3fec1b9073e46aa32c4828f771a87/multidict-6.7.0-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3996b50c3237c4aec17459217c1e7bbdead9a22a0fcd3c365564fbd16439dde6", size = 251187, upload-time = "2025-10-06T14:52:08.049Z" }, + { url = "https://files.pythonhosted.org/packages/f4/13/a8b078ebbaceb7819fd28cd004413c33b98f1b70d542a62e6a00b74fb09f/multidict-6.7.0-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:7f5170993a0dd3ab871c74f45c0a21a4e2c37a2f2b01b5f722a2ad9c6650469e", size = 249379, upload-time = "2025-10-06T14:52:09.831Z" }, + { url = "https://files.pythonhosted.org/packages/e3/6d/ab12e1246be4d65d1f55de1e6f6aaa9b8120eddcfdd1d290439c7833d5ce/multidict-6.7.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ec81878ddf0e98817def1e77d4f50dae5ef5b0e4fe796fae3bd674304172416e", size = 239241, upload-time = "2025-10-06T14:52:11.561Z" }, + { url = "https://files.pythonhosted.org/packages/bb/d7/079a93625208c173b8fa756396814397c0fd9fee61ef87b75a748820b86e/multidict-6.7.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9281bf5b34f59afbc6b1e477a372e9526b66ca446f4bf62592839c195a718b32", size = 237418, upload-time = "2025-10-06T14:52:13.671Z" }, + { url = "https://files.pythonhosted.org/packages/c9/29/03777c2212274aa9440918d604dc9d6af0e6b4558c611c32c3dcf1a13870/multidict-6.7.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:68af405971779d8b37198726f2b6fe3955db846fee42db7a4286fc542203934c", size = 232987, upload-time = "2025-10-06T14:52:15.708Z" }, + { url = "https://files.pythonhosted.org/packages/d9/00/11188b68d85a84e8050ee34724d6ded19ad03975caebe0c8dcb2829b37bf/multidict-6.7.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3ba3ef510467abb0667421a286dc906e30eb08569365f5cdb131d7aff7c2dd84", size = 240985, upload-time = "2025-10-06T14:52:17.317Z" }, + { url = "https://files.pythonhosted.org/packages/df/0c/12eef6aeda21859c6cdf7d75bd5516d83be3efe3d8cc45fd1a3037f5b9dc/multidict-6.7.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:b61189b29081a20c7e4e0b49b44d5d44bb0dc92be3c6d06a11cc043f81bf9329", size = 246855, upload-time = "2025-10-06T14:52:19.096Z" }, + { url = "https://files.pythonhosted.org/packages/69/f6/076120fd8bb3975f09228e288e08bff6b9f1bfd5166397c7ba284f622ab2/multidict-6.7.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:fb287618b9c7aa3bf8d825f02d9201b2f13078a5ed3b293c8f4d953917d84d5e", size = 241804, upload-time = "2025-10-06T14:52:21.166Z" }, + { url = "https://files.pythonhosted.org/packages/5f/51/41bb950c81437b88a93e6ddfca1d8763569ae861e638442838c4375f7497/multidict-6.7.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:521f33e377ff64b96c4c556b81c55d0cfffb96a11c194fd0c3f1e56f3d8dd5a4", size = 235321, upload-time = "2025-10-06T14:52:23.208Z" }, + { url = "https://files.pythonhosted.org/packages/5a/cf/5bbd31f055199d56c1f6b04bbadad3ccb24e6d5d4db75db774fc6d6674b8/multidict-6.7.0-cp39-cp39-win32.whl", hash = "sha256:ce8fdc2dca699f8dbf055a61d73eaa10482569ad20ee3c36ef9641f69afa8c91", size = 41435, upload-time = "2025-10-06T14:52:24.735Z" }, + { url = "https://files.pythonhosted.org/packages/af/01/547ffe9c2faec91c26965c152f3fea6cff068b6037401f61d310cc861ff4/multidict-6.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:7e73299c99939f089dd9b2120a04a516b95cdf8c1cd2b18c53ebf0de80b1f18f", size = 46193, upload-time = "2025-10-06T14:52:26.101Z" }, + { url = "https://files.pythonhosted.org/packages/27/77/cfa5461d1d2651d6fc24216c92b4a21d4e385a41c46e0d9f3b070675167b/multidict-6.7.0-cp39-cp39-win_arm64.whl", hash = "sha256:6bdce131e14b04fd34a809b6380dbfd826065c3e2fe8a50dbae659fa0c390546", size = 43118, upload-time = "2025-10-06T14:52:27.876Z" }, + { url = "https://files.pythonhosted.org/packages/b7/da/7d22601b625e241d4f23ef1ebff8acfc60da633c9e7e7922e24d10f592b3/multidict-6.7.0-py3-none-any.whl", hash = "sha256:394fc5c42a333c9ffc3e421a4c85e08580d990e08b99f6bf35b4132114c5dcb3", size = 12317, upload-time = "2025-10-06T14:52:29.272Z" }, +] + +[[package]] +name = "mypy" +version = "1.14.1" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.9'", +] +dependencies = [ + { name = "mypy-extensions", marker = "python_full_version < '3.9'" }, + { name = "tomli", marker = "python_full_version < '3.9'" }, + { name = "typing-extensions", version = "4.13.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b9/eb/2c92d8ea1e684440f54fa49ac5d9a5f19967b7b472a281f419e69a8d228e/mypy-1.14.1.tar.gz", hash = "sha256:7ec88144fe9b510e8475ec2f5f251992690fcf89ccb4500b214b4226abcd32d6", size = 3216051, upload-time = "2024-12-30T16:39:07.335Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9b/7a/87ae2adb31d68402da6da1e5f30c07ea6063e9f09b5e7cfc9dfa44075e74/mypy-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:52686e37cf13d559f668aa398dd7ddf1f92c5d613e4f8cb262be2fb4fedb0fcb", size = 11211002, upload-time = "2024-12-30T16:37:22.435Z" }, + { url = "https://files.pythonhosted.org/packages/e1/23/eada4c38608b444618a132be0d199b280049ded278b24cbb9d3fc59658e4/mypy-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1fb545ca340537d4b45d3eecdb3def05e913299ca72c290326be19b3804b39c0", size = 10358400, upload-time = "2024-12-30T16:37:53.526Z" }, + { url = "https://files.pythonhosted.org/packages/43/c9/d6785c6f66241c62fd2992b05057f404237deaad1566545e9f144ced07f5/mypy-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:90716d8b2d1f4cd503309788e51366f07c56635a3309b0f6a32547eaaa36a64d", size = 12095172, upload-time = "2024-12-30T16:37:50.332Z" }, + { url = "https://files.pythonhosted.org/packages/c3/62/daa7e787770c83c52ce2aaf1a111eae5893de9e004743f51bfcad9e487ec/mypy-1.14.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ae753f5c9fef278bcf12e1a564351764f2a6da579d4a81347e1d5a15819997b", size = 12828732, upload-time = "2024-12-30T16:37:29.96Z" }, + { url = "https://files.pythonhosted.org/packages/1b/a2/5fb18318a3637f29f16f4e41340b795da14f4751ef4f51c99ff39ab62e52/mypy-1.14.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e0fe0f5feaafcb04505bcf439e991c6d8f1bf8b15f12b05feeed96e9e7bf1427", size = 13012197, upload-time = "2024-12-30T16:38:05.037Z" }, + { url = "https://files.pythonhosted.org/packages/28/99/e153ce39105d164b5f02c06c35c7ba958aaff50a2babba7d080988b03fe7/mypy-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:7d54bd85b925e501c555a3227f3ec0cfc54ee8b6930bd6141ec872d1c572f81f", size = 9780836, upload-time = "2024-12-30T16:37:19.726Z" }, + { url = "https://files.pythonhosted.org/packages/da/11/a9422850fd506edbcdc7f6090682ecceaf1f87b9dd847f9df79942da8506/mypy-1.14.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f995e511de847791c3b11ed90084a7a0aafdc074ab88c5a9711622fe4751138c", size = 11120432, upload-time = "2024-12-30T16:37:11.533Z" }, + { url = "https://files.pythonhosted.org/packages/b6/9e/47e450fd39078d9c02d620545b2cb37993a8a8bdf7db3652ace2f80521ca/mypy-1.14.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d64169ec3b8461311f8ce2fd2eb5d33e2d0f2c7b49116259c51d0d96edee48d1", size = 10279515, upload-time = "2024-12-30T16:37:40.724Z" }, + { url = "https://files.pythonhosted.org/packages/01/b5/6c8d33bd0f851a7692a8bfe4ee75eb82b6983a3cf39e5e32a5d2a723f0c1/mypy-1.14.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ba24549de7b89b6381b91fbc068d798192b1b5201987070319889e93038967a8", size = 12025791, upload-time = "2024-12-30T16:36:58.73Z" }, + { url = "https://files.pythonhosted.org/packages/f0/4c/e10e2c46ea37cab5c471d0ddaaa9a434dc1d28650078ac1b56c2d7b9b2e4/mypy-1.14.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:183cf0a45457d28ff9d758730cd0210419ac27d4d3f285beda038c9083363b1f", size = 12749203, upload-time = "2024-12-30T16:37:03.741Z" }, + { url = "https://files.pythonhosted.org/packages/88/55/beacb0c69beab2153a0f57671ec07861d27d735a0faff135a494cd4f5020/mypy-1.14.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f2a0ecc86378f45347f586e4163d1769dd81c5a223d577fe351f26b179e148b1", size = 12885900, upload-time = "2024-12-30T16:37:57.948Z" }, + { url = "https://files.pythonhosted.org/packages/a2/75/8c93ff7f315c4d086a2dfcde02f713004357d70a163eddb6c56a6a5eff40/mypy-1.14.1-cp311-cp311-win_amd64.whl", hash = "sha256:ad3301ebebec9e8ee7135d8e3109ca76c23752bac1e717bc84cd3836b4bf3eae", size = 9777869, upload-time = "2024-12-30T16:37:33.428Z" }, + { url = "https://files.pythonhosted.org/packages/43/1b/b38c079609bb4627905b74fc6a49849835acf68547ac33d8ceb707de5f52/mypy-1.14.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:30ff5ef8519bbc2e18b3b54521ec319513a26f1bba19a7582e7b1f58a6e69f14", size = 11266668, upload-time = "2024-12-30T16:38:02.211Z" }, + { url = "https://files.pythonhosted.org/packages/6b/75/2ed0d2964c1ffc9971c729f7a544e9cd34b2cdabbe2d11afd148d7838aa2/mypy-1.14.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cb9f255c18052343c70234907e2e532bc7e55a62565d64536dbc7706a20b78b9", size = 10254060, upload-time = "2024-12-30T16:37:46.131Z" }, + { url = "https://files.pythonhosted.org/packages/a1/5f/7b8051552d4da3c51bbe8fcafffd76a6823779101a2b198d80886cd8f08e/mypy-1.14.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b4e3413e0bddea671012b063e27591b953d653209e7a4fa5e48759cda77ca11", size = 11933167, upload-time = "2024-12-30T16:37:43.534Z" }, + { url = "https://files.pythonhosted.org/packages/04/90/f53971d3ac39d8b68bbaab9a4c6c58c8caa4d5fd3d587d16f5927eeeabe1/mypy-1.14.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:553c293b1fbdebb6c3c4030589dab9fafb6dfa768995a453d8a5d3b23784af2e", size = 12864341, upload-time = "2024-12-30T16:37:36.249Z" }, + { url = "https://files.pythonhosted.org/packages/03/d2/8bc0aeaaf2e88c977db41583559319f1821c069e943ada2701e86d0430b7/mypy-1.14.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fad79bfe3b65fe6a1efaed97b445c3d37f7be9fdc348bdb2d7cac75579607c89", size = 12972991, upload-time = "2024-12-30T16:37:06.743Z" }, + { url = "https://files.pythonhosted.org/packages/6f/17/07815114b903b49b0f2cf7499f1c130e5aa459411596668267535fe9243c/mypy-1.14.1-cp312-cp312-win_amd64.whl", hash = "sha256:8fa2220e54d2946e94ab6dbb3ba0a992795bd68b16dc852db33028df2b00191b", size = 9879016, upload-time = "2024-12-30T16:37:15.02Z" }, + { url = "https://files.pythonhosted.org/packages/9e/15/bb6a686901f59222275ab228453de741185f9d54fecbaacec041679496c6/mypy-1.14.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:92c3ed5afb06c3a8e188cb5da4984cab9ec9a77ba956ee419c68a388b4595255", size = 11252097, upload-time = "2024-12-30T16:37:25.144Z" }, + { url = "https://files.pythonhosted.org/packages/f8/b3/8b0f74dfd072c802b7fa368829defdf3ee1566ba74c32a2cb2403f68024c/mypy-1.14.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:dbec574648b3e25f43d23577309b16534431db4ddc09fda50841f1e34e64ed34", size = 10239728, upload-time = "2024-12-30T16:38:08.634Z" }, + { url = "https://files.pythonhosted.org/packages/c5/9b/4fd95ab20c52bb5b8c03cc49169be5905d931de17edfe4d9d2986800b52e/mypy-1.14.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8c6d94b16d62eb3e947281aa7347d78236688e21081f11de976376cf010eb31a", size = 11924965, upload-time = "2024-12-30T16:38:12.132Z" }, + { url = "https://files.pythonhosted.org/packages/56/9d/4a236b9c57f5d8f08ed346914b3f091a62dd7e19336b2b2a0d85485f82ff/mypy-1.14.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d4b19b03fdf54f3c5b2fa474c56b4c13c9dbfb9a2db4370ede7ec11a2c5927d9", size = 12867660, upload-time = "2024-12-30T16:38:17.342Z" }, + { url = "https://files.pythonhosted.org/packages/40/88/a61a5497e2f68d9027de2bb139c7bb9abaeb1be1584649fa9d807f80a338/mypy-1.14.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0c911fde686394753fff899c409fd4e16e9b294c24bfd5e1ea4675deae1ac6fd", size = 12969198, upload-time = "2024-12-30T16:38:32.839Z" }, + { url = "https://files.pythonhosted.org/packages/54/da/3d6fc5d92d324701b0c23fb413c853892bfe0e1dbe06c9138037d459756b/mypy-1.14.1-cp313-cp313-win_amd64.whl", hash = "sha256:8b21525cb51671219f5307be85f7e646a153e5acc656e5cebf64bfa076c50107", size = 9885276, upload-time = "2024-12-30T16:38:20.828Z" }, + { url = "https://files.pythonhosted.org/packages/39/02/1817328c1372be57c16148ce7d2bfcfa4a796bedaed897381b1aad9b267c/mypy-1.14.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7084fb8f1128c76cd9cf68fe5971b37072598e7c31b2f9f95586b65c741a9d31", size = 11143050, upload-time = "2024-12-30T16:38:29.743Z" }, + { url = "https://files.pythonhosted.org/packages/b9/07/99db9a95ece5e58eee1dd87ca456a7e7b5ced6798fd78182c59c35a7587b/mypy-1.14.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8f845a00b4f420f693f870eaee5f3e2692fa84cc8514496114649cfa8fd5e2c6", size = 10321087, upload-time = "2024-12-30T16:38:14.739Z" }, + { url = "https://files.pythonhosted.org/packages/9a/eb/85ea6086227b84bce79b3baf7f465b4732e0785830726ce4a51528173b71/mypy-1.14.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:44bf464499f0e3a2d14d58b54674dee25c031703b2ffc35064bd0df2e0fac319", size = 12066766, upload-time = "2024-12-30T16:38:47.038Z" }, + { url = "https://files.pythonhosted.org/packages/4b/bb/f01bebf76811475d66359c259eabe40766d2f8ac8b8250d4e224bb6df379/mypy-1.14.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c99f27732c0b7dc847adb21c9d47ce57eb48fa33a17bc6d7d5c5e9f9e7ae5bac", size = 12787111, upload-time = "2024-12-30T16:39:02.444Z" }, + { url = "https://files.pythonhosted.org/packages/2f/c9/84837ff891edcb6dcc3c27d85ea52aab0c4a34740ff5f0ccc0eb87c56139/mypy-1.14.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:bce23c7377b43602baa0bd22ea3265c49b9ff0b76eb315d6c34721af4cdf1d9b", size = 12974331, upload-time = "2024-12-30T16:38:23.849Z" }, + { url = "https://files.pythonhosted.org/packages/84/5f/901e18464e6a13f8949b4909535be3fa7f823291b8ab4e4b36cfe57d6769/mypy-1.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:8edc07eeade7ebc771ff9cf6b211b9a7d93687ff892150cb5692e4f4272b0837", size = 9763210, upload-time = "2024-12-30T16:38:36.299Z" }, + { url = "https://files.pythonhosted.org/packages/ca/1f/186d133ae2514633f8558e78cd658070ba686c0e9275c5a5c24a1e1f0d67/mypy-1.14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3888a1816d69f7ab92092f785a462944b3ca16d7c470d564165fe703b0970c35", size = 11200493, upload-time = "2024-12-30T16:38:26.935Z" }, + { url = "https://files.pythonhosted.org/packages/af/fc/4842485d034e38a4646cccd1369f6b1ccd7bc86989c52770d75d719a9941/mypy-1.14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46c756a444117c43ee984bd055db99e498bc613a70bbbc120272bd13ca579fbc", size = 10357702, upload-time = "2024-12-30T16:38:50.623Z" }, + { url = "https://files.pythonhosted.org/packages/b4/e6/457b83f2d701e23869cfec013a48a12638f75b9d37612a9ddf99072c1051/mypy-1.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:27fc248022907e72abfd8e22ab1f10e903915ff69961174784a3900a8cba9ad9", size = 12091104, upload-time = "2024-12-30T16:38:53.735Z" }, + { url = "https://files.pythonhosted.org/packages/f1/bf/76a569158db678fee59f4fd30b8e7a0d75bcbaeef49edd882a0d63af6d66/mypy-1.14.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:499d6a72fb7e5de92218db961f1a66d5f11783f9ae549d214617edab5d4dbdbb", size = 12830167, upload-time = "2024-12-30T16:38:56.437Z" }, + { url = "https://files.pythonhosted.org/packages/43/bc/0bc6b694b3103de9fed61867f1c8bd33336b913d16831431e7cb48ef1c92/mypy-1.14.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:57961db9795eb566dc1d1b4e9139ebc4c6b0cb6e7254ecde69d1552bf7613f60", size = 13013834, upload-time = "2024-12-30T16:38:59.204Z" }, + { url = "https://files.pythonhosted.org/packages/b0/79/5f5ec47849b6df1e6943d5fd8e6632fbfc04b4fd4acfa5a5a9535d11b4e2/mypy-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:07ba89fdcc9451f2ebb02853deb6aaaa3d2239a236669a63ab3801bbf923ef5c", size = 9781231, upload-time = "2024-12-30T16:39:05.124Z" }, + { url = "https://files.pythonhosted.org/packages/a0/b5/32dd67b69a16d088e533962e5044e51004176a9952419de0370cdaead0f8/mypy-1.14.1-py3-none-any.whl", hash = "sha256:b66a60cc4073aeb8ae00057f9c1f64d49e90f918fbcef9a977eb121da8b8f1d1", size = 2752905, upload-time = "2024-12-30T16:38:42.021Z" }, +] + +[[package]] +name = "mypy" +version = "1.18.2" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", + "python_full_version == '3.9.*'", +] +dependencies = [ + { name = "mypy-extensions", marker = "python_full_version >= '3.9'" }, + { name = "pathspec", marker = "python_full_version >= '3.9'" }, + { name = "tomli", marker = "python_full_version >= '3.9' and python_full_version < '3.11'" }, + { name = "typing-extensions", version = "4.15.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c0/77/8f0d0001ffad290cef2f7f216f96c814866248a0b92a722365ed54648e7e/mypy-1.18.2.tar.gz", hash = "sha256:06a398102a5f203d7477b2923dda3634c36727fa5c237d8f859ef90c42a9924b", size = 3448846, upload-time = "2025-09-19T00:11:10.519Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/03/6f/657961a0743cff32e6c0611b63ff1c1970a0b482ace35b069203bf705187/mypy-1.18.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c1eab0cf6294dafe397c261a75f96dc2c31bffe3b944faa24db5def4e2b0f77c", size = 12807973, upload-time = "2025-09-19T00:10:35.282Z" }, + { url = "https://files.pythonhosted.org/packages/10/e9/420822d4f661f13ca8900f5fa239b40ee3be8b62b32f3357df9a3045a08b/mypy-1.18.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7a780ca61fc239e4865968ebc5240bb3bf610ef59ac398de9a7421b54e4a207e", size = 11896527, upload-time = "2025-09-19T00:10:55.791Z" }, + { url = "https://files.pythonhosted.org/packages/aa/73/a05b2bbaa7005f4642fcfe40fb73f2b4fb6bb44229bd585b5878e9a87ef8/mypy-1.18.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:448acd386266989ef11662ce3c8011fd2a7b632e0ec7d61a98edd8e27472225b", size = 12507004, upload-time = "2025-09-19T00:11:05.411Z" }, + { url = "https://files.pythonhosted.org/packages/4f/01/f6e4b9f0d031c11ccbd6f17da26564f3a0f3c4155af344006434b0a05a9d/mypy-1.18.2-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f9e171c465ad3901dc652643ee4bffa8e9fef4d7d0eece23b428908c77a76a66", size = 13245947, upload-time = "2025-09-19T00:10:46.923Z" }, + { url = "https://files.pythonhosted.org/packages/d7/97/19727e7499bfa1ae0773d06afd30ac66a58ed7437d940c70548634b24185/mypy-1.18.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:592ec214750bc00741af1f80cbf96b5013d81486b7bb24cb052382c19e40b428", size = 13499217, upload-time = "2025-09-19T00:09:39.472Z" }, + { url = "https://files.pythonhosted.org/packages/9f/4f/90dc8c15c1441bf31cf0f9918bb077e452618708199e530f4cbd5cede6ff/mypy-1.18.2-cp310-cp310-win_amd64.whl", hash = "sha256:7fb95f97199ea11769ebe3638c29b550b5221e997c63b14ef93d2e971606ebed", size = 9766753, upload-time = "2025-09-19T00:10:49.161Z" }, + { url = "https://files.pythonhosted.org/packages/88/87/cafd3ae563f88f94eec33f35ff722d043e09832ea8530ef149ec1efbaf08/mypy-1.18.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:807d9315ab9d464125aa9fcf6d84fde6e1dc67da0b6f80e7405506b8ac72bc7f", size = 12731198, upload-time = "2025-09-19T00:09:44.857Z" }, + { url = "https://files.pythonhosted.org/packages/0f/e0/1e96c3d4266a06d4b0197ace5356d67d937d8358e2ee3ffac71faa843724/mypy-1.18.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:776bb00de1778caf4db739c6e83919c1d85a448f71979b6a0edd774ea8399341", size = 11817879, upload-time = "2025-09-19T00:09:47.131Z" }, + { url = "https://files.pythonhosted.org/packages/72/ef/0c9ba89eb03453e76bdac5a78b08260a848c7bfc5d6603634774d9cd9525/mypy-1.18.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1379451880512ffce14505493bd9fe469e0697543717298242574882cf8cdb8d", size = 12427292, upload-time = "2025-09-19T00:10:22.472Z" }, + { url = "https://files.pythonhosted.org/packages/1a/52/ec4a061dd599eb8179d5411d99775bec2a20542505988f40fc2fee781068/mypy-1.18.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1331eb7fd110d60c24999893320967594ff84c38ac6d19e0a76c5fd809a84c86", size = 13163750, upload-time = "2025-09-19T00:09:51.472Z" }, + { url = "https://files.pythonhosted.org/packages/c4/5f/2cf2ceb3b36372d51568f2208c021870fe7834cf3186b653ac6446511839/mypy-1.18.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3ca30b50a51e7ba93b00422e486cbb124f1c56a535e20eff7b2d6ab72b3b2e37", size = 13351827, upload-time = "2025-09-19T00:09:58.311Z" }, + { url = "https://files.pythonhosted.org/packages/c8/7d/2697b930179e7277529eaaec1513f8de622818696857f689e4a5432e5e27/mypy-1.18.2-cp311-cp311-win_amd64.whl", hash = "sha256:664dc726e67fa54e14536f6e1224bcfce1d9e5ac02426d2326e2bb4e081d1ce8", size = 9757983, upload-time = "2025-09-19T00:10:09.071Z" }, + { url = "https://files.pythonhosted.org/packages/07/06/dfdd2bc60c66611dd8335f463818514733bc763e4760dee289dcc33df709/mypy-1.18.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:33eca32dd124b29400c31d7cf784e795b050ace0e1f91b8dc035672725617e34", size = 12908273, upload-time = "2025-09-19T00:10:58.321Z" }, + { url = "https://files.pythonhosted.org/packages/81/14/6a9de6d13a122d5608e1a04130724caf9170333ac5a924e10f670687d3eb/mypy-1.18.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a3c47adf30d65e89b2dcd2fa32f3aeb5e94ca970d2c15fcb25e297871c8e4764", size = 11920910, upload-time = "2025-09-19T00:10:20.043Z" }, + { url = "https://files.pythonhosted.org/packages/5f/a9/b29de53e42f18e8cc547e38daa9dfa132ffdc64f7250e353f5c8cdd44bee/mypy-1.18.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d6c838e831a062f5f29d11c9057c6009f60cb294fea33a98422688181fe2893", size = 12465585, upload-time = "2025-09-19T00:10:33.005Z" }, + { url = "https://files.pythonhosted.org/packages/77/ae/6c3d2c7c61ff21f2bee938c917616c92ebf852f015fb55917fd6e2811db2/mypy-1.18.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:01199871b6110a2ce984bde85acd481232d17413868c9807e95c1b0739a58914", size = 13348562, upload-time = "2025-09-19T00:10:11.51Z" }, + { url = "https://files.pythonhosted.org/packages/4d/31/aec68ab3b4aebdf8f36d191b0685d99faa899ab990753ca0fee60fb99511/mypy-1.18.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a2afc0fa0b0e91b4599ddfe0f91e2c26c2b5a5ab263737e998d6817874c5f7c8", size = 13533296, upload-time = "2025-09-19T00:10:06.568Z" }, + { url = "https://files.pythonhosted.org/packages/9f/83/abcb3ad9478fca3ebeb6a5358bb0b22c95ea42b43b7789c7fb1297ca44f4/mypy-1.18.2-cp312-cp312-win_amd64.whl", hash = "sha256:d8068d0afe682c7c4897c0f7ce84ea77f6de953262b12d07038f4d296d547074", size = 9828828, upload-time = "2025-09-19T00:10:28.203Z" }, + { url = "https://files.pythonhosted.org/packages/5f/04/7f462e6fbba87a72bc8097b93f6842499c428a6ff0c81dd46948d175afe8/mypy-1.18.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:07b8b0f580ca6d289e69209ec9d3911b4a26e5abfde32228a288eb79df129fcc", size = 12898728, upload-time = "2025-09-19T00:10:01.33Z" }, + { url = "https://files.pythonhosted.org/packages/99/5b/61ed4efb64f1871b41fd0b82d29a64640f3516078f6c7905b68ab1ad8b13/mypy-1.18.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ed4482847168439651d3feee5833ccedbf6657e964572706a2adb1f7fa4dfe2e", size = 11910758, upload-time = "2025-09-19T00:10:42.607Z" }, + { url = "https://files.pythonhosted.org/packages/3c/46/d297d4b683cc89a6e4108c4250a6a6b717f5fa96e1a30a7944a6da44da35/mypy-1.18.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c3ad2afadd1e9fea5cf99a45a822346971ede8685cc581ed9cd4d42eaf940986", size = 12475342, upload-time = "2025-09-19T00:11:00.371Z" }, + { url = "https://files.pythonhosted.org/packages/83/45/4798f4d00df13eae3bfdf726c9244bcb495ab5bd588c0eed93a2f2dd67f3/mypy-1.18.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a431a6f1ef14cf8c144c6b14793a23ec4eae3db28277c358136e79d7d062f62d", size = 13338709, upload-time = "2025-09-19T00:11:03.358Z" }, + { url = "https://files.pythonhosted.org/packages/d7/09/479f7358d9625172521a87a9271ddd2441e1dab16a09708f056e97007207/mypy-1.18.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7ab28cc197f1dd77a67e1c6f35cd1f8e8b73ed2217e4fc005f9e6a504e46e7ba", size = 13529806, upload-time = "2025-09-19T00:10:26.073Z" }, + { url = "https://files.pythonhosted.org/packages/71/cf/ac0f2c7e9d0ea3c75cd99dff7aec1c9df4a1376537cb90e4c882267ee7e9/mypy-1.18.2-cp313-cp313-win_amd64.whl", hash = "sha256:0e2785a84b34a72ba55fb5daf079a1003a34c05b22238da94fcae2bbe46f3544", size = 9833262, upload-time = "2025-09-19T00:10:40.035Z" }, + { url = "https://files.pythonhosted.org/packages/5a/0c/7d5300883da16f0063ae53996358758b2a2df2a09c72a5061fa79a1f5006/mypy-1.18.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:62f0e1e988ad41c2a110edde6c398383a889d95b36b3e60bcf155f5164c4fdce", size = 12893775, upload-time = "2025-09-19T00:10:03.814Z" }, + { url = "https://files.pythonhosted.org/packages/50/df/2cffbf25737bdb236f60c973edf62e3e7b4ee1c25b6878629e88e2cde967/mypy-1.18.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:8795a039bab805ff0c1dfdb8cd3344642c2b99b8e439d057aba30850b8d3423d", size = 11936852, upload-time = "2025-09-19T00:10:51.631Z" }, + { url = "https://files.pythonhosted.org/packages/be/50/34059de13dd269227fb4a03be1faee6e2a4b04a2051c82ac0a0b5a773c9a/mypy-1.18.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6ca1e64b24a700ab5ce10133f7ccd956a04715463d30498e64ea8715236f9c9c", size = 12480242, upload-time = "2025-09-19T00:11:07.955Z" }, + { url = "https://files.pythonhosted.org/packages/5b/11/040983fad5132d85914c874a2836252bbc57832065548885b5bb5b0d4359/mypy-1.18.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d924eef3795cc89fecf6bedc6ed32b33ac13e8321344f6ddbf8ee89f706c05cb", size = 13326683, upload-time = "2025-09-19T00:09:55.572Z" }, + { url = "https://files.pythonhosted.org/packages/e9/ba/89b2901dd77414dd7a8c8729985832a5735053be15b744c18e4586e506ef/mypy-1.18.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:20c02215a080e3a2be3aa50506c67242df1c151eaba0dcbc1e4e557922a26075", size = 13514749, upload-time = "2025-09-19T00:10:44.827Z" }, + { url = "https://files.pythonhosted.org/packages/25/bc/cc98767cffd6b2928ba680f3e5bc969c4152bf7c2d83f92f5a504b92b0eb/mypy-1.18.2-cp314-cp314-win_amd64.whl", hash = "sha256:749b5f83198f1ca64345603118a6f01a4e99ad4bf9d103ddc5a3200cc4614adf", size = 9982959, upload-time = "2025-09-19T00:10:37.344Z" }, + { url = "https://files.pythonhosted.org/packages/3f/a6/490ff491d8ecddf8ab91762d4f67635040202f76a44171420bcbe38ceee5/mypy-1.18.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:25a9c8fb67b00599f839cf472713f54249a62efd53a54b565eb61956a7e3296b", size = 12807230, upload-time = "2025-09-19T00:09:49.471Z" }, + { url = "https://files.pythonhosted.org/packages/eb/2e/60076fc829645d167ece9e80db9e8375648d210dab44cc98beb5b322a826/mypy-1.18.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c2b9c7e284ee20e7598d6f42e13ca40b4928e6957ed6813d1ab6348aa3f47133", size = 11895666, upload-time = "2025-09-19T00:10:53.678Z" }, + { url = "https://files.pythonhosted.org/packages/97/4a/1e2880a2a5dda4dc8d9ecd1a7e7606bc0b0e14813637eeda40c38624e037/mypy-1.18.2-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d6985ed057513e344e43a26cc1cd815c7a94602fb6a3130a34798625bc2f07b6", size = 12499608, upload-time = "2025-09-19T00:09:36.204Z" }, + { url = "https://files.pythonhosted.org/packages/00/81/a117f1b73a3015b076b20246b1f341c34a578ebd9662848c6b80ad5c4138/mypy-1.18.2-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:22f27105f1525ec024b5c630c0b9f36d5c1cc4d447d61fe51ff4bd60633f47ac", size = 13244551, upload-time = "2025-09-19T00:10:17.531Z" }, + { url = "https://files.pythonhosted.org/packages/9b/61/b9f48e1714ce87c7bf0358eb93f60663740ebb08f9ea886ffc670cea7933/mypy-1.18.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:030c52d0ea8144e721e49b1f68391e39553d7451f0c3f8a7565b59e19fcb608b", size = 13491552, upload-time = "2025-09-19T00:10:13.753Z" }, + { url = "https://files.pythonhosted.org/packages/c9/66/b2c0af3b684fa80d1b27501a8bdd3d2daa467ea3992a8aa612f5ca17c2db/mypy-1.18.2-cp39-cp39-win_amd64.whl", hash = "sha256:aa5e07ac1a60a253445797e42b8b2963c9675563a94f11291ab40718b016a7a0", size = 9765635, upload-time = "2025-09-19T00:10:30.993Z" }, + { url = "https://files.pythonhosted.org/packages/87/e3/be76d87158ebafa0309946c4a73831974d4d6ab4f4ef40c3b53a385a66fd/mypy-1.18.2-py3-none-any.whl", hash = "sha256:22a1748707dd62b58d2ae53562ffc4d7f8bcc727e8ac7cbc69c053ddc874d47e", size = 2352367, upload-time = "2025-09-19T00:10:15.489Z" }, +] + +[[package]] +name = "mypy-extensions" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343, upload-time = "2025-04-22T14:54:24.164Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" }, +] + +[[package]] +name = "nodeenv" +version = "1.9.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437, upload-time = "2024-06-04T18:44:11.171Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload-time = "2024-06-04T18:44:08.352Z" }, +] + +[[package]] +name = "packaging" +version = "25.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043, upload-time = "2023-12-10T22:30:45Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" }, +] + +[[package]] +name = "platformdirs" +version = "4.3.6" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.9'", +] +sdist = { url = "https://files.pythonhosted.org/packages/13/fc/128cc9cb8f03208bdbf93d3aa862e16d376844a14f9a0ce5cf4507372de4/platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907", size = 21302, upload-time = "2024-09-17T19:06:50.688Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3c/a6/bc1012356d8ece4d66dd75c4b9fc6c1f6650ddd5991e421177d9f8f671be/platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb", size = 18439, upload-time = "2024-09-17T19:06:49.212Z" }, +] + +[[package]] +name = "platformdirs" +version = "4.4.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version == '3.9.*'", +] +sdist = { url = "https://files.pythonhosted.org/packages/23/e8/21db9c9987b0e728855bd57bff6984f67952bea55d6f75e055c46b5383e8/platformdirs-4.4.0.tar.gz", hash = "sha256:ca753cf4d81dc309bc67b0ea38fd15dc97bc30ce419a7f58d13eb3bf14c4febf", size = 21634, upload-time = "2025-08-26T14:32:04.268Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/40/4b/2028861e724d3bd36227adfa20d3fd24c3fc6d52032f4a93c133be5d17ce/platformdirs-4.4.0-py3-none-any.whl", hash = "sha256:abd01743f24e5287cd7a5db3752faf1a2d65353f38ec26d98e25a6db65958c85", size = 18654, upload-time = "2025-08-26T14:32:02.735Z" }, +] + +[[package]] +name = "platformdirs" +version = "4.5.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", +] +sdist = { url = "https://files.pythonhosted.org/packages/61/33/9611380c2bdb1225fdef633e2a9610622310fed35ab11dac9620972ee088/platformdirs-4.5.0.tar.gz", hash = "sha256:70ddccdd7c99fc5942e9fc25636a8b34d04c24b335100223152c2803e4063312", size = 21632, upload-time = "2025-10-08T17:44:48.791Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/73/cb/ac7874b3e5d58441674fb70742e6c374b28b0c7cb988d37d991cde47166c/platformdirs-4.5.0-py3-none-any.whl", hash = "sha256:e578a81bb873cbb89a41fcc904c7ef523cc18284b7e3b3ccf06aca1403b7ebd3", size = 18651, upload-time = "2025-10-08T17:44:47.223Z" }, +] + +[[package]] +name = "pluggy" +version = "1.5.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.9'", +] +sdist = { url = "https://files.pythonhosted.org/packages/96/2d/02d4312c973c6050a18b314a5ad0b3210edb65a906f868e31c111dede4a6/pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1", size = 67955, upload-time = "2024-04-20T21:34:42.531Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669", size = 20556, upload-time = "2024-04-20T21:34:40.434Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", + "python_full_version == '3.9.*'", +] +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "pre-commit" +version = "3.5.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.9'", +] +dependencies = [ + { name = "cfgv", marker = "python_full_version < '3.9'" }, + { name = "identify", version = "2.6.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, + { name = "nodeenv", marker = "python_full_version < '3.9'" }, + { name = "pyyaml", marker = "python_full_version < '3.9'" }, + { name = "virtualenv", marker = "python_full_version < '3.9'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/04/b3/4ae08d21eb097162f5aad37f4585f8069a86402ed7f5362cc9ae097f9572/pre_commit-3.5.0.tar.gz", hash = "sha256:5804465c675b659b0862f07907f96295d490822a450c4c40e747d0b1c6ebcb32", size = 177079, upload-time = "2023-10-13T15:57:48.334Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6c/75/526915fedf462e05eeb1c75ceaf7e3f9cde7b5ce6f62740fe5f7f19a0050/pre_commit-3.5.0-py2.py3-none-any.whl", hash = "sha256:841dc9aef25daba9a0238cd27984041fa0467b4199fc4852e27950664919f660", size = 203698, upload-time = "2023-10-13T15:57:46.378Z" }, +] + +[[package]] +name = "pre-commit" +version = "4.3.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", + "python_full_version == '3.9.*'", +] +dependencies = [ + { name = "cfgv", marker = "python_full_version >= '3.9'" }, + { name = "identify", version = "2.6.15", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" }, + { name = "nodeenv", marker = "python_full_version >= '3.9'" }, + { name = "pyyaml", marker = "python_full_version >= '3.9'" }, + { name = "virtualenv", marker = "python_full_version >= '3.9'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ff/29/7cf5bbc236333876e4b41f56e06857a87937ce4bf91e117a6991a2dbb02a/pre_commit-4.3.0.tar.gz", hash = "sha256:499fe450cc9d42e9d58e606262795ecb64dd05438943c62b66f6a8673da30b16", size = 193792, upload-time = "2025-08-09T18:56:14.651Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5b/a5/987a405322d78a73b66e39e4a90e4ef156fd7141bf71df987e50717c321b/pre_commit-4.3.0-py2.py3-none-any.whl", hash = "sha256:2b0747ad7e6e967169136edffee14c16e148a778a54e4f967921aa1ebf2308d8", size = 220965, upload-time = "2025-08-09T18:56:13.192Z" }, +] + +[[package]] +name = "propcache" +version = "0.2.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.9'", +] +sdist = { url = "https://files.pythonhosted.org/packages/a9/4d/5e5a60b78dbc1d464f8a7bbaeb30957257afdc8512cbb9dfd5659304f5cd/propcache-0.2.0.tar.gz", hash = "sha256:df81779732feb9d01e5d513fad0122efb3d53bbc75f61b2a4f29a020bc985e70", size = 40951, upload-time = "2024-10-07T12:56:36.896Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3a/08/1963dfb932b8d74d5b09098507b37e9b96c835ba89ab8aad35aa330f4ff3/propcache-0.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:c5869b8fd70b81835a6f187c5fdbe67917a04d7e52b6e7cc4e5fe39d55c39d58", size = 80712, upload-time = "2024-10-07T12:54:02.193Z" }, + { url = "https://files.pythonhosted.org/packages/e6/59/49072aba9bf8a8ed958e576182d46f038e595b17ff7408bc7e8807e721e1/propcache-0.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:952e0d9d07609d9c5be361f33b0d6d650cd2bae393aabb11d9b719364521984b", size = 46301, upload-time = "2024-10-07T12:54:03.576Z" }, + { url = "https://files.pythonhosted.org/packages/33/a2/6b1978c2e0d80a678e2c483f45e5443c15fe5d32c483902e92a073314ef1/propcache-0.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:33ac8f098df0585c0b53009f039dfd913b38c1d2edafed0cedcc0c32a05aa110", size = 45581, upload-time = "2024-10-07T12:54:05.415Z" }, + { url = "https://files.pythonhosted.org/packages/43/95/55acc9adff8f997c7572f23d41993042290dfb29e404cdadb07039a4386f/propcache-0.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97e48e8875e6c13909c800fa344cd54cc4b2b0db1d5f911f840458a500fde2c2", size = 208659, upload-time = "2024-10-07T12:54:06.742Z" }, + { url = "https://files.pythonhosted.org/packages/bd/2c/ef7371ff715e6cd19ea03fdd5637ecefbaa0752fee5b0f2fe8ea8407ee01/propcache-0.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:388f3217649d6d59292b722d940d4d2e1e6a7003259eb835724092a1cca0203a", size = 222613, upload-time = "2024-10-07T12:54:08.204Z" }, + { url = "https://files.pythonhosted.org/packages/5e/1c/fef251f79fd4971a413fa4b1ae369ee07727b4cc2c71e2d90dfcde664fbb/propcache-0.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f571aea50ba5623c308aa146eb650eebf7dbe0fd8c5d946e28343cb3b5aad577", size = 221067, upload-time = "2024-10-07T12:54:10.449Z" }, + { url = "https://files.pythonhosted.org/packages/8d/e7/22e76ae6fc5a1708bdce92bdb49de5ebe89a173db87e4ef597d6bbe9145a/propcache-0.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3dfafb44f7bb35c0c06eda6b2ab4bfd58f02729e7c4045e179f9a861b07c9850", size = 208920, upload-time = "2024-10-07T12:54:11.903Z" }, + { url = "https://files.pythonhosted.org/packages/04/3e/f10aa562781bcd8a1e0b37683a23bef32bdbe501d9cc7e76969becaac30d/propcache-0.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3ebe9a75be7ab0b7da2464a77bb27febcb4fab46a34f9288f39d74833db7f61", size = 200050, upload-time = "2024-10-07T12:54:13.292Z" }, + { url = "https://files.pythonhosted.org/packages/d0/98/8ac69f638358c5f2a0043809c917802f96f86026e86726b65006830f3dc6/propcache-0.2.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d2f0d0f976985f85dfb5f3d685697ef769faa6b71993b46b295cdbbd6be8cc37", size = 202346, upload-time = "2024-10-07T12:54:14.644Z" }, + { url = "https://files.pythonhosted.org/packages/ee/78/4acfc5544a5075d8e660af4d4e468d60c418bba93203d1363848444511ad/propcache-0.2.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:a3dc1a4b165283bd865e8f8cb5f0c64c05001e0718ed06250d8cac9bec115b48", size = 199750, upload-time = "2024-10-07T12:54:16.286Z" }, + { url = "https://files.pythonhosted.org/packages/a2/8f/90ada38448ca2e9cf25adc2fe05d08358bda1b9446f54a606ea38f41798b/propcache-0.2.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:9e0f07b42d2a50c7dd2d8675d50f7343d998c64008f1da5fef888396b7f84630", size = 201279, upload-time = "2024-10-07T12:54:17.752Z" }, + { url = "https://files.pythonhosted.org/packages/08/31/0e299f650f73903da851f50f576ef09bfffc8e1519e6a2f1e5ed2d19c591/propcache-0.2.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e63e3e1e0271f374ed489ff5ee73d4b6e7c60710e1f76af5f0e1a6117cd26394", size = 211035, upload-time = "2024-10-07T12:54:19.109Z" }, + { url = "https://files.pythonhosted.org/packages/85/3e/e356cc6b09064bff1c06d0b2413593e7c925726f0139bc7acef8a21e87a8/propcache-0.2.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:56bb5c98f058a41bb58eead194b4db8c05b088c93d94d5161728515bd52b052b", size = 215565, upload-time = "2024-10-07T12:54:20.578Z" }, + { url = "https://files.pythonhosted.org/packages/8b/54/4ef7236cd657e53098bd05aa59cbc3cbf7018fba37b40eaed112c3921e51/propcache-0.2.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7665f04d0c7f26ff8bb534e1c65068409bf4687aa2534faf7104d7182debb336", size = 207604, upload-time = "2024-10-07T12:54:22.588Z" }, + { url = "https://files.pythonhosted.org/packages/1f/27/d01d7799c068443ee64002f0655d82fb067496897bf74b632e28ee6a32cf/propcache-0.2.0-cp310-cp310-win32.whl", hash = "sha256:7cf18abf9764746b9c8704774d8b06714bcb0a63641518a3a89c7f85cc02c2ad", size = 40526, upload-time = "2024-10-07T12:54:23.867Z" }, + { url = "https://files.pythonhosted.org/packages/bb/44/6c2add5eeafb7f31ff0d25fbc005d930bea040a1364cf0f5768750ddf4d1/propcache-0.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:cfac69017ef97db2438efb854edf24f5a29fd09a536ff3a992b75990720cdc99", size = 44958, upload-time = "2024-10-07T12:54:24.983Z" }, + { url = "https://files.pythonhosted.org/packages/e0/1c/71eec730e12aec6511e702ad0cd73c2872eccb7cad39de8ba3ba9de693ef/propcache-0.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:63f13bf09cc3336eb04a837490b8f332e0db41da66995c9fd1ba04552e516354", size = 80811, upload-time = "2024-10-07T12:54:26.165Z" }, + { url = "https://files.pythonhosted.org/packages/89/c3/7e94009f9a4934c48a371632197406a8860b9f08e3f7f7d922ab69e57a41/propcache-0.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:608cce1da6f2672a56b24a015b42db4ac612ee709f3d29f27a00c943d9e851de", size = 46365, upload-time = "2024-10-07T12:54:28.034Z" }, + { url = "https://files.pythonhosted.org/packages/c0/1d/c700d16d1d6903aeab28372fe9999762f074b80b96a0ccc953175b858743/propcache-0.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:466c219deee4536fbc83c08d09115249db301550625c7fef1c5563a584c9bc87", size = 45602, upload-time = "2024-10-07T12:54:29.148Z" }, + { url = "https://files.pythonhosted.org/packages/2e/5e/4a3e96380805bf742712e39a4534689f4cddf5fa2d3a93f22e9fd8001b23/propcache-0.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc2db02409338bf36590aa985a461b2c96fce91f8e7e0f14c50c5fcc4f229016", size = 236161, upload-time = "2024-10-07T12:54:31.557Z" }, + { url = "https://files.pythonhosted.org/packages/a5/85/90132481183d1436dff6e29f4fa81b891afb6cb89a7306f32ac500a25932/propcache-0.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a6ed8db0a556343d566a5c124ee483ae113acc9a557a807d439bcecc44e7dfbb", size = 244938, upload-time = "2024-10-07T12:54:33.051Z" }, + { url = "https://files.pythonhosted.org/packages/4a/89/c893533cb45c79c970834274e2d0f6d64383ec740be631b6a0a1d2b4ddc0/propcache-0.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:91997d9cb4a325b60d4e3f20967f8eb08dfcb32b22554d5ef78e6fd1dda743a2", size = 243576, upload-time = "2024-10-07T12:54:34.497Z" }, + { url = "https://files.pythonhosted.org/packages/8c/56/98c2054c8526331a05f205bf45cbb2cda4e58e56df70e76d6a509e5d6ec6/propcache-0.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c7dde9e533c0a49d802b4f3f218fa9ad0a1ce21f2c2eb80d5216565202acab4", size = 236011, upload-time = "2024-10-07T12:54:35.903Z" }, + { url = "https://files.pythonhosted.org/packages/2d/0c/8b8b9f8a6e1abd869c0fa79b907228e7abb966919047d294ef5df0d136cf/propcache-0.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffcad6c564fe6b9b8916c1aefbb37a362deebf9394bd2974e9d84232e3e08504", size = 224834, upload-time = "2024-10-07T12:54:37.238Z" }, + { url = "https://files.pythonhosted.org/packages/18/bb/397d05a7298b7711b90e13108db697732325cafdcd8484c894885c1bf109/propcache-0.2.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:97a58a28bcf63284e8b4d7b460cbee1edaab24634e82059c7b8c09e65284f178", size = 224946, upload-time = "2024-10-07T12:54:38.72Z" }, + { url = "https://files.pythonhosted.org/packages/25/19/4fc08dac19297ac58135c03770b42377be211622fd0147f015f78d47cd31/propcache-0.2.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:945db8ee295d3af9dbdbb698cce9bbc5c59b5c3fe328bbc4387f59a8a35f998d", size = 217280, upload-time = "2024-10-07T12:54:40.089Z" }, + { url = "https://files.pythonhosted.org/packages/7e/76/c79276a43df2096ce2aba07ce47576832b1174c0c480fe6b04bd70120e59/propcache-0.2.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:39e104da444a34830751715f45ef9fc537475ba21b7f1f5b0f4d71a3b60d7fe2", size = 220088, upload-time = "2024-10-07T12:54:41.726Z" }, + { url = "https://files.pythonhosted.org/packages/c3/9a/8a8cf428a91b1336b883f09c8b884e1734c87f724d74b917129a24fe2093/propcache-0.2.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:c5ecca8f9bab618340c8e848d340baf68bcd8ad90a8ecd7a4524a81c1764b3db", size = 233008, upload-time = "2024-10-07T12:54:43.742Z" }, + { url = "https://files.pythonhosted.org/packages/25/7b/768a8969abd447d5f0f3333df85c6a5d94982a1bc9a89c53c154bf7a8b11/propcache-0.2.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:c436130cc779806bdf5d5fae0d848713105472b8566b75ff70048c47d3961c5b", size = 237719, upload-time = "2024-10-07T12:54:45.065Z" }, + { url = "https://files.pythonhosted.org/packages/ed/0d/e5d68ccc7976ef8b57d80613ac07bbaf0614d43f4750cf953f0168ef114f/propcache-0.2.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:191db28dc6dcd29d1a3e063c3be0b40688ed76434622c53a284e5427565bbd9b", size = 227729, upload-time = "2024-10-07T12:54:46.405Z" }, + { url = "https://files.pythonhosted.org/packages/05/64/17eb2796e2d1c3d0c431dc5f40078d7282f4645af0bb4da9097fbb628c6c/propcache-0.2.0-cp311-cp311-win32.whl", hash = "sha256:5f2564ec89058ee7c7989a7b719115bdfe2a2fb8e7a4543b8d1c0cc4cf6478c1", size = 40473, upload-time = "2024-10-07T12:54:47.694Z" }, + { url = "https://files.pythonhosted.org/packages/83/c5/e89fc428ccdc897ade08cd7605f174c69390147526627a7650fb883e0cd0/propcache-0.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:6e2e54267980349b723cff366d1e29b138b9a60fa376664a157a342689553f71", size = 44921, upload-time = "2024-10-07T12:54:48.935Z" }, + { url = "https://files.pythonhosted.org/packages/7c/46/a41ca1097769fc548fc9216ec4c1471b772cc39720eb47ed7e38ef0006a9/propcache-0.2.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:2ee7606193fb267be4b2e3b32714f2d58cad27217638db98a60f9efb5efeccc2", size = 80800, upload-time = "2024-10-07T12:54:50.409Z" }, + { url = "https://files.pythonhosted.org/packages/75/4f/93df46aab9cc473498ff56be39b5f6ee1e33529223d7a4d8c0a6101a9ba2/propcache-0.2.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:91ee8fc02ca52e24bcb77b234f22afc03288e1dafbb1f88fe24db308910c4ac7", size = 46443, upload-time = "2024-10-07T12:54:51.634Z" }, + { url = "https://files.pythonhosted.org/packages/0b/17/308acc6aee65d0f9a8375e36c4807ac6605d1f38074b1581bd4042b9fb37/propcache-0.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2e900bad2a8456d00a113cad8c13343f3b1f327534e3589acc2219729237a2e8", size = 45676, upload-time = "2024-10-07T12:54:53.454Z" }, + { url = "https://files.pythonhosted.org/packages/65/44/626599d2854d6c1d4530b9a05e7ff2ee22b790358334b475ed7c89f7d625/propcache-0.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f52a68c21363c45297aca15561812d542f8fc683c85201df0bebe209e349f793", size = 246191, upload-time = "2024-10-07T12:54:55.438Z" }, + { url = "https://files.pythonhosted.org/packages/f2/df/5d996d7cb18df076debae7d76ac3da085c0575a9f2be6b1f707fe227b54c/propcache-0.2.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e41d67757ff4fbc8ef2af99b338bfb955010444b92929e9e55a6d4dcc3c4f09", size = 251791, upload-time = "2024-10-07T12:54:57.441Z" }, + { url = "https://files.pythonhosted.org/packages/2e/6d/9f91e5dde8b1f662f6dd4dff36098ed22a1ef4e08e1316f05f4758f1576c/propcache-0.2.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a64e32f8bd94c105cc27f42d3b658902b5bcc947ece3c8fe7bc1b05982f60e89", size = 253434, upload-time = "2024-10-07T12:54:58.857Z" }, + { url = "https://files.pythonhosted.org/packages/3c/e9/1b54b7e26f50b3e0497cd13d3483d781d284452c2c50dd2a615a92a087a3/propcache-0.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55346705687dbd7ef0d77883ab4f6fabc48232f587925bdaf95219bae072491e", size = 248150, upload-time = "2024-10-07T12:55:00.19Z" }, + { url = "https://files.pythonhosted.org/packages/a7/ef/a35bf191c8038fe3ce9a414b907371c81d102384eda5dbafe6f4dce0cf9b/propcache-0.2.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00181262b17e517df2cd85656fcd6b4e70946fe62cd625b9d74ac9977b64d8d9", size = 233568, upload-time = "2024-10-07T12:55:01.723Z" }, + { url = "https://files.pythonhosted.org/packages/97/d9/d00bb9277a9165a5e6d60f2142cd1a38a750045c9c12e47ae087f686d781/propcache-0.2.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6994984550eaf25dd7fc7bd1b700ff45c894149341725bb4edc67f0ffa94efa4", size = 229874, upload-time = "2024-10-07T12:55:03.962Z" }, + { url = "https://files.pythonhosted.org/packages/8e/78/c123cf22469bdc4b18efb78893e69c70a8b16de88e6160b69ca6bdd88b5d/propcache-0.2.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:56295eb1e5f3aecd516d91b00cfd8bf3a13991de5a479df9e27dd569ea23959c", size = 225857, upload-time = "2024-10-07T12:55:06.439Z" }, + { url = "https://files.pythonhosted.org/packages/31/1b/fd6b2f1f36d028820d35475be78859d8c89c8f091ad30e377ac49fd66359/propcache-0.2.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:439e76255daa0f8151d3cb325f6dd4a3e93043e6403e6491813bcaaaa8733887", size = 227604, upload-time = "2024-10-07T12:55:08.254Z" }, + { url = "https://files.pythonhosted.org/packages/99/36/b07be976edf77a07233ba712e53262937625af02154353171716894a86a6/propcache-0.2.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:f6475a1b2ecb310c98c28d271a30df74f9dd436ee46d09236a6b750a7599ce57", size = 238430, upload-time = "2024-10-07T12:55:09.766Z" }, + { url = "https://files.pythonhosted.org/packages/0d/64/5822f496c9010e3966e934a011ac08cac8734561842bc7c1f65586e0683c/propcache-0.2.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:3444cdba6628accf384e349014084b1cacd866fbb88433cd9d279d90a54e0b23", size = 244814, upload-time = "2024-10-07T12:55:11.145Z" }, + { url = "https://files.pythonhosted.org/packages/fd/bd/8657918a35d50b18a9e4d78a5df7b6c82a637a311ab20851eef4326305c1/propcache-0.2.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4a9d9b4d0a9b38d1c391bb4ad24aa65f306c6f01b512e10a8a34a2dc5675d348", size = 235922, upload-time = "2024-10-07T12:55:12.508Z" }, + { url = "https://files.pythonhosted.org/packages/a8/6f/ec0095e1647b4727db945213a9f395b1103c442ef65e54c62e92a72a3f75/propcache-0.2.0-cp312-cp312-win32.whl", hash = "sha256:69d3a98eebae99a420d4b28756c8ce6ea5a29291baf2dc9ff9414b42676f61d5", size = 40177, upload-time = "2024-10-07T12:55:13.814Z" }, + { url = "https://files.pythonhosted.org/packages/20/a2/bd0896fdc4f4c1db46d9bc361c8c79a9bf08ccc08ba054a98e38e7ba1557/propcache-0.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:ad9c9b99b05f163109466638bd30ada1722abb01bbb85c739c50b6dc11f92dc3", size = 44446, upload-time = "2024-10-07T12:55:14.972Z" }, + { url = "https://files.pythonhosted.org/packages/a8/a7/5f37b69197d4f558bfef5b4bceaff7c43cc9b51adf5bd75e9081d7ea80e4/propcache-0.2.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ecddc221a077a8132cf7c747d5352a15ed763b674c0448d811f408bf803d9ad7", size = 78120, upload-time = "2024-10-07T12:55:16.179Z" }, + { url = "https://files.pythonhosted.org/packages/c8/cd/48ab2b30a6b353ecb95a244915f85756d74f815862eb2ecc7a518d565b48/propcache-0.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0e53cb83fdd61cbd67202735e6a6687a7b491c8742dfc39c9e01e80354956763", size = 45127, upload-time = "2024-10-07T12:55:18.275Z" }, + { url = "https://files.pythonhosted.org/packages/a5/ba/0a1ef94a3412aab057bd996ed5f0ac7458be5bf469e85c70fa9ceb43290b/propcache-0.2.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92fe151145a990c22cbccf9ae15cae8ae9eddabfc949a219c9f667877e40853d", size = 44419, upload-time = "2024-10-07T12:55:19.487Z" }, + { url = "https://files.pythonhosted.org/packages/b4/6c/ca70bee4f22fa99eacd04f4d2f1699be9d13538ccf22b3169a61c60a27fa/propcache-0.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6a21ef516d36909931a2967621eecb256018aeb11fc48656e3257e73e2e247a", size = 229611, upload-time = "2024-10-07T12:55:21.377Z" }, + { url = "https://files.pythonhosted.org/packages/19/70/47b872a263e8511ca33718d96a10c17d3c853aefadeb86dc26e8421184b9/propcache-0.2.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f88a4095e913f98988f5b338c1d4d5d07dbb0b6bad19892fd447484e483ba6b", size = 234005, upload-time = "2024-10-07T12:55:22.898Z" }, + { url = "https://files.pythonhosted.org/packages/4f/be/3b0ab8c84a22e4a3224719099c1229ddfdd8a6a1558cf75cb55ee1e35c25/propcache-0.2.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a5b3bb545ead161be780ee85a2b54fdf7092815995661947812dde94a40f6fb", size = 237270, upload-time = "2024-10-07T12:55:24.354Z" }, + { url = "https://files.pythonhosted.org/packages/04/d8/f071bb000d4b8f851d312c3c75701e586b3f643fe14a2e3409b1b9ab3936/propcache-0.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67aeb72e0f482709991aa91345a831d0b707d16b0257e8ef88a2ad246a7280bf", size = 231877, upload-time = "2024-10-07T12:55:25.774Z" }, + { url = "https://files.pythonhosted.org/packages/93/e7/57a035a1359e542bbb0a7df95aad6b9871ebee6dce2840cb157a415bd1f3/propcache-0.2.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c997f8c44ec9b9b0bcbf2d422cc00a1d9b9c681f56efa6ca149a941e5560da2", size = 217848, upload-time = "2024-10-07T12:55:27.148Z" }, + { url = "https://files.pythonhosted.org/packages/f0/93/d1dea40f112ec183398fb6c42fde340edd7bab202411c4aa1a8289f461b6/propcache-0.2.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2a66df3d4992bc1d725b9aa803e8c5a66c010c65c741ad901e260ece77f58d2f", size = 216987, upload-time = "2024-10-07T12:55:29.294Z" }, + { url = "https://files.pythonhosted.org/packages/62/4c/877340871251145d3522c2b5d25c16a1690ad655fbab7bb9ece6b117e39f/propcache-0.2.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:3ebbcf2a07621f29638799828b8d8668c421bfb94c6cb04269130d8de4fb7136", size = 212451, upload-time = "2024-10-07T12:55:30.643Z" }, + { url = "https://files.pythonhosted.org/packages/7c/bb/a91b72efeeb42906ef58ccf0cdb87947b54d7475fee3c93425d732f16a61/propcache-0.2.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:1235c01ddaa80da8235741e80815ce381c5267f96cc49b1477fdcf8c047ef325", size = 212879, upload-time = "2024-10-07T12:55:32.024Z" }, + { url = "https://files.pythonhosted.org/packages/9b/7f/ee7fea8faac57b3ec5d91ff47470c6c5d40d7f15d0b1fccac806348fa59e/propcache-0.2.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3947483a381259c06921612550867b37d22e1df6d6d7e8361264b6d037595f44", size = 222288, upload-time = "2024-10-07T12:55:33.401Z" }, + { url = "https://files.pythonhosted.org/packages/ff/d7/acd67901c43d2e6b20a7a973d9d5fd543c6e277af29b1eb0e1f7bd7ca7d2/propcache-0.2.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:d5bed7f9805cc29c780f3aee05de3262ee7ce1f47083cfe9f77471e9d6777e83", size = 228257, upload-time = "2024-10-07T12:55:35.381Z" }, + { url = "https://files.pythonhosted.org/packages/8d/6f/6272ecc7a8daad1d0754cfc6c8846076a8cb13f810005c79b15ce0ef0cf2/propcache-0.2.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e4a91d44379f45f5e540971d41e4626dacd7f01004826a18cb048e7da7e96544", size = 221075, upload-time = "2024-10-07T12:55:36.789Z" }, + { url = "https://files.pythonhosted.org/packages/7c/bd/c7a6a719a6b3dd8b3aeadb3675b5783983529e4a3185946aa444d3e078f6/propcache-0.2.0-cp313-cp313-win32.whl", hash = "sha256:f902804113e032e2cdf8c71015651c97af6418363bea8d78dc0911d56c335032", size = 39654, upload-time = "2024-10-07T12:55:38.762Z" }, + { url = "https://files.pythonhosted.org/packages/88/e7/0eef39eff84fa3e001b44de0bd41c7c0e3432e7648ffd3d64955910f002d/propcache-0.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:8f188cfcc64fb1266f4684206c9de0e80f54622c3f22a910cbd200478aeae61e", size = 43705, upload-time = "2024-10-07T12:55:39.921Z" }, + { url = "https://files.pythonhosted.org/packages/b4/94/2c3d64420fd58ed462e2b416386d48e72dec027cf7bb572066cf3866e939/propcache-0.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:53d1bd3f979ed529f0805dd35ddaca330f80a9a6d90bc0121d2ff398f8ed8861", size = 82315, upload-time = "2024-10-07T12:55:41.166Z" }, + { url = "https://files.pythonhosted.org/packages/73/b7/9e2a17d9a126f2012b22ddc5d0979c28ca75104e24945214790c1d787015/propcache-0.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:83928404adf8fb3d26793665633ea79b7361efa0287dfbd372a7e74311d51ee6", size = 47188, upload-time = "2024-10-07T12:55:42.316Z" }, + { url = "https://files.pythonhosted.org/packages/80/ef/18af27caaae5589c08bb5a461cfa136b83b7e7983be604f2140d91f92b97/propcache-0.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:77a86c261679ea5f3896ec060be9dc8e365788248cc1e049632a1be682442063", size = 46314, upload-time = "2024-10-07T12:55:43.544Z" }, + { url = "https://files.pythonhosted.org/packages/fa/df/8dbd3e472baf73251c0fbb571a3f0a4e3a40c52a1c8c2a6c46ab08736ff9/propcache-0.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:218db2a3c297a3768c11a34812e63b3ac1c3234c3a086def9c0fee50d35add1f", size = 212874, upload-time = "2024-10-07T12:55:44.823Z" }, + { url = "https://files.pythonhosted.org/packages/7c/57/5d4d783ac594bd56434679b8643673ae12de1ce758116fd8912a7f2313ec/propcache-0.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7735e82e3498c27bcb2d17cb65d62c14f1100b71723b68362872bca7d0913d90", size = 224578, upload-time = "2024-10-07T12:55:46.253Z" }, + { url = "https://files.pythonhosted.org/packages/66/27/072be8ad434c9a3aa1b561f527984ea0ed4ac072fd18dfaaa2aa2d6e6a2b/propcache-0.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:20a617c776f520c3875cf4511e0d1db847a076d720714ae35ffe0df3e440be68", size = 222636, upload-time = "2024-10-07T12:55:47.608Z" }, + { url = "https://files.pythonhosted.org/packages/c3/f1/69a30ff0928d07f50bdc6f0147fd9a08e80904fd3fdb711785e518de1021/propcache-0.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67b69535c870670c9f9b14a75d28baa32221d06f6b6fa6f77a0a13c5a7b0a5b9", size = 213573, upload-time = "2024-10-07T12:55:49.82Z" }, + { url = "https://files.pythonhosted.org/packages/a8/2e/c16716ae113fe0a3219978df3665a6fea049d81d50bd28c4ae72a4c77567/propcache-0.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4569158070180c3855e9c0791c56be3ceeb192defa2cdf6a3f39e54319e56b89", size = 205438, upload-time = "2024-10-07T12:55:51.231Z" }, + { url = "https://files.pythonhosted.org/packages/e1/df/80e2c5cd5ed56a7bfb1aa58cedb79617a152ae43de7c0a7e800944a6b2e2/propcache-0.2.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:db47514ffdbd91ccdc7e6f8407aac4ee94cc871b15b577c1c324236b013ddd04", size = 202352, upload-time = "2024-10-07T12:55:52.596Z" }, + { url = "https://files.pythonhosted.org/packages/0f/4e/79f665fa04839f30ffb2903211c718b9660fbb938ac7a4df79525af5aeb3/propcache-0.2.0-cp38-cp38-musllinux_1_2_armv7l.whl", hash = "sha256:2a60ad3e2553a74168d275a0ef35e8c0a965448ffbc3b300ab3a5bb9956c2162", size = 200476, upload-time = "2024-10-07T12:55:54.016Z" }, + { url = "https://files.pythonhosted.org/packages/a9/39/b9ea7b011521dd7cfd2f89bb6b8b304f3c789ea6285445bc145bebc83094/propcache-0.2.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:662dd62358bdeaca0aee5761de8727cfd6861432e3bb828dc2a693aa0471a563", size = 201581, upload-time = "2024-10-07T12:55:56.246Z" }, + { url = "https://files.pythonhosted.org/packages/e4/81/e8e96c97aa0b675a14e37b12ca9c9713b15cfacf0869e64bf3ab389fabf1/propcache-0.2.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:25a1f88b471b3bc911d18b935ecb7115dff3a192b6fef46f0bfaf71ff4f12418", size = 225628, upload-time = "2024-10-07T12:55:57.686Z" }, + { url = "https://files.pythonhosted.org/packages/eb/99/15f998c502c214f6c7f51462937605d514a8943a9a6c1fa10f40d2710976/propcache-0.2.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:f60f0ac7005b9f5a6091009b09a419ace1610e163fa5deaba5ce3484341840e7", size = 229270, upload-time = "2024-10-07T12:55:59.065Z" }, + { url = "https://files.pythonhosted.org/packages/ff/3a/a9f1a0c0e5b994b8f1a1c71bea56bb3e9eeec821cb4dd61e14051c4ba00b/propcache-0.2.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:74acd6e291f885678631b7ebc85d2d4aec458dd849b8c841b57ef04047833bed", size = 207771, upload-time = "2024-10-07T12:56:00.393Z" }, + { url = "https://files.pythonhosted.org/packages/ff/3e/6103906a66d6713f32880cf6a5ba84a1406b4d66e1b9389bb9b8e1789f9e/propcache-0.2.0-cp38-cp38-win32.whl", hash = "sha256:d9b6ddac6408194e934002a69bcaadbc88c10b5f38fb9307779d1c629181815d", size = 41015, upload-time = "2024-10-07T12:56:01.953Z" }, + { url = "https://files.pythonhosted.org/packages/37/23/a30214b4c1f2bea24cc1197ef48d67824fbc41d5cf5472b17c37fef6002c/propcache-0.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:676135dcf3262c9c5081cc8f19ad55c8a64e3f7282a21266d05544450bffc3a5", size = 45749, upload-time = "2024-10-07T12:56:03.095Z" }, + { url = "https://files.pythonhosted.org/packages/38/05/797e6738c9f44ab5039e3ff329540c934eabbe8ad7e63c305c75844bc86f/propcache-0.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:25c8d773a62ce0451b020c7b29a35cfbc05de8b291163a7a0f3b7904f27253e6", size = 81903, upload-time = "2024-10-07T12:56:04.651Z" }, + { url = "https://files.pythonhosted.org/packages/9f/84/8d5edb9a73e1a56b24dd8f2adb6aac223109ff0e8002313d52e5518258ba/propcache-0.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:375a12d7556d462dc64d70475a9ee5982465fbb3d2b364f16b86ba9135793638", size = 46960, upload-time = "2024-10-07T12:56:06.38Z" }, + { url = "https://files.pythonhosted.org/packages/e7/77/388697bedda984af0d12d68e536b98129b167282da3401965c8450de510e/propcache-0.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1ec43d76b9677637a89d6ab86e1fef70d739217fefa208c65352ecf0282be957", size = 46133, upload-time = "2024-10-07T12:56:07.606Z" }, + { url = "https://files.pythonhosted.org/packages/e2/dc/60d444610bc5b1d7a758534f58362b1bcee736a785473f8a39c91f05aad1/propcache-0.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f45eec587dafd4b2d41ac189c2156461ebd0c1082d2fe7013571598abb8505d1", size = 211105, upload-time = "2024-10-07T12:56:08.826Z" }, + { url = "https://files.pythonhosted.org/packages/bc/c6/40eb0dd1de6f8e84f454615ab61f68eb4a58f9d63d6f6eaf04300ac0cc17/propcache-0.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc092ba439d91df90aea38168e11f75c655880c12782facf5cf9c00f3d42b562", size = 226613, upload-time = "2024-10-07T12:56:11.184Z" }, + { url = "https://files.pythonhosted.org/packages/de/b6/e078b5e9de58e20db12135eb6a206b4b43cb26c6b62ee0fe36ac40763a64/propcache-0.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fa1076244f54bb76e65e22cb6910365779d5c3d71d1f18b275f1dfc7b0d71b4d", size = 225587, upload-time = "2024-10-07T12:56:15.294Z" }, + { url = "https://files.pythonhosted.org/packages/ce/4e/97059dd24494d1c93d1efb98bb24825e1930265b41858dd59c15cb37a975/propcache-0.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:682a7c79a2fbf40f5dbb1eb6bfe2cd865376deeac65acf9beb607505dced9e12", size = 211826, upload-time = "2024-10-07T12:56:16.997Z" }, + { url = "https://files.pythonhosted.org/packages/fc/23/4dbf726602a989d2280fe130a9b9dd71faa8d3bb8cd23d3261ff3c23f692/propcache-0.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8e40876731f99b6f3c897b66b803c9e1c07a989b366c6b5b475fafd1f7ba3fb8", size = 203140, upload-time = "2024-10-07T12:56:18.368Z" }, + { url = "https://files.pythonhosted.org/packages/5b/ce/f3bff82c885dbd9ae9e43f134d5b02516c3daa52d46f7a50e4f52ef9121f/propcache-0.2.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:363ea8cd3c5cb6679f1c2f5f1f9669587361c062e4899fce56758efa928728f8", size = 208841, upload-time = "2024-10-07T12:56:19.859Z" }, + { url = "https://files.pythonhosted.org/packages/29/d7/19a4d3b4c7e95d08f216da97035d0b103d0c90411c6f739d47088d2da1f0/propcache-0.2.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:140fbf08ab3588b3468932974a9331aff43c0ab8a2ec2c608b6d7d1756dbb6cb", size = 203315, upload-time = "2024-10-07T12:56:21.256Z" }, + { url = "https://files.pythonhosted.org/packages/db/87/5748212a18beb8d4ab46315c55ade8960d1e2cdc190764985b2d229dd3f4/propcache-0.2.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e70fac33e8b4ac63dfc4c956fd7d85a0b1139adcfc0d964ce288b7c527537fea", size = 204724, upload-time = "2024-10-07T12:56:23.644Z" }, + { url = "https://files.pythonhosted.org/packages/84/2a/c3d2f989fc571a5bad0fabcd970669ccb08c8f9b07b037ecddbdab16a040/propcache-0.2.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:b33d7a286c0dc1a15f5fc864cc48ae92a846df287ceac2dd499926c3801054a6", size = 215514, upload-time = "2024-10-07T12:56:25.733Z" }, + { url = "https://files.pythonhosted.org/packages/c9/1f/4c44c133b08bc5f776afcb8f0833889c2636b8a83e07ea1d9096c1e401b0/propcache-0.2.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:f6d5749fdd33d90e34c2efb174c7e236829147a2713334d708746e94c4bde40d", size = 220063, upload-time = "2024-10-07T12:56:28.497Z" }, + { url = "https://files.pythonhosted.org/packages/2e/25/280d0a3bdaee68db74c0acd9a472e59e64b516735b59cffd3a326ff9058a/propcache-0.2.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:22aa8f2272d81d9317ff5756bb108021a056805ce63dd3630e27d042c8092798", size = 211620, upload-time = "2024-10-07T12:56:29.891Z" }, + { url = "https://files.pythonhosted.org/packages/28/8c/266898981b7883c1563c35954f9ce9ced06019fdcc487a9520150c48dc91/propcache-0.2.0-cp39-cp39-win32.whl", hash = "sha256:73e4b40ea0eda421b115248d7e79b59214411109a5bc47d0d48e4c73e3b8fcf9", size = 41049, upload-time = "2024-10-07T12:56:31.246Z" }, + { url = "https://files.pythonhosted.org/packages/af/53/a3e5b937f58e757a940716b88105ec4c211c42790c1ea17052b46dc16f16/propcache-0.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:9517d5e9e0731957468c29dbfd0f976736a0e55afaea843726e887f36fe017df", size = 45587, upload-time = "2024-10-07T12:56:33.416Z" }, + { url = "https://files.pythonhosted.org/packages/3d/b6/e6d98278f2d49b22b4d033c9f792eda783b9ab2094b041f013fc69bcde87/propcache-0.2.0-py3-none-any.whl", hash = "sha256:2ccc28197af5313706511fab3a8b66dcd6da067a1331372c82ea1cb74285e036", size = 11603, upload-time = "2024-10-07T12:56:35.137Z" }, +] + +[[package]] +name = "propcache" +version = "0.4.1" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", + "python_full_version == '3.9.*'", +] +sdist = { url = "https://files.pythonhosted.org/packages/9e/da/e9fc233cf63743258bff22b3dfa7ea5baef7b5bc324af47a0ad89b8ffc6f/propcache-0.4.1.tar.gz", hash = "sha256:f48107a8c637e80362555f37ecf49abe20370e557cc4ab374f04ec4423c97c3d", size = 46442, upload-time = "2025-10-08T19:49:02.291Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3c/0e/934b541323035566a9af292dba85a195f7b78179114f2c6ebb24551118a9/propcache-0.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c2d1fa3201efaf55d730400d945b5b3ab6e672e100ba0f9a409d950ab25d7db", size = 79534, upload-time = "2025-10-08T19:46:02.083Z" }, + { url = "https://files.pythonhosted.org/packages/a1/6b/db0d03d96726d995dc7171286c6ba9d8d14251f37433890f88368951a44e/propcache-0.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1eb2994229cc8ce7fe9b3db88f5465f5fd8651672840b2e426b88cdb1a30aac8", size = 45526, upload-time = "2025-10-08T19:46:03.884Z" }, + { url = "https://files.pythonhosted.org/packages/e4/c3/82728404aea669e1600f304f2609cde9e665c18df5a11cdd57ed73c1dceb/propcache-0.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:66c1f011f45a3b33d7bcb22daed4b29c0c9e2224758b6be00686731e1b46f925", size = 47263, upload-time = "2025-10-08T19:46:05.405Z" }, + { url = "https://files.pythonhosted.org/packages/df/1b/39313ddad2bf9187a1432654c38249bab4562ef535ef07f5eb6eb04d0b1b/propcache-0.4.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9a52009f2adffe195d0b605c25ec929d26b36ef986ba85244891dee3b294df21", size = 201012, upload-time = "2025-10-08T19:46:07.165Z" }, + { url = "https://files.pythonhosted.org/packages/5b/01/f1d0b57d136f294a142acf97f4ed58c8e5b974c21e543000968357115011/propcache-0.4.1-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5d4e2366a9c7b837555cf02fb9be2e3167d333aff716332ef1b7c3a142ec40c5", size = 209491, upload-time = "2025-10-08T19:46:08.909Z" }, + { url = "https://files.pythonhosted.org/packages/a1/c8/038d909c61c5bb039070b3fb02ad5cccdb1dde0d714792e251cdb17c9c05/propcache-0.4.1-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:9d2b6caef873b4f09e26ea7e33d65f42b944837563a47a94719cc3544319a0db", size = 215319, upload-time = "2025-10-08T19:46:10.7Z" }, + { url = "https://files.pythonhosted.org/packages/08/57/8c87e93142b2c1fa2408e45695205a7ba05fb5db458c0bf5c06ba0e09ea6/propcache-0.4.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2b16ec437a8c8a965ecf95739448dd938b5c7f56e67ea009f4300d8df05f32b7", size = 196856, upload-time = "2025-10-08T19:46:12.003Z" }, + { url = "https://files.pythonhosted.org/packages/42/df/5615fec76aa561987a534759b3686008a288e73107faa49a8ae5795a9f7a/propcache-0.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:296f4c8ed03ca7476813fe666c9ea97869a8d7aec972618671b33a38a5182ef4", size = 193241, upload-time = "2025-10-08T19:46:13.495Z" }, + { url = "https://files.pythonhosted.org/packages/d5/21/62949eb3a7a54afe8327011c90aca7e03547787a88fb8bd9726806482fea/propcache-0.4.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:1f0978529a418ebd1f49dad413a2b68af33f85d5c5ca5c6ca2a3bed375a7ac60", size = 190552, upload-time = "2025-10-08T19:46:14.938Z" }, + { url = "https://files.pythonhosted.org/packages/30/ee/ab4d727dd70806e5b4de96a798ae7ac6e4d42516f030ee60522474b6b332/propcache-0.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fd138803047fb4c062b1c1dd95462f5209456bfab55c734458f15d11da288f8f", size = 200113, upload-time = "2025-10-08T19:46:16.695Z" }, + { url = "https://files.pythonhosted.org/packages/8a/0b/38b46208e6711b016aa8966a3ac793eee0d05c7159d8342aa27fc0bc365e/propcache-0.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8c9b3cbe4584636d72ff556d9036e0c9317fa27b3ac1f0f558e7e84d1c9c5900", size = 200778, upload-time = "2025-10-08T19:46:18.023Z" }, + { url = "https://files.pythonhosted.org/packages/cf/81/5abec54355ed344476bee711e9f04815d4b00a311ab0535599204eecc257/propcache-0.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f93243fdc5657247533273ac4f86ae106cc6445a0efacb9a1bfe982fcfefd90c", size = 193047, upload-time = "2025-10-08T19:46:19.449Z" }, + { url = "https://files.pythonhosted.org/packages/ec/b6/1f237c04e32063cb034acd5f6ef34ef3a394f75502e72703545631ab1ef6/propcache-0.4.1-cp310-cp310-win32.whl", hash = "sha256:a0ee98db9c5f80785b266eb805016e36058ac72c51a064040f2bc43b61101cdb", size = 38093, upload-time = "2025-10-08T19:46:20.643Z" }, + { url = "https://files.pythonhosted.org/packages/a6/67/354aac4e0603a15f76439caf0427781bcd6797f370377f75a642133bc954/propcache-0.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:1cdb7988c4e5ac7f6d175a28a9aa0c94cb6f2ebe52756a3c0cda98d2809a9e37", size = 41638, upload-time = "2025-10-08T19:46:21.935Z" }, + { url = "https://files.pythonhosted.org/packages/e0/e1/74e55b9fd1a4c209ff1a9a824bf6c8b3d1fc5a1ac3eabe23462637466785/propcache-0.4.1-cp310-cp310-win_arm64.whl", hash = "sha256:d82ad62b19645419fe79dd63b3f9253e15b30e955c0170e5cebc350c1844e581", size = 38229, upload-time = "2025-10-08T19:46:23.368Z" }, + { url = "https://files.pythonhosted.org/packages/8c/d4/4e2c9aaf7ac2242b9358f98dccd8f90f2605402f5afeff6c578682c2c491/propcache-0.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:60a8fda9644b7dfd5dece8c61d8a85e271cb958075bfc4e01083c148b61a7caf", size = 80208, upload-time = "2025-10-08T19:46:24.597Z" }, + { url = "https://files.pythonhosted.org/packages/c2/21/d7b68e911f9c8e18e4ae43bdbc1e1e9bbd971f8866eb81608947b6f585ff/propcache-0.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c30b53e7e6bda1d547cabb47c825f3843a0a1a42b0496087bb58d8fedf9f41b5", size = 45777, upload-time = "2025-10-08T19:46:25.733Z" }, + { url = "https://files.pythonhosted.org/packages/d3/1d/11605e99ac8ea9435651ee71ab4cb4bf03f0949586246476a25aadfec54a/propcache-0.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6918ecbd897443087a3b7cd978d56546a812517dcaaca51b49526720571fa93e", size = 47647, upload-time = "2025-10-08T19:46:27.304Z" }, + { url = "https://files.pythonhosted.org/packages/58/1a/3c62c127a8466c9c843bccb503d40a273e5cc69838805f322e2826509e0d/propcache-0.4.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3d902a36df4e5989763425a8ab9e98cd8ad5c52c823b34ee7ef307fd50582566", size = 214929, upload-time = "2025-10-08T19:46:28.62Z" }, + { url = "https://files.pythonhosted.org/packages/56/b9/8fa98f850960b367c4b8fe0592e7fc341daa7a9462e925228f10a60cf74f/propcache-0.4.1-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a9695397f85973bb40427dedddf70d8dc4a44b22f1650dd4af9eedf443d45165", size = 221778, upload-time = "2025-10-08T19:46:30.358Z" }, + { url = "https://files.pythonhosted.org/packages/46/a6/0ab4f660eb59649d14b3d3d65c439421cf2f87fe5dd68591cbe3c1e78a89/propcache-0.4.1-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2bb07ffd7eaad486576430c89f9b215f9e4be68c4866a96e97db9e97fead85dc", size = 228144, upload-time = "2025-10-08T19:46:32.607Z" }, + { url = "https://files.pythonhosted.org/packages/52/6a/57f43e054fb3d3a56ac9fc532bc684fc6169a26c75c353e65425b3e56eef/propcache-0.4.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd6f30fdcf9ae2a70abd34da54f18da086160e4d7d9251f81f3da0ff84fc5a48", size = 210030, upload-time = "2025-10-08T19:46:33.969Z" }, + { url = "https://files.pythonhosted.org/packages/40/e2/27e6feebb5f6b8408fa29f5efbb765cd54c153ac77314d27e457a3e993b7/propcache-0.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fc38cba02d1acba4e2869eef1a57a43dfbd3d49a59bf90dda7444ec2be6a5570", size = 208252, upload-time = "2025-10-08T19:46:35.309Z" }, + { url = "https://files.pythonhosted.org/packages/9e/f8/91c27b22ccda1dbc7967f921c42825564fa5336a01ecd72eb78a9f4f53c2/propcache-0.4.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:67fad6162281e80e882fb3ec355398cf72864a54069d060321f6cd0ade95fe85", size = 202064, upload-time = "2025-10-08T19:46:36.993Z" }, + { url = "https://files.pythonhosted.org/packages/f2/26/7f00bd6bd1adba5aafe5f4a66390f243acab58eab24ff1a08bebb2ef9d40/propcache-0.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f10207adf04d08bec185bae14d9606a1444715bc99180f9331c9c02093e1959e", size = 212429, upload-time = "2025-10-08T19:46:38.398Z" }, + { url = "https://files.pythonhosted.org/packages/84/89/fd108ba7815c1117ddca79c228f3f8a15fc82a73bca8b142eb5de13b2785/propcache-0.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e9b0d8d0845bbc4cfcdcbcdbf5086886bc8157aa963c31c777ceff7846c77757", size = 216727, upload-time = "2025-10-08T19:46:39.732Z" }, + { url = "https://files.pythonhosted.org/packages/79/37/3ec3f7e3173e73f1d600495d8b545b53802cbf35506e5732dd8578db3724/propcache-0.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:981333cb2f4c1896a12f4ab92a9cc8f09ea664e9b7dbdc4eff74627af3a11c0f", size = 205097, upload-time = "2025-10-08T19:46:41.025Z" }, + { url = "https://files.pythonhosted.org/packages/61/b0/b2631c19793f869d35f47d5a3a56fb19e9160d3c119f15ac7344fc3ccae7/propcache-0.4.1-cp311-cp311-win32.whl", hash = "sha256:f1d2f90aeec838a52f1c1a32fe9a619fefd5e411721a9117fbf82aea638fe8a1", size = 38084, upload-time = "2025-10-08T19:46:42.693Z" }, + { url = "https://files.pythonhosted.org/packages/f4/78/6cce448e2098e9f3bfc91bb877f06aa24b6ccace872e39c53b2f707c4648/propcache-0.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:364426a62660f3f699949ac8c621aad6977be7126c5807ce48c0aeb8e7333ea6", size = 41637, upload-time = "2025-10-08T19:46:43.778Z" }, + { url = "https://files.pythonhosted.org/packages/9c/e9/754f180cccd7f51a39913782c74717c581b9cc8177ad0e949f4d51812383/propcache-0.4.1-cp311-cp311-win_arm64.whl", hash = "sha256:e53f3a38d3510c11953f3e6a33f205c6d1b001129f972805ca9b42fc308bc239", size = 38064, upload-time = "2025-10-08T19:46:44.872Z" }, + { url = "https://files.pythonhosted.org/packages/a2/0f/f17b1b2b221d5ca28b4b876e8bb046ac40466513960646bda8e1853cdfa2/propcache-0.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e153e9cd40cc8945138822807139367f256f89c6810c2634a4f6902b52d3b4e2", size = 80061, upload-time = "2025-10-08T19:46:46.075Z" }, + { url = "https://files.pythonhosted.org/packages/76/47/8ccf75935f51448ba9a16a71b783eb7ef6b9ee60f5d14c7f8a8a79fbeed7/propcache-0.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cd547953428f7abb73c5ad82cbb32109566204260d98e41e5dfdc682eb7f8403", size = 46037, upload-time = "2025-10-08T19:46:47.23Z" }, + { url = "https://files.pythonhosted.org/packages/0a/b6/5c9a0e42df4d00bfb4a3cbbe5cf9f54260300c88a0e9af1f47ca5ce17ac0/propcache-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f048da1b4f243fc44f205dfd320933a951b8d89e0afd4c7cacc762a8b9165207", size = 47324, upload-time = "2025-10-08T19:46:48.384Z" }, + { url = "https://files.pythonhosted.org/packages/9e/d3/6c7ee328b39a81ee877c962469f1e795f9db87f925251efeb0545e0020d0/propcache-0.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ec17c65562a827bba85e3872ead335f95405ea1674860d96483a02f5c698fa72", size = 225505, upload-time = "2025-10-08T19:46:50.055Z" }, + { url = "https://files.pythonhosted.org/packages/01/5d/1c53f4563490b1d06a684742cc6076ef944bc6457df6051b7d1a877c057b/propcache-0.4.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:405aac25c6394ef275dee4c709be43745d36674b223ba4eb7144bf4d691b7367", size = 230242, upload-time = "2025-10-08T19:46:51.815Z" }, + { url = "https://files.pythonhosted.org/packages/20/e1/ce4620633b0e2422207c3cb774a0ee61cac13abc6217763a7b9e2e3f4a12/propcache-0.4.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0013cb6f8dde4b2a2f66903b8ba740bdfe378c943c4377a200551ceb27f379e4", size = 238474, upload-time = "2025-10-08T19:46:53.208Z" }, + { url = "https://files.pythonhosted.org/packages/46/4b/3aae6835b8e5f44ea6a68348ad90f78134047b503765087be2f9912140ea/propcache-0.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:15932ab57837c3368b024473a525e25d316d8353016e7cc0e5ba9eb343fbb1cf", size = 221575, upload-time = "2025-10-08T19:46:54.511Z" }, + { url = "https://files.pythonhosted.org/packages/6e/a5/8a5e8678bcc9d3a1a15b9a29165640d64762d424a16af543f00629c87338/propcache-0.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:031dce78b9dc099f4c29785d9cf5577a3faf9ebf74ecbd3c856a7b92768c3df3", size = 216736, upload-time = "2025-10-08T19:46:56.212Z" }, + { url = "https://files.pythonhosted.org/packages/f1/63/b7b215eddeac83ca1c6b934f89d09a625aa9ee4ba158338854c87210cc36/propcache-0.4.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ab08df6c9a035bee56e31af99be621526bd237bea9f32def431c656b29e41778", size = 213019, upload-time = "2025-10-08T19:46:57.595Z" }, + { url = "https://files.pythonhosted.org/packages/57/74/f580099a58c8af587cac7ba19ee7cb418506342fbbe2d4a4401661cca886/propcache-0.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4d7af63f9f93fe593afbf104c21b3b15868efb2c21d07d8732c0c4287e66b6a6", size = 220376, upload-time = "2025-10-08T19:46:59.067Z" }, + { url = "https://files.pythonhosted.org/packages/c4/ee/542f1313aff7eaf19c2bb758c5d0560d2683dac001a1c96d0774af799843/propcache-0.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cfc27c945f422e8b5071b6e93169679e4eb5bf73bbcbf1ba3ae3a83d2f78ebd9", size = 226988, upload-time = "2025-10-08T19:47:00.544Z" }, + { url = "https://files.pythonhosted.org/packages/8f/18/9c6b015dd9c6930f6ce2229e1f02fb35298b847f2087ea2b436a5bfa7287/propcache-0.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:35c3277624a080cc6ec6f847cbbbb5b49affa3598c4535a0a4682a697aaa5c75", size = 215615, upload-time = "2025-10-08T19:47:01.968Z" }, + { url = "https://files.pythonhosted.org/packages/80/9e/e7b85720b98c45a45e1fca6a177024934dc9bc5f4d5dd04207f216fc33ed/propcache-0.4.1-cp312-cp312-win32.whl", hash = "sha256:671538c2262dadb5ba6395e26c1731e1d52534bfe9ae56d0b5573ce539266aa8", size = 38066, upload-time = "2025-10-08T19:47:03.503Z" }, + { url = "https://files.pythonhosted.org/packages/54/09/d19cff2a5aaac632ec8fc03737b223597b1e347416934c1b3a7df079784c/propcache-0.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:cb2d222e72399fcf5890d1d5cc1060857b9b236adff2792ff48ca2dfd46c81db", size = 41655, upload-time = "2025-10-08T19:47:04.973Z" }, + { url = "https://files.pythonhosted.org/packages/68/ab/6b5c191bb5de08036a8c697b265d4ca76148efb10fa162f14af14fb5f076/propcache-0.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:204483131fb222bdaaeeea9f9e6c6ed0cac32731f75dfc1d4a567fc1926477c1", size = 37789, upload-time = "2025-10-08T19:47:06.077Z" }, + { url = "https://files.pythonhosted.org/packages/bf/df/6d9c1b6ac12b003837dde8a10231a7344512186e87b36e855bef32241942/propcache-0.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:43eedf29202c08550aac1d14e0ee619b0430aaef78f85864c1a892294fbc28cf", size = 77750, upload-time = "2025-10-08T19:47:07.648Z" }, + { url = "https://files.pythonhosted.org/packages/8b/e8/677a0025e8a2acf07d3418a2e7ba529c9c33caf09d3c1f25513023c1db56/propcache-0.4.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d62cdfcfd89ccb8de04e0eda998535c406bf5e060ffd56be6c586cbcc05b3311", size = 44780, upload-time = "2025-10-08T19:47:08.851Z" }, + { url = "https://files.pythonhosted.org/packages/89/a4/92380f7ca60f99ebae761936bc48a72a639e8a47b29050615eef757cb2a7/propcache-0.4.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cae65ad55793da34db5f54e4029b89d3b9b9490d8abe1b4c7ab5d4b8ec7ebf74", size = 46308, upload-time = "2025-10-08T19:47:09.982Z" }, + { url = "https://files.pythonhosted.org/packages/2d/48/c5ac64dee5262044348d1d78a5f85dd1a57464a60d30daee946699963eb3/propcache-0.4.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:333ddb9031d2704a301ee3e506dc46b1fe5f294ec198ed6435ad5b6a085facfe", size = 208182, upload-time = "2025-10-08T19:47:11.319Z" }, + { url = "https://files.pythonhosted.org/packages/c6/0c/cd762dd011a9287389a6a3eb43aa30207bde253610cca06824aeabfe9653/propcache-0.4.1-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:fd0858c20f078a32cf55f7e81473d96dcf3b93fd2ccdb3d40fdf54b8573df3af", size = 211215, upload-time = "2025-10-08T19:47:13.146Z" }, + { url = "https://files.pythonhosted.org/packages/30/3e/49861e90233ba36890ae0ca4c660e95df565b2cd15d4a68556ab5865974e/propcache-0.4.1-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:678ae89ebc632c5c204c794f8dab2837c5f159aeb59e6ed0539500400577298c", size = 218112, upload-time = "2025-10-08T19:47:14.913Z" }, + { url = "https://files.pythonhosted.org/packages/f1/8b/544bc867e24e1bd48f3118cecd3b05c694e160a168478fa28770f22fd094/propcache-0.4.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d472aeb4fbf9865e0c6d622d7f4d54a4e101a89715d8904282bb5f9a2f476c3f", size = 204442, upload-time = "2025-10-08T19:47:16.277Z" }, + { url = "https://files.pythonhosted.org/packages/50/a6/4282772fd016a76d3e5c0df58380a5ea64900afd836cec2c2f662d1b9bb3/propcache-0.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4d3df5fa7e36b3225954fba85589da77a0fe6a53e3976de39caf04a0db4c36f1", size = 199398, upload-time = "2025-10-08T19:47:17.962Z" }, + { url = "https://files.pythonhosted.org/packages/3e/ec/d8a7cd406ee1ddb705db2139f8a10a8a427100347bd698e7014351c7af09/propcache-0.4.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:ee17f18d2498f2673e432faaa71698032b0127ebf23ae5974eeaf806c279df24", size = 196920, upload-time = "2025-10-08T19:47:19.355Z" }, + { url = "https://files.pythonhosted.org/packages/f6/6c/f38ab64af3764f431e359f8baf9e0a21013e24329e8b85d2da32e8ed07ca/propcache-0.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:580e97762b950f993ae618e167e7be9256b8353c2dcd8b99ec100eb50f5286aa", size = 203748, upload-time = "2025-10-08T19:47:21.338Z" }, + { url = "https://files.pythonhosted.org/packages/d6/e3/fa846bd70f6534d647886621388f0a265254d30e3ce47e5c8e6e27dbf153/propcache-0.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:501d20b891688eb8e7aa903021f0b72d5a55db40ffaab27edefd1027caaafa61", size = 205877, upload-time = "2025-10-08T19:47:23.059Z" }, + { url = "https://files.pythonhosted.org/packages/e2/39/8163fc6f3133fea7b5f2827e8eba2029a0277ab2c5beee6c1db7b10fc23d/propcache-0.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a0bd56e5b100aef69bd8562b74b46254e7c8812918d3baa700c8a8009b0af66", size = 199437, upload-time = "2025-10-08T19:47:24.445Z" }, + { url = "https://files.pythonhosted.org/packages/93/89/caa9089970ca49c7c01662bd0eeedfe85494e863e8043565aeb6472ce8fe/propcache-0.4.1-cp313-cp313-win32.whl", hash = "sha256:bcc9aaa5d80322bc2fb24bb7accb4a30f81e90ab8d6ba187aec0744bc302ad81", size = 37586, upload-time = "2025-10-08T19:47:25.736Z" }, + { url = "https://files.pythonhosted.org/packages/f5/ab/f76ec3c3627c883215b5c8080debb4394ef5a7a29be811f786415fc1e6fd/propcache-0.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:381914df18634f5494334d201e98245c0596067504b9372d8cf93f4bb23e025e", size = 40790, upload-time = "2025-10-08T19:47:26.847Z" }, + { url = "https://files.pythonhosted.org/packages/59/1b/e71ae98235f8e2ba5004d8cb19765a74877abf189bc53fc0c80d799e56c3/propcache-0.4.1-cp313-cp313-win_arm64.whl", hash = "sha256:8873eb4460fd55333ea49b7d189749ecf6e55bf85080f11b1c4530ed3034cba1", size = 37158, upload-time = "2025-10-08T19:47:27.961Z" }, + { url = "https://files.pythonhosted.org/packages/83/ce/a31bbdfc24ee0dcbba458c8175ed26089cf109a55bbe7b7640ed2470cfe9/propcache-0.4.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:92d1935ee1f8d7442da9c0c4fa7ac20d07e94064184811b685f5c4fada64553b", size = 81451, upload-time = "2025-10-08T19:47:29.445Z" }, + { url = "https://files.pythonhosted.org/packages/25/9c/442a45a470a68456e710d96cacd3573ef26a1d0a60067e6a7d5e655621ed/propcache-0.4.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:473c61b39e1460d386479b9b2f337da492042447c9b685f28be4f74d3529e566", size = 46374, upload-time = "2025-10-08T19:47:30.579Z" }, + { url = "https://files.pythonhosted.org/packages/f4/bf/b1d5e21dbc3b2e889ea4327044fb16312a736d97640fb8b6aa3f9c7b3b65/propcache-0.4.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:c0ef0aaafc66fbd87842a3fe3902fd889825646bc21149eafe47be6072725835", size = 48396, upload-time = "2025-10-08T19:47:31.79Z" }, + { url = "https://files.pythonhosted.org/packages/f4/04/5b4c54a103d480e978d3c8a76073502b18db0c4bc17ab91b3cb5092ad949/propcache-0.4.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95393b4d66bfae908c3ca8d169d5f79cd65636ae15b5e7a4f6e67af675adb0e", size = 275950, upload-time = "2025-10-08T19:47:33.481Z" }, + { url = "https://files.pythonhosted.org/packages/b4/c1/86f846827fb969c4b78b0af79bba1d1ea2156492e1b83dea8b8a6ae27395/propcache-0.4.1-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c07fda85708bc48578467e85099645167a955ba093be0a2dcba962195676e859", size = 273856, upload-time = "2025-10-08T19:47:34.906Z" }, + { url = "https://files.pythonhosted.org/packages/36/1d/fc272a63c8d3bbad6878c336c7a7dea15e8f2d23a544bda43205dfa83ada/propcache-0.4.1-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:af223b406d6d000830c6f65f1e6431783fc3f713ba3e6cc8c024d5ee96170a4b", size = 280420, upload-time = "2025-10-08T19:47:36.338Z" }, + { url = "https://files.pythonhosted.org/packages/07/0c/01f2219d39f7e53d52e5173bcb09c976609ba30209912a0680adfb8c593a/propcache-0.4.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a78372c932c90ee474559c5ddfffd718238e8673c340dc21fe45c5b8b54559a0", size = 263254, upload-time = "2025-10-08T19:47:37.692Z" }, + { url = "https://files.pythonhosted.org/packages/2d/18/cd28081658ce597898f0c4d174d4d0f3c5b6d4dc27ffafeef835c95eb359/propcache-0.4.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:564d9f0d4d9509e1a870c920a89b2fec951b44bf5ba7d537a9e7c1ccec2c18af", size = 261205, upload-time = "2025-10-08T19:47:39.659Z" }, + { url = "https://files.pythonhosted.org/packages/7a/71/1f9e22eb8b8316701c2a19fa1f388c8a3185082607da8e406a803c9b954e/propcache-0.4.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:17612831fda0138059cc5546f4d12a2aacfb9e47068c06af35c400ba58ba7393", size = 247873, upload-time = "2025-10-08T19:47:41.084Z" }, + { url = "https://files.pythonhosted.org/packages/4a/65/3d4b61f36af2b4eddba9def857959f1016a51066b4f1ce348e0cf7881f58/propcache-0.4.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:41a89040cb10bd345b3c1a873b2bf36413d48da1def52f268a055f7398514874", size = 262739, upload-time = "2025-10-08T19:47:42.51Z" }, + { url = "https://files.pythonhosted.org/packages/2a/42/26746ab087faa77c1c68079b228810436ccd9a5ce9ac85e2b7307195fd06/propcache-0.4.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e35b88984e7fa64aacecea39236cee32dd9bd8c55f57ba8a75cf2399553f9bd7", size = 263514, upload-time = "2025-10-08T19:47:43.927Z" }, + { url = "https://files.pythonhosted.org/packages/94/13/630690fe201f5502d2403dd3cfd451ed8858fe3c738ee88d095ad2ff407b/propcache-0.4.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f8b465489f927b0df505cbe26ffbeed4d6d8a2bbc61ce90eb074ff129ef0ab1", size = 257781, upload-time = "2025-10-08T19:47:45.448Z" }, + { url = "https://files.pythonhosted.org/packages/92/f7/1d4ec5841505f423469efbfc381d64b7b467438cd5a4bbcbb063f3b73d27/propcache-0.4.1-cp313-cp313t-win32.whl", hash = "sha256:2ad890caa1d928c7c2965b48f3a3815c853180831d0e5503d35cf00c472f4717", size = 41396, upload-time = "2025-10-08T19:47:47.202Z" }, + { url = "https://files.pythonhosted.org/packages/48/f0/615c30622316496d2cbbc29f5985f7777d3ada70f23370608c1d3e081c1f/propcache-0.4.1-cp313-cp313t-win_amd64.whl", hash = "sha256:f7ee0e597f495cf415bcbd3da3caa3bd7e816b74d0d52b8145954c5e6fd3ff37", size = 44897, upload-time = "2025-10-08T19:47:48.336Z" }, + { url = "https://files.pythonhosted.org/packages/fd/ca/6002e46eccbe0e33dcd4069ef32f7f1c9e243736e07adca37ae8c4830ec3/propcache-0.4.1-cp313-cp313t-win_arm64.whl", hash = "sha256:929d7cbe1f01bb7baffb33dc14eb5691c95831450a26354cd210a8155170c93a", size = 39789, upload-time = "2025-10-08T19:47:49.876Z" }, + { url = "https://files.pythonhosted.org/packages/8e/5c/bca52d654a896f831b8256683457ceddd490ec18d9ec50e97dfd8fc726a8/propcache-0.4.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3f7124c9d820ba5548d431afb4632301acf965db49e666aa21c305cbe8c6de12", size = 78152, upload-time = "2025-10-08T19:47:51.051Z" }, + { url = "https://files.pythonhosted.org/packages/65/9b/03b04e7d82a5f54fb16113d839f5ea1ede58a61e90edf515f6577c66fa8f/propcache-0.4.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:c0d4b719b7da33599dfe3b22d3db1ef789210a0597bc650b7cee9c77c2be8c5c", size = 44869, upload-time = "2025-10-08T19:47:52.594Z" }, + { url = "https://files.pythonhosted.org/packages/b2/fa/89a8ef0468d5833a23fff277b143d0573897cf75bd56670a6d28126c7d68/propcache-0.4.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9f302f4783709a78240ebc311b793f123328716a60911d667e0c036bc5dcbded", size = 46596, upload-time = "2025-10-08T19:47:54.073Z" }, + { url = "https://files.pythonhosted.org/packages/86/bd/47816020d337f4a746edc42fe8d53669965138f39ee117414c7d7a340cfe/propcache-0.4.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c80ee5802e3fb9ea37938e7eecc307fb984837091d5fd262bb37238b1ae97641", size = 206981, upload-time = "2025-10-08T19:47:55.715Z" }, + { url = "https://files.pythonhosted.org/packages/df/f6/c5fa1357cc9748510ee55f37173eb31bfde6d94e98ccd9e6f033f2fc06e1/propcache-0.4.1-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ed5a841e8bb29a55fb8159ed526b26adc5bdd7e8bd7bf793ce647cb08656cdf4", size = 211490, upload-time = "2025-10-08T19:47:57.499Z" }, + { url = "https://files.pythonhosted.org/packages/80/1e/e5889652a7c4a3846683401a48f0f2e5083ce0ec1a8a5221d8058fbd1adf/propcache-0.4.1-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:55c72fd6ea2da4c318e74ffdf93c4fe4e926051133657459131a95c846d16d44", size = 215371, upload-time = "2025-10-08T19:47:59.317Z" }, + { url = "https://files.pythonhosted.org/packages/b2/f2/889ad4b2408f72fe1a4f6a19491177b30ea7bf1a0fd5f17050ca08cfc882/propcache-0.4.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8326e144341460402713f91df60ade3c999d601e7eb5ff8f6f7862d54de0610d", size = 201424, upload-time = "2025-10-08T19:48:00.67Z" }, + { url = "https://files.pythonhosted.org/packages/27/73/033d63069b57b0812c8bd19f311faebeceb6ba31b8f32b73432d12a0b826/propcache-0.4.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:060b16ae65bc098da7f6d25bf359f1f31f688384858204fe5d652979e0015e5b", size = 197566, upload-time = "2025-10-08T19:48:02.604Z" }, + { url = "https://files.pythonhosted.org/packages/dc/89/ce24f3dc182630b4e07aa6d15f0ff4b14ed4b9955fae95a0b54c58d66c05/propcache-0.4.1-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:89eb3fa9524f7bec9de6e83cf3faed9d79bffa560672c118a96a171a6f55831e", size = 193130, upload-time = "2025-10-08T19:48:04.499Z" }, + { url = "https://files.pythonhosted.org/packages/a9/24/ef0d5fd1a811fb5c609278d0209c9f10c35f20581fcc16f818da959fc5b4/propcache-0.4.1-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:dee69d7015dc235f526fe80a9c90d65eb0039103fe565776250881731f06349f", size = 202625, upload-time = "2025-10-08T19:48:06.213Z" }, + { url = "https://files.pythonhosted.org/packages/f5/02/98ec20ff5546f68d673df2f7a69e8c0d076b5abd05ca882dc7ee3a83653d/propcache-0.4.1-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:5558992a00dfd54ccbc64a32726a3357ec93825a418a401f5cc67df0ac5d9e49", size = 204209, upload-time = "2025-10-08T19:48:08.432Z" }, + { url = "https://files.pythonhosted.org/packages/a0/87/492694f76759b15f0467a2a93ab68d32859672b646aa8a04ce4864e7932d/propcache-0.4.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c9b822a577f560fbd9554812526831712c1436d2c046cedee4c3796d3543b144", size = 197797, upload-time = "2025-10-08T19:48:09.968Z" }, + { url = "https://files.pythonhosted.org/packages/ee/36/66367de3575db1d2d3f3d177432bd14ee577a39d3f5d1b3d5df8afe3b6e2/propcache-0.4.1-cp314-cp314-win32.whl", hash = "sha256:ab4c29b49d560fe48b696cdcb127dd36e0bc2472548f3bf56cc5cb3da2b2984f", size = 38140, upload-time = "2025-10-08T19:48:11.232Z" }, + { url = "https://files.pythonhosted.org/packages/0c/2a/a758b47de253636e1b8aef181c0b4f4f204bf0dd964914fb2af90a95b49b/propcache-0.4.1-cp314-cp314-win_amd64.whl", hash = "sha256:5a103c3eb905fcea0ab98be99c3a9a5ab2de60228aa5aceedc614c0281cf6153", size = 41257, upload-time = "2025-10-08T19:48:12.707Z" }, + { url = "https://files.pythonhosted.org/packages/34/5e/63bd5896c3fec12edcbd6f12508d4890d23c265df28c74b175e1ef9f4f3b/propcache-0.4.1-cp314-cp314-win_arm64.whl", hash = "sha256:74c1fb26515153e482e00177a1ad654721bf9207da8a494a0c05e797ad27b992", size = 38097, upload-time = "2025-10-08T19:48:13.923Z" }, + { url = "https://files.pythonhosted.org/packages/99/85/9ff785d787ccf9bbb3f3106f79884a130951436f58392000231b4c737c80/propcache-0.4.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:824e908bce90fb2743bd6b59db36eb4f45cd350a39637c9f73b1c1ea66f5b75f", size = 81455, upload-time = "2025-10-08T19:48:15.16Z" }, + { url = "https://files.pythonhosted.org/packages/90/85/2431c10c8e7ddb1445c1f7c4b54d886e8ad20e3c6307e7218f05922cad67/propcache-0.4.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c2b5e7db5328427c57c8e8831abda175421b709672f6cfc3d630c3b7e2146393", size = 46372, upload-time = "2025-10-08T19:48:16.424Z" }, + { url = "https://files.pythonhosted.org/packages/01/20/b0972d902472da9bcb683fa595099911f4d2e86e5683bcc45de60dd05dc3/propcache-0.4.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6f6ff873ed40292cd4969ef5310179afd5db59fdf055897e282485043fc80ad0", size = 48411, upload-time = "2025-10-08T19:48:17.577Z" }, + { url = "https://files.pythonhosted.org/packages/e2/e3/7dc89f4f21e8f99bad3d5ddb3a3389afcf9da4ac69e3deb2dcdc96e74169/propcache-0.4.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:49a2dc67c154db2c1463013594c458881a069fcf98940e61a0569016a583020a", size = 275712, upload-time = "2025-10-08T19:48:18.901Z" }, + { url = "https://files.pythonhosted.org/packages/20/67/89800c8352489b21a8047c773067644e3897f02ecbbd610f4d46b7f08612/propcache-0.4.1-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:005f08e6a0529984491e37d8dbc3dd86f84bd78a8ceb5fa9a021f4c48d4984be", size = 273557, upload-time = "2025-10-08T19:48:20.762Z" }, + { url = "https://files.pythonhosted.org/packages/e2/a1/b52b055c766a54ce6d9c16d9aca0cad8059acd9637cdf8aa0222f4a026ef/propcache-0.4.1-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5c3310452e0d31390da9035c348633b43d7e7feb2e37be252be6da45abd1abcc", size = 280015, upload-time = "2025-10-08T19:48:22.592Z" }, + { url = "https://files.pythonhosted.org/packages/48/c8/33cee30bd890672c63743049f3c9e4be087e6780906bfc3ec58528be59c1/propcache-0.4.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c3c70630930447f9ef1caac7728c8ad1c56bc5015338b20fed0d08ea2480b3a", size = 262880, upload-time = "2025-10-08T19:48:23.947Z" }, + { url = "https://files.pythonhosted.org/packages/0c/b1/8f08a143b204b418285c88b83d00edbd61afbc2c6415ffafc8905da7038b/propcache-0.4.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8e57061305815dfc910a3634dcf584f08168a8836e6999983569f51a8544cd89", size = 260938, upload-time = "2025-10-08T19:48:25.656Z" }, + { url = "https://files.pythonhosted.org/packages/cf/12/96e4664c82ca2f31e1c8dff86afb867348979eb78d3cb8546a680287a1e9/propcache-0.4.1-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:521a463429ef54143092c11a77e04056dd00636f72e8c45b70aaa3140d639726", size = 247641, upload-time = "2025-10-08T19:48:27.207Z" }, + { url = "https://files.pythonhosted.org/packages/18/ed/e7a9cfca28133386ba52278136d42209d3125db08d0a6395f0cba0c0285c/propcache-0.4.1-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:120c964da3fdc75e3731aa392527136d4ad35868cc556fd09bb6d09172d9a367", size = 262510, upload-time = "2025-10-08T19:48:28.65Z" }, + { url = "https://files.pythonhosted.org/packages/f5/76/16d8bf65e8845dd62b4e2b57444ab81f07f40caa5652b8969b87ddcf2ef6/propcache-0.4.1-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:d8f353eb14ee3441ee844ade4277d560cdd68288838673273b978e3d6d2c8f36", size = 263161, upload-time = "2025-10-08T19:48:30.133Z" }, + { url = "https://files.pythonhosted.org/packages/e7/70/c99e9edb5d91d5ad8a49fa3c1e8285ba64f1476782fed10ab251ff413ba1/propcache-0.4.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ab2943be7c652f09638800905ee1bab2c544e537edb57d527997a24c13dc1455", size = 257393, upload-time = "2025-10-08T19:48:31.567Z" }, + { url = "https://files.pythonhosted.org/packages/08/02/87b25304249a35c0915d236575bc3574a323f60b47939a2262b77632a3ee/propcache-0.4.1-cp314-cp314t-win32.whl", hash = "sha256:05674a162469f31358c30bcaa8883cb7829fa3110bf9c0991fe27d7896c42d85", size = 42546, upload-time = "2025-10-08T19:48:32.872Z" }, + { url = "https://files.pythonhosted.org/packages/cb/ef/3c6ecf8b317aa982f309835e8f96987466123c6e596646d4e6a1dfcd080f/propcache-0.4.1-cp314-cp314t-win_amd64.whl", hash = "sha256:990f6b3e2a27d683cb7602ed6c86f15ee6b43b1194736f9baaeb93d0016633b1", size = 46259, upload-time = "2025-10-08T19:48:34.226Z" }, + { url = "https://files.pythonhosted.org/packages/c4/2d/346e946d4951f37eca1e4f55be0f0174c52cd70720f84029b02f296f4a38/propcache-0.4.1-cp314-cp314t-win_arm64.whl", hash = "sha256:ecef2343af4cc68e05131e45024ba34f6095821988a9d0a02aa7c73fcc448aa9", size = 40428, upload-time = "2025-10-08T19:48:35.441Z" }, + { url = "https://files.pythonhosted.org/packages/9b/01/0ebaec9003f5d619a7475165961f8e3083cf8644d704b60395df3601632d/propcache-0.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3d233076ccf9e450c8b3bc6720af226b898ef5d051a2d145f7d765e6e9f9bcff", size = 80277, upload-time = "2025-10-08T19:48:36.647Z" }, + { url = "https://files.pythonhosted.org/packages/34/58/04af97ac586b4ef6b9026c3fd36ee7798b737a832f5d3440a4280dcebd3a/propcache-0.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:357f5bb5c377a82e105e44bd3d52ba22b616f7b9773714bff93573988ef0a5fb", size = 45865, upload-time = "2025-10-08T19:48:37.859Z" }, + { url = "https://files.pythonhosted.org/packages/7c/19/b65d98ae21384518b291d9939e24a8aeac4fdb5101b732576f8f7540e834/propcache-0.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cbc3b6dfc728105b2a57c06791eb07a94229202ea75c59db644d7d496b698cac", size = 47636, upload-time = "2025-10-08T19:48:39.038Z" }, + { url = "https://files.pythonhosted.org/packages/b3/0f/317048c6d91c356c7154dca5af019e6effeb7ee15fa6a6db327cc19e12b4/propcache-0.4.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:182b51b421f0501952d938dc0b0eb45246a5b5153c50d42b495ad5fb7517c888", size = 201126, upload-time = "2025-10-08T19:48:40.774Z" }, + { url = "https://files.pythonhosted.org/packages/71/69/0b2a7a5a6ee83292b4b997dbd80549d8ce7d40b6397c1646c0d9495f5a85/propcache-0.4.1-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4b536b39c5199b96fc6245eb5fb796c497381d3942f169e44e8e392b29c9ebcc", size = 209837, upload-time = "2025-10-08T19:48:42.167Z" }, + { url = "https://files.pythonhosted.org/packages/a5/92/c699ac495a6698df6e497fc2de27af4b6ace10d8e76528357ce153722e45/propcache-0.4.1-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:db65d2af507bbfbdcedb254a11149f894169d90488dd3e7190f7cdcb2d6cd57a", size = 215578, upload-time = "2025-10-08T19:48:43.56Z" }, + { url = "https://files.pythonhosted.org/packages/b3/ee/14de81c5eb02c0ee4f500b4e39c4e1bd0677c06e72379e6ab18923c773fc/propcache-0.4.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd2dbc472da1f772a4dae4fa24be938a6c544671a912e30529984dd80400cd88", size = 197187, upload-time = "2025-10-08T19:48:45.309Z" }, + { url = "https://files.pythonhosted.org/packages/1d/94/48dce9aaa6d8dd5a0859bad75158ec522546d4ac23f8e2f05fac469477dd/propcache-0.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:daede9cd44e0f8bdd9e6cc9a607fc81feb80fae7a5fc6cecaff0e0bb32e42d00", size = 193478, upload-time = "2025-10-08T19:48:47.743Z" }, + { url = "https://files.pythonhosted.org/packages/60/b5/0516b563e801e1ace212afde869a0596a0d7115eec0b12d296d75633fb29/propcache-0.4.1-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:71b749281b816793678ae7f3d0d84bd36e694953822eaad408d682efc5ca18e0", size = 190650, upload-time = "2025-10-08T19:48:49.373Z" }, + { url = "https://files.pythonhosted.org/packages/24/89/e0f7d4a5978cd56f8cd67735f74052f257dc471ec901694e430f0d1572fe/propcache-0.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:0002004213ee1f36cfb3f9a42b5066100c44276b9b72b4e1504cddd3d692e86e", size = 200251, upload-time = "2025-10-08T19:48:51.4Z" }, + { url = "https://files.pythonhosted.org/packages/06/7d/a1fac863d473876ed4406c914f2e14aa82d2f10dd207c9e16fc383cc5a24/propcache-0.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:fe49d0a85038f36ba9e3ffafa1103e61170b28e95b16622e11be0a0ea07c6781", size = 200919, upload-time = "2025-10-08T19:48:53.227Z" }, + { url = "https://files.pythonhosted.org/packages/c3/4e/f86a256ff24944cf5743e4e6c6994e3526f6acfcfb55e21694c2424f758c/propcache-0.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:99d43339c83aaf4d32bda60928231848eee470c6bda8d02599cc4cebe872d183", size = 193211, upload-time = "2025-10-08T19:48:55.027Z" }, + { url = "https://files.pythonhosted.org/packages/6e/3f/3fbad5f4356b068f1b047d300a6ff2c66614d7030f078cd50be3fec04228/propcache-0.4.1-cp39-cp39-win32.whl", hash = "sha256:a129e76735bc792794d5177069691c3217898b9f5cee2b2661471e52ffe13f19", size = 38314, upload-time = "2025-10-08T19:48:56.792Z" }, + { url = "https://files.pythonhosted.org/packages/a4/45/d78d136c3a3d215677abb886785aae744da2c3005bcb99e58640c56529b1/propcache-0.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:948dab269721ae9a87fd16c514a0a2c2a1bdb23a9a61b969b0f9d9ee2968546f", size = 41912, upload-time = "2025-10-08T19:48:57.995Z" }, + { url = "https://files.pythonhosted.org/packages/fc/2a/b0632941f25139f4e58450b307242951f7c2717a5704977c6d5323a800af/propcache-0.4.1-cp39-cp39-win_arm64.whl", hash = "sha256:5fd37c406dd6dc85aa743e214cef35dc54bbdd1419baac4f6ae5e5b1a2976938", size = 38450, upload-time = "2025-10-08T19:48:59.349Z" }, + { url = "https://files.pythonhosted.org/packages/5b/5a/bc7b4a4ef808fa59a816c17b20c4bef6884daebbdf627ff2a161da67da19/propcache-0.4.1-py3-none-any.whl", hash = "sha256:af2a6052aeb6cf17d3e46ee169099044fd8224cbaf75c76a2ef596e8163e2237", size = 13305, upload-time = "2025-10-08T19:49:00.792Z" }, +] + +[[package]] +name = "py" +version = "1.11.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/98/ff/fec109ceb715d2a6b4c4a85a61af3b40c723a961e8828319fbcb15b868dc/py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719", size = 207796, upload-time = "2021-11-04T17:17:01.377Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f6/f0/10642828a8dfb741e5f3fbaac830550a518a775c7fff6f04a007259b0548/py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378", size = 98708, upload-time = "2021-11-04T17:17:00.152Z" }, +] + +[[package]] +name = "py-trees" +version = "2.2.3" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.9'", +] +dependencies = [ + { name = "pydot", marker = "python_full_version < '3.9'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/29/57/03dc256d51e1daf80be6f098e82154676bf90003e5db7e17b130b3300611/py_trees-2.2.3.tar.gz", hash = "sha256:8a2024ca6cb966c3c571d91e2d60c90cc562c4d237a0af8ab7a7c27c2cff0795", size = 87603, upload-time = "2023-02-08T05:50:46.38Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f4/a3/1dcb3bbc475373018e8a684d1289ea6892e5d485ef6ec1c8e0eb431c33e3/py_trees-2.2.3-py3-none-any.whl", hash = "sha256:0c667e15d45157d92b0cd23f0cf8f8ca6a41bd9d2f3a159599cbf88287d98566", size = 113023, upload-time = "2023-02-08T05:50:43.806Z" }, +] + +[[package]] +name = "py-trees" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", + "python_full_version == '3.9.*'", +] +dependencies = [ + { name = "py", marker = "python_full_version >= '3.9'" }, + { name = "pydot", marker = "python_full_version >= '3.9'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4f/b6/1ab21ed427e2508135186de187bba5bba674b77746f0b130b33cbe68e621/py_trees-2.3.0.tar.gz", hash = "sha256:ff36cf3f228589d2efaaaa26eaebb08e1f9f7baa2b06398bc721e953aa45531f", size = 84068, upload-time = "2025-01-13T23:27:16.856Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/39/1c/9f32299572bce7c1d46ca98425d82f3fe6723a526f05a01b39d491cbfdf6/py_trees-2.3.0-py3-none-any.whl", hash = "sha256:02831face639b15edd5830df76dd59f358358177384543cf17a5e019370bffc2", size = 112918, upload-time = "2025-01-13T23:27:15.54Z" }, +] + +[[package]] +name = "pydantic" +version = "2.10.6" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.9'", +] +dependencies = [ + { name = "annotated-types", marker = "python_full_version < '3.9'" }, + { name = "pydantic-core", version = "2.27.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, + { name = "typing-extensions", version = "4.13.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b7/ae/d5220c5c52b158b1de7ca89fc5edb72f304a70a4c540c84c8844bf4008de/pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236", size = 761681, upload-time = "2025-01-24T01:42:12.693Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f4/3c/8cc1cc84deffa6e25d2d0c688ebb80635dfdbf1dbea3e30c541c8cf4d860/pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584", size = 431696, upload-time = "2025-01-24T01:42:10.371Z" }, +] + +[[package]] +name = "pydantic" +version = "2.12.3" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", + "python_full_version == '3.9.*'", +] +dependencies = [ + { name = "annotated-types", marker = "python_full_version >= '3.9'" }, + { name = "pydantic-core", version = "2.41.4", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" }, + { name = "typing-extensions", version = "4.15.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" }, + { name = "typing-inspection", marker = "python_full_version >= '3.9'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f3/1e/4f0a3233767010308f2fd6bd0814597e3f63f1dc98304a9112b8759df4ff/pydantic-2.12.3.tar.gz", hash = "sha256:1da1c82b0fc140bb0103bc1441ffe062154c8d38491189751ee00fd8ca65ce74", size = 819383, upload-time = "2025-10-17T15:04:21.222Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a1/6b/83661fa77dcefa195ad5f8cd9af3d1a7450fd57cc883ad04d65446ac2029/pydantic-2.12.3-py3-none-any.whl", hash = "sha256:6986454a854bc3bc6e5443e1369e06a3a456af9d339eda45510f517d9ea5c6bf", size = 462431, upload-time = "2025-10-17T15:04:19.346Z" }, +] + +[[package]] +name = "pydantic-core" +version = "2.27.2" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.9'", +] +dependencies = [ + { name = "typing-extensions", version = "4.13.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fc/01/f3e5ac5e7c25833db5eb555f7b7ab24cd6f8c322d3a3ad2d67a952dc0abc/pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39", size = 413443, upload-time = "2024-12-18T11:31:54.917Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3a/bc/fed5f74b5d802cf9a03e83f60f18864e90e3aed7223adaca5ffb7a8d8d64/pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa", size = 1895938, upload-time = "2024-12-18T11:27:14.406Z" }, + { url = "https://files.pythonhosted.org/packages/71/2a/185aff24ce844e39abb8dd680f4e959f0006944f4a8a0ea372d9f9ae2e53/pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c", size = 1815684, upload-time = "2024-12-18T11:27:16.489Z" }, + { url = "https://files.pythonhosted.org/packages/c3/43/fafabd3d94d159d4f1ed62e383e264f146a17dd4d48453319fd782e7979e/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a", size = 1829169, upload-time = "2024-12-18T11:27:22.16Z" }, + { url = "https://files.pythonhosted.org/packages/a2/d1/f2dfe1a2a637ce6800b799aa086d079998959f6f1215eb4497966efd2274/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5", size = 1867227, upload-time = "2024-12-18T11:27:25.097Z" }, + { url = "https://files.pythonhosted.org/packages/7d/39/e06fcbcc1c785daa3160ccf6c1c38fea31f5754b756e34b65f74e99780b5/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c", size = 2037695, upload-time = "2024-12-18T11:27:28.656Z" }, + { url = "https://files.pythonhosted.org/packages/7a/67/61291ee98e07f0650eb756d44998214231f50751ba7e13f4f325d95249ab/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7", size = 2741662, upload-time = "2024-12-18T11:27:30.798Z" }, + { url = "https://files.pythonhosted.org/packages/32/90/3b15e31b88ca39e9e626630b4c4a1f5a0dfd09076366f4219429e6786076/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a", size = 1993370, upload-time = "2024-12-18T11:27:33.692Z" }, + { url = "https://files.pythonhosted.org/packages/ff/83/c06d333ee3a67e2e13e07794995c1535565132940715931c1c43bfc85b11/pydantic_core-2.27.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236", size = 1996813, upload-time = "2024-12-18T11:27:37.111Z" }, + { url = "https://files.pythonhosted.org/packages/7c/f7/89be1c8deb6e22618a74f0ca0d933fdcb8baa254753b26b25ad3acff8f74/pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962", size = 2005287, upload-time = "2024-12-18T11:27:40.566Z" }, + { url = "https://files.pythonhosted.org/packages/b7/7d/8eb3e23206c00ef7feee17b83a4ffa0a623eb1a9d382e56e4aa46fd15ff2/pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9", size = 2128414, upload-time = "2024-12-18T11:27:43.757Z" }, + { url = "https://files.pythonhosted.org/packages/4e/99/fe80f3ff8dd71a3ea15763878d464476e6cb0a2db95ff1c5c554133b6b83/pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af", size = 2155301, upload-time = "2024-12-18T11:27:47.36Z" }, + { url = "https://files.pythonhosted.org/packages/2b/a3/e50460b9a5789ca1451b70d4f52546fa9e2b420ba3bfa6100105c0559238/pydantic_core-2.27.2-cp310-cp310-win32.whl", hash = "sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4", size = 1816685, upload-time = "2024-12-18T11:27:50.508Z" }, + { url = "https://files.pythonhosted.org/packages/57/4c/a8838731cb0f2c2a39d3535376466de6049034d7b239c0202a64aaa05533/pydantic_core-2.27.2-cp310-cp310-win_amd64.whl", hash = "sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31", size = 1982876, upload-time = "2024-12-18T11:27:53.54Z" }, + { url = "https://files.pythonhosted.org/packages/c2/89/f3450af9d09d44eea1f2c369f49e8f181d742f28220f88cc4dfaae91ea6e/pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc", size = 1893421, upload-time = "2024-12-18T11:27:55.409Z" }, + { url = "https://files.pythonhosted.org/packages/9e/e3/71fe85af2021f3f386da42d291412e5baf6ce7716bd7101ea49c810eda90/pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7", size = 1814998, upload-time = "2024-12-18T11:27:57.252Z" }, + { url = "https://files.pythonhosted.org/packages/a6/3c/724039e0d848fd69dbf5806894e26479577316c6f0f112bacaf67aa889ac/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15", size = 1826167, upload-time = "2024-12-18T11:27:59.146Z" }, + { url = "https://files.pythonhosted.org/packages/2b/5b/1b29e8c1fb5f3199a9a57c1452004ff39f494bbe9bdbe9a81e18172e40d3/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306", size = 1865071, upload-time = "2024-12-18T11:28:02.625Z" }, + { url = "https://files.pythonhosted.org/packages/89/6c/3985203863d76bb7d7266e36970d7e3b6385148c18a68cc8915fd8c84d57/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99", size = 2036244, upload-time = "2024-12-18T11:28:04.442Z" }, + { url = "https://files.pythonhosted.org/packages/0e/41/f15316858a246b5d723f7d7f599f79e37493b2e84bfc789e58d88c209f8a/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459", size = 2737470, upload-time = "2024-12-18T11:28:07.679Z" }, + { url = "https://files.pythonhosted.org/packages/a8/7c/b860618c25678bbd6d1d99dbdfdf0510ccb50790099b963ff78a124b754f/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048", size = 1992291, upload-time = "2024-12-18T11:28:10.297Z" }, + { url = "https://files.pythonhosted.org/packages/bf/73/42c3742a391eccbeab39f15213ecda3104ae8682ba3c0c28069fbcb8c10d/pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d", size = 1994613, upload-time = "2024-12-18T11:28:13.362Z" }, + { url = "https://files.pythonhosted.org/packages/94/7a/941e89096d1175d56f59340f3a8ebaf20762fef222c298ea96d36a6328c5/pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b", size = 2002355, upload-time = "2024-12-18T11:28:16.587Z" }, + { url = "https://files.pythonhosted.org/packages/6e/95/2359937a73d49e336a5a19848713555605d4d8d6940c3ec6c6c0ca4dcf25/pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474", size = 2126661, upload-time = "2024-12-18T11:28:18.407Z" }, + { url = "https://files.pythonhosted.org/packages/2b/4c/ca02b7bdb6012a1adef21a50625b14f43ed4d11f1fc237f9d7490aa5078c/pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6", size = 2153261, upload-time = "2024-12-18T11:28:21.471Z" }, + { url = "https://files.pythonhosted.org/packages/72/9d/a241db83f973049a1092a079272ffe2e3e82e98561ef6214ab53fe53b1c7/pydantic_core-2.27.2-cp311-cp311-win32.whl", hash = "sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c", size = 1812361, upload-time = "2024-12-18T11:28:23.53Z" }, + { url = "https://files.pythonhosted.org/packages/e8/ef/013f07248041b74abd48a385e2110aa3a9bbfef0fbd97d4e6d07d2f5b89a/pydantic_core-2.27.2-cp311-cp311-win_amd64.whl", hash = "sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc", size = 1982484, upload-time = "2024-12-18T11:28:25.391Z" }, + { url = "https://files.pythonhosted.org/packages/10/1c/16b3a3e3398fd29dca77cea0a1d998d6bde3902fa2706985191e2313cc76/pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4", size = 1867102, upload-time = "2024-12-18T11:28:28.593Z" }, + { url = "https://files.pythonhosted.org/packages/d6/74/51c8a5482ca447871c93e142d9d4a92ead74de6c8dc5e66733e22c9bba89/pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0", size = 1893127, upload-time = "2024-12-18T11:28:30.346Z" }, + { url = "https://files.pythonhosted.org/packages/d3/f3/c97e80721735868313c58b89d2de85fa80fe8dfeeed84dc51598b92a135e/pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef", size = 1811340, upload-time = "2024-12-18T11:28:32.521Z" }, + { url = "https://files.pythonhosted.org/packages/9e/91/840ec1375e686dbae1bd80a9e46c26a1e0083e1186abc610efa3d9a36180/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7", size = 1822900, upload-time = "2024-12-18T11:28:34.507Z" }, + { url = "https://files.pythonhosted.org/packages/f6/31/4240bc96025035500c18adc149aa6ffdf1a0062a4b525c932065ceb4d868/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934", size = 1869177, upload-time = "2024-12-18T11:28:36.488Z" }, + { url = "https://files.pythonhosted.org/packages/fa/20/02fbaadb7808be578317015c462655c317a77a7c8f0ef274bc016a784c54/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6", size = 2038046, upload-time = "2024-12-18T11:28:39.409Z" }, + { url = "https://files.pythonhosted.org/packages/06/86/7f306b904e6c9eccf0668248b3f272090e49c275bc488a7b88b0823444a4/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c", size = 2685386, upload-time = "2024-12-18T11:28:41.221Z" }, + { url = "https://files.pythonhosted.org/packages/8d/f0/49129b27c43396581a635d8710dae54a791b17dfc50c70164866bbf865e3/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2", size = 1997060, upload-time = "2024-12-18T11:28:44.709Z" }, + { url = "https://files.pythonhosted.org/packages/0d/0f/943b4af7cd416c477fd40b187036c4f89b416a33d3cc0ab7b82708a667aa/pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4", size = 2004870, upload-time = "2024-12-18T11:28:46.839Z" }, + { url = "https://files.pythonhosted.org/packages/35/40/aea70b5b1a63911c53a4c8117c0a828d6790483f858041f47bab0b779f44/pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3", size = 1999822, upload-time = "2024-12-18T11:28:48.896Z" }, + { url = "https://files.pythonhosted.org/packages/f2/b3/807b94fd337d58effc5498fd1a7a4d9d59af4133e83e32ae39a96fddec9d/pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4", size = 2130364, upload-time = "2024-12-18T11:28:50.755Z" }, + { url = "https://files.pythonhosted.org/packages/fc/df/791c827cd4ee6efd59248dca9369fb35e80a9484462c33c6649a8d02b565/pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57", size = 2158303, upload-time = "2024-12-18T11:28:54.122Z" }, + { url = "https://files.pythonhosted.org/packages/9b/67/4e197c300976af185b7cef4c02203e175fb127e414125916bf1128b639a9/pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc", size = 1834064, upload-time = "2024-12-18T11:28:56.074Z" }, + { url = "https://files.pythonhosted.org/packages/1f/ea/cd7209a889163b8dcca139fe32b9687dd05249161a3edda62860430457a5/pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9", size = 1989046, upload-time = "2024-12-18T11:28:58.107Z" }, + { url = "https://files.pythonhosted.org/packages/bc/49/c54baab2f4658c26ac633d798dab66b4c3a9bbf47cff5284e9c182f4137a/pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b", size = 1885092, upload-time = "2024-12-18T11:29:01.335Z" }, + { url = "https://files.pythonhosted.org/packages/41/b1/9bc383f48f8002f99104e3acff6cba1231b29ef76cfa45d1506a5cad1f84/pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b", size = 1892709, upload-time = "2024-12-18T11:29:03.193Z" }, + { url = "https://files.pythonhosted.org/packages/10/6c/e62b8657b834f3eb2961b49ec8e301eb99946245e70bf42c8817350cbefc/pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154", size = 1811273, upload-time = "2024-12-18T11:29:05.306Z" }, + { url = "https://files.pythonhosted.org/packages/ba/15/52cfe49c8c986e081b863b102d6b859d9defc63446b642ccbbb3742bf371/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9", size = 1823027, upload-time = "2024-12-18T11:29:07.294Z" }, + { url = "https://files.pythonhosted.org/packages/b1/1c/b6f402cfc18ec0024120602bdbcebc7bdd5b856528c013bd4d13865ca473/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9", size = 1868888, upload-time = "2024-12-18T11:29:09.249Z" }, + { url = "https://files.pythonhosted.org/packages/bd/7b/8cb75b66ac37bc2975a3b7de99f3c6f355fcc4d89820b61dffa8f1e81677/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1", size = 2037738, upload-time = "2024-12-18T11:29:11.23Z" }, + { url = "https://files.pythonhosted.org/packages/c8/f1/786d8fe78970a06f61df22cba58e365ce304bf9b9f46cc71c8c424e0c334/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a", size = 2685138, upload-time = "2024-12-18T11:29:16.396Z" }, + { url = "https://files.pythonhosted.org/packages/a6/74/d12b2cd841d8724dc8ffb13fc5cef86566a53ed358103150209ecd5d1999/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e", size = 1997025, upload-time = "2024-12-18T11:29:20.25Z" }, + { url = "https://files.pythonhosted.org/packages/a0/6e/940bcd631bc4d9a06c9539b51f070b66e8f370ed0933f392db6ff350d873/pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4", size = 2004633, upload-time = "2024-12-18T11:29:23.877Z" }, + { url = "https://files.pythonhosted.org/packages/50/cc/a46b34f1708d82498c227d5d80ce615b2dd502ddcfd8376fc14a36655af1/pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27", size = 1999404, upload-time = "2024-12-18T11:29:25.872Z" }, + { url = "https://files.pythonhosted.org/packages/ca/2d/c365cfa930ed23bc58c41463bae347d1005537dc8db79e998af8ba28d35e/pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee", size = 2130130, upload-time = "2024-12-18T11:29:29.252Z" }, + { url = "https://files.pythonhosted.org/packages/f4/d7/eb64d015c350b7cdb371145b54d96c919d4db516817f31cd1c650cae3b21/pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1", size = 2157946, upload-time = "2024-12-18T11:29:31.338Z" }, + { url = "https://files.pythonhosted.org/packages/a4/99/bddde3ddde76c03b65dfd5a66ab436c4e58ffc42927d4ff1198ffbf96f5f/pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130", size = 1834387, upload-time = "2024-12-18T11:29:33.481Z" }, + { url = "https://files.pythonhosted.org/packages/71/47/82b5e846e01b26ac6f1893d3c5f9f3a2eb6ba79be26eef0b759b4fe72946/pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee", size = 1990453, upload-time = "2024-12-18T11:29:35.533Z" }, + { url = "https://files.pythonhosted.org/packages/51/b2/b2b50d5ecf21acf870190ae5d093602d95f66c9c31f9d5de6062eb329ad1/pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b", size = 1885186, upload-time = "2024-12-18T11:29:37.649Z" }, + { url = "https://files.pythonhosted.org/packages/43/53/13e9917fc69c0a4aea06fd63ed6a8d6cda9cf140ca9584d49c1650b0ef5e/pydantic_core-2.27.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d3e8d504bdd3f10835468f29008d72fc8359d95c9c415ce6e767203db6127506", size = 1899595, upload-time = "2024-12-18T11:29:40.887Z" }, + { url = "https://files.pythonhosted.org/packages/f4/20/26c549249769ed84877f862f7bb93f89a6ee08b4bee1ed8781616b7fbb5e/pydantic_core-2.27.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:521eb9b7f036c9b6187f0b47318ab0d7ca14bd87f776240b90b21c1f4f149320", size = 1775010, upload-time = "2024-12-18T11:29:44.823Z" }, + { url = "https://files.pythonhosted.org/packages/35/eb/8234e05452d92d2b102ffa1b56d801c3567e628fdc63f02080fdfc68fd5e/pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85210c4d99a0114f5a9481b44560d7d1e35e32cc5634c656bc48e590b669b145", size = 1830727, upload-time = "2024-12-18T11:29:46.904Z" }, + { url = "https://files.pythonhosted.org/packages/8f/df/59f915c8b929d5f61e5a46accf748a87110ba145156f9326d1a7d28912b2/pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d716e2e30c6f140d7560ef1538953a5cd1a87264c737643d481f2779fc247fe1", size = 1868393, upload-time = "2024-12-18T11:29:49.098Z" }, + { url = "https://files.pythonhosted.org/packages/d5/52/81cf4071dca654d485c277c581db368b0c95b2b883f4d7b736ab54f72ddf/pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f66d89ba397d92f840f8654756196d93804278457b5fbede59598a1f9f90b228", size = 2040300, upload-time = "2024-12-18T11:29:51.43Z" }, + { url = "https://files.pythonhosted.org/packages/9c/00/05197ce1614f5c08d7a06e1d39d5d8e704dc81971b2719af134b844e2eaf/pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:669e193c1c576a58f132e3158f9dfa9662969edb1a250c54d8fa52590045f046", size = 2738785, upload-time = "2024-12-18T11:29:55.001Z" }, + { url = "https://files.pythonhosted.org/packages/f7/a3/5f19bc495793546825ab160e530330c2afcee2281c02b5ffafd0b32ac05e/pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdbe7629b996647b99c01b37f11170a57ae675375b14b8c13b8518b8320ced5", size = 1996493, upload-time = "2024-12-18T11:29:57.13Z" }, + { url = "https://files.pythonhosted.org/packages/ed/e8/e0102c2ec153dc3eed88aea03990e1b06cfbca532916b8a48173245afe60/pydantic_core-2.27.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d262606bf386a5ba0b0af3b97f37c83d7011439e3dc1a9298f21efb292e42f1a", size = 1998544, upload-time = "2024-12-18T11:30:00.681Z" }, + { url = "https://files.pythonhosted.org/packages/fb/a3/4be70845b555bd80aaee9f9812a7cf3df81550bce6dadb3cfee9c5d8421d/pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cabb9bcb7e0d97f74df8646f34fc76fbf793b7f6dc2438517d7a9e50eee4f14d", size = 2007449, upload-time = "2024-12-18T11:30:02.985Z" }, + { url = "https://files.pythonhosted.org/packages/e3/9f/b779ed2480ba355c054e6d7ea77792467631d674b13d8257085a4bc7dcda/pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:d2d63f1215638d28221f664596b1ccb3944f6e25dd18cd3b86b0a4c408d5ebb9", size = 2129460, upload-time = "2024-12-18T11:30:06.55Z" }, + { url = "https://files.pythonhosted.org/packages/a0/f0/a6ab0681f6e95260c7fbf552874af7302f2ea37b459f9b7f00698f875492/pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bca101c00bff0adb45a833f8451b9105d9df18accb8743b08107d7ada14bd7da", size = 2159609, upload-time = "2024-12-18T11:30:09.428Z" }, + { url = "https://files.pythonhosted.org/packages/8a/2b/e1059506795104349712fbca647b18b3f4a7fd541c099e6259717441e1e0/pydantic_core-2.27.2-cp38-cp38-win32.whl", hash = "sha256:f6f8e111843bbb0dee4cb6594cdc73e79b3329b526037ec242a3e49012495b3b", size = 1819886, upload-time = "2024-12-18T11:30:11.777Z" }, + { url = "https://files.pythonhosted.org/packages/aa/6d/df49c17f024dfc58db0bacc7b03610058018dd2ea2eaf748ccbada4c3d06/pydantic_core-2.27.2-cp38-cp38-win_amd64.whl", hash = "sha256:fd1aea04935a508f62e0d0ef1f5ae968774a32afc306fb8545e06f5ff5cdf3ad", size = 1980773, upload-time = "2024-12-18T11:30:14.828Z" }, + { url = "https://files.pythonhosted.org/packages/27/97/3aef1ddb65c5ccd6eda9050036c956ff6ecbfe66cb7eb40f280f121a5bb0/pydantic_core-2.27.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c10eb4f1659290b523af58fa7cffb452a61ad6ae5613404519aee4bfbf1df993", size = 1896475, upload-time = "2024-12-18T11:30:18.316Z" }, + { url = "https://files.pythonhosted.org/packages/ad/d3/5668da70e373c9904ed2f372cb52c0b996426f302e0dee2e65634c92007d/pydantic_core-2.27.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef592d4bad47296fb11f96cd7dc898b92e795032b4894dfb4076cfccd43a9308", size = 1772279, upload-time = "2024-12-18T11:30:20.547Z" }, + { url = "https://files.pythonhosted.org/packages/8a/9e/e44b8cb0edf04a2f0a1f6425a65ee089c1d6f9c4c2dcab0209127b6fdfc2/pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61709a844acc6bf0b7dce7daae75195a10aac96a596ea1b776996414791ede4", size = 1829112, upload-time = "2024-12-18T11:30:23.255Z" }, + { url = "https://files.pythonhosted.org/packages/1c/90/1160d7ac700102effe11616e8119e268770f2a2aa5afb935f3ee6832987d/pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c5f762659e47fdb7b16956c71598292f60a03aa92f8b6351504359dbdba6cf", size = 1866780, upload-time = "2024-12-18T11:30:25.742Z" }, + { url = "https://files.pythonhosted.org/packages/ee/33/13983426df09a36d22c15980008f8d9c77674fc319351813b5a2739b70f3/pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c9775e339e42e79ec99c441d9730fccf07414af63eac2f0e48e08fd38a64d76", size = 2037943, upload-time = "2024-12-18T11:30:28.036Z" }, + { url = "https://files.pythonhosted.org/packages/01/d7/ced164e376f6747e9158c89988c293cd524ab8d215ae4e185e9929655d5c/pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57762139821c31847cfb2df63c12f725788bd9f04bc2fb392790959b8f70f118", size = 2740492, upload-time = "2024-12-18T11:30:30.412Z" }, + { url = "https://files.pythonhosted.org/packages/8b/1f/3dc6e769d5b7461040778816aab2b00422427bcaa4b56cc89e9c653b2605/pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d1e85068e818c73e048fe28cfc769040bb1f475524f4745a5dc621f75ac7630", size = 1995714, upload-time = "2024-12-18T11:30:34.358Z" }, + { url = "https://files.pythonhosted.org/packages/07/d7/a0bd09bc39283530b3f7c27033a814ef254ba3bd0b5cfd040b7abf1fe5da/pydantic_core-2.27.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:097830ed52fd9e427942ff3b9bc17fab52913b2f50f2880dc4a5611446606a54", size = 1997163, upload-time = "2024-12-18T11:30:37.979Z" }, + { url = "https://files.pythonhosted.org/packages/2d/bb/2db4ad1762e1c5699d9b857eeb41959191980de6feb054e70f93085e1bcd/pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044a50963a614ecfae59bb1eaf7ea7efc4bc62f49ed594e18fa1e5d953c40e9f", size = 2005217, upload-time = "2024-12-18T11:30:40.367Z" }, + { url = "https://files.pythonhosted.org/packages/53/5f/23a5a3e7b8403f8dd8fc8a6f8b49f6b55c7d715b77dcf1f8ae919eeb5628/pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:4e0b4220ba5b40d727c7f879eac379b822eee5d8fff418e9d3381ee45b3b0362", size = 2127899, upload-time = "2024-12-18T11:30:42.737Z" }, + { url = "https://files.pythonhosted.org/packages/c2/ae/aa38bb8dd3d89c2f1d8362dd890ee8f3b967330821d03bbe08fa01ce3766/pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e4f4bb20d75e9325cc9696c6802657b58bc1dbbe3022f32cc2b2b632c3fbb96", size = 2155726, upload-time = "2024-12-18T11:30:45.279Z" }, + { url = "https://files.pythonhosted.org/packages/98/61/4f784608cc9e98f70839187117ce840480f768fed5d386f924074bf6213c/pydantic_core-2.27.2-cp39-cp39-win32.whl", hash = "sha256:cca63613e90d001b9f2f9a9ceb276c308bfa2a43fafb75c8031c4f66039e8c6e", size = 1817219, upload-time = "2024-12-18T11:30:47.718Z" }, + { url = "https://files.pythonhosted.org/packages/57/82/bb16a68e4a1a858bb3768c2c8f1ff8d8978014e16598f001ea29a25bf1d1/pydantic_core-2.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:77d1bca19b0f7021b3a982e6f903dcd5b2b06076def36a652e3907f596e29f67", size = 1985382, upload-time = "2024-12-18T11:30:51.871Z" }, + { url = "https://files.pythonhosted.org/packages/46/72/af70981a341500419e67d5cb45abe552a7c74b66326ac8877588488da1ac/pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e", size = 1891159, upload-time = "2024-12-18T11:30:54.382Z" }, + { url = "https://files.pythonhosted.org/packages/ad/3d/c5913cccdef93e0a6a95c2d057d2c2cba347815c845cda79ddd3c0f5e17d/pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8", size = 1768331, upload-time = "2024-12-18T11:30:58.178Z" }, + { url = "https://files.pythonhosted.org/packages/f6/f0/a3ae8fbee269e4934f14e2e0e00928f9346c5943174f2811193113e58252/pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3", size = 1822467, upload-time = "2024-12-18T11:31:00.6Z" }, + { url = "https://files.pythonhosted.org/packages/d7/7a/7bbf241a04e9f9ea24cd5874354a83526d639b02674648af3f350554276c/pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f", size = 1979797, upload-time = "2024-12-18T11:31:07.243Z" }, + { url = "https://files.pythonhosted.org/packages/4f/5f/4784c6107731f89e0005a92ecb8a2efeafdb55eb992b8e9d0a2be5199335/pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133", size = 1987839, upload-time = "2024-12-18T11:31:09.775Z" }, + { url = "https://files.pythonhosted.org/packages/6d/a7/61246562b651dff00de86a5f01b6e4befb518df314c54dec187a78d81c84/pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc", size = 1998861, upload-time = "2024-12-18T11:31:13.469Z" }, + { url = "https://files.pythonhosted.org/packages/86/aa/837821ecf0c022bbb74ca132e117c358321e72e7f9702d1b6a03758545e2/pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50", size = 2116582, upload-time = "2024-12-18T11:31:17.423Z" }, + { url = "https://files.pythonhosted.org/packages/81/b0/5e74656e95623cbaa0a6278d16cf15e10a51f6002e3ec126541e95c29ea3/pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9", size = 2151985, upload-time = "2024-12-18T11:31:19.901Z" }, + { url = "https://files.pythonhosted.org/packages/63/37/3e32eeb2a451fddaa3898e2163746b0cffbbdbb4740d38372db0490d67f3/pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151", size = 2004715, upload-time = "2024-12-18T11:31:22.821Z" }, + { url = "https://files.pythonhosted.org/packages/29/0e/dcaea00c9dbd0348b723cae82b0e0c122e0fa2b43fa933e1622fd237a3ee/pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c33939a82924da9ed65dab5a65d427205a73181d8098e79b6b426bdf8ad4e656", size = 1891733, upload-time = "2024-12-18T11:31:26.876Z" }, + { url = "https://files.pythonhosted.org/packages/86/d3/e797bba8860ce650272bda6383a9d8cad1d1c9a75a640c9d0e848076f85e/pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:00bad2484fa6bda1e216e7345a798bd37c68fb2d97558edd584942aa41b7d278", size = 1768375, upload-time = "2024-12-18T11:31:29.276Z" }, + { url = "https://files.pythonhosted.org/packages/41/f7/f847b15fb14978ca2b30262548f5fc4872b2724e90f116393eb69008299d/pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817e2b40aba42bac6f457498dacabc568c3b7a986fc9ba7c8d9d260b71485fb", size = 1822307, upload-time = "2024-12-18T11:31:33.123Z" }, + { url = "https://files.pythonhosted.org/packages/9c/63/ed80ec8255b587b2f108e514dc03eed1546cd00f0af281e699797f373f38/pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:251136cdad0cb722e93732cb45ca5299fb56e1344a833640bf93b2803f8d1bfd", size = 1979971, upload-time = "2024-12-18T11:31:35.755Z" }, + { url = "https://files.pythonhosted.org/packages/a9/6d/6d18308a45454a0de0e975d70171cadaf454bc7a0bf86b9c7688e313f0bb/pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2088237af596f0a524d3afc39ab3b036e8adb054ee57cbb1dcf8e09da5b29cc", size = 1987616, upload-time = "2024-12-18T11:31:38.534Z" }, + { url = "https://files.pythonhosted.org/packages/82/8a/05f8780f2c1081b800a7ca54c1971e291c2d07d1a50fb23c7e4aef4ed403/pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d4041c0b966a84b4ae7a09832eb691a35aec90910cd2dbe7a208de59be77965b", size = 1998943, upload-time = "2024-12-18T11:31:41.853Z" }, + { url = "https://files.pythonhosted.org/packages/5e/3e/fe5b6613d9e4c0038434396b46c5303f5ade871166900b357ada4766c5b7/pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8083d4e875ebe0b864ffef72a4304827015cff328a1be6e22cc850753bfb122b", size = 2116654, upload-time = "2024-12-18T11:31:44.756Z" }, + { url = "https://files.pythonhosted.org/packages/db/ad/28869f58938fad8cc84739c4e592989730bfb69b7c90a8fff138dff18e1e/pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f141ee28a0ad2123b6611b6ceff018039df17f32ada8b534e6aa039545a3efb2", size = 2152292, upload-time = "2024-12-18T11:31:48.613Z" }, + { url = "https://files.pythonhosted.org/packages/a1/0c/c5c5cd3689c32ed1fe8c5d234b079c12c281c051759770c05b8bed6412b5/pydantic_core-2.27.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7d0c8399fcc1848491f00e0314bd59fb34a9c008761bcb422a057670c3f65e35", size = 2004961, upload-time = "2024-12-18T11:31:52.446Z" }, +] + +[[package]] +name = "pydantic-core" +version = "2.41.4" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", + "python_full_version == '3.9.*'", +] +dependencies = [ + { name = "typing-extensions", version = "4.15.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/18/d0944e8eaaa3efd0a91b0f1fc537d3be55ad35091b6a87638211ba691964/pydantic_core-2.41.4.tar.gz", hash = "sha256:70e47929a9d4a1905a67e4b687d5946026390568a8e952b92824118063cee4d5", size = 457557, upload-time = "2025-10-14T10:23:47.909Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a7/3d/9b8ca77b0f76fcdbf8bc6b72474e264283f461284ca84ac3fde570c6c49a/pydantic_core-2.41.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2442d9a4d38f3411f22eb9dd0912b7cbf4b7d5b6c92c4173b75d3e1ccd84e36e", size = 2111197, upload-time = "2025-10-14T10:19:43.303Z" }, + { url = "https://files.pythonhosted.org/packages/59/92/b7b0fe6ed4781642232755cb7e56a86e2041e1292f16d9ae410a0ccee5ac/pydantic_core-2.41.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:30a9876226dda131a741afeab2702e2d127209bde3c65a2b8133f428bc5d006b", size = 1917909, upload-time = "2025-10-14T10:19:45.194Z" }, + { url = "https://files.pythonhosted.org/packages/52/8c/3eb872009274ffa4fb6a9585114e161aa1a0915af2896e2d441642929fe4/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d55bbac04711e2980645af68b97d445cdbcce70e5216de444a6c4b6943ebcccd", size = 1969905, upload-time = "2025-10-14T10:19:46.567Z" }, + { url = "https://files.pythonhosted.org/packages/f4/21/35adf4a753bcfaea22d925214a0c5b880792e3244731b3f3e6fec0d124f7/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e1d778fb7849a42d0ee5927ab0f7453bf9f85eef8887a546ec87db5ddb178945", size = 2051938, upload-time = "2025-10-14T10:19:48.237Z" }, + { url = "https://files.pythonhosted.org/packages/7d/d0/cdf7d126825e36d6e3f1eccf257da8954452934ede275a8f390eac775e89/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b65077a4693a98b90ec5ad8f203ad65802a1b9b6d4a7e48066925a7e1606706", size = 2250710, upload-time = "2025-10-14T10:19:49.619Z" }, + { url = "https://files.pythonhosted.org/packages/2e/1c/af1e6fd5ea596327308f9c8d1654e1285cc3d8de0d584a3c9d7705bf8a7c/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:62637c769dee16eddb7686bf421be48dfc2fae93832c25e25bc7242e698361ba", size = 2367445, upload-time = "2025-10-14T10:19:51.269Z" }, + { url = "https://files.pythonhosted.org/packages/d3/81/8cece29a6ef1b3a92f956ea6da6250d5b2d2e7e4d513dd3b4f0c7a83dfea/pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dfe3aa529c8f501babf6e502936b9e8d4698502b2cfab41e17a028d91b1ac7b", size = 2072875, upload-time = "2025-10-14T10:19:52.671Z" }, + { url = "https://files.pythonhosted.org/packages/e3/37/a6a579f5fc2cd4d5521284a0ab6a426cc6463a7b3897aeb95b12f1ba607b/pydantic_core-2.41.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ca2322da745bf2eeb581fc9ea3bbb31147702163ccbcbf12a3bb630e4bf05e1d", size = 2191329, upload-time = "2025-10-14T10:19:54.214Z" }, + { url = "https://files.pythonhosted.org/packages/ae/03/505020dc5c54ec75ecba9f41119fd1e48f9e41e4629942494c4a8734ded1/pydantic_core-2.41.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e8cd3577c796be7231dcf80badcf2e0835a46665eaafd8ace124d886bab4d700", size = 2151658, upload-time = "2025-10-14T10:19:55.843Z" }, + { url = "https://files.pythonhosted.org/packages/cb/5d/2c0d09fb53aa03bbd2a214d89ebfa6304be7df9ed86ee3dc7770257f41ee/pydantic_core-2.41.4-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:1cae8851e174c83633f0833e90636832857297900133705ee158cf79d40f03e6", size = 2316777, upload-time = "2025-10-14T10:19:57.607Z" }, + { url = "https://files.pythonhosted.org/packages/ea/4b/c2c9c8f5e1f9c864b57d08539d9d3db160e00491c9f5ee90e1bfd905e644/pydantic_core-2.41.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a26d950449aae348afe1ac8be5525a00ae4235309b729ad4d3399623125b43c9", size = 2320705, upload-time = "2025-10-14T10:19:59.016Z" }, + { url = "https://files.pythonhosted.org/packages/28/c3/a74c1c37f49c0a02c89c7340fafc0ba816b29bd495d1a31ce1bdeacc6085/pydantic_core-2.41.4-cp310-cp310-win32.whl", hash = "sha256:0cf2a1f599efe57fa0051312774280ee0f650e11152325e41dfd3018ef2c1b57", size = 1975464, upload-time = "2025-10-14T10:20:00.581Z" }, + { url = "https://files.pythonhosted.org/packages/d6/23/5dd5c1324ba80303368f7569e2e2e1a721c7d9eb16acb7eb7b7f85cb1be2/pydantic_core-2.41.4-cp310-cp310-win_amd64.whl", hash = "sha256:a8c2e340d7e454dc3340d3d2e8f23558ebe78c98aa8f68851b04dcb7bc37abdc", size = 2024497, upload-time = "2025-10-14T10:20:03.018Z" }, + { url = "https://files.pythonhosted.org/packages/62/4c/f6cbfa1e8efacd00b846764e8484fe173d25b8dab881e277a619177f3384/pydantic_core-2.41.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:28ff11666443a1a8cf2a044d6a545ebffa8382b5f7973f22c36109205e65dc80", size = 2109062, upload-time = "2025-10-14T10:20:04.486Z" }, + { url = "https://files.pythonhosted.org/packages/21/f8/40b72d3868896bfcd410e1bd7e516e762d326201c48e5b4a06446f6cf9e8/pydantic_core-2.41.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:61760c3925d4633290292bad462e0f737b840508b4f722247d8729684f6539ae", size = 1916301, upload-time = "2025-10-14T10:20:06.857Z" }, + { url = "https://files.pythonhosted.org/packages/94/4d/d203dce8bee7faeca791671c88519969d98d3b4e8f225da5b96dad226fc8/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eae547b7315d055b0de2ec3965643b0ab82ad0106a7ffd29615ee9f266a02827", size = 1968728, upload-time = "2025-10-14T10:20:08.353Z" }, + { url = "https://files.pythonhosted.org/packages/65/f5/6a66187775df87c24d526985b3a5d78d861580ca466fbd9d4d0e792fcf6c/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ef9ee5471edd58d1fcce1c80ffc8783a650e3e3a193fe90d52e43bb4d87bff1f", size = 2050238, upload-time = "2025-10-14T10:20:09.766Z" }, + { url = "https://files.pythonhosted.org/packages/5e/b9/78336345de97298cf53236b2f271912ce11f32c1e59de25a374ce12f9cce/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:15dd504af121caaf2c95cb90c0ebf71603c53de98305621b94da0f967e572def", size = 2249424, upload-time = "2025-10-14T10:20:11.732Z" }, + { url = "https://files.pythonhosted.org/packages/99/bb/a4584888b70ee594c3d374a71af5075a68654d6c780369df269118af7402/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3a926768ea49a8af4d36abd6a8968b8790f7f76dd7cbd5a4c180db2b4ac9a3a2", size = 2366047, upload-time = "2025-10-14T10:20:13.647Z" }, + { url = "https://files.pythonhosted.org/packages/5f/8d/17fc5de9d6418e4d2ae8c675f905cdafdc59d3bf3bf9c946b7ab796a992a/pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6916b9b7d134bff5440098a4deb80e4cb623e68974a87883299de9124126c2a8", size = 2071163, upload-time = "2025-10-14T10:20:15.307Z" }, + { url = "https://files.pythonhosted.org/packages/54/e7/03d2c5c0b8ed37a4617430db68ec5e7dbba66358b629cd69e11b4d564367/pydantic_core-2.41.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5cf90535979089df02e6f17ffd076f07237efa55b7343d98760bde8743c4b265", size = 2190585, upload-time = "2025-10-14T10:20:17.3Z" }, + { url = "https://files.pythonhosted.org/packages/be/fc/15d1c9fe5ad9266a5897d9b932b7f53d7e5cfc800573917a2c5d6eea56ec/pydantic_core-2.41.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7533c76fa647fade2d7ec75ac5cc079ab3f34879626dae5689b27790a6cf5a5c", size = 2150109, upload-time = "2025-10-14T10:20:19.143Z" }, + { url = "https://files.pythonhosted.org/packages/26/ef/e735dd008808226c83ba56972566138665b71477ad580fa5a21f0851df48/pydantic_core-2.41.4-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:37e516bca9264cbf29612539801ca3cd5d1be465f940417b002905e6ed79d38a", size = 2315078, upload-time = "2025-10-14T10:20:20.742Z" }, + { url = "https://files.pythonhosted.org/packages/90/00/806efdcf35ff2ac0f938362350cd9827b8afb116cc814b6b75cf23738c7c/pydantic_core-2.41.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0c19cb355224037c83642429b8ce261ae108e1c5fbf5c028bac63c77b0f8646e", size = 2318737, upload-time = "2025-10-14T10:20:22.306Z" }, + { url = "https://files.pythonhosted.org/packages/41/7e/6ac90673fe6cb36621a2283552897838c020db343fa86e513d3f563b196f/pydantic_core-2.41.4-cp311-cp311-win32.whl", hash = "sha256:09c2a60e55b357284b5f31f5ab275ba9f7f70b7525e18a132ec1f9160b4f1f03", size = 1974160, upload-time = "2025-10-14T10:20:23.817Z" }, + { url = "https://files.pythonhosted.org/packages/e0/9d/7c5e24ee585c1f8b6356e1d11d40ab807ffde44d2db3b7dfd6d20b09720e/pydantic_core-2.41.4-cp311-cp311-win_amd64.whl", hash = "sha256:711156b6afb5cb1cb7c14a2cc2c4a8b4c717b69046f13c6b332d8a0a8f41ca3e", size = 2021883, upload-time = "2025-10-14T10:20:25.48Z" }, + { url = "https://files.pythonhosted.org/packages/33/90/5c172357460fc28b2871eb4a0fb3843b136b429c6fa827e4b588877bf115/pydantic_core-2.41.4-cp311-cp311-win_arm64.whl", hash = "sha256:6cb9cf7e761f4f8a8589a45e49ed3c0d92d1d696a45a6feaee8c904b26efc2db", size = 1968026, upload-time = "2025-10-14T10:20:27.039Z" }, + { url = "https://files.pythonhosted.org/packages/e9/81/d3b3e95929c4369d30b2a66a91db63c8ed0a98381ae55a45da2cd1cc1288/pydantic_core-2.41.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ab06d77e053d660a6faaf04894446df7b0a7e7aba70c2797465a0a1af00fc887", size = 2099043, upload-time = "2025-10-14T10:20:28.561Z" }, + { url = "https://files.pythonhosted.org/packages/58/da/46fdac49e6717e3a94fc9201403e08d9d61aa7a770fab6190b8740749047/pydantic_core-2.41.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c53ff33e603a9c1179a9364b0a24694f183717b2e0da2b5ad43c316c956901b2", size = 1910699, upload-time = "2025-10-14T10:20:30.217Z" }, + { url = "https://files.pythonhosted.org/packages/1e/63/4d948f1b9dd8e991a5a98b77dd66c74641f5f2e5225fee37994b2e07d391/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:304c54176af2c143bd181d82e77c15c41cbacea8872a2225dd37e6544dce9999", size = 1952121, upload-time = "2025-10-14T10:20:32.246Z" }, + { url = "https://files.pythonhosted.org/packages/b2/a7/e5fc60a6f781fc634ecaa9ecc3c20171d238794cef69ae0af79ac11b89d7/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:025ba34a4cf4fb32f917d5d188ab5e702223d3ba603be4d8aca2f82bede432a4", size = 2041590, upload-time = "2025-10-14T10:20:34.332Z" }, + { url = "https://files.pythonhosted.org/packages/70/69/dce747b1d21d59e85af433428978a1893c6f8a7068fa2bb4a927fba7a5ff/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b9f5f30c402ed58f90c70e12eff65547d3ab74685ffe8283c719e6bead8ef53f", size = 2219869, upload-time = "2025-10-14T10:20:35.965Z" }, + { url = "https://files.pythonhosted.org/packages/83/6a/c070e30e295403bf29c4df1cb781317b6a9bac7cd07b8d3acc94d501a63c/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd96e5d15385d301733113bcaa324c8bcf111275b7675a9c6e88bfb19fc05e3b", size = 2345169, upload-time = "2025-10-14T10:20:37.627Z" }, + { url = "https://files.pythonhosted.org/packages/f0/83/06d001f8043c336baea7fd202a9ac7ad71f87e1c55d8112c50b745c40324/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98f348cbb44fae6e9653c1055db7e29de67ea6a9ca03a5fa2c2e11a47cff0e47", size = 2070165, upload-time = "2025-10-14T10:20:39.246Z" }, + { url = "https://files.pythonhosted.org/packages/14/0a/e567c2883588dd12bcbc110232d892cf385356f7c8a9910311ac997ab715/pydantic_core-2.41.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec22626a2d14620a83ca583c6f5a4080fa3155282718b6055c2ea48d3ef35970", size = 2189067, upload-time = "2025-10-14T10:20:41.015Z" }, + { url = "https://files.pythonhosted.org/packages/f4/1d/3d9fca34273ba03c9b1c5289f7618bc4bd09c3ad2289b5420481aa051a99/pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3a95d4590b1f1a43bf33ca6d647b990a88f4a3824a8c4572c708f0b45a5290ed", size = 2132997, upload-time = "2025-10-14T10:20:43.106Z" }, + { url = "https://files.pythonhosted.org/packages/52/70/d702ef7a6cd41a8afc61f3554922b3ed8d19dd54c3bd4bdbfe332e610827/pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:f9672ab4d398e1b602feadcffcdd3af44d5f5e6ddc15bc7d15d376d47e8e19f8", size = 2307187, upload-time = "2025-10-14T10:20:44.849Z" }, + { url = "https://files.pythonhosted.org/packages/68/4c/c06be6e27545d08b802127914156f38d10ca287a9e8489342793de8aae3c/pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:84d8854db5f55fead3b579f04bda9a36461dab0730c5d570e1526483e7bb8431", size = 2305204, upload-time = "2025-10-14T10:20:46.781Z" }, + { url = "https://files.pythonhosted.org/packages/b0/e5/35ae4919bcd9f18603419e23c5eaf32750224a89d41a8df1a3704b69f77e/pydantic_core-2.41.4-cp312-cp312-win32.whl", hash = "sha256:9be1c01adb2ecc4e464392c36d17f97e9110fbbc906bcbe1c943b5b87a74aabd", size = 1972536, upload-time = "2025-10-14T10:20:48.39Z" }, + { url = "https://files.pythonhosted.org/packages/1e/c2/49c5bb6d2a49eb2ee3647a93e3dae7080c6409a8a7558b075027644e879c/pydantic_core-2.41.4-cp312-cp312-win_amd64.whl", hash = "sha256:d682cf1d22bab22a5be08539dca3d1593488a99998f9f412137bc323179067ff", size = 2031132, upload-time = "2025-10-14T10:20:50.421Z" }, + { url = "https://files.pythonhosted.org/packages/06/23/936343dbcba6eec93f73e95eb346810fc732f71ba27967b287b66f7b7097/pydantic_core-2.41.4-cp312-cp312-win_arm64.whl", hash = "sha256:833eebfd75a26d17470b58768c1834dfc90141b7afc6eb0429c21fc5a21dcfb8", size = 1969483, upload-time = "2025-10-14T10:20:52.35Z" }, + { url = "https://files.pythonhosted.org/packages/13/d0/c20adabd181a029a970738dfe23710b52a31f1258f591874fcdec7359845/pydantic_core-2.41.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:85e050ad9e5f6fe1004eec65c914332e52f429bc0ae12d6fa2092407a462c746", size = 2105688, upload-time = "2025-10-14T10:20:54.448Z" }, + { url = "https://files.pythonhosted.org/packages/00/b6/0ce5c03cec5ae94cca220dfecddc453c077d71363b98a4bbdb3c0b22c783/pydantic_core-2.41.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e7393f1d64792763a48924ba31d1e44c2cfbc05e3b1c2c9abb4ceeadd912cced", size = 1910807, upload-time = "2025-10-14T10:20:56.115Z" }, + { url = "https://files.pythonhosted.org/packages/68/3e/800d3d02c8beb0b5c069c870cbb83799d085debf43499c897bb4b4aaff0d/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94dab0940b0d1fb28bcab847adf887c66a27a40291eedf0b473be58761c9799a", size = 1956669, upload-time = "2025-10-14T10:20:57.874Z" }, + { url = "https://files.pythonhosted.org/packages/60/a4/24271cc71a17f64589be49ab8bd0751f6a0a03046c690df60989f2f95c2c/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:de7c42f897e689ee6f9e93c4bec72b99ae3b32a2ade1c7e4798e690ff5246e02", size = 2051629, upload-time = "2025-10-14T10:21:00.006Z" }, + { url = "https://files.pythonhosted.org/packages/68/de/45af3ca2f175d91b96bfb62e1f2d2f1f9f3b14a734afe0bfeff079f78181/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:664b3199193262277b8b3cd1e754fb07f2c6023289c815a1e1e8fb415cb247b1", size = 2224049, upload-time = "2025-10-14T10:21:01.801Z" }, + { url = "https://files.pythonhosted.org/packages/af/8f/ae4e1ff84672bf869d0a77af24fd78387850e9497753c432875066b5d622/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d95b253b88f7d308b1c0b417c4624f44553ba4762816f94e6986819b9c273fb2", size = 2342409, upload-time = "2025-10-14T10:21:03.556Z" }, + { url = "https://files.pythonhosted.org/packages/18/62/273dd70b0026a085c7b74b000394e1ef95719ea579c76ea2f0cc8893736d/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1351f5bbdbbabc689727cb91649a00cb9ee7203e0a6e54e9f5ba9e22e384b84", size = 2069635, upload-time = "2025-10-14T10:21:05.385Z" }, + { url = "https://files.pythonhosted.org/packages/30/03/cf485fff699b4cdaea469bc481719d3e49f023241b4abb656f8d422189fc/pydantic_core-2.41.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1affa4798520b148d7182da0615d648e752de4ab1a9566b7471bc803d88a062d", size = 2194284, upload-time = "2025-10-14T10:21:07.122Z" }, + { url = "https://files.pythonhosted.org/packages/f9/7e/c8e713db32405dfd97211f2fc0a15d6bf8adb7640f3d18544c1f39526619/pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7b74e18052fea4aa8dea2fb7dbc23d15439695da6cbe6cfc1b694af1115df09d", size = 2137566, upload-time = "2025-10-14T10:21:08.981Z" }, + { url = "https://files.pythonhosted.org/packages/04/f7/db71fd4cdccc8b75990f79ccafbbd66757e19f6d5ee724a6252414483fb4/pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:285b643d75c0e30abda9dc1077395624f314a37e3c09ca402d4015ef5979f1a2", size = 2316809, upload-time = "2025-10-14T10:21:10.805Z" }, + { url = "https://files.pythonhosted.org/packages/76/63/a54973ddb945f1bca56742b48b144d85c9fc22f819ddeb9f861c249d5464/pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:f52679ff4218d713b3b33f88c89ccbf3a5c2c12ba665fb80ccc4192b4608dbab", size = 2311119, upload-time = "2025-10-14T10:21:12.583Z" }, + { url = "https://files.pythonhosted.org/packages/f8/03/5d12891e93c19218af74843a27e32b94922195ded2386f7b55382f904d2f/pydantic_core-2.41.4-cp313-cp313-win32.whl", hash = "sha256:ecde6dedd6fff127c273c76821bb754d793be1024bc33314a120f83a3c69460c", size = 1981398, upload-time = "2025-10-14T10:21:14.584Z" }, + { url = "https://files.pythonhosted.org/packages/be/d8/fd0de71f39db91135b7a26996160de71c073d8635edfce8b3c3681be0d6d/pydantic_core-2.41.4-cp313-cp313-win_amd64.whl", hash = "sha256:d081a1f3800f05409ed868ebb2d74ac39dd0c1ff6c035b5162356d76030736d4", size = 2030735, upload-time = "2025-10-14T10:21:16.432Z" }, + { url = "https://files.pythonhosted.org/packages/72/86/c99921c1cf6650023c08bfab6fe2d7057a5142628ef7ccfa9921f2dda1d5/pydantic_core-2.41.4-cp313-cp313-win_arm64.whl", hash = "sha256:f8e49c9c364a7edcbe2a310f12733aad95b022495ef2a8d653f645e5d20c1564", size = 1973209, upload-time = "2025-10-14T10:21:18.213Z" }, + { url = "https://files.pythonhosted.org/packages/36/0d/b5706cacb70a8414396efdda3d72ae0542e050b591119e458e2490baf035/pydantic_core-2.41.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:ed97fd56a561f5eb5706cebe94f1ad7c13b84d98312a05546f2ad036bafe87f4", size = 1877324, upload-time = "2025-10-14T10:21:20.363Z" }, + { url = "https://files.pythonhosted.org/packages/de/2d/cba1fa02cfdea72dfb3a9babb067c83b9dff0bbcb198368e000a6b756ea7/pydantic_core-2.41.4-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a870c307bf1ee91fc58a9a61338ff780d01bfae45922624816878dce784095d2", size = 1884515, upload-time = "2025-10-14T10:21:22.339Z" }, + { url = "https://files.pythonhosted.org/packages/07/ea/3df927c4384ed9b503c9cc2d076cf983b4f2adb0c754578dfb1245c51e46/pydantic_core-2.41.4-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d25e97bc1f5f8f7985bdc2335ef9e73843bb561eb1fa6831fdfc295c1c2061cf", size = 2042819, upload-time = "2025-10-14T10:21:26.683Z" }, + { url = "https://files.pythonhosted.org/packages/6a/ee/df8e871f07074250270a3b1b82aad4cd0026b588acd5d7d3eb2fcb1471a3/pydantic_core-2.41.4-cp313-cp313t-win_amd64.whl", hash = "sha256:d405d14bea042f166512add3091c1af40437c2e7f86988f3915fabd27b1e9cd2", size = 1995866, upload-time = "2025-10-14T10:21:28.951Z" }, + { url = "https://files.pythonhosted.org/packages/fc/de/b20f4ab954d6d399499c33ec4fafc46d9551e11dc1858fb7f5dca0748ceb/pydantic_core-2.41.4-cp313-cp313t-win_arm64.whl", hash = "sha256:19f3684868309db5263a11bace3c45d93f6f24afa2ffe75a647583df22a2ff89", size = 1970034, upload-time = "2025-10-14T10:21:30.869Z" }, + { url = "https://files.pythonhosted.org/packages/54/28/d3325da57d413b9819365546eb9a6e8b7cbd9373d9380efd5f74326143e6/pydantic_core-2.41.4-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:e9205d97ed08a82ebb9a307e92914bb30e18cdf6f6b12ca4bedadb1588a0bfe1", size = 2102022, upload-time = "2025-10-14T10:21:32.809Z" }, + { url = "https://files.pythonhosted.org/packages/9e/24/b58a1bc0d834bf1acc4361e61233ee217169a42efbdc15a60296e13ce438/pydantic_core-2.41.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:82df1f432b37d832709fbcc0e24394bba04a01b6ecf1ee87578145c19cde12ac", size = 1905495, upload-time = "2025-10-14T10:21:34.812Z" }, + { url = "https://files.pythonhosted.org/packages/fb/a4/71f759cc41b7043e8ecdaab81b985a9b6cad7cec077e0b92cff8b71ecf6b/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3b4cc4539e055cfa39a3763c939f9d409eb40e85813257dcd761985a108554", size = 1956131, upload-time = "2025-10-14T10:21:36.924Z" }, + { url = "https://files.pythonhosted.org/packages/b0/64/1e79ac7aa51f1eec7c4cda8cbe456d5d09f05fdd68b32776d72168d54275/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b1eb1754fce47c63d2ff57fdb88c351a6c0150995890088b33767a10218eaa4e", size = 2052236, upload-time = "2025-10-14T10:21:38.927Z" }, + { url = "https://files.pythonhosted.org/packages/e9/e3/a3ffc363bd4287b80f1d43dc1c28ba64831f8dfc237d6fec8f2661138d48/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e6ab5ab30ef325b443f379ddb575a34969c333004fca5a1daa0133a6ffaad616", size = 2223573, upload-time = "2025-10-14T10:21:41.574Z" }, + { url = "https://files.pythonhosted.org/packages/28/27/78814089b4d2e684a9088ede3790763c64693c3d1408ddc0a248bc789126/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:31a41030b1d9ca497634092b46481b937ff9397a86f9f51bd41c4767b6fc04af", size = 2342467, upload-time = "2025-10-14T10:21:44.018Z" }, + { url = "https://files.pythonhosted.org/packages/92/97/4de0e2a1159cb85ad737e03306717637842c88c7fd6d97973172fb183149/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a44ac1738591472c3d020f61c6df1e4015180d6262ebd39bf2aeb52571b60f12", size = 2063754, upload-time = "2025-10-14T10:21:46.466Z" }, + { url = "https://files.pythonhosted.org/packages/0f/50/8cb90ce4b9efcf7ae78130afeb99fd1c86125ccdf9906ef64b9d42f37c25/pydantic_core-2.41.4-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d72f2b5e6e82ab8f94ea7d0d42f83c487dc159c5240d8f83beae684472864e2d", size = 2196754, upload-time = "2025-10-14T10:21:48.486Z" }, + { url = "https://files.pythonhosted.org/packages/34/3b/ccdc77af9cd5082723574a1cc1bcae7a6acacc829d7c0a06201f7886a109/pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:c4d1e854aaf044487d31143f541f7aafe7b482ae72a022c664b2de2e466ed0ad", size = 2137115, upload-time = "2025-10-14T10:21:50.63Z" }, + { url = "https://files.pythonhosted.org/packages/ca/ba/e7c7a02651a8f7c52dc2cff2b64a30c313e3b57c7d93703cecea76c09b71/pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:b568af94267729d76e6ee5ececda4e283d07bbb28e8148bb17adad93d025d25a", size = 2317400, upload-time = "2025-10-14T10:21:52.959Z" }, + { url = "https://files.pythonhosted.org/packages/2c/ba/6c533a4ee8aec6b812c643c49bb3bd88d3f01e3cebe451bb85512d37f00f/pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:6d55fb8b1e8929b341cc313a81a26e0d48aa3b519c1dbaadec3a6a2b4fcad025", size = 2312070, upload-time = "2025-10-14T10:21:55.419Z" }, + { url = "https://files.pythonhosted.org/packages/22/ae/f10524fcc0ab8d7f96cf9a74c880243576fd3e72bd8ce4f81e43d22bcab7/pydantic_core-2.41.4-cp314-cp314-win32.whl", hash = "sha256:5b66584e549e2e32a1398df11da2e0a7eff45d5c2d9db9d5667c5e6ac764d77e", size = 1982277, upload-time = "2025-10-14T10:21:57.474Z" }, + { url = "https://files.pythonhosted.org/packages/b4/dc/e5aa27aea1ad4638f0c3fb41132f7eb583bd7420ee63204e2d4333a3bbf9/pydantic_core-2.41.4-cp314-cp314-win_amd64.whl", hash = "sha256:557a0aab88664cc552285316809cab897716a372afaf8efdbef756f8b890e894", size = 2024608, upload-time = "2025-10-14T10:21:59.557Z" }, + { url = "https://files.pythonhosted.org/packages/3e/61/51d89cc2612bd147198e120a13f150afbf0bcb4615cddb049ab10b81b79e/pydantic_core-2.41.4-cp314-cp314-win_arm64.whl", hash = "sha256:3f1ea6f48a045745d0d9f325989d8abd3f1eaf47dd00485912d1a3a63c623a8d", size = 1967614, upload-time = "2025-10-14T10:22:01.847Z" }, + { url = "https://files.pythonhosted.org/packages/0d/c2/472f2e31b95eff099961fa050c376ab7156a81da194f9edb9f710f68787b/pydantic_core-2.41.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6c1fe4c5404c448b13188dd8bd2ebc2bdd7e6727fa61ff481bcc2cca894018da", size = 1876904, upload-time = "2025-10-14T10:22:04.062Z" }, + { url = "https://files.pythonhosted.org/packages/4a/07/ea8eeb91173807ecdae4f4a5f4b150a520085b35454350fc219ba79e66a3/pydantic_core-2.41.4-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:523e7da4d43b113bf8e7b49fa4ec0c35bf4fe66b2230bfc5c13cc498f12c6c3e", size = 1882538, upload-time = "2025-10-14T10:22:06.39Z" }, + { url = "https://files.pythonhosted.org/packages/1e/29/b53a9ca6cd366bfc928823679c6a76c7a4c69f8201c0ba7903ad18ebae2f/pydantic_core-2.41.4-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5729225de81fb65b70fdb1907fcf08c75d498f4a6f15af005aabb1fdadc19dfa", size = 2041183, upload-time = "2025-10-14T10:22:08.812Z" }, + { url = "https://files.pythonhosted.org/packages/c7/3d/f8c1a371ceebcaf94d6dd2d77c6cf4b1c078e13a5837aee83f760b4f7cfd/pydantic_core-2.41.4-cp314-cp314t-win_amd64.whl", hash = "sha256:de2cfbb09e88f0f795fd90cf955858fc2c691df65b1f21f0aa00b99f3fbc661d", size = 1993542, upload-time = "2025-10-14T10:22:11.332Z" }, + { url = "https://files.pythonhosted.org/packages/8a/ac/9fc61b4f9d079482a290afe8d206b8f490e9fd32d4fc03ed4fc698214e01/pydantic_core-2.41.4-cp314-cp314t-win_arm64.whl", hash = "sha256:d34f950ae05a83e0ede899c595f312ca976023ea1db100cd5aa188f7005e3ab0", size = 1973897, upload-time = "2025-10-14T10:22:13.444Z" }, + { url = "https://files.pythonhosted.org/packages/2c/36/f86d582be5fb47d4014506cd9ddd10a3979b6d0f2d237aa6ad3e7033b3ea/pydantic_core-2.41.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:646e76293345954acea6966149683047b7b2ace793011922208c8e9da12b0062", size = 2112444, upload-time = "2025-10-14T10:22:16.165Z" }, + { url = "https://files.pythonhosted.org/packages/ba/e5/63c521dc2dd106ba6b5941c080617ea9db252f8a7d5625231e9d761bc28c/pydantic_core-2.41.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cc8e85a63085a137d286e2791037f5fdfff0aabb8b899483ca9c496dd5797338", size = 1938218, upload-time = "2025-10-14T10:22:19.443Z" }, + { url = "https://files.pythonhosted.org/packages/30/56/c84b638a3e6e9f5a612b9f5abdad73182520423de43669d639ed4f14b011/pydantic_core-2.41.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:692c622c8f859a17c156492783902d8370ac7e121a611bd6fe92cc71acf9ee8d", size = 1971449, upload-time = "2025-10-14T10:22:21.567Z" }, + { url = "https://files.pythonhosted.org/packages/99/c6/e974aade34fc7a0248fdfd0a373d62693502a407c596ab3470165e38183c/pydantic_core-2.41.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d1e2906efb1031a532600679b424ef1d95d9f9fb507f813951f23320903adbd7", size = 2054023, upload-time = "2025-10-14T10:22:24.229Z" }, + { url = "https://files.pythonhosted.org/packages/4f/91/2507dda801f50980a38d1353c313e8f51349a42b008e63a4e45bf4620562/pydantic_core-2.41.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e04e2f7f8916ad3ddd417a7abdd295276a0bf216993d9318a5d61cc058209166", size = 2251614, upload-time = "2025-10-14T10:22:26.498Z" }, + { url = "https://files.pythonhosted.org/packages/b2/ad/05d886bc96938f4d31bed24e8d3fc3496d9aea7e77bcff6e4b93127c6de7/pydantic_core-2.41.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df649916b81822543d1c8e0e1d079235f68acdc7d270c911e8425045a8cfc57e", size = 2378807, upload-time = "2025-10-14T10:22:28.733Z" }, + { url = "https://files.pythonhosted.org/packages/6a/0a/d26e1bb9a80b9fc12cc30d9288193fbc9e60a799e55843804ee37bd38a9c/pydantic_core-2.41.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66c529f862fdba70558061bb936fe00ddbaaa0c647fd26e4a4356ef1d6561891", size = 2076891, upload-time = "2025-10-14T10:22:30.853Z" }, + { url = "https://files.pythonhosted.org/packages/d9/66/af014e3a294d9933ebfecf11a5d858709014bd2315fa9616195374dd82f0/pydantic_core-2.41.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fc3b4c5a1fd3a311563ed866c2c9b62da06cb6398bee186484ce95c820db71cb", size = 2192179, upload-time = "2025-10-14T10:22:33.481Z" }, + { url = "https://files.pythonhosted.org/packages/e7/3e/79783f97024037d0ea6e1b3ebcd761463a925199e04ce2625727e9f27d06/pydantic_core-2.41.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6e0fc40d84448f941df9b3334c4b78fe42f36e3bf631ad54c3047a0cdddc2514", size = 2153067, upload-time = "2025-10-14T10:22:35.792Z" }, + { url = "https://files.pythonhosted.org/packages/b3/97/ea83b0f87d9e742405fb687d5682e7a26334eef2c82a2de06bfbdc305fab/pydantic_core-2.41.4-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:44e7625332683b6c1c8b980461475cde9595eff94447500e80716db89b0da005", size = 2319048, upload-time = "2025-10-14T10:22:38.144Z" }, + { url = "https://files.pythonhosted.org/packages/64/4a/36d8c966a0b086362ac10a7ee75978ed15c5f2dfdfc02a1578d19d3802fb/pydantic_core-2.41.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:170ee6835f6c71081d031ef1c3b4dc4a12b9efa6a9540f93f95b82f3c7571ae8", size = 2321830, upload-time = "2025-10-14T10:22:40.337Z" }, + { url = "https://files.pythonhosted.org/packages/a2/6e/d80cc4909dde5f6842861288aa1a7181e7afbfc50940c862ed2848df15bd/pydantic_core-2.41.4-cp39-cp39-win32.whl", hash = "sha256:3adf61415efa6ce977041ba9745183c0e1f637ca849773afa93833e04b163feb", size = 1976706, upload-time = "2025-10-14T10:22:42.61Z" }, + { url = "https://files.pythonhosted.org/packages/29/ee/5bda8d960d4a8b24a7eeb8a856efa9c865a7a6cab714ed387b29507dc278/pydantic_core-2.41.4-cp39-cp39-win_amd64.whl", hash = "sha256:a238dd3feee263eeaeb7dc44aea4ba1364682c4f9f9467e6af5596ba322c2332", size = 2027640, upload-time = "2025-10-14T10:22:44.907Z" }, + { url = "https://files.pythonhosted.org/packages/b0/12/5ba58daa7f453454464f92b3ca7b9d7c657d8641c48e370c3ebc9a82dd78/pydantic_core-2.41.4-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:a1b2cfec3879afb742a7b0bcfa53e4f22ba96571c9e54d6a3afe1052d17d843b", size = 2122139, upload-time = "2025-10-14T10:22:47.288Z" }, + { url = "https://files.pythonhosted.org/packages/21/fb/6860126a77725c3108baecd10fd3d75fec25191d6381b6eb2ac660228eac/pydantic_core-2.41.4-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:d175600d975b7c244af6eb9c9041f10059f20b8bbffec9e33fdd5ee3f67cdc42", size = 1936674, upload-time = "2025-10-14T10:22:49.555Z" }, + { url = "https://files.pythonhosted.org/packages/de/be/57dcaa3ed595d81f8757e2b44a38240ac5d37628bce25fb20d02c7018776/pydantic_core-2.41.4-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f184d657fa4947ae5ec9c47bd7e917730fa1cbb78195037e32dcbab50aca5ee", size = 1956398, upload-time = "2025-10-14T10:22:52.19Z" }, + { url = "https://files.pythonhosted.org/packages/2f/1d/679a344fadb9695f1a6a294d739fbd21d71fa023286daeea8c0ed49e7c2b/pydantic_core-2.41.4-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ed810568aeffed3edc78910af32af911c835cc39ebbfacd1f0ab5dd53028e5c", size = 2138674, upload-time = "2025-10-14T10:22:54.499Z" }, + { url = "https://files.pythonhosted.org/packages/c4/48/ae937e5a831b7c0dc646b2ef788c27cd003894882415300ed21927c21efa/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:4f5d640aeebb438517150fdeec097739614421900e4a08db4a3ef38898798537", size = 2112087, upload-time = "2025-10-14T10:22:56.818Z" }, + { url = "https://files.pythonhosted.org/packages/5e/db/6db8073e3d32dae017da7e0d16a9ecb897d0a4d92e00634916e486097961/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:4a9ab037b71927babc6d9e7fc01aea9e66dc2a4a34dff06ef0724a4049629f94", size = 1920387, upload-time = "2025-10-14T10:22:59.342Z" }, + { url = "https://files.pythonhosted.org/packages/0d/c1/dd3542d072fcc336030d66834872f0328727e3b8de289c662faa04aa270e/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4dab9484ec605c3016df9ad4fd4f9a390bc5d816a3b10c6550f8424bb80b18c", size = 1951495, upload-time = "2025-10-14T10:23:02.089Z" }, + { url = "https://files.pythonhosted.org/packages/2b/c6/db8d13a1f8ab3f1eb08c88bd00fd62d44311e3456d1e85c0e59e0a0376e7/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8a5028425820731d8c6c098ab642d7b8b999758e24acae03ed38a66eca8335", size = 2139008, upload-time = "2025-10-14T10:23:04.539Z" }, + { url = "https://files.pythonhosted.org/packages/5d/d4/912e976a2dd0b49f31c98a060ca90b353f3b73ee3ea2fd0030412f6ac5ec/pydantic_core-2.41.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1e5ab4fc177dd41536b3c32b2ea11380dd3d4619a385860621478ac2d25ceb00", size = 2106739, upload-time = "2025-10-14T10:23:06.934Z" }, + { url = "https://files.pythonhosted.org/packages/71/f0/66ec5a626c81eba326072d6ee2b127f8c139543f1bf609b4842978d37833/pydantic_core-2.41.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:3d88d0054d3fa11ce936184896bed3c1c5441d6fa483b498fac6a5d0dd6f64a9", size = 1932549, upload-time = "2025-10-14T10:23:09.24Z" }, + { url = "https://files.pythonhosted.org/packages/c4/af/625626278ca801ea0a658c2dcf290dc9f21bb383098e99e7c6a029fccfc0/pydantic_core-2.41.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b2a054a8725f05b4b6503357e0ac1c4e8234ad3b0c2ac130d6ffc66f0e170e2", size = 2135093, upload-time = "2025-10-14T10:23:11.626Z" }, + { url = "https://files.pythonhosted.org/packages/20/f6/2fba049f54e0f4975fef66be654c597a1d005320fa141863699180c7697d/pydantic_core-2.41.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0d9db5a161c99375a0c68c058e227bee1d89303300802601d76a3d01f74e258", size = 2187971, upload-time = "2025-10-14T10:23:14.437Z" }, + { url = "https://files.pythonhosted.org/packages/0e/80/65ab839a2dfcd3b949202f9d920c34f9de5a537c3646662bdf2f7d999680/pydantic_core-2.41.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:6273ea2c8ffdac7b7fda2653c49682db815aebf4a89243a6feccf5e36c18c347", size = 2147939, upload-time = "2025-10-14T10:23:16.831Z" }, + { url = "https://files.pythonhosted.org/packages/44/58/627565d3d182ce6dfda18b8e1c841eede3629d59c9d7cbc1e12a03aeb328/pydantic_core-2.41.4-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:4c973add636efc61de22530b2ef83a65f39b6d6f656df97f678720e20de26caa", size = 2311400, upload-time = "2025-10-14T10:23:19.234Z" }, + { url = "https://files.pythonhosted.org/packages/24/06/8a84711162ad5a5f19a88cead37cca81b4b1f294f46260ef7334ae4f24d3/pydantic_core-2.41.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b69d1973354758007f46cf2d44a4f3d0933f10b6dc9bf15cf1356e037f6f731a", size = 2316840, upload-time = "2025-10-14T10:23:21.738Z" }, + { url = "https://files.pythonhosted.org/packages/aa/8b/b7bb512a4682a2f7fbfae152a755d37351743900226d29bd953aaf870eaa/pydantic_core-2.41.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:3619320641fd212aaf5997b6ca505e97540b7e16418f4a241f44cdf108ffb50d", size = 2149135, upload-time = "2025-10-14T10:23:24.379Z" }, + { url = "https://files.pythonhosted.org/packages/7e/7d/138e902ed6399b866f7cfe4435d22445e16fff888a1c00560d9dc79a780f/pydantic_core-2.41.4-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:491535d45cd7ad7e4a2af4a5169b0d07bebf1adfd164b0368da8aa41e19907a5", size = 2104721, upload-time = "2025-10-14T10:23:26.906Z" }, + { url = "https://files.pythonhosted.org/packages/47/13/0525623cf94627f7b53b4c2034c81edc8491cbfc7c28d5447fa318791479/pydantic_core-2.41.4-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:54d86c0cada6aba4ec4c047d0e348cbad7063b87ae0f005d9f8c9ad04d4a92a2", size = 1931608, upload-time = "2025-10-14T10:23:29.306Z" }, + { url = "https://files.pythonhosted.org/packages/d6/f9/744bc98137d6ef0a233f808bfc9b18cf94624bf30836a18d3b05d08bf418/pydantic_core-2.41.4-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eca1124aced216b2500dc2609eade086d718e8249cb9696660ab447d50a758bd", size = 2132986, upload-time = "2025-10-14T10:23:32.057Z" }, + { url = "https://files.pythonhosted.org/packages/17/c8/629e88920171173f6049386cc71f893dff03209a9ef32b4d2f7e7c264bcf/pydantic_core-2.41.4-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6c9024169becccf0cb470ada03ee578d7348c119a0d42af3dcf9eda96e3a247c", size = 2187516, upload-time = "2025-10-14T10:23:34.871Z" }, + { url = "https://files.pythonhosted.org/packages/2e/0f/4f2734688d98488782218ca61bcc118329bf5de05bb7fe3adc7dd79b0b86/pydantic_core-2.41.4-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:26895a4268ae5a2849269f4991cdc97236e4b9c010e51137becf25182daac405", size = 2146146, upload-time = "2025-10-14T10:23:37.342Z" }, + { url = "https://files.pythonhosted.org/packages/ed/f2/ab385dbd94a052c62224b99cf99002eee99dbec40e10006c78575aead256/pydantic_core-2.41.4-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:ca4df25762cf71308c446e33c9b1fdca2923a3f13de616e2a949f38bf21ff5a8", size = 2311296, upload-time = "2025-10-14T10:23:40.145Z" }, + { url = "https://files.pythonhosted.org/packages/fc/8e/e4f12afe1beeb9823bba5375f8f258df0cc61b056b0195fb1cf9f62a1a58/pydantic_core-2.41.4-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:5a28fcedd762349519276c36634e71853b4541079cab4acaaac60c4421827308", size = 2315386, upload-time = "2025-10-14T10:23:42.624Z" }, + { url = "https://files.pythonhosted.org/packages/48/f7/925f65d930802e3ea2eb4d5afa4cb8730c8dc0d2cb89a59dc4ed2fcb2d74/pydantic_core-2.41.4-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c173ddcd86afd2535e2b695217e82191580663a1d1928239f877f5a1649ef39f", size = 2147775, upload-time = "2025-10-14T10:23:45.406Z" }, +] + +[[package]] +name = "pydot" +version = "4.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyparsing", version = "3.1.4", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, + { name = "pyparsing", version = "3.2.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/50/35/b17cb89ff865484c6a20ef46bf9d95a5f07328292578de0b295f4a6beec2/pydot-4.0.1.tar.gz", hash = "sha256:c2148f681c4a33e08bf0e26a9e5f8e4099a82e0e2a068098f32ce86577364ad5", size = 162594, upload-time = "2025-06-17T20:09:56.454Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/32/a7125fb28c4261a627f999d5fb4afff25b523800faed2c30979949d6facd/pydot-4.0.1-py3-none-any.whl", hash = "sha256:869c0efadd2708c0be1f916eb669f3d664ca684bc57ffb7ecc08e70d5e93fee6", size = 37087, upload-time = "2025-06-17T20:09:55.25Z" }, +] + +[[package]] +name = "pygments" +version = "2.19.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, +] + +[[package]] +name = "pyparsing" +version = "3.1.4" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.9'", +] +sdist = { url = "https://files.pythonhosted.org/packages/83/08/13f3bce01b2061f2bbd582c9df82723de943784cf719a35ac886c652043a/pyparsing-3.1.4.tar.gz", hash = "sha256:f86ec8d1a83f11977c9a6ea7598e8c27fc5cddfa5b07ea2241edbbde1d7bc032", size = 900231, upload-time = "2024-08-25T15:00:47.416Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/0c/0e3c05b1c87bb6a1c76d281b0f35e78d2d80ac91b5f8f524cebf77f51049/pyparsing-3.1.4-py3-none-any.whl", hash = "sha256:a6a7ee4235a3f944aa1fa2249307708f893fe5717dc603503c6c7969c070fb7c", size = 104100, upload-time = "2024-08-25T15:00:45.361Z" }, +] + +[[package]] +name = "pyparsing" +version = "3.2.5" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", + "python_full_version == '3.9.*'", +] +sdist = { url = "https://files.pythonhosted.org/packages/f2/a5/181488fc2b9d093e3972d2a472855aae8a03f000592dbfce716a512b3359/pyparsing-3.2.5.tar.gz", hash = "sha256:2df8d5b7b2802ef88e8d016a2eb9c7aeaa923529cd251ed0fe4608275d4105b6", size = 1099274, upload-time = "2025-09-21T04:11:06.277Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/10/5e/1aa9a93198c6b64513c9d7752de7422c06402de6600a8767da1524f9570b/pyparsing-3.2.5-py3-none-any.whl", hash = "sha256:e38a4f02064cf41fe6593d328d0512495ad1f3d8a91c4f73fc401b3079a59a5e", size = 113890, upload-time = "2025-09-21T04:11:04.117Z" }, +] + +[[package]] +name = "pytest" +version = "8.3.5" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.9'", +] +dependencies = [ + { name = "colorama", marker = "python_full_version < '3.9' and sys_platform == 'win32'" }, + { name = "exceptiongroup", marker = "python_full_version < '3.9'" }, + { name = "iniconfig", version = "2.1.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, + { name = "packaging", marker = "python_full_version < '3.9'" }, + { name = "pluggy", version = "1.5.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, + { name = "tomli", marker = "python_full_version < '3.9'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ae/3c/c9d525a414d506893f0cd8a8d0de7706446213181570cdbd766691164e40/pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845", size = 1450891, upload-time = "2025-03-02T12:54:54.503Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/30/3d/64ad57c803f1fa1e963a7946b6e0fea4a70df53c1a7fed304586539c2bac/pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820", size = 343634, upload-time = "2025-03-02T12:54:52.069Z" }, +] + +[[package]] +name = "pytest" +version = "8.4.2" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", + "python_full_version == '3.9.*'", +] +dependencies = [ + { name = "colorama", marker = "python_full_version >= '3.9' and sys_platform == 'win32'" }, + { name = "exceptiongroup", marker = "python_full_version >= '3.9' and python_full_version < '3.11'" }, + { name = "iniconfig", version = "2.1.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.9.*'" }, + { name = "iniconfig", version = "2.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "packaging", marker = "python_full_version >= '3.9'" }, + { name = "pluggy", version = "1.6.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" }, + { name = "pygments", marker = "python_full_version >= '3.9'" }, + { name = "tomli", marker = "python_full_version >= '3.9' and python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a3/5c/00a0e072241553e1a7496d638deababa67c5058571567b92a7eaa258397c/pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01", size = 1519618, upload-time = "2025-09-04T14:34:22.711Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a8/a4/20da314d277121d6534b3a980b29035dcd51e6744bd79075a6ce8fa4eb8d/pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79", size = 365750, upload-time = "2025-09-04T14:34:20.226Z" }, +] + +[[package]] +name = "pytest-asyncio" +version = "0.24.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.9'", +] +dependencies = [ + { name = "pytest", version = "8.3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/52/6d/c6cf50ce320cf8611df7a1254d86233b3df7cc07f9b5f5cbcb82e08aa534/pytest_asyncio-0.24.0.tar.gz", hash = "sha256:d081d828e576d85f875399194281e92bf8a68d60d72d1a2faf2feddb6c46b276", size = 49855, upload-time = "2024-08-22T08:03:18.145Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/96/31/6607dab48616902f76885dfcf62c08d929796fc3b2d2318faf9fd54dbed9/pytest_asyncio-0.24.0-py3-none-any.whl", hash = "sha256:a811296ed596b69bf0b6f3dc40f83bcaf341b155a269052d82efa2b25ac7037b", size = 18024, upload-time = "2024-08-22T08:03:15.536Z" }, +] + +[[package]] +name = "pytest-asyncio" +version = "1.2.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", + "python_full_version == '3.9.*'", +] +dependencies = [ + { name = "backports-asyncio-runner", marker = "python_full_version >= '3.9' and python_full_version < '3.11'" }, + { name = "pytest", version = "8.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" }, + { name = "typing-extensions", version = "4.15.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9' and python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/42/86/9e3c5f48f7b7b638b216e4b9e645f54d199d7abbbab7a64a13b4e12ba10f/pytest_asyncio-1.2.0.tar.gz", hash = "sha256:c609a64a2a8768462d0c99811ddb8bd2583c33fd33cf7f21af1c142e824ffb57", size = 50119, upload-time = "2025-09-12T07:33:53.816Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/93/2fa34714b7a4ae72f2f8dad66ba17dd9a2c793220719e736dda28b7aec27/pytest_asyncio-1.2.0-py3-none-any.whl", hash = "sha256:8e17ae5e46d8e7efe51ab6494dd2010f4ca8dae51652aa3c8d55acf50bfb2e99", size = 15095, upload-time = "2025-09-12T07:33:52.639Z" }, +] + +[[package]] +name = "pytest-cov" +version = "5.0.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.9'", +] +dependencies = [ + { name = "coverage", version = "7.6.1", source = { registry = "https://pypi.org/simple" }, extra = ["toml"], marker = "python_full_version < '3.9'" }, + { name = "pytest", version = "8.3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/74/67/00efc8d11b630c56f15f4ad9c7f9223f1e5ec275aaae3fa9118c6a223ad2/pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857", size = 63042, upload-time = "2024-03-24T20:16:34.856Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/3a/af5b4fa5961d9a1e6237b530eb87dd04aea6eb83da09d2a4073d81b54ccf/pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652", size = 21990, upload-time = "2024-03-24T20:16:32.444Z" }, +] + +[[package]] +name = "pytest-cov" +version = "7.0.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", + "python_full_version == '3.9.*'", +] +dependencies = [ + { name = "coverage", version = "7.10.7", source = { registry = "https://pypi.org/simple" }, extra = ["toml"], marker = "python_full_version == '3.9.*'" }, + { name = "coverage", version = "7.11.0", source = { registry = "https://pypi.org/simple" }, extra = ["toml"], marker = "python_full_version >= '3.10'" }, + { name = "pluggy", version = "1.6.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" }, + { name = "pytest", version = "8.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5e/f7/c933acc76f5208b3b00089573cf6a2bc26dc80a8aece8f52bb7d6b1855ca/pytest_cov-7.0.0.tar.gz", hash = "sha256:33c97eda2e049a0c5298e91f519302a1334c26ac65c1a483d6206fd458361af1", size = 54328, upload-time = "2025-09-09T10:57:02.113Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ee/49/1377b49de7d0c1ce41292161ea0f721913fa8722c19fb9c1e3aa0367eecb/pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861", size = 22424, upload-time = "2025-09-09T10:57:00.695Z" }, +] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" }, +] + +[[package]] +name = "pytokens" +version = "0.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d4/c2/dbadcdddb412a267585459142bfd7cc241e6276db69339353ae6e241ab2b/pytokens-0.2.0.tar.gz", hash = "sha256:532d6421364e5869ea57a9523bf385f02586d4662acbcc0342afd69511b4dd43", size = 15368, upload-time = "2025-10-15T08:02:42.738Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/89/5a/c269ea6b348b6f2c32686635df89f32dbe05df1088dd4579302a6f8f99af/pytokens-0.2.0-py3-none-any.whl", hash = "sha256:74d4b318c67f4295c13782ddd9abcb7e297ec5630ad060eb90abf7ebbefe59f8", size = 12038, upload-time = "2025-10-15T08:02:41.694Z" }, +] + +[[package]] +name = "pyyaml" +version = "6.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0d/a2/09f67a3589cb4320fb5ce90d3fd4c9752636b8b6ad8f34b54d76c5a54693/PyYAML-6.0.3-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:c2514fceb77bc5e7a2f7adfaa1feb2fb311607c9cb518dbc378688ec73d8292f", size = 186824, upload-time = "2025-09-29T20:27:35.918Z" }, + { url = "https://files.pythonhosted.org/packages/02/72/d972384252432d57f248767556ac083793292a4adf4e2d85dfe785ec2659/PyYAML-6.0.3-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c57bb8c96f6d1808c030b1687b9b5fb476abaa47f0db9c0101f5e9f394e97f4", size = 795069, upload-time = "2025-09-29T20:27:38.15Z" }, + { url = "https://files.pythonhosted.org/packages/a7/3b/6c58ac0fa7c4e1b35e48024eb03d00817438310447f93ef4431673c24138/PyYAML-6.0.3-cp38-cp38-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:efd7b85f94a6f21e4932043973a7ba2613b059c4a000551892ac9f1d11f5baf3", size = 862585, upload-time = "2025-09-29T20:27:39.715Z" }, + { url = "https://files.pythonhosted.org/packages/25/a2/b725b61ac76a75583ae7104b3209f75ea44b13cfd026aa535ece22b7f22e/PyYAML-6.0.3-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:22ba7cfcad58ef3ecddc7ed1db3409af68d023b7f940da23c6c2a1890976eda6", size = 806018, upload-time = "2025-09-29T20:27:41.444Z" }, + { url = "https://files.pythonhosted.org/packages/6f/b0/b2227677b2d1036d84f5ee95eb948e7af53d59fe3e4328784e4d290607e0/PyYAML-6.0.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:6344df0d5755a2c9a276d4473ae6b90647e216ab4757f8426893b5dd2ac3f369", size = 802822, upload-time = "2025-09-29T20:27:42.885Z" }, + { url = "https://files.pythonhosted.org/packages/99/a5/718a8ea22521e06ef19f91945766a892c5ceb1855df6adbde67d997ea7ed/PyYAML-6.0.3-cp38-cp38-win32.whl", hash = "sha256:3ff07ec89bae51176c0549bc4c63aa6202991da2d9a6129d7aef7f1407d3f295", size = 143744, upload-time = "2025-09-29T20:27:44.487Z" }, + { url = "https://files.pythonhosted.org/packages/76/b2/2b69cee94c9eb215216fc05778675c393e3aa541131dc910df8e52c83776/PyYAML-6.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:5cf4e27da7e3fbed4d6c3d8e797387aaad68102272f8f9752883bc32d61cb87b", size = 160082, upload-time = "2025-09-29T20:27:46.049Z" }, + { url = "https://files.pythonhosted.org/packages/f4/a0/39350dd17dd6d6c6507025c0e53aef67a9293a6d37d3511f23ea510d5800/pyyaml-6.0.3-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:214ed4befebe12df36bcc8bc2b64b396ca31be9304b8f59e25c11cf94a4c033b", size = 184227, upload-time = "2025-09-25T21:31:46.04Z" }, + { url = "https://files.pythonhosted.org/packages/05/14/52d505b5c59ce73244f59c7a50ecf47093ce4765f116cdb98286a71eeca2/pyyaml-6.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02ea2dfa234451bbb8772601d7b8e426c2bfa197136796224e50e35a78777956", size = 174019, upload-time = "2025-09-25T21:31:47.706Z" }, + { url = "https://files.pythonhosted.org/packages/43/f7/0e6a5ae5599c838c696adb4e6330a59f463265bfa1e116cfd1fbb0abaaae/pyyaml-6.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b30236e45cf30d2b8e7b3e85881719e98507abed1011bf463a8fa23e9c3e98a8", size = 740646, upload-time = "2025-09-25T21:31:49.21Z" }, + { url = "https://files.pythonhosted.org/packages/2f/3a/61b9db1d28f00f8fd0ae760459a5c4bf1b941baf714e207b6eb0657d2578/pyyaml-6.0.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:66291b10affd76d76f54fad28e22e51719ef9ba22b29e1d7d03d6777a9174198", size = 840793, upload-time = "2025-09-25T21:31:50.735Z" }, + { url = "https://files.pythonhosted.org/packages/7a/1e/7acc4f0e74c4b3d9531e24739e0ab832a5edf40e64fbae1a9c01941cabd7/pyyaml-6.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9c7708761fccb9397fe64bbc0395abcae8c4bf7b0eac081e12b809bf47700d0b", size = 770293, upload-time = "2025-09-25T21:31:51.828Z" }, + { url = "https://files.pythonhosted.org/packages/8b/ef/abd085f06853af0cd59fa5f913d61a8eab65d7639ff2a658d18a25d6a89d/pyyaml-6.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:418cf3f2111bc80e0933b2cd8cd04f286338bb88bdc7bc8e6dd775ebde60b5e0", size = 732872, upload-time = "2025-09-25T21:31:53.282Z" }, + { url = "https://files.pythonhosted.org/packages/1f/15/2bc9c8faf6450a8b3c9fc5448ed869c599c0a74ba2669772b1f3a0040180/pyyaml-6.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5e0b74767e5f8c593e8c9b5912019159ed0533c70051e9cce3e8b6aa699fcd69", size = 758828, upload-time = "2025-09-25T21:31:54.807Z" }, + { url = "https://files.pythonhosted.org/packages/a3/00/531e92e88c00f4333ce359e50c19b8d1de9fe8d581b1534e35ccfbc5f393/pyyaml-6.0.3-cp310-cp310-win32.whl", hash = "sha256:28c8d926f98f432f88adc23edf2e6d4921ac26fb084b028c733d01868d19007e", size = 142415, upload-time = "2025-09-25T21:31:55.885Z" }, + { url = "https://files.pythonhosted.org/packages/2a/fa/926c003379b19fca39dd4634818b00dec6c62d87faf628d1394e137354d4/pyyaml-6.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:bdb2c67c6c1390b63c6ff89f210c8fd09d9a1217a465701eac7316313c915e4c", size = 158561, upload-time = "2025-09-25T21:31:57.406Z" }, + { url = "https://files.pythonhosted.org/packages/6d/16/a95b6757765b7b031c9374925bb718d55e0a9ba8a1b6a12d25962ea44347/pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e", size = 185826, upload-time = "2025-09-25T21:31:58.655Z" }, + { url = "https://files.pythonhosted.org/packages/16/19/13de8e4377ed53079ee996e1ab0a9c33ec2faf808a4647b7b4c0d46dd239/pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824", size = 175577, upload-time = "2025-09-25T21:32:00.088Z" }, + { url = "https://files.pythonhosted.org/packages/0c/62/d2eb46264d4b157dae1275b573017abec435397aa59cbcdab6fc978a8af4/pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c", size = 775556, upload-time = "2025-09-25T21:32:01.31Z" }, + { url = "https://files.pythonhosted.org/packages/10/cb/16c3f2cf3266edd25aaa00d6c4350381c8b012ed6f5276675b9eba8d9ff4/pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00", size = 882114, upload-time = "2025-09-25T21:32:03.376Z" }, + { url = "https://files.pythonhosted.org/packages/71/60/917329f640924b18ff085ab889a11c763e0b573da888e8404ff486657602/pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d", size = 806638, upload-time = "2025-09-25T21:32:04.553Z" }, + { url = "https://files.pythonhosted.org/packages/dd/6f/529b0f316a9fd167281a6c3826b5583e6192dba792dd55e3203d3f8e655a/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a", size = 767463, upload-time = "2025-09-25T21:32:06.152Z" }, + { url = "https://files.pythonhosted.org/packages/f2/6a/b627b4e0c1dd03718543519ffb2f1deea4a1e6d42fbab8021936a4d22589/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4", size = 794986, upload-time = "2025-09-25T21:32:07.367Z" }, + { url = "https://files.pythonhosted.org/packages/45/91/47a6e1c42d9ee337c4839208f30d9f09caa9f720ec7582917b264defc875/pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b", size = 142543, upload-time = "2025-09-25T21:32:08.95Z" }, + { url = "https://files.pythonhosted.org/packages/da/e3/ea007450a105ae919a72393cb06f122f288ef60bba2dc64b26e2646fa315/pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf", size = 158763, upload-time = "2025-09-25T21:32:09.96Z" }, + { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" }, + { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" }, + { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" }, + { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011, upload-time = "2025-09-25T21:32:15.21Z" }, + { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870, upload-time = "2025-09-25T21:32:16.431Z" }, + { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089, upload-time = "2025-09-25T21:32:17.56Z" }, + { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181, upload-time = "2025-09-25T21:32:18.834Z" }, + { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658, upload-time = "2025-09-25T21:32:20.209Z" }, + { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003, upload-time = "2025-09-25T21:32:21.167Z" }, + { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344, upload-time = "2025-09-25T21:32:22.617Z" }, + { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669, upload-time = "2025-09-25T21:32:23.673Z" }, + { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252, upload-time = "2025-09-25T21:32:25.149Z" }, + { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081, upload-time = "2025-09-25T21:32:26.575Z" }, + { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159, upload-time = "2025-09-25T21:32:27.727Z" }, + { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626, upload-time = "2025-09-25T21:32:28.878Z" }, + { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613, upload-time = "2025-09-25T21:32:30.178Z" }, + { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115, upload-time = "2025-09-25T21:32:31.353Z" }, + { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427, upload-time = "2025-09-25T21:32:32.58Z" }, + { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090, upload-time = "2025-09-25T21:32:33.659Z" }, + { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246, upload-time = "2025-09-25T21:32:34.663Z" }, + { url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814, upload-time = "2025-09-25T21:32:35.712Z" }, + { url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809, upload-time = "2025-09-25T21:32:36.789Z" }, + { url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454, upload-time = "2025-09-25T21:32:37.966Z" }, + { url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355, upload-time = "2025-09-25T21:32:39.178Z" }, + { url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175, upload-time = "2025-09-25T21:32:40.865Z" }, + { url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228, upload-time = "2025-09-25T21:32:42.084Z" }, + { url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194, upload-time = "2025-09-25T21:32:43.362Z" }, + { url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429, upload-time = "2025-09-25T21:32:57.844Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912, upload-time = "2025-09-25T21:32:59.247Z" }, + { url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108, upload-time = "2025-09-25T21:32:44.377Z" }, + { url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641, upload-time = "2025-09-25T21:32:45.407Z" }, + { url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901, upload-time = "2025-09-25T21:32:48.83Z" }, + { url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132, upload-time = "2025-09-25T21:32:50.149Z" }, + { url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261, upload-time = "2025-09-25T21:32:51.808Z" }, + { url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272, upload-time = "2025-09-25T21:32:52.941Z" }, + { url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923, upload-time = "2025-09-25T21:32:54.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062, upload-time = "2025-09-25T21:32:55.767Z" }, + { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" }, + { url = "https://files.pythonhosted.org/packages/9f/62/67fc8e68a75f738c9200422bf65693fb79a4cd0dc5b23310e5202e978090/pyyaml-6.0.3-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:b865addae83924361678b652338317d1bd7e79b1f4596f96b96c77a5a34b34da", size = 184450, upload-time = "2025-09-25T21:33:00.618Z" }, + { url = "https://files.pythonhosted.org/packages/ae/92/861f152ce87c452b11b9d0977952259aa7df792d71c1053365cc7b09cc08/pyyaml-6.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c3355370a2c156cffb25e876646f149d5d68f5e0a3ce86a5084dd0b64a994917", size = 174319, upload-time = "2025-09-25T21:33:02.086Z" }, + { url = "https://files.pythonhosted.org/packages/d0/cd/f0cfc8c74f8a030017a2b9c771b7f47e5dd702c3e28e5b2071374bda2948/pyyaml-6.0.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3c5677e12444c15717b902a5798264fa7909e41153cdf9ef7ad571b704a63dd9", size = 737631, upload-time = "2025-09-25T21:33:03.25Z" }, + { url = "https://files.pythonhosted.org/packages/ef/b2/18f2bd28cd2055a79a46c9b0895c0b3d987ce40ee471cecf58a1a0199805/pyyaml-6.0.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5ed875a24292240029e4483f9d4a4b8a1ae08843b9c54f43fcc11e404532a8a5", size = 836795, upload-time = "2025-09-25T21:33:05.014Z" }, + { url = "https://files.pythonhosted.org/packages/73/b9/793686b2d54b531203c160ef12bec60228a0109c79bae6c1277961026770/pyyaml-6.0.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0150219816b6a1fa26fb4699fb7daa9caf09eb1999f3b70fb6e786805e80375a", size = 750767, upload-time = "2025-09-25T21:33:06.398Z" }, + { url = "https://files.pythonhosted.org/packages/a9/86/a137b39a611def2ed78b0e66ce2fe13ee701a07c07aebe55c340ed2a050e/pyyaml-6.0.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fa160448684b4e94d80416c0fa4aac48967a969efe22931448d853ada8baf926", size = 727982, upload-time = "2025-09-25T21:33:08.708Z" }, + { url = "https://files.pythonhosted.org/packages/dd/62/71c27c94f457cf4418ef8ccc71735324c549f7e3ea9d34aba50874563561/pyyaml-6.0.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:27c0abcb4a5dac13684a37f76e701e054692a9b2d3064b70f5e4eb54810553d7", size = 755677, upload-time = "2025-09-25T21:33:09.876Z" }, + { url = "https://files.pythonhosted.org/packages/29/3d/6f5e0d58bd924fb0d06c3a6bad00effbdae2de5adb5cda5648006ffbd8d3/pyyaml-6.0.3-cp39-cp39-win32.whl", hash = "sha256:1ebe39cb5fc479422b83de611d14e2c0d3bb2a18bbcb01f229ab3cfbd8fee7a0", size = 142592, upload-time = "2025-09-25T21:33:10.983Z" }, + { url = "https://files.pythonhosted.org/packages/f0/0c/25113e0b5e103d7f1490c0e947e303fe4a696c10b501dea7a9f49d4e876c/pyyaml-6.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:2e71d11abed7344e42a8849600193d15b6def118602c4c176f748e4583246007", size = 158777, upload-time = "2025-09-25T21:33:15.55Z" }, +] + +[[package]] +name = "redis-release-cli" +version = "0.1.0" +source = { editable = "." } +dependencies = [ + { name = "aiohttp", version = "3.10.11", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, + { name = "aiohttp", version = "3.13.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" }, + { name = "boto3", version = "1.37.38", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, + { name = "boto3", version = "1.40.62", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" }, + { name = "py-trees", version = "2.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, + { name = "py-trees", version = "2.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" }, + { name = "pydantic", version = "2.10.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, + { name = "pydantic", version = "2.12.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" }, + { name = "pyyaml" }, + { name = "requests", version = "2.32.4", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, + { name = "requests", version = "2.32.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" }, + { name = "rich" }, + { name = "typer" }, +] + +[package.optional-dependencies] +dev = [ + { name = "black", version = "24.8.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, + { name = "black", version = "25.9.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" }, + { name = "isort", version = "5.13.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, + { name = "isort", version = "6.1.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.9.*'" }, + { name = "isort", version = "7.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "mypy", version = "1.14.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, + { name = "mypy", version = "1.18.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" }, + { name = "pre-commit", version = "3.5.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, + { name = "pre-commit", version = "4.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" }, + { name = "pytest", version = "8.3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, + { name = "pytest", version = "8.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" }, + { name = "pytest-asyncio", version = "0.24.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, + { name = "pytest-asyncio", version = "1.2.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" }, + { name = "pytest-cov", version = "5.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, + { name = "pytest-cov", version = "7.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" }, +] + +[package.metadata] +requires-dist = [ + { name = "aiohttp", specifier = ">=3.8.0" }, + { name = "black", marker = "extra == 'dev'", specifier = ">=23.0.0" }, + { name = "boto3", specifier = ">=1.26.0" }, + { name = "isort", marker = "extra == 'dev'", specifier = ">=5.12.0" }, + { name = "mypy", marker = "extra == 'dev'", specifier = ">=1.0.0" }, + { name = "pre-commit", marker = "extra == 'dev'", specifier = ">=3.0.0" }, + { name = "py-trees", specifier = ">=2.2,<3.0" }, + { name = "pydantic", specifier = ">=2.0.0" }, + { name = "pytest", marker = "extra == 'dev'", specifier = ">=7.0.0" }, + { name = "pytest-asyncio", marker = "extra == 'dev'", specifier = ">=0.21.0" }, + { name = "pytest-cov", marker = "extra == 'dev'", specifier = ">=4.0.0" }, + { name = "pyyaml", specifier = ">=6.0.3" }, + { name = "requests", specifier = ">=2.28.0" }, + { name = "rich", specifier = ">=13.0.0" }, + { name = "typer", extras = ["all"], specifier = ">=0.9.0" }, +] +provides-extras = ["dev"] + +[[package]] +name = "requests" +version = "2.32.4" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.9'", +] +dependencies = [ + { name = "certifi", marker = "python_full_version < '3.9'" }, + { name = "charset-normalizer", marker = "python_full_version < '3.9'" }, + { name = "idna", marker = "python_full_version < '3.9'" }, + { name = "urllib3", version = "1.26.20", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e1/0a/929373653770d8a0d7ea76c37de6e41f11eb07559b103b1c02cafb3f7cf8/requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422", size = 135258, upload-time = "2025-06-09T16:43:07.34Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7c/e4/56027c4a6b4ae70ca9de302488c5ca95ad4a39e190093d6c1a8ace08341b/requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c", size = 64847, upload-time = "2025-06-09T16:43:05.728Z" }, +] + +[[package]] +name = "requests" +version = "2.32.5" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", + "python_full_version == '3.9.*'", +] +dependencies = [ + { name = "certifi", marker = "python_full_version >= '3.9'" }, + { name = "charset-normalizer", marker = "python_full_version >= '3.9'" }, + { name = "idna", marker = "python_full_version >= '3.9'" }, + { name = "urllib3", version = "1.26.20", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.9.*'" }, + { name = "urllib3", version = "2.5.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, +] + +[[package]] +name = "rich" +version = "14.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py", version = "3.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "markdown-it-py", version = "4.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fb/d2/8920e102050a0de7bfabeb4c4614a49248cf8d5d7a8d01885fbb24dc767a/rich-14.2.0.tar.gz", hash = "sha256:73ff50c7c0c1c77c8243079283f4edb376f0f6442433aecb8ce7e6d0b92d1fe4", size = 219990, upload-time = "2025-10-09T14:16:53.064Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/25/7a/b0178788f8dc6cafce37a212c99565fa1fe7872c70c6c9c1e1a372d9d88f/rich-14.2.0-py3-none-any.whl", hash = "sha256:76bc51fe2e57d2b1be1f96c524b890b816e334ab4c1e45888799bfaab0021edd", size = 243393, upload-time = "2025-10-09T14:16:51.245Z" }, +] + +[[package]] +name = "s3transfer" +version = "0.11.5" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.9'", +] +dependencies = [ + { name = "botocore", version = "1.37.38", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c4/2b/5c9562795c2eb2b5f63536961754760c25bf0f34af93d36aa28dea2fb303/s3transfer-0.11.5.tar.gz", hash = "sha256:8c8aad92784779ab8688a61aefff3e28e9ebdce43142808eaa3f0b0f402f68b7", size = 149107, upload-time = "2025-04-17T19:23:19.051Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/45/39/13402e323666d17850eca87e4cd6ecfcf9fd7809cac9efdcce10272fc29d/s3transfer-0.11.5-py3-none-any.whl", hash = "sha256:757af0f2ac150d3c75bc4177a32355c3862a98d20447b69a0161812992fe0bd4", size = 84782, upload-time = "2025-04-17T19:23:17.516Z" }, +] + +[[package]] +name = "s3transfer" +version = "0.14.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", + "python_full_version == '3.9.*'", +] +dependencies = [ + { name = "botocore", version = "1.40.62", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/62/74/8d69dcb7a9efe8baa2046891735e5dfe433ad558ae23d9e3c14c633d1d58/s3transfer-0.14.0.tar.gz", hash = "sha256:eff12264e7c8b4985074ccce27a3b38a485bb7f7422cc8046fee9be4983e4125", size = 151547, upload-time = "2025-09-09T19:23:31.089Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/48/f0/ae7ca09223a81a1d890b2557186ea015f6e0502e9b8cb8e1813f1d8cfa4e/s3transfer-0.14.0-py3-none-any.whl", hash = "sha256:ea3b790c7077558ed1f02a3072fb3cb992bbbd253392f4b6e9e8976941c7d456", size = 85712, upload-time = "2025-09-09T19:23:30.041Z" }, +] + +[[package]] +name = "shellingham" +version = "1.5.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de", size = 10310, upload-time = "2023-10-24T04:13:40.426Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755, upload-time = "2023-10-24T04:13:38.866Z" }, +] + +[[package]] +name = "six" +version = "1.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, +] + +[[package]] +name = "tomli" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/52/ed/3f73f72945444548f33eba9a87fc7a6e969915e7b1acc8260b30e1f76a2f/tomli-2.3.0.tar.gz", hash = "sha256:64be704a875d2a59753d80ee8a533c3fe183e3f06807ff7dc2232938ccb01549", size = 17392, upload-time = "2025-10-08T22:01:47.119Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/2e/299f62b401438d5fe1624119c723f5d877acc86a4c2492da405626665f12/tomli-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:88bd15eb972f3664f5ed4b57c1634a97153b4bac4479dcb6a495f41921eb7f45", size = 153236, upload-time = "2025-10-08T22:01:00.137Z" }, + { url = "https://files.pythonhosted.org/packages/86/7f/d8fffe6a7aefdb61bced88fcb5e280cfd71e08939da5894161bd71bea022/tomli-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:883b1c0d6398a6a9d29b508c331fa56adbcdff647f6ace4dfca0f50e90dfd0ba", size = 148084, upload-time = "2025-10-08T22:01:01.63Z" }, + { url = "https://files.pythonhosted.org/packages/47/5c/24935fb6a2ee63e86d80e4d3b58b222dafaf438c416752c8b58537c8b89a/tomli-2.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1381caf13ab9f300e30dd8feadb3de072aeb86f1d34a8569453ff32a7dea4bf", size = 234832, upload-time = "2025-10-08T22:01:02.543Z" }, + { url = "https://files.pythonhosted.org/packages/89/da/75dfd804fc11e6612846758a23f13271b76d577e299592b4371a4ca4cd09/tomli-2.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0e285d2649b78c0d9027570d4da3425bdb49830a6156121360b3f8511ea3441", size = 242052, upload-time = "2025-10-08T22:01:03.836Z" }, + { url = "https://files.pythonhosted.org/packages/70/8c/f48ac899f7b3ca7eb13af73bacbc93aec37f9c954df3c08ad96991c8c373/tomli-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a154a9ae14bfcf5d8917a59b51ffd5a3ac1fd149b71b47a3a104ca4edcfa845", size = 239555, upload-time = "2025-10-08T22:01:04.834Z" }, + { url = "https://files.pythonhosted.org/packages/ba/28/72f8afd73f1d0e7829bfc093f4cb98ce0a40ffc0cc997009ee1ed94ba705/tomli-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:74bf8464ff93e413514fefd2be591c3b0b23231a77f901db1eb30d6f712fc42c", size = 245128, upload-time = "2025-10-08T22:01:05.84Z" }, + { url = "https://files.pythonhosted.org/packages/b6/eb/a7679c8ac85208706d27436e8d421dfa39d4c914dcf5fa8083a9305f58d9/tomli-2.3.0-cp311-cp311-win32.whl", hash = "sha256:00b5f5d95bbfc7d12f91ad8c593a1659b6387b43f054104cda404be6bda62456", size = 96445, upload-time = "2025-10-08T22:01:06.896Z" }, + { url = "https://files.pythonhosted.org/packages/0a/fe/3d3420c4cb1ad9cb462fb52967080575f15898da97e21cb6f1361d505383/tomli-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:4dc4ce8483a5d429ab602f111a93a6ab1ed425eae3122032db7e9acf449451be", size = 107165, upload-time = "2025-10-08T22:01:08.107Z" }, + { url = "https://files.pythonhosted.org/packages/ff/b7/40f36368fcabc518bb11c8f06379a0fd631985046c038aca08c6d6a43c6e/tomli-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d7d86942e56ded512a594786a5ba0a5e521d02529b3826e7761a05138341a2ac", size = 154891, upload-time = "2025-10-08T22:01:09.082Z" }, + { url = "https://files.pythonhosted.org/packages/f9/3f/d9dd692199e3b3aab2e4e4dd948abd0f790d9ded8cd10cbaae276a898434/tomli-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:73ee0b47d4dad1c5e996e3cd33b8a76a50167ae5f96a2607cbe8cc773506ab22", size = 148796, upload-time = "2025-10-08T22:01:10.266Z" }, + { url = "https://files.pythonhosted.org/packages/60/83/59bff4996c2cf9f9387a0f5a3394629c7efa5ef16142076a23a90f1955fa/tomli-2.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:792262b94d5d0a466afb5bc63c7daa9d75520110971ee269152083270998316f", size = 242121, upload-time = "2025-10-08T22:01:11.332Z" }, + { url = "https://files.pythonhosted.org/packages/45/e5/7c5119ff39de8693d6baab6c0b6dcb556d192c165596e9fc231ea1052041/tomli-2.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f195fe57ecceac95a66a75ac24d9d5fbc98ef0962e09b2eddec5d39375aae52", size = 250070, upload-time = "2025-10-08T22:01:12.498Z" }, + { url = "https://files.pythonhosted.org/packages/45/12/ad5126d3a278f27e6701abde51d342aa78d06e27ce2bb596a01f7709a5a2/tomli-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e31d432427dcbf4d86958c184b9bfd1e96b5b71f8eb17e6d02531f434fd335b8", size = 245859, upload-time = "2025-10-08T22:01:13.551Z" }, + { url = "https://files.pythonhosted.org/packages/fb/a1/4d6865da6a71c603cfe6ad0e6556c73c76548557a8d658f9e3b142df245f/tomli-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b0882799624980785240ab732537fcfc372601015c00f7fc367c55308c186f6", size = 250296, upload-time = "2025-10-08T22:01:14.614Z" }, + { url = "https://files.pythonhosted.org/packages/a0/b7/a7a7042715d55c9ba6e8b196d65d2cb662578b4d8cd17d882d45322b0d78/tomli-2.3.0-cp312-cp312-win32.whl", hash = "sha256:ff72b71b5d10d22ecb084d345fc26f42b5143c5533db5e2eaba7d2d335358876", size = 97124, upload-time = "2025-10-08T22:01:15.629Z" }, + { url = "https://files.pythonhosted.org/packages/06/1e/f22f100db15a68b520664eb3328fb0ae4e90530887928558112c8d1f4515/tomli-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:1cb4ed918939151a03f33d4242ccd0aa5f11b3547d0cf30f7c74a408a5b99878", size = 107698, upload-time = "2025-10-08T22:01:16.51Z" }, + { url = "https://files.pythonhosted.org/packages/89/48/06ee6eabe4fdd9ecd48bf488f4ac783844fd777f547b8d1b61c11939974e/tomli-2.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5192f562738228945d7b13d4930baffda67b69425a7f0da96d360b0a3888136b", size = 154819, upload-time = "2025-10-08T22:01:17.964Z" }, + { url = "https://files.pythonhosted.org/packages/f1/01/88793757d54d8937015c75dcdfb673c65471945f6be98e6a0410fba167ed/tomli-2.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:be71c93a63d738597996be9528f4abe628d1adf5e6eb11607bc8fe1a510b5dae", size = 148766, upload-time = "2025-10-08T22:01:18.959Z" }, + { url = "https://files.pythonhosted.org/packages/42/17/5e2c956f0144b812e7e107f94f1cc54af734eb17b5191c0bbfb72de5e93e/tomli-2.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4665508bcbac83a31ff8ab08f424b665200c0e1e645d2bd9ab3d3e557b6185b", size = 240771, upload-time = "2025-10-08T22:01:20.106Z" }, + { url = "https://files.pythonhosted.org/packages/d5/f4/0fbd014909748706c01d16824eadb0307115f9562a15cbb012cd9b3512c5/tomli-2.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4021923f97266babc6ccab9f5068642a0095faa0a51a246a6a02fccbb3514eaf", size = 248586, upload-time = "2025-10-08T22:01:21.164Z" }, + { url = "https://files.pythonhosted.org/packages/30/77/fed85e114bde5e81ecf9bc5da0cc69f2914b38f4708c80ae67d0c10180c5/tomli-2.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4ea38c40145a357d513bffad0ed869f13c1773716cf71ccaa83b0fa0cc4e42f", size = 244792, upload-time = "2025-10-08T22:01:22.417Z" }, + { url = "https://files.pythonhosted.org/packages/55/92/afed3d497f7c186dc71e6ee6d4fcb0acfa5f7d0a1a2878f8beae379ae0cc/tomli-2.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ad805ea85eda330dbad64c7ea7a4556259665bdf9d2672f5dccc740eb9d3ca05", size = 248909, upload-time = "2025-10-08T22:01:23.859Z" }, + { url = "https://files.pythonhosted.org/packages/f8/84/ef50c51b5a9472e7265ce1ffc7f24cd4023d289e109f669bdb1553f6a7c2/tomli-2.3.0-cp313-cp313-win32.whl", hash = "sha256:97d5eec30149fd3294270e889b4234023f2c69747e555a27bd708828353ab606", size = 96946, upload-time = "2025-10-08T22:01:24.893Z" }, + { url = "https://files.pythonhosted.org/packages/b2/b7/718cd1da0884f281f95ccfa3a6cc572d30053cba64603f79d431d3c9b61b/tomli-2.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0c95ca56fbe89e065c6ead5b593ee64b84a26fca063b5d71a1122bf26e533999", size = 107705, upload-time = "2025-10-08T22:01:26.153Z" }, + { url = "https://files.pythonhosted.org/packages/19/94/aeafa14a52e16163008060506fcb6aa1949d13548d13752171a755c65611/tomli-2.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:cebc6fe843e0733ee827a282aca4999b596241195f43b4cc371d64fc6639da9e", size = 154244, upload-time = "2025-10-08T22:01:27.06Z" }, + { url = "https://files.pythonhosted.org/packages/db/e4/1e58409aa78eefa47ccd19779fc6f36787edbe7d4cd330eeeedb33a4515b/tomli-2.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4c2ef0244c75aba9355561272009d934953817c49f47d768070c3c94355c2aa3", size = 148637, upload-time = "2025-10-08T22:01:28.059Z" }, + { url = "https://files.pythonhosted.org/packages/26/b6/d1eccb62f665e44359226811064596dd6a366ea1f985839c566cd61525ae/tomli-2.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c22a8bf253bacc0cf11f35ad9808b6cb75ada2631c2d97c971122583b129afbc", size = 241925, upload-time = "2025-10-08T22:01:29.066Z" }, + { url = "https://files.pythonhosted.org/packages/70/91/7cdab9a03e6d3d2bb11beae108da5bdc1c34bdeb06e21163482544ddcc90/tomli-2.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0eea8cc5c5e9f89c9b90c4896a8deefc74f518db5927d0e0e8d4a80953d774d0", size = 249045, upload-time = "2025-10-08T22:01:31.98Z" }, + { url = "https://files.pythonhosted.org/packages/15/1b/8c26874ed1f6e4f1fcfeb868db8a794cbe9f227299402db58cfcc858766c/tomli-2.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b74a0e59ec5d15127acdabd75ea17726ac4c5178ae51b85bfe39c4f8a278e879", size = 245835, upload-time = "2025-10-08T22:01:32.989Z" }, + { url = "https://files.pythonhosted.org/packages/fd/42/8e3c6a9a4b1a1360c1a2a39f0b972cef2cc9ebd56025168c4137192a9321/tomli-2.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b5870b50c9db823c595983571d1296a6ff3e1b88f734a4c8f6fc6188397de005", size = 253109, upload-time = "2025-10-08T22:01:34.052Z" }, + { url = "https://files.pythonhosted.org/packages/22/0c/b4da635000a71b5f80130937eeac12e686eefb376b8dee113b4a582bba42/tomli-2.3.0-cp314-cp314-win32.whl", hash = "sha256:feb0dacc61170ed7ab602d3d972a58f14ee3ee60494292d384649a3dc38ef463", size = 97930, upload-time = "2025-10-08T22:01:35.082Z" }, + { url = "https://files.pythonhosted.org/packages/b9/74/cb1abc870a418ae99cd5c9547d6bce30701a954e0e721821df483ef7223c/tomli-2.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:b273fcbd7fc64dc3600c098e39136522650c49bca95df2d11cf3b626422392c8", size = 107964, upload-time = "2025-10-08T22:01:36.057Z" }, + { url = "https://files.pythonhosted.org/packages/54/78/5c46fff6432a712af9f792944f4fcd7067d8823157949f4e40c56b8b3c83/tomli-2.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:940d56ee0410fa17ee1f12b817b37a4d4e4dc4d27340863cc67236c74f582e77", size = 163065, upload-time = "2025-10-08T22:01:37.27Z" }, + { url = "https://files.pythonhosted.org/packages/39/67/f85d9bd23182f45eca8939cd2bc7050e1f90c41f4a2ecbbd5963a1d1c486/tomli-2.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f85209946d1fe94416debbb88d00eb92ce9cd5266775424ff81bc959e001acaf", size = 159088, upload-time = "2025-10-08T22:01:38.235Z" }, + { url = "https://files.pythonhosted.org/packages/26/5a/4b546a0405b9cc0659b399f12b6adb750757baf04250b148d3c5059fc4eb/tomli-2.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a56212bdcce682e56b0aaf79e869ba5d15a6163f88d5451cbde388d48b13f530", size = 268193, upload-time = "2025-10-08T22:01:39.712Z" }, + { url = "https://files.pythonhosted.org/packages/42/4f/2c12a72ae22cf7b59a7fe75b3465b7aba40ea9145d026ba41cb382075b0e/tomli-2.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c5f3ffd1e098dfc032d4d3af5c0ac64f6d286d98bc148698356847b80fa4de1b", size = 275488, upload-time = "2025-10-08T22:01:40.773Z" }, + { url = "https://files.pythonhosted.org/packages/92/04/a038d65dbe160c3aa5a624e93ad98111090f6804027d474ba9c37c8ae186/tomli-2.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5e01decd096b1530d97d5d85cb4dff4af2d8347bd35686654a004f8dea20fc67", size = 272669, upload-time = "2025-10-08T22:01:41.824Z" }, + { url = "https://files.pythonhosted.org/packages/be/2f/8b7c60a9d1612a7cbc39ffcca4f21a73bf368a80fc25bccf8253e2563267/tomli-2.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8a35dd0e643bb2610f156cca8db95d213a90015c11fee76c946aa62b7ae7e02f", size = 279709, upload-time = "2025-10-08T22:01:43.177Z" }, + { url = "https://files.pythonhosted.org/packages/7e/46/cc36c679f09f27ded940281c38607716c86cf8ba4a518d524e349c8b4874/tomli-2.3.0-cp314-cp314t-win32.whl", hash = "sha256:a1f7f282fe248311650081faafa5f4732bdbfef5d45fe3f2e702fbc6f2d496e0", size = 107563, upload-time = "2025-10-08T22:01:44.233Z" }, + { url = "https://files.pythonhosted.org/packages/84/ff/426ca8683cf7b753614480484f6437f568fd2fda2edbdf57a2d3d8b27a0b/tomli-2.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:70a251f8d4ba2d9ac2542eecf008b3c8a9fc5c3f9f02c56a9d7952612be2fdba", size = 119756, upload-time = "2025-10-08T22:01:45.234Z" }, + { url = "https://files.pythonhosted.org/packages/77/b8/0135fadc89e73be292b473cb820b4f5a08197779206b33191e801feeae40/tomli-2.3.0-py3-none-any.whl", hash = "sha256:e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b", size = 14408, upload-time = "2025-10-08T22:01:46.04Z" }, +] + +[[package]] +name = "typer" +version = "0.20.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click", version = "8.1.8", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "click", version = "8.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "rich" }, + { name = "shellingham" }, + { name = "typing-extensions", version = "4.13.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, + { name = "typing-extensions", version = "4.15.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8f/28/7c85c8032b91dbe79725b6f17d2fffc595dff06a35c7a30a37bef73a1ab4/typer-0.20.0.tar.gz", hash = "sha256:1aaf6494031793e4876fb0bacfa6a912b551cf43c1e63c800df8b1a866720c37", size = 106492, upload-time = "2025-10-20T17:03:49.445Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/64/7713ffe4b5983314e9d436a90d5bd4f63b6054e2aca783a3cfc44cb95bbf/typer-0.20.0-py3-none-any.whl", hash = "sha256:5b463df6793ec1dca6213a3cf4c0f03bc6e322ac5e16e13ddd622a889489784a", size = 47028, upload-time = "2025-10-20T17:03:47.617Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.13.2" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.9'", +] +sdist = { url = "https://files.pythonhosted.org/packages/f6/37/23083fcd6e35492953e8d2aaaa68b860eb422b34627b13f2ce3eb6106061/typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef", size = 106967, upload-time = "2025-04-10T14:19:05.416Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8b/54/b1ae86c0973cc6f0210b53d508ca3641fb6d0c56823f288d108bc7ab3cc8/typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c", size = 45806, upload-time = "2025-04-10T14:19:03.967Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", + "python_full_version == '3.9.*'", +] +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] + +[[package]] +name = "typing-inspection" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", version = "4.15.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, +] + +[[package]] +name = "urllib3" +version = "1.26.20" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version == '3.9.*'", + "python_full_version < '3.9'", +] +sdist = { url = "https://files.pythonhosted.org/packages/e4/e8/6ff5e6bc22095cfc59b6ea711b687e2b7ed4bdb373f7eeec370a97d7392f/urllib3-1.26.20.tar.gz", hash = "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32", size = 307380, upload-time = "2024-08-29T15:43:11.37Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/33/cf/8435d5a7159e2a9c83a95896ed596f68cf798005fe107cc655b5c5c14704/urllib3-1.26.20-py2.py3-none-any.whl", hash = "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e", size = 144225, upload-time = "2024-08-29T15:43:08.921Z" }, +] + +[[package]] +name = "urllib3" +version = "2.5.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", +] +sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" }, +] + +[[package]] +name = "virtualenv" +version = "20.35.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "distlib" }, + { name = "filelock", version = "3.16.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, + { name = "filelock", version = "3.19.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.9.*'" }, + { name = "filelock", version = "3.20.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "platformdirs", version = "4.3.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, + { name = "platformdirs", version = "4.4.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.9.*'" }, + { name = "platformdirs", version = "4.5.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "typing-extensions", version = "4.13.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, + { name = "typing-extensions", version = "4.15.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9' and python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/20/28/e6f1a6f655d620846bd9df527390ecc26b3805a0c5989048c210e22c5ca9/virtualenv-20.35.4.tar.gz", hash = "sha256:643d3914d73d3eeb0c552cbb12d7e82adf0e504dbf86a3182f8771a153a1971c", size = 6028799, upload-time = "2025-10-29T06:57:40.511Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/79/0c/c05523fa3181fdf0c9c52a6ba91a23fbf3246cc095f26f6516f9c60e6771/virtualenv-20.35.4-py3-none-any.whl", hash = "sha256:c21c9cede36c9753eeade68ba7d523529f228a403463376cf821eaae2b650f1b", size = 6005095, upload-time = "2025-10-29T06:57:37.598Z" }, +] + +[[package]] +name = "yarl" +version = "1.15.2" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.9'", +] +dependencies = [ + { name = "idna", marker = "python_full_version < '3.9'" }, + { name = "multidict", version = "6.1.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, + { name = "propcache", version = "0.2.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/e1/d5427a061819c9f885f58bb0467d02a523f1aec19f9e5f9c82ce950d90d3/yarl-1.15.2.tar.gz", hash = "sha256:a39c36f4218a5bb668b4f06874d676d35a035ee668e6e7e3538835c703634b84", size = 169318, upload-time = "2024-10-13T18:48:04.311Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/61/f8/6b1bbc6f597d8937ad8661c042aa6bdbbe46a3a6e38e2c04214b9c82e804/yarl-1.15.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e4ee8b8639070ff246ad3649294336b06db37a94bdea0d09ea491603e0be73b8", size = 136479, upload-time = "2024-10-13T18:44:32.077Z" }, + { url = "https://files.pythonhosted.org/packages/61/e0/973c0d16b1cb710d318b55bd5d019a1ecd161d28670b07d8d9df9a83f51f/yarl-1.15.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a7cf963a357c5f00cb55b1955df8bbe68d2f2f65de065160a1c26b85a1e44172", size = 88671, upload-time = "2024-10-13T18:44:35.334Z" }, + { url = "https://files.pythonhosted.org/packages/16/df/241cfa1cf33b96da2c8773b76fe3ee58e04cb09ecfe794986ec436ae97dc/yarl-1.15.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:43ebdcc120e2ca679dba01a779333a8ea76b50547b55e812b8b92818d604662c", size = 86578, upload-time = "2024-10-13T18:44:37.58Z" }, + { url = "https://files.pythonhosted.org/packages/02/a4/ee2941d1f93600d921954a0850e20581159772304e7de49f60588e9128a2/yarl-1.15.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3433da95b51a75692dcf6cc8117a31410447c75a9a8187888f02ad45c0a86c50", size = 307212, upload-time = "2024-10-13T18:44:39.932Z" }, + { url = "https://files.pythonhosted.org/packages/08/64/2e6561af430b092b21c7a867ae3079f62e1532d3e51fee765fd7a74cef6c/yarl-1.15.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38d0124fa992dbacd0c48b1b755d3ee0a9f924f427f95b0ef376556a24debf01", size = 321589, upload-time = "2024-10-13T18:44:42.527Z" }, + { url = "https://files.pythonhosted.org/packages/f8/af/056ab318a7117fa70f6ab502ff880e47af973948d1d123aff397cd68499c/yarl-1.15.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ded1b1803151dd0f20a8945508786d57c2f97a50289b16f2629f85433e546d47", size = 319443, upload-time = "2024-10-13T18:44:45.03Z" }, + { url = "https://files.pythonhosted.org/packages/99/d1/051b0bc2c90c9a2618bab10a9a9a61a96ddb28c7c54161a5c97f9e625205/yarl-1.15.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ace4cad790f3bf872c082366c9edd7f8f8f77afe3992b134cfc810332206884f", size = 310324, upload-time = "2024-10-13T18:44:47.675Z" }, + { url = "https://files.pythonhosted.org/packages/23/1b/16df55016f9ac18457afda165031086bce240d8bcf494501fb1164368617/yarl-1.15.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c77494a2f2282d9bbbbcab7c227a4d1b4bb829875c96251f66fb5f3bae4fb053", size = 300428, upload-time = "2024-10-13T18:44:49.431Z" }, + { url = "https://files.pythonhosted.org/packages/83/a5/5188d1c575139a8dfd90d463d56f831a018f41f833cdf39da6bd8a72ee08/yarl-1.15.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b7f227ca6db5a9fda0a2b935a2ea34a7267589ffc63c8045f0e4edb8d8dcf956", size = 307079, upload-time = "2024-10-13T18:44:51.96Z" }, + { url = "https://files.pythonhosted.org/packages/ba/4e/2497f8f2b34d1a261bebdbe00066242eacc9a7dccd4f02ddf0995014290a/yarl-1.15.2-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:31561a5b4d8dbef1559b3600b045607cf804bae040f64b5f5bca77da38084a8a", size = 305835, upload-time = "2024-10-13T18:44:53.83Z" }, + { url = "https://files.pythonhosted.org/packages/91/db/40a347e1f8086e287a53c72dc333198816885bc770e3ecafcf5eaeb59311/yarl-1.15.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3e52474256a7db9dcf3c5f4ca0b300fdea6c21cca0148c8891d03a025649d935", size = 311033, upload-time = "2024-10-13T18:44:56.464Z" }, + { url = "https://files.pythonhosted.org/packages/2f/a6/1500e1e694616c25eed6bf8c1aacc0943f124696d2421a07ae5e9ee101a5/yarl-1.15.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:0e1af74a9529a1137c67c887ed9cde62cff53aa4d84a3adbec329f9ec47a3936", size = 326317, upload-time = "2024-10-13T18:44:59.015Z" }, + { url = "https://files.pythonhosted.org/packages/37/db/868d4b59cc76932ce880cc9946cd0ae4ab111a718494a94cb50dd5b67d82/yarl-1.15.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:15c87339490100c63472a76d87fe7097a0835c705eb5ae79fd96e343473629ed", size = 324196, upload-time = "2024-10-13T18:45:00.772Z" }, + { url = "https://files.pythonhosted.org/packages/bd/41/b6c917c2fde2601ee0b45c82a0c502dc93e746dea469d3a6d1d0a24749e8/yarl-1.15.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:74abb8709ea54cc483c4fb57fb17bb66f8e0f04438cff6ded322074dbd17c7ec", size = 317023, upload-time = "2024-10-13T18:45:03.427Z" }, + { url = "https://files.pythonhosted.org/packages/b0/85/2cde6b656fd83c474f19606af3f7a3e94add8988760c87a101ee603e7b8f/yarl-1.15.2-cp310-cp310-win32.whl", hash = "sha256:ffd591e22b22f9cb48e472529db6a47203c41c2c5911ff0a52e85723196c0d75", size = 78136, upload-time = "2024-10-13T18:45:05.173Z" }, + { url = "https://files.pythonhosted.org/packages/ef/3c/4414901b0588427870002b21d790bd1fad142a9a992a22e5037506d0ed9d/yarl-1.15.2-cp310-cp310-win_amd64.whl", hash = "sha256:1695497bb2a02a6de60064c9f077a4ae9c25c73624e0d43e3aa9d16d983073c2", size = 84231, upload-time = "2024-10-13T18:45:07.622Z" }, + { url = "https://files.pythonhosted.org/packages/4a/59/3ae125c97a2a8571ea16fdf59fcbd288bc169e0005d1af9946a90ea831d9/yarl-1.15.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9fcda20b2de7042cc35cf911702fa3d8311bd40055a14446c1e62403684afdc5", size = 136492, upload-time = "2024-10-13T18:45:09.962Z" }, + { url = "https://files.pythonhosted.org/packages/f9/2b/efa58f36b582db45b94c15e87803b775eb8a4ca0db558121a272e67f3564/yarl-1.15.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0545de8c688fbbf3088f9e8b801157923be4bf8e7b03e97c2ecd4dfa39e48e0e", size = 88614, upload-time = "2024-10-13T18:45:12.329Z" }, + { url = "https://files.pythonhosted.org/packages/82/69/eb73c0453a2ff53194df485dc7427d54e6cb8d1180fcef53251a8e24d069/yarl-1.15.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fbda058a9a68bec347962595f50546a8a4a34fd7b0654a7b9697917dc2bf810d", size = 86607, upload-time = "2024-10-13T18:45:13.88Z" }, + { url = "https://files.pythonhosted.org/packages/48/4e/89beaee3a4da0d1c6af1176d738cff415ff2ad3737785ee25382409fe3e3/yarl-1.15.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1ac2bc069f4a458634c26b101c2341b18da85cb96afe0015990507efec2e417", size = 334077, upload-time = "2024-10-13T18:45:16.217Z" }, + { url = "https://files.pythonhosted.org/packages/da/e8/8fcaa7552093f94c3f327783e2171da0eaa71db0c267510898a575066b0f/yarl-1.15.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd126498171f752dd85737ab1544329a4520c53eed3997f9b08aefbafb1cc53b", size = 347365, upload-time = "2024-10-13T18:45:18.812Z" }, + { url = "https://files.pythonhosted.org/packages/be/fa/dc2002f82a89feab13a783d3e6b915a3a2e0e83314d9e3f6d845ee31bfcc/yarl-1.15.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3db817b4e95eb05c362e3b45dafe7144b18603e1211f4a5b36eb9522ecc62bcf", size = 344823, upload-time = "2024-10-13T18:45:20.644Z" }, + { url = "https://files.pythonhosted.org/packages/ae/c8/c4a00fe7f2aa6970c2651df332a14c88f8baaedb2e32d6c3b8c8a003ea74/yarl-1.15.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:076b1ed2ac819933895b1a000904f62d615fe4533a5cf3e052ff9a1da560575c", size = 337132, upload-time = "2024-10-13T18:45:22.487Z" }, + { url = "https://files.pythonhosted.org/packages/07/bf/84125f85f44bf2af03f3cf64e87214b42cd59dcc8a04960d610a9825f4d4/yarl-1.15.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f8cfd847e6b9ecf9f2f2531c8427035f291ec286c0a4944b0a9fce58c6446046", size = 326258, upload-time = "2024-10-13T18:45:25.049Z" }, + { url = "https://files.pythonhosted.org/packages/00/19/73ad8122b2fa73fe22e32c24b82a6c053cf6c73e2f649b73f7ef97bee8d0/yarl-1.15.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:32b66be100ac5739065496c74c4b7f3015cef792c3174982809274d7e51b3e04", size = 336212, upload-time = "2024-10-13T18:45:26.808Z" }, + { url = "https://files.pythonhosted.org/packages/39/1d/2fa4337d11f6587e9b7565f84eba549f2921494bc8b10bfe811079acaa70/yarl-1.15.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:34a2d76a1984cac04ff8b1bfc939ec9dc0914821264d4a9c8fd0ed6aa8d4cfd2", size = 330397, upload-time = "2024-10-13T18:45:29.112Z" }, + { url = "https://files.pythonhosted.org/packages/39/ab/dce75e06806bcb4305966471ead03ce639d8230f4f52c32bd614d820c044/yarl-1.15.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0afad2cd484908f472c8fe2e8ef499facee54a0a6978be0e0cff67b1254fd747", size = 334985, upload-time = "2024-10-13T18:45:31.709Z" }, + { url = "https://files.pythonhosted.org/packages/c1/98/3f679149347a5e34c952bf8f71a387bc96b3488fae81399a49f8b1a01134/yarl-1.15.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:c68e820879ff39992c7f148113b46efcd6ec765a4865581f2902b3c43a5f4bbb", size = 356033, upload-time = "2024-10-13T18:45:34.325Z" }, + { url = "https://files.pythonhosted.org/packages/f7/8c/96546061c19852d0a4b1b07084a58c2e8911db6bcf7838972cff542e09fb/yarl-1.15.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:98f68df80ec6ca3015186b2677c208c096d646ef37bbf8b49764ab4a38183931", size = 357710, upload-time = "2024-10-13T18:45:36.216Z" }, + { url = "https://files.pythonhosted.org/packages/01/45/ade6fb3daf689816ebaddb3175c962731edf300425c3254c559b6d0dcc27/yarl-1.15.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3c56ec1eacd0a5d35b8a29f468659c47f4fe61b2cab948ca756c39b7617f0aa5", size = 345532, upload-time = "2024-10-13T18:45:38.123Z" }, + { url = "https://files.pythonhosted.org/packages/e7/d7/8de800d3aecda0e64c43e8fc844f7effc8731a6099fa0c055738a2247504/yarl-1.15.2-cp311-cp311-win32.whl", hash = "sha256:eedc3f247ee7b3808ea07205f3e7d7879bc19ad3e6222195cd5fbf9988853e4d", size = 78250, upload-time = "2024-10-13T18:45:39.908Z" }, + { url = "https://files.pythonhosted.org/packages/3a/6c/69058bbcfb0164f221aa30e0cd1a250f6babb01221e27c95058c51c498ca/yarl-1.15.2-cp311-cp311-win_amd64.whl", hash = "sha256:0ccaa1bc98751fbfcf53dc8dfdb90d96e98838010fc254180dd6707a6e8bb179", size = 84492, upload-time = "2024-10-13T18:45:42.286Z" }, + { url = "https://files.pythonhosted.org/packages/e0/d1/17ff90e7e5b1a0b4ddad847f9ec6a214b87905e3a59d01bff9207ce2253b/yarl-1.15.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:82d5161e8cb8f36ec778fd7ac4d740415d84030f5b9ef8fe4da54784a1f46c94", size = 136721, upload-time = "2024-10-13T18:45:43.876Z" }, + { url = "https://files.pythonhosted.org/packages/44/50/a64ca0577aeb9507f4b672f9c833d46cf8f1e042ce2e80c11753b936457d/yarl-1.15.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fa2bea05ff0a8fb4d8124498e00e02398f06d23cdadd0fe027d84a3f7afde31e", size = 88954, upload-time = "2024-10-13T18:45:46.305Z" }, + { url = "https://files.pythonhosted.org/packages/c9/0a/a30d0b02046d4088c1fd32d85d025bd70ceb55f441213dee14d503694f41/yarl-1.15.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:99e12d2bf587b44deb74e0d6170fec37adb489964dbca656ec41a7cd8f2ff178", size = 86692, upload-time = "2024-10-13T18:45:47.992Z" }, + { url = "https://files.pythonhosted.org/packages/06/0b/7613decb8baa26cba840d7ea2074bd3c5e27684cbcb6d06e7840d6c5226c/yarl-1.15.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:243fbbbf003754fe41b5bdf10ce1e7f80bcc70732b5b54222c124d6b4c2ab31c", size = 325762, upload-time = "2024-10-13T18:45:49.69Z" }, + { url = "https://files.pythonhosted.org/packages/97/f5/b8c389a58d1eb08f89341fc1bbcc23a0341f7372185a0a0704dbdadba53a/yarl-1.15.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:856b7f1a7b98a8c31823285786bd566cf06226ac4f38b3ef462f593c608a9bd6", size = 335037, upload-time = "2024-10-13T18:45:51.932Z" }, + { url = "https://files.pythonhosted.org/packages/cb/f9/d89b93a7bb8b66e01bf722dcc6fec15e11946e649e71414fd532b05c4d5d/yarl-1.15.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:553dad9af802a9ad1a6525e7528152a015b85fb8dbf764ebfc755c695f488367", size = 334221, upload-time = "2024-10-13T18:45:54.548Z" }, + { url = "https://files.pythonhosted.org/packages/10/77/1db077601998e0831a540a690dcb0f450c31f64c492e993e2eaadfbc7d31/yarl-1.15.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30c3ff305f6e06650a761c4393666f77384f1cc6c5c0251965d6bfa5fbc88f7f", size = 330167, upload-time = "2024-10-13T18:45:56.675Z" }, + { url = "https://files.pythonhosted.org/packages/3b/c2/e5b7121662fd758656784fffcff2e411c593ec46dc9ec68e0859a2ffaee3/yarl-1.15.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:353665775be69bbfc6d54c8d134bfc533e332149faeddd631b0bc79df0897f46", size = 317472, upload-time = "2024-10-13T18:45:58.815Z" }, + { url = "https://files.pythonhosted.org/packages/c6/f3/41e366c17e50782651b192ba06a71d53500cc351547816bf1928fb043c4f/yarl-1.15.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f4fe99ce44128c71233d0d72152db31ca119711dfc5f2c82385ad611d8d7f897", size = 330896, upload-time = "2024-10-13T18:46:01.126Z" }, + { url = "https://files.pythonhosted.org/packages/79/a2/d72e501bc1e33e68a5a31f584fe4556ab71a50a27bfd607d023f097cc9bb/yarl-1.15.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:9c1e3ff4b89cdd2e1a24c214f141e848b9e0451f08d7d4963cb4108d4d798f1f", size = 328787, upload-time = "2024-10-13T18:46:02.991Z" }, + { url = "https://files.pythonhosted.org/packages/9d/ba/890f7e1ea17f3c247748548eee876528ceb939e44566fa7d53baee57e5aa/yarl-1.15.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:711bdfae4e699a6d4f371137cbe9e740dc958530cb920eb6f43ff9551e17cfbc", size = 332631, upload-time = "2024-10-13T18:46:04.939Z" }, + { url = "https://files.pythonhosted.org/packages/48/c7/27b34206fd5dfe76b2caa08bf22f9212b2d665d5bb2df8a6dd3af498dcf4/yarl-1.15.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4388c72174868884f76affcdd3656544c426407e0043c89b684d22fb265e04a5", size = 344023, upload-time = "2024-10-13T18:46:06.809Z" }, + { url = "https://files.pythonhosted.org/packages/88/e7/730b130f4f02bd8b00479baf9a57fdea1dc927436ed1d6ba08fa5c36c68e/yarl-1.15.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:f0e1844ad47c7bd5d6fa784f1d4accc5f4168b48999303a868fe0f8597bde715", size = 352290, upload-time = "2024-10-13T18:46:08.676Z" }, + { url = "https://files.pythonhosted.org/packages/84/9b/e8dda28f91a0af67098cddd455e6b540d3f682dda4c0de224215a57dee4a/yarl-1.15.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a5cafb02cf097a82d74403f7e0b6b9df3ffbfe8edf9415ea816314711764a27b", size = 343742, upload-time = "2024-10-13T18:46:10.583Z" }, + { url = "https://files.pythonhosted.org/packages/66/47/b1c6bb85f2b66decbe189e27fcc956ab74670a068655df30ef9a2e15c379/yarl-1.15.2-cp312-cp312-win32.whl", hash = "sha256:156ececdf636143f508770bf8a3a0498de64da5abd890c7dbb42ca9e3b6c05b8", size = 78051, upload-time = "2024-10-13T18:46:12.671Z" }, + { url = "https://files.pythonhosted.org/packages/7d/9e/1a897e5248ec53e96e9f15b3e6928efd5e75d322c6cf666f55c1c063e5c9/yarl-1.15.2-cp312-cp312-win_amd64.whl", hash = "sha256:435aca062444a7f0c884861d2e3ea79883bd1cd19d0a381928b69ae1b85bc51d", size = 84313, upload-time = "2024-10-13T18:46:15.237Z" }, + { url = "https://files.pythonhosted.org/packages/46/ab/be3229898d7eb1149e6ba7fe44f873cf054d275a00b326f2a858c9ff7175/yarl-1.15.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:416f2e3beaeae81e2f7a45dc711258be5bdc79c940a9a270b266c0bec038fb84", size = 135006, upload-time = "2024-10-13T18:46:16.909Z" }, + { url = "https://files.pythonhosted.org/packages/10/10/b91c186b1b0e63951f80481b3e6879bb9f7179d471fe7c4440c9e900e2a3/yarl-1.15.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:173563f3696124372831007e3d4b9821746964a95968628f7075d9231ac6bb33", size = 88121, upload-time = "2024-10-13T18:46:18.702Z" }, + { url = "https://files.pythonhosted.org/packages/bf/1d/4ceaccf836b9591abfde775e84249b847ac4c6c14ee2dd8d15b5b3cede44/yarl-1.15.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9ce2e0f6123a60bd1a7f5ae3b2c49b240c12c132847f17aa990b841a417598a2", size = 85967, upload-time = "2024-10-13T18:46:20.354Z" }, + { url = "https://files.pythonhosted.org/packages/93/bd/c924f22bdb2c5d0ca03a9e64ecc5e041aace138c2a91afff7e2f01edc3a1/yarl-1.15.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eaea112aed589131f73d50d570a6864728bd7c0c66ef6c9154ed7b59f24da611", size = 325615, upload-time = "2024-10-13T18:46:22.057Z" }, + { url = "https://files.pythonhosted.org/packages/59/a5/6226accd5c01cafd57af0d249c7cf9dd12569cd9c78fbd93e8198e7a9d84/yarl-1.15.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4ca3b9f370f218cc2a0309542cab8d0acdfd66667e7c37d04d617012485f904", size = 334945, upload-time = "2024-10-13T18:46:24.184Z" }, + { url = "https://files.pythonhosted.org/packages/4c/c1/cc6ccdd2bcd0ff7291602d5831754595260f8d2754642dfd34fef1791059/yarl-1.15.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:23ec1d3c31882b2a8a69c801ef58ebf7bae2553211ebbddf04235be275a38548", size = 336701, upload-time = "2024-10-13T18:46:27.038Z" }, + { url = "https://files.pythonhosted.org/packages/ef/ff/39a767ee249444e4b26ea998a526838238f8994c8f274befc1f94dacfb43/yarl-1.15.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75119badf45f7183e10e348edff5a76a94dc19ba9287d94001ff05e81475967b", size = 330977, upload-time = "2024-10-13T18:46:28.921Z" }, + { url = "https://files.pythonhosted.org/packages/dd/ba/b1fed73f9d39e3e7be8f6786be5a2ab4399c21504c9168c3cadf6e441c2e/yarl-1.15.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78e6fdc976ec966b99e4daa3812fac0274cc28cd2b24b0d92462e2e5ef90d368", size = 317402, upload-time = "2024-10-13T18:46:30.86Z" }, + { url = "https://files.pythonhosted.org/packages/82/e8/03e3ebb7f558374f29c04868b20ca484d7997f80a0a191490790a8c28058/yarl-1.15.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:8657d3f37f781d987037f9cc20bbc8b40425fa14380c87da0cb8dfce7c92d0fb", size = 331776, upload-time = "2024-10-13T18:46:33.037Z" }, + { url = "https://files.pythonhosted.org/packages/1f/83/90b0f4fd1ecf2602ba4ac50ad0bbc463122208f52dd13f152bbc0d8417dd/yarl-1.15.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:93bed8a8084544c6efe8856c362af08a23e959340c87a95687fdbe9c9f280c8b", size = 331585, upload-time = "2024-10-13T18:46:35.275Z" }, + { url = "https://files.pythonhosted.org/packages/c7/f6/1ed7e7f270ae5f9f1174c1f8597b29658f552fee101c26de8b2eb4ca147a/yarl-1.15.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:69d5856d526802cbda768d3e6246cd0d77450fa2a4bc2ea0ea14f0d972c2894b", size = 336395, upload-time = "2024-10-13T18:46:38.003Z" }, + { url = "https://files.pythonhosted.org/packages/e0/3a/4354ed8812909d9ec54a92716a53259b09e6b664209231f2ec5e75f4820d/yarl-1.15.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:ccad2800dfdff34392448c4bf834be124f10a5bc102f254521d931c1c53c455a", size = 342810, upload-time = "2024-10-13T18:46:39.952Z" }, + { url = "https://files.pythonhosted.org/packages/de/cc/39e55e16b1415a87f6d300064965d6cfb2ac8571e11339ccb7dada2444d9/yarl-1.15.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:a880372e2e5dbb9258a4e8ff43f13888039abb9dd6d515f28611c54361bc5644", size = 351441, upload-time = "2024-10-13T18:46:41.867Z" }, + { url = "https://files.pythonhosted.org/packages/fb/19/5cd4757079dc9d9f3de3e3831719b695f709a8ce029e70b33350c9d082a7/yarl-1.15.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c998d0558805860503bc3a595994895ca0f7835e00668dadc673bbf7f5fbfcbe", size = 345875, upload-time = "2024-10-13T18:46:43.824Z" }, + { url = "https://files.pythonhosted.org/packages/83/a0/ef09b54634f73417f1ea4a746456a4372c1b044f07b26e16fa241bd2d94e/yarl-1.15.2-cp313-cp313-win32.whl", hash = "sha256:533a28754e7f7439f217550a497bb026c54072dbe16402b183fdbca2431935a9", size = 302609, upload-time = "2024-10-13T18:46:45.828Z" }, + { url = "https://files.pythonhosted.org/packages/20/9f/f39c37c17929d3975da84c737b96b606b68c495cc4ee86408f10523a1635/yarl-1.15.2-cp313-cp313-win_amd64.whl", hash = "sha256:5838f2b79dc8f96fdc44077c9e4e2e33d7089b10788464609df788eb97d03aad", size = 308252, upload-time = "2024-10-13T18:46:48.042Z" }, + { url = "https://files.pythonhosted.org/packages/7b/1f/544439ce6b7a498327d57ff40f0cd4f24bf4b1c1daf76c8c962dca022e71/yarl-1.15.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fbbb63bed5fcd70cd3dd23a087cd78e4675fb5a2963b8af53f945cbbca79ae16", size = 138555, upload-time = "2024-10-13T18:46:50.448Z" }, + { url = "https://files.pythonhosted.org/packages/e8/b7/d6f33e7a42832f1e8476d0aabe089be0586a9110b5dfc2cef93444dc7c21/yarl-1.15.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e2e93b88ecc8f74074012e18d679fb2e9c746f2a56f79cd5e2b1afcf2a8a786b", size = 89844, upload-time = "2024-10-13T18:46:52.297Z" }, + { url = "https://files.pythonhosted.org/packages/93/34/ede8d8ed7350b4b21e33fc4eff71e08de31da697034969b41190132d421f/yarl-1.15.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:af8ff8d7dc07ce873f643de6dfbcd45dc3db2c87462e5c387267197f59e6d776", size = 87671, upload-time = "2024-10-13T18:46:54.104Z" }, + { url = "https://files.pythonhosted.org/packages/fa/51/6d71e92bc54b5788b18f3dc29806f9ce37e12b7c610e8073357717f34b78/yarl-1.15.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:66f629632220a4e7858b58e4857927dd01a850a4cef2fb4044c8662787165cf7", size = 314558, upload-time = "2024-10-13T18:46:55.885Z" }, + { url = "https://files.pythonhosted.org/packages/76/0a/f9ffe503b4ef77cd77c9eefd37717c092e26f2c2dbbdd45700f864831292/yarl-1.15.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:833547179c31f9bec39b49601d282d6f0ea1633620701288934c5f66d88c3e50", size = 327622, upload-time = "2024-10-13T18:46:58.173Z" }, + { url = "https://files.pythonhosted.org/packages/8b/38/8eb602eeb153de0189d572dce4ed81b9b14f71de7c027d330b601b4fdcdc/yarl-1.15.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2aa738e0282be54eede1e3f36b81f1e46aee7ec7602aa563e81e0e8d7b67963f", size = 324447, upload-time = "2024-10-13T18:47:00.263Z" }, + { url = "https://files.pythonhosted.org/packages/c2/1e/1c78c695a4c7b957b5665e46a89ea35df48511dbed301a05c0a8beed0cc3/yarl-1.15.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a13a07532e8e1c4a5a3afff0ca4553da23409fad65def1b71186fb867eeae8d", size = 319009, upload-time = "2024-10-13T18:47:02.417Z" }, + { url = "https://files.pythonhosted.org/packages/06/a0/7ea93de4ca1991e7f92a8901dcd1585165f547d342f7c6f36f1ea58b75de/yarl-1.15.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c45817e3e6972109d1a2c65091504a537e257bc3c885b4e78a95baa96df6a3f8", size = 307760, upload-time = "2024-10-13T18:47:04.553Z" }, + { url = "https://files.pythonhosted.org/packages/f4/b4/ceaa1f35cfb37fe06af3f7404438abf9a1262dc5df74dba37c90b0615e06/yarl-1.15.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:670eb11325ed3a6209339974b276811867defe52f4188fe18dc49855774fa9cf", size = 315038, upload-time = "2024-10-13T18:47:06.482Z" }, + { url = "https://files.pythonhosted.org/packages/da/45/a2ca2b547c56550eefc39e45d61e4b42ae6dbb3e913810b5a0eb53e86412/yarl-1.15.2-cp38-cp38-musllinux_1_2_armv7l.whl", hash = "sha256:d417a4f6943112fae3924bae2af7112562285848d9bcee737fc4ff7cbd450e6c", size = 312898, upload-time = "2024-10-13T18:47:09.291Z" }, + { url = "https://files.pythonhosted.org/packages/ea/e0/f692ba36dedc5b0b22084bba558a7ede053841e247b7dd2adbb9d40450be/yarl-1.15.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:bc8936d06cd53fddd4892677d65e98af514c8d78c79864f418bbf78a4a2edde4", size = 319370, upload-time = "2024-10-13T18:47:11.647Z" }, + { url = "https://files.pythonhosted.org/packages/b1/3f/0e382caf39958be6ae61d4bb0c82a68a3c45a494fc8cdc6f55c29757970e/yarl-1.15.2-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:954dde77c404084c2544e572f342aef384240b3e434e06cecc71597e95fd1ce7", size = 332429, upload-time = "2024-10-13T18:47:13.88Z" }, + { url = "https://files.pythonhosted.org/packages/21/6b/c824a4a1c45d67b15b431d4ab83b63462bfcbc710065902e10fa5c2ffd9e/yarl-1.15.2-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:5bc0df728e4def5e15a754521e8882ba5a5121bd6b5a3a0ff7efda5d6558ab3d", size = 333143, upload-time = "2024-10-13T18:47:16.141Z" }, + { url = "https://files.pythonhosted.org/packages/20/76/8af2a1d93fe95b04e284b5d55daaad33aae6e2f6254a1bcdb40e2752af6c/yarl-1.15.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:b71862a652f50babab4a43a487f157d26b464b1dedbcc0afda02fd64f3809d04", size = 326687, upload-time = "2024-10-13T18:47:18.179Z" }, + { url = "https://files.pythonhosted.org/packages/1c/53/490830773f907ef8a311cc5d82e5830f75f7692c1adacbdb731d3f1246fd/yarl-1.15.2-cp38-cp38-win32.whl", hash = "sha256:63eab904f8630aed5a68f2d0aeab565dcfc595dc1bf0b91b71d9ddd43dea3aea", size = 78705, upload-time = "2024-10-13T18:47:20.876Z" }, + { url = "https://files.pythonhosted.org/packages/9c/9d/d944e897abf37f50f4fa2d8d6f5fd0ed9413bc8327d3b4cc25ba9694e1ba/yarl-1.15.2-cp38-cp38-win_amd64.whl", hash = "sha256:2cf441c4b6e538ba0d2591574f95d3fdd33f1efafa864faa077d9636ecc0c4e9", size = 84998, upload-time = "2024-10-13T18:47:23.301Z" }, + { url = "https://files.pythonhosted.org/packages/91/1c/1c9d08c29b10499348eedc038cf61b6d96d5ba0e0d69438975845939ed3c/yarl-1.15.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a32d58f4b521bb98b2c0aa9da407f8bd57ca81f34362bcb090e4a79e9924fefc", size = 138011, upload-time = "2024-10-13T18:47:25.002Z" }, + { url = "https://files.pythonhosted.org/packages/d4/33/2d4a1418bae6d7883c1fcc493be7b6d6fe015919835adc9e8eeba472e9f7/yarl-1.15.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:766dcc00b943c089349d4060b935c76281f6be225e39994c2ccec3a2a36ad627", size = 89618, upload-time = "2024-10-13T18:47:27.587Z" }, + { url = "https://files.pythonhosted.org/packages/78/2e/0024c674a376cfdc722a167a8f308f5779aca615cb7a28d67fbeabf3f697/yarl-1.15.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:bed1b5dbf90bad3bfc19439258c97873eab453c71d8b6869c136346acfe497e7", size = 87347, upload-time = "2024-10-13T18:47:29.671Z" }, + { url = "https://files.pythonhosted.org/packages/c5/08/a01874dabd4ddf475c5c2adc86f7ac329f83a361ee513a97841720ab7b24/yarl-1.15.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed20a4bdc635f36cb19e630bfc644181dd075839b6fc84cac51c0f381ac472e2", size = 310438, upload-time = "2024-10-13T18:47:31.577Z" }, + { url = "https://files.pythonhosted.org/packages/09/95/691bc6de2c1b0e9c8bbaa5f8f38118d16896ba1a069a09d1fb073d41a093/yarl-1.15.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d538df442c0d9665664ab6dd5fccd0110fa3b364914f9c85b3ef9b7b2e157980", size = 325384, upload-time = "2024-10-13T18:47:33.587Z" }, + { url = "https://files.pythonhosted.org/packages/95/fd/fee11eb3337f48c62d39c5676e6a0e4e318e318900a901b609a3c45394df/yarl-1.15.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c6cf1d92edf936ceedc7afa61b07e9d78a27b15244aa46bbcd534c7458ee1b", size = 321820, upload-time = "2024-10-13T18:47:35.633Z" }, + { url = "https://files.pythonhosted.org/packages/7a/ad/4a2c9bbebaefdce4a69899132f4bf086abbddb738dc6e794a31193bc0854/yarl-1.15.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce44217ad99ffad8027d2fde0269ae368c86db66ea0571c62a000798d69401fb", size = 314150, upload-time = "2024-10-13T18:47:37.693Z" }, + { url = "https://files.pythonhosted.org/packages/38/7d/552c37bc6c4ae8ea900e44b6c05cb16d50dca72d3782ccd66f53e27e353f/yarl-1.15.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47a6000a7e833ebfe5886b56a31cb2ff12120b1efd4578a6fcc38df16cc77bd", size = 304202, upload-time = "2024-10-13T18:47:40.411Z" }, + { url = "https://files.pythonhosted.org/packages/2e/f8/c22a158f3337f49775775ecef43fc097a98b20cdce37425b68b9c45a6f94/yarl-1.15.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:e52f77a0cd246086afde8815039f3e16f8d2be51786c0a39b57104c563c5cbb0", size = 310311, upload-time = "2024-10-13T18:47:43.236Z" }, + { url = "https://files.pythonhosted.org/packages/ce/e4/ebce06afa25c2a6c8e6c9a5915cbbc7940a37f3ec38e950e8f346ca908da/yarl-1.15.2-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:f9ca0e6ce7774dc7830dc0cc4bb6b3eec769db667f230e7c770a628c1aa5681b", size = 310645, upload-time = "2024-10-13T18:47:45.24Z" }, + { url = "https://files.pythonhosted.org/packages/0a/34/5504cc8fbd1be959ec0a1e9e9f471fd438c37cb877b0178ce09085b36b51/yarl-1.15.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:136f9db0f53c0206db38b8cd0c985c78ded5fd596c9a86ce5c0b92afb91c3a19", size = 313328, upload-time = "2024-10-13T18:47:47.546Z" }, + { url = "https://files.pythonhosted.org/packages/cf/e4/fb3f91a539c6505e347d7d75bc675d291228960ffd6481ced76a15412924/yarl-1.15.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:173866d9f7409c0fb514cf6e78952e65816600cb888c68b37b41147349fe0057", size = 330135, upload-time = "2024-10-13T18:47:50.279Z" }, + { url = "https://files.pythonhosted.org/packages/e1/08/a0b27db813f0159e1c8a45f48852afded501de2f527e7613c4dcf436ecf7/yarl-1.15.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:6e840553c9c494a35e449a987ca2c4f8372668ee954a03a9a9685075228e5036", size = 327155, upload-time = "2024-10-13T18:47:52.337Z" }, + { url = "https://files.pythonhosted.org/packages/97/4e/b3414dded12d0e2b52eb1964c21a8d8b68495b320004807de770f7b6b53a/yarl-1.15.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:458c0c65802d816a6b955cf3603186de79e8fdb46d4f19abaec4ef0a906f50a7", size = 320810, upload-time = "2024-10-13T18:47:55.067Z" }, + { url = "https://files.pythonhosted.org/packages/bb/ca/e5149c55d1c9dcf3d5b48acd7c71ca8622fd2f61322d0386fe63ba106774/yarl-1.15.2-cp39-cp39-win32.whl", hash = "sha256:5b48388ded01f6f2429a8c55012bdbd1c2a0c3735b3e73e221649e524c34a58d", size = 78686, upload-time = "2024-10-13T18:47:57Z" }, + { url = "https://files.pythonhosted.org/packages/b1/87/f56a80a1abaf65dbf138b821357b51b6cc061756bb7d93f08797950b3881/yarl-1.15.2-cp39-cp39-win_amd64.whl", hash = "sha256:81dadafb3aa124f86dc267a2168f71bbd2bfb163663661ab0038f6e4b8edb810", size = 84818, upload-time = "2024-10-13T18:47:58.76Z" }, + { url = "https://files.pythonhosted.org/packages/46/cf/a28c494decc9c8776b0d7b729c68d26fdafefcedd8d2eab5d9cd767376b2/yarl-1.15.2-py3-none-any.whl", hash = "sha256:0d3105efab7c5c091609abacad33afff33bdff0035bece164c98bcf5a85ef90a", size = 38891, upload-time = "2024-10-13T18:48:00.883Z" }, +] + +[[package]] +name = "yarl" +version = "1.22.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", + "python_full_version == '3.9.*'", +] +dependencies = [ + { name = "idna", marker = "python_full_version >= '3.9'" }, + { name = "multidict", version = "6.7.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" }, + { name = "propcache", version = "0.4.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.9'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/57/63/0c6ebca57330cd313f6102b16dd57ffaf3ec4c83403dcb45dbd15c6f3ea1/yarl-1.22.0.tar.gz", hash = "sha256:bebf8557577d4401ba8bd9ff33906f1376c877aa78d1fe216ad01b4d6745af71", size = 187169, upload-time = "2025-10-06T14:12:55.963Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/43/a2204825342f37c337f5edb6637040fa14e365b2fcc2346960201d457579/yarl-1.22.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:c7bd6683587567e5a49ee6e336e0612bec8329be1b7d4c8af5687dcdeb67ee1e", size = 140517, upload-time = "2025-10-06T14:08:42.494Z" }, + { url = "https://files.pythonhosted.org/packages/44/6f/674f3e6f02266428c56f704cd2501c22f78e8b2eeb23f153117cc86fb28a/yarl-1.22.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5cdac20da754f3a723cceea5b3448e1a2074866406adeb4ef35b469d089adb8f", size = 93495, upload-time = "2025-10-06T14:08:46.2Z" }, + { url = "https://files.pythonhosted.org/packages/b8/12/5b274d8a0f30c07b91b2f02cba69152600b47830fcfb465c108880fcee9c/yarl-1.22.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:07a524d84df0c10f41e3ee918846e1974aba4ec017f990dc735aad487a0bdfdf", size = 94400, upload-time = "2025-10-06T14:08:47.855Z" }, + { url = "https://files.pythonhosted.org/packages/e2/7f/df1b6949b1fa1aa9ff6de6e2631876ad4b73c4437822026e85d8acb56bb1/yarl-1.22.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e1b329cb8146d7b736677a2440e422eadd775d1806a81db2d4cded80a48efc1a", size = 347545, upload-time = "2025-10-06T14:08:49.683Z" }, + { url = "https://files.pythonhosted.org/packages/84/09/f92ed93bd6cd77872ab6c3462df45ca45cd058d8f1d0c9b4f54c1704429f/yarl-1.22.0-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:75976c6945d85dbb9ee6308cd7ff7b1fb9409380c82d6119bd778d8fcfe2931c", size = 319598, upload-time = "2025-10-06T14:08:51.215Z" }, + { url = "https://files.pythonhosted.org/packages/c3/97/ac3f3feae7d522cf7ccec3d340bb0b2b61c56cb9767923df62a135092c6b/yarl-1.22.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:80ddf7a5f8c86cb3eb4bc9028b07bbbf1f08a96c5c0bc1244be5e8fefcb94147", size = 363893, upload-time = "2025-10-06T14:08:53.144Z" }, + { url = "https://files.pythonhosted.org/packages/06/49/f3219097403b9c84a4d079b1d7bda62dd9b86d0d6e4428c02d46ab2c77fc/yarl-1.22.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d332fc2e3c94dad927f2112395772a4e4fedbcf8f80efc21ed7cdfae4d574fdb", size = 371240, upload-time = "2025-10-06T14:08:55.036Z" }, + { url = "https://files.pythonhosted.org/packages/35/9f/06b765d45c0e44e8ecf0fe15c9eacbbde342bb5b7561c46944f107bfb6c3/yarl-1.22.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0cf71bf877efeac18b38d3930594c0948c82b64547c1cf420ba48722fe5509f6", size = 346965, upload-time = "2025-10-06T14:08:56.722Z" }, + { url = "https://files.pythonhosted.org/packages/c5/69/599e7cea8d0fcb1694323b0db0dda317fa3162f7b90166faddecf532166f/yarl-1.22.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:663e1cadaddae26be034a6ab6072449a8426ddb03d500f43daf952b74553bba0", size = 342026, upload-time = "2025-10-06T14:08:58.563Z" }, + { url = "https://files.pythonhosted.org/packages/95/6f/9dfd12c8bc90fea9eab39832ee32ea48f8e53d1256252a77b710c065c89f/yarl-1.22.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:6dcbb0829c671f305be48a7227918cfcd11276c2d637a8033a99a02b67bf9eda", size = 335637, upload-time = "2025-10-06T14:09:00.506Z" }, + { url = "https://files.pythonhosted.org/packages/57/2e/34c5b4eb9b07e16e873db5b182c71e5f06f9b5af388cdaa97736d79dd9a6/yarl-1.22.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:f0d97c18dfd9a9af4490631905a3f131a8e4c9e80a39353919e2cfed8f00aedc", size = 359082, upload-time = "2025-10-06T14:09:01.936Z" }, + { url = "https://files.pythonhosted.org/packages/31/71/fa7e10fb772d273aa1f096ecb8ab8594117822f683bab7d2c5a89914c92a/yarl-1.22.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:437840083abe022c978470b942ff832c3940b2ad3734d424b7eaffcd07f76737", size = 357811, upload-time = "2025-10-06T14:09:03.445Z" }, + { url = "https://files.pythonhosted.org/packages/26/da/11374c04e8e1184a6a03cf9c8f5688d3e5cec83ed6f31ad3481b3207f709/yarl-1.22.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a899cbd98dce6f5d8de1aad31cb712ec0a530abc0a86bd6edaa47c1090138467", size = 351223, upload-time = "2025-10-06T14:09:05.401Z" }, + { url = "https://files.pythonhosted.org/packages/82/8f/e2d01f161b0c034a30410e375e191a5d27608c1f8693bab1a08b089ca096/yarl-1.22.0-cp310-cp310-win32.whl", hash = "sha256:595697f68bd1f0c1c159fcb97b661fc9c3f5db46498043555d04805430e79bea", size = 82118, upload-time = "2025-10-06T14:09:11.148Z" }, + { url = "https://files.pythonhosted.org/packages/62/46/94c76196642dbeae634c7a61ba3da88cd77bed875bf6e4a8bed037505aa6/yarl-1.22.0-cp310-cp310-win_amd64.whl", hash = "sha256:cb95a9b1adaa48e41815a55ae740cfda005758104049a640a398120bf02515ca", size = 86852, upload-time = "2025-10-06T14:09:12.958Z" }, + { url = "https://files.pythonhosted.org/packages/af/af/7df4f179d3b1a6dcb9a4bd2ffbc67642746fcafdb62580e66876ce83fff4/yarl-1.22.0-cp310-cp310-win_arm64.whl", hash = "sha256:b85b982afde6df99ecc996990d4ad7ccbdbb70e2a4ba4de0aecde5922ba98a0b", size = 82012, upload-time = "2025-10-06T14:09:14.664Z" }, + { url = "https://files.pythonhosted.org/packages/4d/27/5ab13fc84c76a0250afd3d26d5936349a35be56ce5785447d6c423b26d92/yarl-1.22.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1ab72135b1f2db3fed3997d7e7dc1b80573c67138023852b6efb336a5eae6511", size = 141607, upload-time = "2025-10-06T14:09:16.298Z" }, + { url = "https://files.pythonhosted.org/packages/6a/a1/d065d51d02dc02ce81501d476b9ed2229d9a990818332242a882d5d60340/yarl-1.22.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:669930400e375570189492dc8d8341301578e8493aec04aebc20d4717f899dd6", size = 94027, upload-time = "2025-10-06T14:09:17.786Z" }, + { url = "https://files.pythonhosted.org/packages/c1/da/8da9f6a53f67b5106ffe902c6fa0164e10398d4e150d85838b82f424072a/yarl-1.22.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:792a2af6d58177ef7c19cbf0097aba92ca1b9cb3ffdd9c7470e156c8f9b5e028", size = 94963, upload-time = "2025-10-06T14:09:19.662Z" }, + { url = "https://files.pythonhosted.org/packages/68/fe/2c1f674960c376e29cb0bec1249b117d11738db92a6ccc4a530b972648db/yarl-1.22.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3ea66b1c11c9150f1372f69afb6b8116f2dd7286f38e14ea71a44eee9ec51b9d", size = 368406, upload-time = "2025-10-06T14:09:21.402Z" }, + { url = "https://files.pythonhosted.org/packages/95/26/812a540e1c3c6418fec60e9bbd38e871eaba9545e94fa5eff8f4a8e28e1e/yarl-1.22.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3e2daa88dc91870215961e96a039ec73e4937da13cf77ce17f9cad0c18df3503", size = 336581, upload-time = "2025-10-06T14:09:22.98Z" }, + { url = "https://files.pythonhosted.org/packages/0b/f5/5777b19e26fdf98563985e481f8be3d8a39f8734147a6ebf459d0dab5a6b/yarl-1.22.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ba440ae430c00eee41509353628600212112cd5018d5def7e9b05ea7ac34eb65", size = 388924, upload-time = "2025-10-06T14:09:24.655Z" }, + { url = "https://files.pythonhosted.org/packages/86/08/24bd2477bd59c0bbd994fe1d93b126e0472e4e3df5a96a277b0a55309e89/yarl-1.22.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e6438cc8f23a9c1478633d216b16104a586b9761db62bfacb6425bac0a36679e", size = 392890, upload-time = "2025-10-06T14:09:26.617Z" }, + { url = "https://files.pythonhosted.org/packages/46/00/71b90ed48e895667ecfb1eaab27c1523ee2fa217433ed77a73b13205ca4b/yarl-1.22.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c52a6e78aef5cf47a98ef8e934755abf53953379b7d53e68b15ff4420e6683d", size = 365819, upload-time = "2025-10-06T14:09:28.544Z" }, + { url = "https://files.pythonhosted.org/packages/30/2d/f715501cae832651d3282387c6a9236cd26bd00d0ff1e404b3dc52447884/yarl-1.22.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3b06bcadaac49c70f4c88af4ffcfbe3dc155aab3163e75777818092478bcbbe7", size = 363601, upload-time = "2025-10-06T14:09:30.568Z" }, + { url = "https://files.pythonhosted.org/packages/f8/f9/a678c992d78e394e7126ee0b0e4e71bd2775e4334d00a9278c06a6cce96a/yarl-1.22.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:6944b2dc72c4d7f7052683487e3677456050ff77fcf5e6204e98caf785ad1967", size = 358072, upload-time = "2025-10-06T14:09:32.528Z" }, + { url = "https://files.pythonhosted.org/packages/2c/d1/b49454411a60edb6fefdcad4f8e6dbba7d8019e3a508a1c5836cba6d0781/yarl-1.22.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:d5372ca1df0f91a86b047d1277c2aaf1edb32d78bbcefffc81b40ffd18f027ed", size = 385311, upload-time = "2025-10-06T14:09:34.634Z" }, + { url = "https://files.pythonhosted.org/packages/87/e5/40d7a94debb8448c7771a916d1861d6609dddf7958dc381117e7ba36d9e8/yarl-1.22.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:51af598701f5299012b8416486b40fceef8c26fc87dc6d7d1f6fc30609ea0aa6", size = 381094, upload-time = "2025-10-06T14:09:36.268Z" }, + { url = "https://files.pythonhosted.org/packages/35/d8/611cc282502381ad855448643e1ad0538957fc82ae83dfe7762c14069e14/yarl-1.22.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b266bd01fedeffeeac01a79ae181719ff848a5a13ce10075adbefc8f1daee70e", size = 370944, upload-time = "2025-10-06T14:09:37.872Z" }, + { url = "https://files.pythonhosted.org/packages/2d/df/fadd00fb1c90e1a5a8bd731fa3d3de2e165e5a3666a095b04e31b04d9cb6/yarl-1.22.0-cp311-cp311-win32.whl", hash = "sha256:a9b1ba5610a4e20f655258d5a1fdc7ebe3d837bb0e45b581398b99eb98b1f5ca", size = 81804, upload-time = "2025-10-06T14:09:39.359Z" }, + { url = "https://files.pythonhosted.org/packages/b5/f7/149bb6f45f267cb5c074ac40c01c6b3ea6d8a620d34b337f6321928a1b4d/yarl-1.22.0-cp311-cp311-win_amd64.whl", hash = "sha256:078278b9b0b11568937d9509b589ee83ef98ed6d561dfe2020e24a9fd08eaa2b", size = 86858, upload-time = "2025-10-06T14:09:41.068Z" }, + { url = "https://files.pythonhosted.org/packages/2b/13/88b78b93ad3f2f0b78e13bfaaa24d11cbc746e93fe76d8c06bf139615646/yarl-1.22.0-cp311-cp311-win_arm64.whl", hash = "sha256:b6a6f620cfe13ccec221fa312139135166e47ae169f8253f72a0abc0dae94376", size = 81637, upload-time = "2025-10-06T14:09:42.712Z" }, + { url = "https://files.pythonhosted.org/packages/75/ff/46736024fee3429b80a165a732e38e5d5a238721e634ab41b040d49f8738/yarl-1.22.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e340382d1afa5d32b892b3ff062436d592ec3d692aeea3bef3a5cfe11bbf8c6f", size = 142000, upload-time = "2025-10-06T14:09:44.631Z" }, + { url = "https://files.pythonhosted.org/packages/5a/9a/b312ed670df903145598914770eb12de1bac44599549b3360acc96878df8/yarl-1.22.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f1e09112a2c31ffe8d80be1b0988fa6a18c5d5cad92a9ffbb1c04c91bfe52ad2", size = 94338, upload-time = "2025-10-06T14:09:46.372Z" }, + { url = "https://files.pythonhosted.org/packages/ba/f5/0601483296f09c3c65e303d60c070a5c19fcdbc72daa061e96170785bc7d/yarl-1.22.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:939fe60db294c786f6b7c2d2e121576628468f65453d86b0fe36cb52f987bd74", size = 94909, upload-time = "2025-10-06T14:09:48.648Z" }, + { url = "https://files.pythonhosted.org/packages/60/41/9a1fe0b73dbcefce72e46cf149b0e0a67612d60bfc90fb59c2b2efdfbd86/yarl-1.22.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e1651bf8e0398574646744c1885a41198eba53dc8a9312b954073f845c90a8df", size = 372940, upload-time = "2025-10-06T14:09:50.089Z" }, + { url = "https://files.pythonhosted.org/packages/17/7a/795cb6dfee561961c30b800f0ed616b923a2ec6258b5def2a00bf8231334/yarl-1.22.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b8a0588521a26bf92a57a1705b77b8b59044cdceccac7151bd8d229e66b8dedb", size = 345825, upload-time = "2025-10-06T14:09:52.142Z" }, + { url = "https://files.pythonhosted.org/packages/d7/93/a58f4d596d2be2ae7bab1a5846c4d270b894958845753b2c606d666744d3/yarl-1.22.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:42188e6a615c1a75bcaa6e150c3fe8f3e8680471a6b10150c5f7e83f47cc34d2", size = 386705, upload-time = "2025-10-06T14:09:54.128Z" }, + { url = "https://files.pythonhosted.org/packages/61/92/682279d0e099d0e14d7fd2e176bd04f48de1484f56546a3e1313cd6c8e7c/yarl-1.22.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f6d2cb59377d99718913ad9a151030d6f83ef420a2b8f521d94609ecc106ee82", size = 396518, upload-time = "2025-10-06T14:09:55.762Z" }, + { url = "https://files.pythonhosted.org/packages/db/0f/0d52c98b8a885aeda831224b78f3be7ec2e1aa4a62091f9f9188c3c65b56/yarl-1.22.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50678a3b71c751d58d7908edc96d332af328839eea883bb554a43f539101277a", size = 377267, upload-time = "2025-10-06T14:09:57.958Z" }, + { url = "https://files.pythonhosted.org/packages/22/42/d2685e35908cbeaa6532c1fc73e89e7f2efb5d8a7df3959ea8e37177c5a3/yarl-1.22.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e8fbaa7cec507aa24ea27a01456e8dd4b6fab829059b69844bd348f2d467124", size = 365797, upload-time = "2025-10-06T14:09:59.527Z" }, + { url = "https://files.pythonhosted.org/packages/a2/83/cf8c7bcc6355631762f7d8bdab920ad09b82efa6b722999dfb05afa6cfac/yarl-1.22.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:433885ab5431bc3d3d4f2f9bd15bfa1614c522b0f1405d62c4f926ccd69d04fa", size = 365535, upload-time = "2025-10-06T14:10:01.139Z" }, + { url = "https://files.pythonhosted.org/packages/25/e1/5302ff9b28f0c59cac913b91fe3f16c59a033887e57ce9ca5d41a3a94737/yarl-1.22.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:b790b39c7e9a4192dc2e201a282109ed2985a1ddbd5ac08dc56d0e121400a8f7", size = 382324, upload-time = "2025-10-06T14:10:02.756Z" }, + { url = "https://files.pythonhosted.org/packages/bf/cd/4617eb60f032f19ae3a688dc990d8f0d89ee0ea378b61cac81ede3e52fae/yarl-1.22.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:31f0b53913220599446872d757257be5898019c85e7971599065bc55065dc99d", size = 383803, upload-time = "2025-10-06T14:10:04.552Z" }, + { url = "https://files.pythonhosted.org/packages/59/65/afc6e62bb506a319ea67b694551dab4a7e6fb7bf604e9bd9f3e11d575fec/yarl-1.22.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a49370e8f711daec68d09b821a34e1167792ee2d24d405cbc2387be4f158b520", size = 374220, upload-time = "2025-10-06T14:10:06.489Z" }, + { url = "https://files.pythonhosted.org/packages/e7/3d/68bf18d50dc674b942daec86a9ba922d3113d8399b0e52b9897530442da2/yarl-1.22.0-cp312-cp312-win32.whl", hash = "sha256:70dfd4f241c04bd9239d53b17f11e6ab672b9f1420364af63e8531198e3f5fe8", size = 81589, upload-time = "2025-10-06T14:10:09.254Z" }, + { url = "https://files.pythonhosted.org/packages/c8/9a/6ad1a9b37c2f72874f93e691b2e7ecb6137fb2b899983125db4204e47575/yarl-1.22.0-cp312-cp312-win_amd64.whl", hash = "sha256:8884d8b332a5e9b88e23f60bb166890009429391864c685e17bd73a9eda9105c", size = 87213, upload-time = "2025-10-06T14:10:11.369Z" }, + { url = "https://files.pythonhosted.org/packages/44/c5/c21b562d1680a77634d748e30c653c3ca918beb35555cff24986fff54598/yarl-1.22.0-cp312-cp312-win_arm64.whl", hash = "sha256:ea70f61a47f3cc93bdf8b2f368ed359ef02a01ca6393916bc8ff877427181e74", size = 81330, upload-time = "2025-10-06T14:10:13.112Z" }, + { url = "https://files.pythonhosted.org/packages/ea/f3/d67de7260456ee105dc1d162d43a019ecad6b91e2f51809d6cddaa56690e/yarl-1.22.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8dee9c25c74997f6a750cd317b8ca63545169c098faee42c84aa5e506c819b53", size = 139980, upload-time = "2025-10-06T14:10:14.601Z" }, + { url = "https://files.pythonhosted.org/packages/01/88/04d98af0b47e0ef42597b9b28863b9060bb515524da0a65d5f4db160b2d5/yarl-1.22.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:01e73b85a5434f89fc4fe27dcda2aff08ddf35e4d47bbbea3bdcd25321af538a", size = 93424, upload-time = "2025-10-06T14:10:16.115Z" }, + { url = "https://files.pythonhosted.org/packages/18/91/3274b215fd8442a03975ce6bee5fe6aa57a8326b29b9d3d56234a1dca244/yarl-1.22.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:22965c2af250d20c873cdbee8ff958fb809940aeb2e74ba5f20aaf6b7ac8c70c", size = 93821, upload-time = "2025-10-06T14:10:17.993Z" }, + { url = "https://files.pythonhosted.org/packages/61/3a/caf4e25036db0f2da4ca22a353dfeb3c9d3c95d2761ebe9b14df8fc16eb0/yarl-1.22.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b4f15793aa49793ec8d1c708ab7f9eded1aa72edc5174cae703651555ed1b601", size = 373243, upload-time = "2025-10-06T14:10:19.44Z" }, + { url = "https://files.pythonhosted.org/packages/6e/9e/51a77ac7516e8e7803b06e01f74e78649c24ee1021eca3d6a739cb6ea49c/yarl-1.22.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5542339dcf2747135c5c85f68680353d5cb9ffd741c0f2e8d832d054d41f35a", size = 342361, upload-time = "2025-10-06T14:10:21.124Z" }, + { url = "https://files.pythonhosted.org/packages/d4/f8/33b92454789dde8407f156c00303e9a891f1f51a0330b0fad7c909f87692/yarl-1.22.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5c401e05ad47a75869c3ab3e35137f8468b846770587e70d71e11de797d113df", size = 387036, upload-time = "2025-10-06T14:10:22.902Z" }, + { url = "https://files.pythonhosted.org/packages/d9/9a/c5db84ea024f76838220280f732970aa4ee154015d7f5c1bfb60a267af6f/yarl-1.22.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:243dda95d901c733f5b59214d28b0120893d91777cb8aa043e6ef059d3cddfe2", size = 397671, upload-time = "2025-10-06T14:10:24.523Z" }, + { url = "https://files.pythonhosted.org/packages/11/c9/cd8538dc2e7727095e0c1d867bad1e40c98f37763e6d995c1939f5fdc7b1/yarl-1.22.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bec03d0d388060058f5d291a813f21c011041938a441c593374da6077fe21b1b", size = 377059, upload-time = "2025-10-06T14:10:26.406Z" }, + { url = "https://files.pythonhosted.org/packages/a1/b9/ab437b261702ced75122ed78a876a6dec0a1b0f5e17a4ac7a9a2482d8abe/yarl-1.22.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b0748275abb8c1e1e09301ee3cf90c8a99678a4e92e4373705f2a2570d581273", size = 365356, upload-time = "2025-10-06T14:10:28.461Z" }, + { url = "https://files.pythonhosted.org/packages/b2/9d/8e1ae6d1d008a9567877b08f0ce4077a29974c04c062dabdb923ed98e6fe/yarl-1.22.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:47fdb18187e2a4e18fda2c25c05d8251a9e4a521edaed757fef033e7d8498d9a", size = 361331, upload-time = "2025-10-06T14:10:30.541Z" }, + { url = "https://files.pythonhosted.org/packages/ca/5a/09b7be3905962f145b73beb468cdd53db8aa171cf18c80400a54c5b82846/yarl-1.22.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c7044802eec4524fde550afc28edda0dd5784c4c45f0be151a2d3ba017daca7d", size = 382590, upload-time = "2025-10-06T14:10:33.352Z" }, + { url = "https://files.pythonhosted.org/packages/aa/7f/59ec509abf90eda5048b0bc3e2d7b5099dffdb3e6b127019895ab9d5ef44/yarl-1.22.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:139718f35149ff544caba20fce6e8a2f71f1e39b92c700d8438a0b1d2a631a02", size = 385316, upload-time = "2025-10-06T14:10:35.034Z" }, + { url = "https://files.pythonhosted.org/packages/e5/84/891158426bc8036bfdfd862fabd0e0fa25df4176ec793e447f4b85cf1be4/yarl-1.22.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e1b51bebd221006d3d2f95fbe124b22b247136647ae5dcc8c7acafba66e5ee67", size = 374431, upload-time = "2025-10-06T14:10:37.76Z" }, + { url = "https://files.pythonhosted.org/packages/bb/49/03da1580665baa8bef5e8ed34c6df2c2aca0a2f28bf397ed238cc1bbc6f2/yarl-1.22.0-cp313-cp313-win32.whl", hash = "sha256:d3e32536234a95f513bd374e93d717cf6b2231a791758de6c509e3653f234c95", size = 81555, upload-time = "2025-10-06T14:10:39.649Z" }, + { url = "https://files.pythonhosted.org/packages/9a/ee/450914ae11b419eadd067c6183ae08381cfdfcb9798b90b2b713bbebddda/yarl-1.22.0-cp313-cp313-win_amd64.whl", hash = "sha256:47743b82b76d89a1d20b83e60d5c20314cbd5ba2befc9cda8f28300c4a08ed4d", size = 86965, upload-time = "2025-10-06T14:10:41.313Z" }, + { url = "https://files.pythonhosted.org/packages/98/4d/264a01eae03b6cf629ad69bae94e3b0e5344741e929073678e84bf7a3e3b/yarl-1.22.0-cp313-cp313-win_arm64.whl", hash = "sha256:5d0fcda9608875f7d052eff120c7a5da474a6796fe4d83e152e0e4d42f6d1a9b", size = 81205, upload-time = "2025-10-06T14:10:43.167Z" }, + { url = "https://files.pythonhosted.org/packages/88/fc/6908f062a2f77b5f9f6d69cecb1747260831ff206adcbc5b510aff88df91/yarl-1.22.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:719ae08b6972befcba4310e49edb1161a88cdd331e3a694b84466bd938a6ab10", size = 146209, upload-time = "2025-10-06T14:10:44.643Z" }, + { url = "https://files.pythonhosted.org/packages/65/47/76594ae8eab26210b4867be6f49129861ad33da1f1ebdf7051e98492bf62/yarl-1.22.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:47d8a5c446df1c4db9d21b49619ffdba90e77c89ec6e283f453856c74b50b9e3", size = 95966, upload-time = "2025-10-06T14:10:46.554Z" }, + { url = "https://files.pythonhosted.org/packages/ab/ce/05e9828a49271ba6b5b038b15b3934e996980dd78abdfeb52a04cfb9467e/yarl-1.22.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:cfebc0ac8333520d2d0423cbbe43ae43c8838862ddb898f5ca68565e395516e9", size = 97312, upload-time = "2025-10-06T14:10:48.007Z" }, + { url = "https://files.pythonhosted.org/packages/d1/c5/7dffad5e4f2265b29c9d7ec869c369e4223166e4f9206fc2243ee9eea727/yarl-1.22.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4398557cbf484207df000309235979c79c4356518fd5c99158c7d38203c4da4f", size = 361967, upload-time = "2025-10-06T14:10:49.997Z" }, + { url = "https://files.pythonhosted.org/packages/50/b2/375b933c93a54bff7fc041e1a6ad2c0f6f733ffb0c6e642ce56ee3b39970/yarl-1.22.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2ca6fd72a8cd803be290d42f2dec5cdcd5299eeb93c2d929bf060ad9efaf5de0", size = 323949, upload-time = "2025-10-06T14:10:52.004Z" }, + { url = "https://files.pythonhosted.org/packages/66/50/bfc2a29a1d78644c5a7220ce2f304f38248dc94124a326794e677634b6cf/yarl-1.22.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ca1f59c4e1ab6e72f0a23c13fca5430f889634166be85dbf1013683e49e3278e", size = 361818, upload-time = "2025-10-06T14:10:54.078Z" }, + { url = "https://files.pythonhosted.org/packages/46/96/f3941a46af7d5d0f0498f86d71275696800ddcdd20426298e572b19b91ff/yarl-1.22.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6c5010a52015e7c70f86eb967db0f37f3c8bd503a695a49f8d45700144667708", size = 372626, upload-time = "2025-10-06T14:10:55.767Z" }, + { url = "https://files.pythonhosted.org/packages/c1/42/8b27c83bb875cd89448e42cd627e0fb971fa1675c9ec546393d18826cb50/yarl-1.22.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d7672ecf7557476642c88497c2f8d8542f8e36596e928e9bcba0e42e1e7d71f", size = 341129, upload-time = "2025-10-06T14:10:57.985Z" }, + { url = "https://files.pythonhosted.org/packages/49/36/99ca3122201b382a3cf7cc937b95235b0ac944f7e9f2d5331d50821ed352/yarl-1.22.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3b7c88eeef021579d600e50363e0b6ee4f7f6f728cd3486b9d0f3ee7b946398d", size = 346776, upload-time = "2025-10-06T14:10:59.633Z" }, + { url = "https://files.pythonhosted.org/packages/85/b4/47328bf996acd01a4c16ef9dcd2f59c969f495073616586f78cd5f2efb99/yarl-1.22.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:f4afb5c34f2c6fecdcc182dfcfc6af6cccf1aa923eed4d6a12e9d96904e1a0d8", size = 334879, upload-time = "2025-10-06T14:11:01.454Z" }, + { url = "https://files.pythonhosted.org/packages/c2/ad/b77d7b3f14a4283bffb8e92c6026496f6de49751c2f97d4352242bba3990/yarl-1.22.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:59c189e3e99a59cf8d83cbb31d4db02d66cda5a1a4374e8a012b51255341abf5", size = 350996, upload-time = "2025-10-06T14:11:03.452Z" }, + { url = "https://files.pythonhosted.org/packages/81/c8/06e1d69295792ba54d556f06686cbd6a7ce39c22307100e3fb4a2c0b0a1d/yarl-1.22.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:5a3bf7f62a289fa90f1990422dc8dff5a458469ea71d1624585ec3a4c8d6960f", size = 356047, upload-time = "2025-10-06T14:11:05.115Z" }, + { url = "https://files.pythonhosted.org/packages/4b/b8/4c0e9e9f597074b208d18cef227d83aac36184bfbc6eab204ea55783dbc5/yarl-1.22.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:de6b9a04c606978fdfe72666fa216ffcf2d1a9f6a381058d4378f8d7b1e5de62", size = 342947, upload-time = "2025-10-06T14:11:08.137Z" }, + { url = "https://files.pythonhosted.org/packages/e0/e5/11f140a58bf4c6ad7aca69a892bff0ee638c31bea4206748fc0df4ebcb3a/yarl-1.22.0-cp313-cp313t-win32.whl", hash = "sha256:1834bb90991cc2999f10f97f5f01317f99b143284766d197e43cd5b45eb18d03", size = 86943, upload-time = "2025-10-06T14:11:10.284Z" }, + { url = "https://files.pythonhosted.org/packages/31/74/8b74bae38ed7fe6793d0c15a0c8207bbb819cf287788459e5ed230996cdd/yarl-1.22.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ff86011bd159a9d2dfc89c34cfd8aff12875980e3bd6a39ff097887520e60249", size = 93715, upload-time = "2025-10-06T14:11:11.739Z" }, + { url = "https://files.pythonhosted.org/packages/69/66/991858aa4b5892d57aef7ee1ba6b4d01ec3b7eb3060795d34090a3ca3278/yarl-1.22.0-cp313-cp313t-win_arm64.whl", hash = "sha256:7861058d0582b847bc4e3a4a4c46828a410bca738673f35a29ba3ca5db0b473b", size = 83857, upload-time = "2025-10-06T14:11:13.586Z" }, + { url = "https://files.pythonhosted.org/packages/46/b3/e20ef504049f1a1c54a814b4b9bed96d1ac0e0610c3b4da178f87209db05/yarl-1.22.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:34b36c2c57124530884d89d50ed2c1478697ad7473efd59cfd479945c95650e4", size = 140520, upload-time = "2025-10-06T14:11:15.465Z" }, + { url = "https://files.pythonhosted.org/packages/e4/04/3532d990fdbab02e5ede063676b5c4260e7f3abea2151099c2aa745acc4c/yarl-1.22.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:0dd9a702591ca2e543631c2a017e4a547e38a5c0f29eece37d9097e04a7ac683", size = 93504, upload-time = "2025-10-06T14:11:17.106Z" }, + { url = "https://files.pythonhosted.org/packages/11/63/ff458113c5c2dac9a9719ac68ee7c947cb621432bcf28c9972b1c0e83938/yarl-1.22.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:594fcab1032e2d2cc3321bb2e51271e7cd2b516c7d9aee780ece81b07ff8244b", size = 94282, upload-time = "2025-10-06T14:11:19.064Z" }, + { url = "https://files.pythonhosted.org/packages/a7/bc/315a56aca762d44a6aaaf7ad253f04d996cb6b27bad34410f82d76ea8038/yarl-1.22.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f3d7a87a78d46a2e3d5b72587ac14b4c16952dd0887dbb051451eceac774411e", size = 372080, upload-time = "2025-10-06T14:11:20.996Z" }, + { url = "https://files.pythonhosted.org/packages/3f/3f/08e9b826ec2e099ea6e7c69a61272f4f6da62cb5b1b63590bb80ca2e4a40/yarl-1.22.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:852863707010316c973162e703bddabec35e8757e67fcb8ad58829de1ebc8590", size = 338696, upload-time = "2025-10-06T14:11:22.847Z" }, + { url = "https://files.pythonhosted.org/packages/e3/9f/90360108e3b32bd76789088e99538febfea24a102380ae73827f62073543/yarl-1.22.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:131a085a53bfe839a477c0845acf21efc77457ba2bcf5899618136d64f3303a2", size = 387121, upload-time = "2025-10-06T14:11:24.889Z" }, + { url = "https://files.pythonhosted.org/packages/98/92/ab8d4657bd5b46a38094cfaea498f18bb70ce6b63508fd7e909bd1f93066/yarl-1.22.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:078a8aefd263f4d4f923a9677b942b445a2be970ca24548a8102689a3a8ab8da", size = 394080, upload-time = "2025-10-06T14:11:27.307Z" }, + { url = "https://files.pythonhosted.org/packages/f5/e7/d8c5a7752fef68205296201f8ec2bf718f5c805a7a7e9880576c67600658/yarl-1.22.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bca03b91c323036913993ff5c738d0842fc9c60c4648e5c8d98331526df89784", size = 372661, upload-time = "2025-10-06T14:11:29.387Z" }, + { url = "https://files.pythonhosted.org/packages/b6/2e/f4d26183c8db0bb82d491b072f3127fb8c381a6206a3a56332714b79b751/yarl-1.22.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:68986a61557d37bb90d3051a45b91fa3d5c516d177dfc6dd6f2f436a07ff2b6b", size = 364645, upload-time = "2025-10-06T14:11:31.423Z" }, + { url = "https://files.pythonhosted.org/packages/80/7c/428e5812e6b87cd00ee8e898328a62c95825bf37c7fa87f0b6bb2ad31304/yarl-1.22.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:4792b262d585ff0dff6bcb787f8492e40698443ec982a3568c2096433660c694", size = 355361, upload-time = "2025-10-06T14:11:33.055Z" }, + { url = "https://files.pythonhosted.org/packages/ec/2a/249405fd26776f8b13c067378ef4d7dd49c9098d1b6457cdd152a99e96a9/yarl-1.22.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:ebd4549b108d732dba1d4ace67614b9545b21ece30937a63a65dd34efa19732d", size = 381451, upload-time = "2025-10-06T14:11:35.136Z" }, + { url = "https://files.pythonhosted.org/packages/67/a8/fb6b1adbe98cf1e2dd9fad71003d3a63a1bc22459c6e15f5714eb9323b93/yarl-1.22.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:f87ac53513d22240c7d59203f25cc3beac1e574c6cd681bbfd321987b69f95fd", size = 383814, upload-time = "2025-10-06T14:11:37.094Z" }, + { url = "https://files.pythonhosted.org/packages/d9/f9/3aa2c0e480fb73e872ae2814c43bc1e734740bb0d54e8cb2a95925f98131/yarl-1.22.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:22b029f2881599e2f1b06f8f1db2ee63bd309e2293ba2d566e008ba12778b8da", size = 370799, upload-time = "2025-10-06T14:11:38.83Z" }, + { url = "https://files.pythonhosted.org/packages/50/3c/af9dba3b8b5eeb302f36f16f92791f3ea62e3f47763406abf6d5a4a3333b/yarl-1.22.0-cp314-cp314-win32.whl", hash = "sha256:6a635ea45ba4ea8238463b4f7d0e721bad669f80878b7bfd1f89266e2ae63da2", size = 82990, upload-time = "2025-10-06T14:11:40.624Z" }, + { url = "https://files.pythonhosted.org/packages/ac/30/ac3a0c5bdc1d6efd1b41fa24d4897a4329b3b1e98de9449679dd327af4f0/yarl-1.22.0-cp314-cp314-win_amd64.whl", hash = "sha256:0d6e6885777af0f110b0e5d7e5dda8b704efed3894da26220b7f3d887b839a79", size = 88292, upload-time = "2025-10-06T14:11:42.578Z" }, + { url = "https://files.pythonhosted.org/packages/df/0a/227ab4ff5b998a1b7410abc7b46c9b7a26b0ca9e86c34ba4b8d8bc7c63d5/yarl-1.22.0-cp314-cp314-win_arm64.whl", hash = "sha256:8218f4e98d3c10d683584cb40f0424f4b9fd6e95610232dd75e13743b070ee33", size = 82888, upload-time = "2025-10-06T14:11:44.863Z" }, + { url = "https://files.pythonhosted.org/packages/06/5e/a15eb13db90abd87dfbefb9760c0f3f257ac42a5cac7e75dbc23bed97a9f/yarl-1.22.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:45c2842ff0e0d1b35a6bf1cd6c690939dacb617a70827f715232b2e0494d55d1", size = 146223, upload-time = "2025-10-06T14:11:46.796Z" }, + { url = "https://files.pythonhosted.org/packages/18/82/9665c61910d4d84f41a5bf6837597c89e665fa88aa4941080704645932a9/yarl-1.22.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:d947071e6ebcf2e2bee8fce76e10faca8f7a14808ca36a910263acaacef08eca", size = 95981, upload-time = "2025-10-06T14:11:48.845Z" }, + { url = "https://files.pythonhosted.org/packages/5d/9a/2f65743589809af4d0a6d3aa749343c4b5f4c380cc24a8e94a3c6625a808/yarl-1.22.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:334b8721303e61b00019474cc103bdac3d7b1f65e91f0bfedeec2d56dfe74b53", size = 97303, upload-time = "2025-10-06T14:11:50.897Z" }, + { url = "https://files.pythonhosted.org/packages/b0/ab/5b13d3e157505c43c3b43b5a776cbf7b24a02bc4cccc40314771197e3508/yarl-1.22.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1e7ce67c34138a058fd092f67d07a72b8e31ff0c9236e751957465a24b28910c", size = 361820, upload-time = "2025-10-06T14:11:52.549Z" }, + { url = "https://files.pythonhosted.org/packages/fb/76/242a5ef4677615cf95330cfc1b4610e78184400699bdda0acb897ef5e49a/yarl-1.22.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d77e1b2c6d04711478cb1c4ab90db07f1609ccf06a287d5607fcd90dc9863acf", size = 323203, upload-time = "2025-10-06T14:11:54.225Z" }, + { url = "https://files.pythonhosted.org/packages/8c/96/475509110d3f0153b43d06164cf4195c64d16999e0c7e2d8a099adcd6907/yarl-1.22.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c4647674b6150d2cae088fc07de2738a84b8bcedebef29802cf0b0a82ab6face", size = 363173, upload-time = "2025-10-06T14:11:56.069Z" }, + { url = "https://files.pythonhosted.org/packages/c9/66/59db471aecfbd559a1fd48aedd954435558cd98c7d0da8b03cc6c140a32c/yarl-1.22.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:efb07073be061c8f79d03d04139a80ba33cbd390ca8f0297aae9cce6411e4c6b", size = 373562, upload-time = "2025-10-06T14:11:58.783Z" }, + { url = "https://files.pythonhosted.org/packages/03/1f/c5d94abc91557384719da10ff166b916107c1b45e4d0423a88457071dd88/yarl-1.22.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e51ac5435758ba97ad69617e13233da53908beccc6cfcd6c34bbed8dcbede486", size = 339828, upload-time = "2025-10-06T14:12:00.686Z" }, + { url = "https://files.pythonhosted.org/packages/5f/97/aa6a143d3afba17b6465733681c70cf175af89f76ec8d9286e08437a7454/yarl-1.22.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:33e32a0dd0c8205efa8e83d04fc9f19313772b78522d1bdc7d9aed706bfd6138", size = 347551, upload-time = "2025-10-06T14:12:02.628Z" }, + { url = "https://files.pythonhosted.org/packages/43/3c/45a2b6d80195959239a7b2a8810506d4eea5487dce61c2a3393e7fc3c52e/yarl-1.22.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:bf4a21e58b9cde0e401e683ebd00f6ed30a06d14e93f7c8fd059f8b6e8f87b6a", size = 334512, upload-time = "2025-10-06T14:12:04.871Z" }, + { url = "https://files.pythonhosted.org/packages/86/a0/c2ab48d74599c7c84cb104ebd799c5813de252bea0f360ffc29d270c2caa/yarl-1.22.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:e4b582bab49ac33c8deb97e058cd67c2c50dac0dd134874106d9c774fd272529", size = 352400, upload-time = "2025-10-06T14:12:06.624Z" }, + { url = "https://files.pythonhosted.org/packages/32/75/f8919b2eafc929567d3d8411f72bdb1a2109c01caaab4ebfa5f8ffadc15b/yarl-1.22.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:0b5bcc1a9c4839e7e30b7b30dd47fe5e7e44fb7054ec29b5bb8d526aa1041093", size = 357140, upload-time = "2025-10-06T14:12:08.362Z" }, + { url = "https://files.pythonhosted.org/packages/cf/72/6a85bba382f22cf78add705d8c3731748397d986e197e53ecc7835e76de7/yarl-1.22.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:c0232bce2170103ec23c454e54a57008a9a72b5d1c3105dc2496750da8cfa47c", size = 341473, upload-time = "2025-10-06T14:12:10.994Z" }, + { url = "https://files.pythonhosted.org/packages/35/18/55e6011f7c044dc80b98893060773cefcfdbf60dfefb8cb2f58b9bacbd83/yarl-1.22.0-cp314-cp314t-win32.whl", hash = "sha256:8009b3173bcd637be650922ac455946197d858b3630b6d8787aa9e5c4564533e", size = 89056, upload-time = "2025-10-06T14:12:13.317Z" }, + { url = "https://files.pythonhosted.org/packages/f9/86/0f0dccb6e59a9e7f122c5afd43568b1d31b8ab7dda5f1b01fb5c7025c9a9/yarl-1.22.0-cp314-cp314t-win_amd64.whl", hash = "sha256:9fb17ea16e972c63d25d4a97f016d235c78dd2344820eb35bc034bc32012ee27", size = 96292, upload-time = "2025-10-06T14:12:15.398Z" }, + { url = "https://files.pythonhosted.org/packages/48/b7/503c98092fb3b344a179579f55814b613c1fbb1c23b3ec14a7b008a66a6e/yarl-1.22.0-cp314-cp314t-win_arm64.whl", hash = "sha256:9f6d73c1436b934e3f01df1e1b21ff765cd1d28c77dfb9ace207f746d4610ee1", size = 85171, upload-time = "2025-10-06T14:12:16.935Z" }, + { url = "https://files.pythonhosted.org/packages/94/fd/6480106702a79bcceda5fd9c63cb19a04a6506bd5ce7fd8d9b63742f0021/yarl-1.22.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3aa27acb6de7a23785d81557577491f6c38a5209a254d1191519d07d8fe51748", size = 141301, upload-time = "2025-10-06T14:12:19.01Z" }, + { url = "https://files.pythonhosted.org/packages/42/e1/6d95d21b17a93e793e4ec420a925fe1f6a9342338ca7a563ed21129c0990/yarl-1.22.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:af74f05666a5e531289cb1cc9c883d1de2088b8e5b4de48004e5ca8a830ac859", size = 93864, upload-time = "2025-10-06T14:12:21.05Z" }, + { url = "https://files.pythonhosted.org/packages/32/58/b8055273c203968e89808413ea4c984988b6649baabf10f4522e67c22d2f/yarl-1.22.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:62441e55958977b8167b2709c164c91a6363e25da322d87ae6dd9c6019ceecf9", size = 94706, upload-time = "2025-10-06T14:12:23.287Z" }, + { url = "https://files.pythonhosted.org/packages/18/91/d7bfbc28a88c2895ecd0da6a874def0c147de78afc52c773c28e1aa233a3/yarl-1.22.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b580e71cac3f8113d3135888770903eaf2f507e9421e5697d6ee6d8cd1c7f054", size = 347100, upload-time = "2025-10-06T14:12:28.527Z" }, + { url = "https://files.pythonhosted.org/packages/bd/e8/37a1e7b99721c0564b1fc7b0a4d1f595ef6fb8060d82ca61775b644185f7/yarl-1.22.0-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e81fda2fb4a07eda1a2252b216aa0df23ebcd4d584894e9612e80999a78fd95b", size = 318902, upload-time = "2025-10-06T14:12:30.528Z" }, + { url = "https://files.pythonhosted.org/packages/1c/ef/34724449d7ef2db4f22df644f2dac0b8a275d20f585e526937b3ae47b02d/yarl-1.22.0-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:99b6fc1d55782461b78221e95fc357b47ad98b041e8e20f47c1411d0aacddc60", size = 363302, upload-time = "2025-10-06T14:12:32.295Z" }, + { url = "https://files.pythonhosted.org/packages/8a/04/88a39a5dad39889f192cce8d66cc4c58dbeca983e83f9b6bf23822a7ed91/yarl-1.22.0-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:088e4e08f033db4be2ccd1f34cf29fe994772fb54cfe004bbf54db320af56890", size = 370816, upload-time = "2025-10-06T14:12:34.01Z" }, + { url = "https://files.pythonhosted.org/packages/6b/1f/5e895e547129413f56c76be2c3ce4b96c797d2d0ff3e16a817d9269b12e6/yarl-1.22.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2e4e1f6f0b4da23e61188676e3ed027ef0baa833a2e633c29ff8530800edccba", size = 346465, upload-time = "2025-10-06T14:12:35.977Z" }, + { url = "https://files.pythonhosted.org/packages/11/13/a750e9fd6f9cc9ed3a52a70fe58ffe505322f0efe0d48e1fd9ffe53281f5/yarl-1.22.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:84fc3ec96fce86ce5aa305eb4aa9358279d1aa644b71fab7b8ed33fe3ba1a7ca", size = 341506, upload-time = "2025-10-06T14:12:37.788Z" }, + { url = "https://files.pythonhosted.org/packages/3c/67/bb6024de76e7186611ebe626aec5b71a2d2ecf9453e795f2dbd80614784c/yarl-1.22.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:5dbeefd6ca588b33576a01b0ad58aa934bc1b41ef89dee505bf2932b22ddffba", size = 335030, upload-time = "2025-10-06T14:12:39.775Z" }, + { url = "https://files.pythonhosted.org/packages/a2/be/50b38447fd94a7992996a62b8b463d0579323fcfc08c61bdba949eef8a5d/yarl-1.22.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:14291620375b1060613f4aab9ebf21850058b6b1b438f386cc814813d901c60b", size = 358560, upload-time = "2025-10-06T14:12:41.547Z" }, + { url = "https://files.pythonhosted.org/packages/e2/89/c020b6f547578c4e3dbb6335bf918f26e2f34ad0d1e515d72fd33ac0c635/yarl-1.22.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:a4fcfc8eb2c34148c118dfa02e6427ca278bfd0f3df7c5f99e33d2c0e81eae3e", size = 357290, upload-time = "2025-10-06T14:12:43.861Z" }, + { url = "https://files.pythonhosted.org/packages/8c/52/c49a619ee35a402fa3a7019a4fa8d26878fec0d1243f6968bbf516789578/yarl-1.22.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:029866bde8d7b0878b9c160e72305bbf0a7342bcd20b9999381704ae03308dc8", size = 350700, upload-time = "2025-10-06T14:12:46.868Z" }, + { url = "https://files.pythonhosted.org/packages/ab/c9/f5042d87777bf6968435f04a2bbb15466b2f142e6e47fa4f34d1a3f32f0c/yarl-1.22.0-cp39-cp39-win32.whl", hash = "sha256:4dcc74149ccc8bba31ce1944acee24813e93cfdee2acda3c172df844948ddf7b", size = 82323, upload-time = "2025-10-06T14:12:48.633Z" }, + { url = "https://files.pythonhosted.org/packages/fd/58/d00f7cad9eba20c4eefac2682f34661d1d1b3a942fc0092eb60e78cfb733/yarl-1.22.0-cp39-cp39-win_amd64.whl", hash = "sha256:10619d9fdee46d20edc49d3479e2f8269d0779f1b031e6f7c2aa1c76be04b7ed", size = 87145, upload-time = "2025-10-06T14:12:50.241Z" }, + { url = "https://files.pythonhosted.org/packages/c2/a3/70904f365080780d38b919edd42d224b8c4ce224a86950d2eaa2a24366ad/yarl-1.22.0-cp39-cp39-win_arm64.whl", hash = "sha256:dd7afd3f8b0bfb4e0d9fc3c31bfe8a4ec7debe124cfd90619305def3c8ca8cd2", size = 82173, upload-time = "2025-10-06T14:12:51.869Z" }, + { url = "https://files.pythonhosted.org/packages/73/ae/b48f95715333080afb75a4504487cbe142cae1268afc482d06692d605ae6/yarl-1.22.0-py3-none-any.whl", hash = "sha256:1380560bdba02b6b6c90de54133c81c9f2a453dee9912fe58c1dcced1edb7cff", size = 46814, upload-time = "2025-10-06T14:12:53.872Z" }, +] + +[[package]] +name = "zipp" +version = "3.23.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547, upload-time = "2025-06-08T17:06:39.4Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276, upload-time = "2025-06-08T17:06:38.034Z" }, +] From bef005dd69f0e942a1aac3cee4f8087c1c83a708 Mon Sep 17 00:00:00 2001 From: Petar Shtuchkin Date: Thu, 30 Oct 2025 11:35:29 +0200 Subject: [PATCH 39/39] force-release-type argument and move ReleaseArgs to models --- src/redis_release/bht/args.py | 12 ------------ src/redis_release/bht/tree.py | 2 +- src/redis_release/cli.py | 9 ++++++++- src/redis_release/models.py | 11 ++++++++++- src/redis_release/state_manager.py | 6 +++++- src/tests/test_state.py | 3 +-- 6 files changed, 25 insertions(+), 18 deletions(-) diff --git a/src/redis_release/bht/args.py b/src/redis_release/bht/args.py index abf9809..a1a4cc5 100644 --- a/src/redis_release/bht/args.py +++ b/src/redis_release/bht/args.py @@ -1,13 +1 @@ """Arguments for release automation.""" - -from typing import List - -from pydantic import BaseModel, Field - - -class ReleaseArgs(BaseModel): - """Arguments for release execution.""" - - release_tag: str - force_rebuild: List[str] = Field(default_factory=list) - only_packages: List[str] = Field(default_factory=list) diff --git a/src/redis_release/bht/tree.py b/src/redis_release/bht/tree.py index 4285954..d404cd5 100644 --- a/src/redis_release/bht/tree.py +++ b/src/redis_release/bht/tree.py @@ -20,8 +20,8 @@ from ..config import Config from ..github_client_async import GitHubClientAsync +from ..models import ReleaseArgs from ..state_manager import S3StateStorage, StateManager, StateStorage -from .args import ReleaseArgs from .backchain import latch_chains from .behaviours import NeedToPublishRelease from .composites import ( diff --git a/src/redis_release/cli.py b/src/redis_release/cli.py index 96efb02..d34df79 100644 --- a/src/redis_release/cli.py +++ b/src/redis_release/cli.py @@ -8,8 +8,8 @@ import typer from py_trees.display import render_dot_tree, unicode_tree -from redis_release.bht.args import ReleaseArgs from redis_release.bht.state import print_state_table +from redis_release.models import ReleaseType from redis_release.state_manager import ( InMemoryStateStorage, S3StateStorage, @@ -19,6 +19,7 @@ from .bht.tree import TreeInspector, async_tick_tock, initialize_tree_and_state from .config import load_config from .logging_config import setup_logging +from .models import ReleaseArgs app = typer.Typer( name="redis-release", @@ -101,6 +102,11 @@ def release( tree_cutoff: int = typer.Option( 2000, "--tree-cutoff", "-m", help="Max number of ticks to run the tree for" ), + force_release_type: Optional[ReleaseType] = typer.Option( + None, + "--force-release-type", + help="Force release type (public or internal)", + ), ) -> None: """Run release using behaviour tree implementation.""" setup_logging() @@ -112,6 +118,7 @@ def release( release_tag=release_tag, force_rebuild=force_rebuild or [], only_packages=only_packages or [], + force_release_type=force_release_type, ) # Use context manager version with automatic lock management diff --git a/src/redis_release/models.py b/src/redis_release/models.py index 2ab5c37..78bcd9a 100644 --- a/src/redis_release/models.py +++ b/src/redis_release/models.py @@ -2,7 +2,7 @@ import re from enum import Enum -from typing import Optional +from typing import List, Optional from pydantic import BaseModel, Field @@ -151,3 +151,12 @@ def __lt__(self, other: "RedisVersion") -> bool: return True return self.suffix < other.suffix + + +class ReleaseArgs(BaseModel): + """Arguments for release execution.""" + + release_tag: str + force_rebuild: List[str] = Field(default_factory=list) + only_packages: List[str] = Field(default_factory=list) + force_release_type: Optional[ReleaseType] = None diff --git a/src/redis_release/state_manager.py b/src/redis_release/state_manager.py index 9973513..f205f43 100644 --- a/src/redis_release/state_manager.py +++ b/src/redis_release/state_manager.py @@ -11,11 +11,11 @@ from botocore.exceptions import ClientError, NoCredentialsError from rich.pretty import pretty_repr -from redis_release.bht.args import ReleaseArgs from redis_release.bht.state import ReleaseState, logger, print_state_table from redis_release.config import Config from .bht.state import ReleaseState +from .models import ReleaseArgs logger = logging.getLogger(__name__) @@ -227,6 +227,10 @@ def apply_args(self, state: ReleaseState) -> None: if package_name in state.packages: state.packages[package_name].meta.ephemeral.force_rebuild = True + if self.args.force_release_type: + logger.info(f"Force release type: {self.args.force_release_type}") + state.meta.release_type = self.args.force_release_type + def load(self) -> Optional[ReleaseState]: """Load state from storage backend.""" state_data = self.storage.get(self.tag) diff --git a/src/tests/test_state.py b/src/tests/test_state.py index 4f8a6b0..9ef0f89 100644 --- a/src/tests/test_state.py +++ b/src/tests/test_state.py @@ -5,10 +5,9 @@ import pytest -from redis_release.bht.args import ReleaseArgs from redis_release.bht.state import ReleaseState, Workflow from redis_release.config import Config, PackageConfig -from redis_release.models import PackageType +from redis_release.models import PackageType, ReleaseArgs from redis_release.state_manager import InMemoryStateStorage, StateManager