diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index e20c59b..9b50ece 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -3,37 +3,39 @@ name: CI
on:
pull_request:
branches: [main]
+ push:
+ tags:
+ - "v*"
jobs:
lint:
+ if: github.event_name == 'pull_request'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
-
- - name: Install ruff
- run: pip install ruff
-
- - name: Ruff format check
- run: ruff format --check .
-
- - name: Ruff lint
- run: ruff check .
+ - run: pip install ruff
+ - run: ruff format --check .
+ - run: ruff check .
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
-
- - name: Set up Python
- uses: actions/setup-python@v5
+ - uses: actions/setup-python@v5
with:
python-version: "3.13"
+ - uses: astral-sh/setup-uv@v4
+ - run: uv sync
+ - run: uv build
- - name: Install uv
- uses: astral-sh/setup-uv@v4
-
- - name: Install dependencies
- run: uv sync
-
- - name: Build package
- run: uv build
+ release:
+ if: startsWith(github.ref, 'refs/tags/')
+ needs: build
+ runs-on: ubuntu-latest
+ permissions:
+ contents: write
+ steps:
+ - uses: actions/checkout@v4
+ - uses: softprops/action-gh-release@v2
+ with:
+ files: dist/*
diff --git a/.gitignore b/.gitignore
index a1f458d..1ffbd94 100644
--- a/.gitignore
+++ b/.gitignore
@@ -15,3 +15,5 @@ wheels/
vectors/
.env.*
logs/stdout.log
+.vscode/
+*.code-workspace
\ No newline at end of file
diff --git a/api.py b/api.py
index 3a518cc..dcde2d2 100644
--- a/api.py
+++ b/api.py
@@ -1,11 +1,14 @@
+import asyncio
import os
+import re
+from typing import Any
import dotenv
import requests
from fastapi import BackgroundTasks, FastAPI, HTTPException, Request
from fastapi.responses import JSONResponse
-from src.reviewbot.agent.workflow import work_agent
+from src.reviewbot.agent.workflow import work_agent # type: ignore
from src.reviewbot.agent.workflow.gitlab_notes import post_mr_note
from src.reviewbot.infra.config.env import load_env
@@ -40,9 +43,9 @@ def get_pipeline_status(project_id: str, pipeline_id: int) -> str:
return r.json()["status"]
-def mr_has_conflicts(mr: dict) -> bool:
+def mr_has_conflicts(mr: Any) -> bool:
# GitLab MR payload includes this
- return mr.get("detailed_merge_status") == "conflict"
+ return mr and mr["detailed_merge_status"] == "conflict"
def pipeline_passed(project_id: str, pipeline_id: int) -> bool:
@@ -81,9 +84,12 @@ async def gitlab_webhook(req: Request, background_tasks: BackgroundTasks):
return JSONResponse({"ignored": "bot note"})
text = note.get("note", "")
- # pattern = rf"(?:/review\b.*@{re.escape(BOT_USERNAME)}|@{re.escape(BOT_USERNAME)}.*?/review\b)"
- # if not re.search(pattern, text):
- # return JSONResponse({"ignored": "no /review command"})
+ if BOT_USERNAME and text.strip() != "/reviewbot review":
+ pattern = (
+ rf"(?:/review\b.*@{re.escape(BOT_USERNAME)}|@{re.escape(BOT_USERNAME)}.*?/review\b)"
+ )
+ if not re.search(pattern, text):
+ return JSONResponse({"ignored": "no /review command"})
if text.strip() != "/reviewbot review":
return JSONResponse({"ignored": "not a review command"})
@@ -92,15 +98,22 @@ async def gitlab_webhook(req: Request, background_tasks: BackgroundTasks):
if not mr:
return JSONResponse({"ignored": "not an MR note"})
- project_id = payload["project"]["id"]
- mr_iid = mr["iid"]
+ project_id = str(payload["project"]["id"])
+ mr_iid = str(mr["iid"])
config = load_env()
- background_tasks.add_task(
- work_agent,
- config,
- project_id,
- mr_iid,
+ thread_id = f"{project_id}:{mr_iid}"
+ asyncio.create_task(
+ work_agent.ainvoke( # type: ignore
+ {
+ "config": config,
+ "project_id": project_id,
+ "mr_iid": mr_iid,
+ },
+ config={
+ "configurable": {"thread_id": thread_id},
+ },
+ )
)
return JSONResponse({"status": "manual review triggered"})
@@ -113,7 +126,7 @@ async def gitlab_webhook(req: Request, background_tasks: BackgroundTasks):
mr = payload.get("merge_request")
detailed_status = attrs.get("detailed_status")
- project_id = payload["project"]["id"]
+ project_id = str(payload["project"]["id"])
if detailed_status not in ["passed", "failed"]:
return JSONResponse({"ignored": "pipeline is not in a final state"})
@@ -121,7 +134,7 @@ async def gitlab_webhook(req: Request, background_tasks: BackgroundTasks):
if not mr:
return JSONResponse({"ignored": "not an MR pipeline"})
- mr_iid = mr["iid"]
+ mr_iid = str(mr["iid"])
if detailed_status != "passed":
post_mr_note(
@@ -145,11 +158,18 @@ async def gitlab_webhook(req: Request, background_tasks: BackgroundTasks):
return JSONResponse({"ignored": "merge conflicts present"})
config = load_env()
- background_tasks.add_task(
- work_agent,
- config,
- project_id,
- mr_iid,
+ thread_id = f"{project_id}:{mr_iid}"
+ asyncio.create_task(
+ work_agent.ainvoke( # type: ignore
+ {
+ "config": config,
+ "project_id": project_id,
+ "mr_iid": mr_iid,
+ },
+ config={
+ "configurable": {"thread_id": thread_id},
+ },
+ )
)
return JSONResponse({"status": "auto review triggered"})
diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml
index a457595..90f2de0 100644
--- a/docker-compose.dev.yml
+++ b/docker-compose.dev.yml
@@ -35,7 +35,18 @@ services:
2>&1 | tee /logs/stdout.log"
restart: unless-stopped
+ gitlab_runner:
+ image: gitlab/gitlab-runner:latest
+ container_name: gitlab_runner
+ restart: always
+ depends_on:
+ - gitlab
+ volumes:
+ - gitlab_runner_config:/etc/gitlab-runner
+ - /var/run/docker.sock:/var/run/docker.sock
+
volumes:
gitlab_config:
gitlab_logs:
gitlab_data:
+ gitlab_runner_config:
diff --git a/pyproject.toml b/pyproject.toml
index 49bfeb5..aa74f9a 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[project]
name = "reviewbot"
-version = "0.1.0"
+version = "0.3.0"
description = "Add your description here"
readme = "README.md"
requires-python = ">=3.13"
@@ -8,6 +8,8 @@ dependencies = [
"dotenv>=0.9.9",
"faiss-cpu>=1.13.1",
"fastapi>=0.125.0",
+ "httpx>=0.28.1",
+ "ido-agents @ git+https://github.com/canefe/ido-agents.git@v0.1.4",
"langchain>=1.2.0",
"langchain-community>=0.4.1",
"langchain-google-genai>=4.1.2",
@@ -20,14 +22,15 @@ dependencies = [
"transformers>=4.57.3",
"typer>=0.20.0",
"uvicorn>=0.40.0",
- "xai-review>=0.48.0",
]
[tool.uv]
package = true
+
[project.scripts]
reviewbot = "reviewbot.main:app"
[tool.pyright]
typeCheckingMode = "strict"
+extraPaths = ["src"]
[tool.ruff]
line-length = 100
@@ -35,15 +38,15 @@ target-version = "py313"
[tool.ruff.lint]
select = [
- "E", # pycodestyle errors
- "W", # pycodestyle warnings
- "F", # pyflakes
- "I", # isort
- "B", # flake8-bugbear
- "UP", # pyupgrade
+ "E", # pycodestyle errors
+ "W", # pycodestyle warnings
+ "F", # pyflakes
+ "I", # isort
+ "B", # flake8-bugbear
+ "UP", # pyupgrade
]
ignore = [
- "E501", # line too long (handled by formatter)
+ "E501", # line too long (handled by formatter)
]
[tool.ruff.format]
@@ -52,6 +55,7 @@ indent-style = "space"
[dependency-groups]
dev = [
+ "pyright>=1.1.408",
"ruff>=0.8.6",
"ty>=0.0.4",
]
diff --git a/src/reviewbot/agent/base.py b/src/reviewbot/agent/base.py
deleted file mode 100644
index 886a2dd..0000000
--- a/src/reviewbot/agent/base.py
+++ /dev/null
@@ -1,53 +0,0 @@
-from collections.abc import Callable
-from dataclasses import dataclass, field
-
-from langgraph.func import entrypoint # type: ignore
-from rich.console import Console
-
-from reviewbot.agent.tasks.core import ToolCallerSettings
-from reviewbot.agent.tasks.issues import IssuesInput, identify_issues
-from reviewbot.context import Context
-from reviewbot.core.agent import Agent
-from reviewbot.core.issues import Issue, IssueModel
-
-console = Console()
-
-
-# Generate response workflow model
-@dataclass
-class AgentRunnerInput:
- agent: Agent
- context: Context
- settings: ToolCallerSettings = field(default_factory=ToolCallerSettings)
- on_file_complete: Callable[[str, list[IssueModel]], None] | None = None
- quick_scan_agent: Agent | None = None
-
-
-@entrypoint()
-def agent_runner(input: AgentRunnerInput) -> list[Issue]:
- agent = input.agent
- settings = input.settings
- context = input.context
- on_file_complete = input.on_file_complete
- quick_scan_agent = input.quick_scan_agent
-
- issue_store = context.get("issue_store")
- if not issue_store:
- raise ValueError("Issue store not found")
-
- store_manager = context.get("store_manager")
- if not store_manager:
- raise ValueError("Store manager not found")
-
- # Step 1: Identify and validate issues
- issues = identify_issues(
- ctx=IssuesInput(
- agent=agent,
- context=context,
- settings=settings,
- on_file_complete=on_file_complete,
- quick_scan_agent=quick_scan_agent,
- )
- ).result()
-
- return issues
diff --git a/src/reviewbot/agent/tasks/acknowledgment.py b/src/reviewbot/agent/tasks/acknowledgment.py
new file mode 100644
index 0000000..e69de29
diff --git a/src/reviewbot/agent/tasks/core.py b/src/reviewbot/agent/tasks/core.py
deleted file mode 100644
index 334c73f..0000000
--- a/src/reviewbot/agent/tasks/core.py
+++ /dev/null
@@ -1,203 +0,0 @@
-import json
-from dataclasses import dataclass
-from typing import Any
-
-from langchain_core.messages import AIMessage, BaseMessage, HumanMessage, ToolMessage
-from rich.console import Console
-
-console = Console()
-
-
-@dataclass
-class ToolCallerSettings:
- """Tool caller settings"""
-
- max_tool_calls: int = -1
- """Maximum number of tool calls
- -1 for unlimited
- """
- max_iterations: int = -1
- """Maximum number of iterations
- -1 for unlimited
- """
- max_retries: int = 3
- """Maximum number of retries for failed API calls
- Default: 3 attempts
- """
- retry_delay: float = 1.0
- """Initial retry delay in seconds
- Will use exponential backoff: delay * (2 ** attempt)
- Default: 1.0 second
- """
- retry_max_delay: float = 60.0
- """Maximum retry delay in seconds
- Default: 60 seconds
- """
-
-
-def tool_caller(agent: Any, messages: list[BaseMessage], settings: ToolCallerSettings) -> str:
- finished = False
- final_response = None
- max_tool_calls = settings.max_tool_calls
- total_tool_calls = 0
-
- while not finished:
- try:
- # Invoke the agent with current messages
- result = agent.invoke({"messages": messages})
-
- # Get the latest message from result
- latest_message = result["messages"][-1]
-
- # Update messages for next iteration
- messages = result["messages"]
-
- if isinstance(latest_message, AIMessage):
- # Check if this message has tool calls
- if latest_message.tool_calls:
- # Agent wants to use tools, continue loop
- console.print(
- f"[dim]Agent is using {len(latest_message.tool_calls)} tools[/dim]"
- )
- continue
- else:
- # No tool calls = final response
- content = latest_message.content
- if isinstance(content, list):
- # Extract text from content blocks
- text_parts = []
- for block in content:
- if isinstance(block, dict):
- if block.get("type") == "text":
- text_parts.append(block.get("text", ""))
- elif "text" in block:
- text_parts.append(block["text"])
- final_response = "\n".join(text_parts) if text_parts else str(content)
- else:
- final_response = content
-
- console.print(f"[dim]Got final response: \n{final_response}...[/dim]")
- finished = True
-
- elif isinstance(latest_message, ToolMessage):
- total_tool_calls += 1
- console.print(f"[dim]Tool call completed ({total_tool_calls} total)[/dim]")
- if max_tool_calls != -1 and total_tool_calls >= max_tool_calls:
- console.print(
- f"[yellow]Max tool calls ({max_tool_calls}) reached - forcing final response[/yellow]"
- )
- # Force the agent to provide a final response
- messages.append(
- HumanMessage(
- content="You have reached the maximum number of tool calls. Please provide your final response now in the required JSON format. If you haven't found any issues, return an empty array: []"
- )
- )
- # Get one final response from the agent
- try:
- result = agent.invoke({"messages": messages})
- latest_message = result["messages"][-1]
- if isinstance(latest_message, AIMessage):
- final_response = latest_message.content
- if isinstance(final_response, list):
- text_parts = []
- for block in final_response:
- if isinstance(block, dict) and block.get("type") == "text":
- text_parts.append(block.get("text", ""))
- final_response = (
- "\n".join(text_parts) if text_parts else str(final_response)
- )
- else:
- final_response = "[]" # Empty array as fallback
- except Exception as e:
- console.print(f"[red]Error getting forced response: {e}[/red]")
- final_response = "[]" # Empty array as fallback
- finished = True
-
- except Exception as e:
- import traceback
-
- console.print(f"[red]Error in tool_caller: {e}[/red]")
- traceback.print_exc()
- finished = True
- final_response = None
-
- if not isinstance(final_response, str):
- console.print(f"Final response is not a string: {final_response}, returning None")
- return "None"
- return final_response
-
-
-def tool_caller_stream(
- agent: Any, messages: list[BaseMessage], settings: ToolCallerSettings
-) -> str:
- finished = False
- final_response = None
- max_tool_calls = settings.max_tool_calls
- last_chunk = None
- tool_call_count = 0
-
- while not finished:
- try:
- for chunk in agent.stream({"messages": messages}, stream_mode="values"):
- last_chunk = chunk
- latest_message = chunk["messages"][-1]
-
- if isinstance(latest_message, AIMessage):
- final_response = latest_message.content
-
- elif isinstance(latest_message, ToolMessage):
- tool_call_count += 1
- print(f"Called a tool! {tool_call_count} calls made")
- if max_tool_calls != -1 and tool_call_count >= max_tool_calls:
- finished = True
- break
-
- if last_chunk:
- messages = last_chunk["messages"]
- if final_response:
- if isinstance(final_response, list):
- last = final_response[-1]
- if last.get("content"):
- final_response = last["content"][-1]["text"]
- elif last.get("text"):
- final_response = last["text"]
- else:
- final_response = last
-
- if isinstance(final_response, dict):
- if final_response.get("content"):
- final_response = final_response["content"][-1]["text"]
- elif final_response.get("text"):
- final_response = final_response["text"]
- else:
- console.print("Messages:")
- # get last 5 messages
- console.print(messages[:5])
- console.print("Popping message:")
- console.print(messages[-1])
- console.print(messages.pop())
- finished = False
- continue
-
- if isinstance(final_response, str):
- try:
- final_response = json.loads(final_response)
- # valid JSON → keep looping
- except json.JSONDecodeError:
- finished = True
- else:
- final_response = json.loads(final_response.replace("\n", ""))
-
- except Exception:
- import traceback
-
- traceback.print_exc()
-
- if last_chunk:
- console.print("Chunk:")
- console.print(last_chunk)
- messages = last_chunk["messages"]
- if not isinstance(final_response, str):
- console.print(f"Final response is not a string : {final_response}, returning None")
- return "None"
- return final_response
diff --git a/src/reviewbot/agent/tasks/data.py b/src/reviewbot/agent/tasks/data.py
new file mode 100644
index 0000000..cd48ef7
--- /dev/null
+++ b/src/reviewbot/agent/tasks/data.py
@@ -0,0 +1,53 @@
+from pathlib import Path
+
+from langgraph.checkpoint.memory import InMemorySaver
+from langgraph.func import task # type:ignore
+from langgraph.store.memory import InMemoryStore
+from pydantic import BaseModel
+
+from reviewbot.infra.git.clone import clone_repo_persistent
+from reviewbot.infra.git.repo_tree import tree
+from reviewbot.infra.gitlab.clone import build_clone_url
+from reviewbot.infra.gitlab.diff import FileDiff, fetch_mr_diffs, get_mr_branch
+
+in_memory_checkpointer = InMemorySaver()
+in_memory_store = InMemoryStore()
+
+
+class GitLabData(BaseModel):
+ clone_url: str
+ diffs: list[FileDiff]
+ diff_refs: dict[str, str]
+ branch: str
+
+
+class RepoSnapshot(BaseModel):
+ repo_path: str
+ repo_tree: str
+
+
+@task
+def fetch_gitlab_data(
+ api_v4: str,
+ project_id: str,
+ mr_iid: str,
+ token: str,
+) -> GitLabData:
+ clone_url = build_clone_url(api_v4, project_id, token)
+ diffs, diff_refs = fetch_mr_diffs(api_v4, project_id, mr_iid, token)
+ branch = get_mr_branch(api_v4, project_id, mr_iid, token)
+ return GitLabData(
+ clone_url=clone_url,
+ diffs=diffs,
+ diff_refs=diff_refs,
+ branch=branch,
+ )
+
+
+@task
+def clone_and_tree(clone_url: str, branch: str) -> RepoSnapshot:
+ repo_path = Path(clone_repo_persistent(clone_url, branch=branch)).resolve()
+ return RepoSnapshot(
+ repo_path=str(repo_path),
+ repo_tree=tree(repo_path),
+ )
diff --git a/src/reviewbot/agent/tasks/issues.py b/src/reviewbot/agent/tasks/issues.py
index 2842c04..26e6b2c 100644
--- a/src/reviewbot/agent/tasks/issues.py
+++ b/src/reviewbot/agent/tasks/issues.py
@@ -1,39 +1,65 @@
-import json
-import threading
+import asyncio
import time
from collections.abc import Callable
-from concurrent.futures import ThreadPoolExecutor, TimeoutError, as_completed
-from dataclasses import dataclass
-from functools import partial
from typing import Any
+from ido_agents.agents.ido_agent import create_ido_agent
+from ido_agents.agents.tool_runner import ToolCallerSettings
from langchain_core.messages import BaseMessage, HumanMessage, SystemMessage
-from langgraph.func import task
+from langgraph.func import BaseStore, task # type: ignore
+from pydantic import BaseModel, Field
from rich.console import Console
-from reviewbot.agent.tasks.core import ToolCallerSettings, tool_caller
-from reviewbot.context import Context, store_manager_ctx
-from reviewbot.core.agent import Agent
-from reviewbot.core.issues import Issue, IssueModel
+from reviewbot.agent.workflow.state import CodebaseState, store
+from reviewbot.core.issues import IssueModel
+from reviewbot.core.issues.issue_model import IssueModelList
+from reviewbot.tools.diff import get_diff_from_file
console = Console()
-def get_reasoning_context() -> str:
+# Pydantic models for structured outputs
+class QuickScanResult(BaseModel):
+ """Result of quick scanning a file to determine if it needs deep review."""
+
+ needs_review: bool = Field(
+ description="True if the file needs deep review, False if it can be skipped"
+ )
+
+
+class ValidationResult(BaseModel):
+ """Result of validating issues against the diff."""
+
+ valid_issues: list[IssueModel] = Field(
+ description="Issues that are confirmed to be valid based on the diff"
+ )
+ removed: list[dict[str, Any]] = Field(
+ description="Issues that were removed, each with 'issue' and 'reason' fields"
+ )
+
+
+def get_reasoning_context(store: BaseStore | None) -> str:
"""
- Retrieve stored reasoning history from the current context.
+ Retrieve stored reasoning history from the store.
Returns:
Formatted string of previous reasoning, or empty string if none exists.
"""
+ if not store:
+ return ""
+
try:
- context = store_manager_ctx.get()
- issue_store = context.get("issue_store")
+ NS = ("reasoning",)
+ existing = store.get(NS, "history")
+
+ if not existing:
+ return ""
- if not issue_store or not hasattr(issue_store, "_reasoning_history"):
+ history_data = existing.value if hasattr(existing, "value") else existing
+ if not history_data or not (isinstance(history_data, dict) and history_data.get("items")):
return ""
- reasoning_history = issue_store._reasoning_history
+ reasoning_history = history_data["items"]
if not reasoning_history:
return ""
@@ -47,168 +73,183 @@ def get_reasoning_context() -> str:
return ""
-def with_retry(func: Callable, settings: ToolCallerSettings, *args, **kwargs) -> Any:
- """
- Execute a function with exponential backoff retry logic.
-
- Args:
- func: The function to execute
- settings: Settings containing retry configuration
- *args, **kwargs: Arguments to pass to the function
-
- Returns:
- The result of the function call
-
- Raises:
- The last exception if all retries fail
- """
- max_retries = settings.max_retries
- retry_delay = settings.retry_delay
- retry_max_delay = settings.retry_max_delay
-
- last_exception = None
-
- for attempt in range(max_retries + 1): # +1 for initial attempt
- try:
- return func(*args, **kwargs)
- except Exception as e:
- last_exception = e
-
- # If this was the last attempt, raise the exception
- if attempt >= max_retries:
- console.print(f"[red]All {max_retries} retries failed. Last error: {e}[/red]")
- raise
-
- # Calculate delay with exponential backoff
- delay = min(retry_delay * (2**attempt), retry_max_delay)
-
- # Check if it's a retryable error
- error_msg = str(e).lower()
- is_retryable = any(
- keyword in error_msg
- for keyword in [
- "rate limit",
- "timeout",
- "connection",
- "network",
- "502",
- "503",
- "504",
- "429",
- ]
- )
-
- if not is_retryable:
- console.print(f"[yellow]Non-retryable error encountered: {e}[/yellow]")
- raise
-
- console.print(f"[yellow]Attempt {attempt + 1}/{max_retries + 1} failed: {e}[/yellow]")
- console.print(f"[yellow]Retrying in {delay:.1f} seconds...[/yellow]")
- time.sleep(delay)
-
- # This should never be reached, but just in case
- if last_exception:
- raise last_exception
- raise RuntimeError("Retry logic failed unexpectedly")
-
-
-@dataclass
-class IssuesInput:
- agent: Agent
- context: Context
- settings: ToolCallerSettings
- on_file_complete: Callable[[str, list[IssueModel]], None] | None = None
- quick_scan_agent: Agent | None = None
-
-
@task
-def identify_issues(ctx: IssuesInput) -> list[Issue]:
- """
- Identify the issues in the codebase using concurrent agents per file.
+async def identify_issues(
+ *,
+ settings: ToolCallerSettings,
+ on_file_complete: Callable[[str, list[IssueModel]], None] | None = None,
+ agent: Any,
+ quick_scan_agent: Any | None = None,
+ model: Any | None = None,
+ tools: list[Any] | None = None,
+ quick_scan_model: Any | None = None,
+ quick_scan_tools: list[Any] | None = None,
+ acknowledgment_info: tuple[str, str, Any] | None = None,
+) -> list[IssueModel]:
"""
- agent = ctx.agent
- context = ctx.context
- settings = ctx.settings
- on_file_complete = ctx.on_file_complete
- quick_scan_agent = ctx.quick_scan_agent
+ Identify issues in the codebase using concurrent agents per file.
- issue_store = context.get("issue_store")
- if not issue_store:
- raise ValueError("Issue store not found")
-
- manager = context.get("store_manager")
- if not manager:
- raise ValueError("Store manager not found")
-
- store = manager.get_store()
- if not store:
- raise ValueError("Store not found")
-
- tree = manager.get_tree()
- diffs = manager.get_diffs()
-
- if not tree or not diffs:
- raise ValueError("Tree or diffs not found")
-
- # Run concurrent reviews - pass the context values and callback
- all_issues = run_concurrent_reviews(
+ Reads CodebaseState from store and runs concurrent reviews.
+ Returns list of IssueModel objects.
+ """
+ # Read codebase state from store
+ NS = ("codebase",)
+ raw = store.get(NS, "state")
+ if not raw:
+ raise ValueError("Codebase state not found in store")
+
+ codebase_data = raw.value if hasattr(raw, "value") else raw
+ codebase = CodebaseState.model_validate(codebase_data)
+ diffs = codebase.diffs
+
+ # Run concurrent reviews
+ all_issues = await run_concurrent_reviews(
agent,
diffs,
settings,
- context,
on_file_complete=on_file_complete,
quick_scan_agent=quick_scan_agent,
+ model=model,
+ tools=tools,
+ quick_scan_model=quick_scan_model,
+ quick_scan_tools=quick_scan_tools,
+ acknowledgment_info=acknowledgment_info,
)
- # Convert to domain objects
- return [issue.to_domain() for issue in all_issues]
+ return all_issues
+
+
+def format_progress_message(
+ all_files: list[str],
+ completed_files: set[str],
+ in_progress_files: dict[asyncio.Future[Any], str],
+ max_workers: int,
+) -> str:
+ """Format a progress message for the acknowledgment note."""
+ total = len(all_files)
+ completed = len(completed_files)
+ in_progress = len(in_progress_files)
+ pending = total - completed - in_progress
+
+ # Progress badge
+ progress_badge = '
'
+ progress_text = f"{completed}/{total} files reviewed"
+
+ # Build message
+ lines = [progress_badge, "", f"**Review Progress: {progress_text}**", ""]
+
+ # Worker status
+ lines.append(f"**Active Workers ({in_progress}/{max_workers}):**")
+ if in_progress_files:
+ for task, file_path in in_progress_files.items():
+ if not task.done():
+ lines.append(f"- Reviewing: `{file_path}`")
+ else:
+ lines.append("- (All workers idle)")
+
+ lines.append("")
+
+ # Completed files
+ if completed_files:
+ lines.append(f"**Completed ({completed}):**")
+ for file_path in sorted(completed_files):
+ lines.append(f"- `{file_path}`")
+ lines.append("")
+
+ # Pending files
+ if pending > 0:
+ pending_list = [
+ f for f in all_files if f not in completed_files and f not in in_progress_files.values()
+ ]
+ lines.append(f"**Pending ({pending}):**")
+ for file_path in pending_list[:5]: # Show first 5
+ lines.append(f"- `{file_path}`")
+ if pending > 5:
+ lines.append(f"- ... and {pending - 5} more")
+ return "\n".join(lines)
-def monitor_progress(
- future_to_file: dict,
- stop_event: threading.Event,
+
+async def monitor_progress(
+ task_to_file: dict[asyncio.Future[Any], str],
+ start_times: dict[asyncio.Future[Any], float],
+ stop_event: asyncio.Event,
task_timeout: int = 300, # 5 minutes per task
+ acknowledgment_info: tuple[str, str, Any] | None = None,
+ all_files: list[str] | None = None,
+ completed_files: set[str] | None = None,
+ max_workers: int = 3,
):
"""
- Monitor thread that logs the status of ongoing tasks.
+ Monitor coroutine that logs the status of ongoing tasks and updates acknowledgment.
"""
- start_times = {future: time.time() for future in future_to_file.keys()}
+ update_interval = 10 # Update every 10 seconds
+ last_update = time.time()
while not stop_event.is_set():
- time.sleep(10) # Check every 10 seconds
+ await asyncio.sleep(5) # Check every 5 seconds
current_time = time.time()
- for future, file_path in future_to_file.items():
- if not future.done():
- elapsed = current_time - start_times[future]
+
+ # Log status to console
+ for io_task, file_path in task_to_file.items():
+ if not io_task.done():
+ start_time = start_times.get(io_task)
+ if start_time is None:
+ continue
+ elapsed = current_time - start_time
if elapsed > task_timeout:
console.print(
f"[red]TIMEOUT WARNING: {file_path} has been running for {elapsed:.0f}s[/red]"
)
- else:
- console.print(
- f"[yellow]Still processing: {file_path} ({elapsed:.0f}s elapsed)[/yellow]"
+
+ # Update acknowledgment note every 10 seconds
+ if acknowledgment_info and all_files and completed_files is not None:
+ if current_time - last_update >= update_interval:
+ try:
+ discussion_id, note_id, gitlab_config = acknowledgment_info
+ progress_message = format_progress_message(
+ all_files, completed_files, task_to_file, max_workers
+ )
+
+ # Import here to avoid circular dependency
+ from reviewbot.infra.gitlab.note import async_update_discussion_note
+
+ await async_update_discussion_note(
+ api_v4=gitlab_config.get_api_base_url(),
+ token=gitlab_config.token.get_secret_value(),
+ project_id=gitlab_config.get_project_identifier(),
+ mr_iid=gitlab_config.get_pr_identifier(),
+ discussion_id=discussion_id,
+ note_id=note_id,
+ body=progress_message,
)
+ last_update = current_time
+ except Exception as e:
+ console.print(f"[yellow]Failed to update progress note: {e}[/yellow]")
-def run_concurrent_reviews(
+async def run_concurrent_reviews(
agent: Any,
diffs: list[Any],
settings: ToolCallerSettings,
- context: Context,
- max_workers: int = 3, # Serial processing to avoid thread safety and rate limit issues
+ max_workers: int = 3, # Limit concurrency to avoid rate limit issues
task_timeout: int = 160, # 5 minutes timeout per file
on_file_complete: Callable[[str, list[IssueModel]], None] | None = None,
quick_scan_agent: Any | None = None,
+ model: Any | None = None,
+ tools: list[Any] | None = None,
+ quick_scan_model: Any | None = None,
+ quick_scan_tools: list[Any] | None = None,
+ acknowledgment_info: tuple[str, str, Any] | None = None,
) -> list[IssueModel]:
"""
- Run concurrent reviews of all diff files with context propagation and monitoring.
+ Run concurrent reviews of all diff files with monitoring.
Args:
agent: The agent to use for reviews
diffs: List of diff objects
settings: Tool caller settings
- context: Context object
max_workers: Maximum number of concurrent workers
task_timeout: Timeout per task in seconds
on_file_complete: Optional callback function called when each file's review completes.
@@ -222,39 +263,58 @@ def run_concurrent_reviews(
console.print(f"[dim]Files: {', '.join(diff_file_paths)}[/dim]\n")
all_issues: list[IssueModel] = []
-
- with ThreadPoolExecutor(max_workers=max_workers) as executor:
- # Create a partial function with context baked in
- review_with_context = partial(
- review_single_file_with_context,
- agent=agent,
- settings=settings,
- context=context,
- quick_scan_agent=quick_scan_agent,
- )
-
- # Submit tasks
- future_to_file = {
- executor.submit(review_with_context, file_path): file_path
- for file_path in diff_file_paths
- }
-
- # Start monitoring thread
- stop_monitor = threading.Event()
- monitor_thread = threading.Thread(
- target=monitor_progress,
- args=(future_to_file, stop_monitor, task_timeout),
- daemon=True,
+ completed_files: set[str] = set()
+
+ semaphore = asyncio.Semaphore(max_workers)
+ task_to_file: dict[asyncio.Future[Any], str] = {}
+ start_times: dict[asyncio.Future[Any], float] = {}
+
+ # Wrapper to track file_path with result
+ async def review_with_tracking(file_path: str) -> tuple[str, list[IssueModel]]:
+ async with semaphore:
+ task = asyncio.current_task()
+ if task is not None:
+ start_times[task] = time.time()
+ task_to_file[task] = file_path
+
+ result = await asyncio.wait_for(
+ review_single_file_wrapper(
+ file_path=file_path,
+ agent=agent,
+ settings=settings,
+ quick_scan_agent=quick_scan_agent,
+ model=model,
+ tools=tools,
+ quick_scan_model=quick_scan_model,
+ quick_scan_tools=quick_scan_tools,
+ ),
+ timeout=task_timeout,
+ )
+ return file_path, result
+
+ # Create tasks
+ tasks = [asyncio.create_task(review_with_tracking(fp)) for fp in diff_file_paths]
+
+ stop_monitor = asyncio.Event()
+ monitor_task = asyncio.create_task(
+ monitor_progress(
+ task_to_file,
+ start_times,
+ stop_monitor,
+ task_timeout,
+ acknowledgment_info=acknowledgment_info,
+ all_files=diff_file_paths,
+ completed_files=completed_files,
+ max_workers=max_workers,
)
- monitor_thread.start()
+ )
- # Process results with timeout
- for future in as_completed(future_to_file, timeout=task_timeout * len(diff_file_paths)):
- file_path = future_to_file[future]
+ try:
+ for coro in asyncio.as_completed(tasks):
try:
- # Get result with per-task timeout
- issues = future.result(timeout=task_timeout)
+ file_path, issues = await coro
all_issues.extend(issues)
+ completed_files.add(file_path)
console.print(f"[green]✓[/green] Processed {file_path}: {len(issues)} issues")
# Call the callback if provided, allowing immediate discussion creation
@@ -269,16 +329,15 @@ def run_concurrent_reviews(
traceback.print_exc()
except TimeoutError:
- console.print(f"[red]✗[/red] TIMEOUT: {file_path} took longer than {task_timeout}s")
+ console.print(f"[red]✗[/red] TIMEOUT: A file took longer than {task_timeout}s")
except Exception as e:
- console.print(f"[red]✗[/red] Failed {file_path}: {e}")
+ console.print(f"[red]✗[/red] Failed: {e}")
import traceback
traceback.print_exc()
-
- # Stop monitoring thread
+ finally:
stop_monitor.set()
- monitor_thread.join(timeout=1)
+ await monitor_task
console.print(
f"\n[bold green]Review complete! Total issues found: {len(all_issues)}[/bold green]"
@@ -297,20 +356,21 @@ def run_concurrent_reviews(
return all_issues
-def quick_scan_file(
+async def quick_scan_file(
agent: Any,
file_path: str,
settings: ToolCallerSettings,
+ model: Any | None = None,
+ tools: list[Any] | None = None,
) -> bool:
"""
Quick scan with low-effort agent to determine if file needs deep review.
Returns True if file needs deep review, False otherwise.
"""
# Fetch the diff first to include in prompt
- from reviewbot.tools import get_diff as get_diff_tool
try:
- diff_content = get_diff_tool.invoke({"file_path": file_path})
+ diff_content = get_diff_from_file(agent.store, file_path)
except Exception as e:
console.print(f"[yellow]Could not fetch diff for {file_path}: {e}[/yellow]")
return True # If can't get diff, do deep review to be safe
@@ -319,7 +379,7 @@ def quick_scan_file(
SystemMessage(
content="""You are a code review triage assistant. Your job is to quickly determine if a file change needs deep review.
-Review the diff and decide if this file needs detailed analysis. Return TRUE if ANY of these apply:
+Review the diff and decide if this file needs detailed analysis. Set needs_review=true if ANY of these apply:
- New code that implements business logic
- Changes to security-sensitive code (auth, permissions, data validation)
- Database queries or migrations
@@ -329,14 +389,12 @@ def quick_scan_file(
- Configuration changes that affect behavior
- Use tool 'think' to reason. You must reason at least 10 times before giving an answer
-Return FALSE if:
+Set needs_review=false if:
- Only formatting/whitespace changes
- Simple refactoring (renaming variables/functions)
- Adding/updating comments or documentation only
- Import reordering
-- Trivial changes (typo fixes in strings, adding logging)
-
-Output ONLY "true" or "false" (lowercase, no quotes)."""
+- Trivial changes (typo fixes in strings, adding logging)"""
),
HumanMessage(
content=f"""Quickly scan this file and determine if it needs deep review: {file_path}
@@ -345,25 +403,30 @@ def quick_scan_file(
```diff
{diff_content}
-```
-
-Respond with ONLY "true" or "false" based on the criteria above."""
+```"""
),
]
try:
console.print(f"[dim]Quick scanning: {file_path}[/dim]")
- raw = with_retry(tool_caller, settings, agent, messages, settings)
- result = str(raw).strip().lower()
+ if model is None:
+ raise ValueError("model parameter is required for ido-agents migration")
+
+ ido_agent = create_ido_agent(model=model, tools=tools or [])
+ result = await (
+ ido_agent.with_structured_output(QuickScanResult)
+ .with_tool_caller(settings)
+ .with_retry(max_retries=3)
+ .ainvoke(messages)
+ )
- needs_review = "true" in result
- if needs_review:
+ if result.needs_review:
console.print(f"[yellow]✓ Needs deep review: {file_path}[/yellow]")
else:
console.print(f"[dim]⊘ Skipping deep review: {file_path}[/dim]")
- return needs_review
+ return result.needs_review
except Exception as e:
console.print(
f"[yellow]Quick scan failed for {file_path}, defaulting to deep review: {e}[/yellow]"
@@ -371,62 +434,62 @@ def quick_scan_file(
return True # If scan fails, do deep review to be safe
-def review_single_file_with_context(
+async def review_single_file_wrapper(
file_path: str,
agent: Any,
settings: ToolCallerSettings,
- context: Context,
quick_scan_agent: Any | None = None,
+ model: Any | None = None,
+ tools: list[Any] | None = None,
+ quick_scan_model: Any | None = None,
+ quick_scan_tools: list[Any] | None = None,
) -> list[IssueModel]:
"""
- Wrapper that sets context before reviewing.
- This runs in each worker thread.
+ Wrapper for reviewing a single file with optional quick scan.
+ This runs per async task.
"""
try:
- # Set the context var for this thread
- store_manager_ctx.set(context)
-
- console.print(f"[dim]Context set for thread processing: {file_path}[/dim]")
-
# Quick scan first if agent provided
if quick_scan_agent:
- needs_deep_review = quick_scan_file(quick_scan_agent, file_path, settings)
+ needs_deep_review = await quick_scan_file(
+ quick_scan_agent, file_path, settings, quick_scan_model, quick_scan_tools
+ )
if not needs_deep_review:
console.print(f"[dim]Skipping deep review for: {file_path}[/dim]")
return []
# Now call the actual review function
- return review_single_file(agent, file_path, settings)
+ return await review_single_file(agent, file_path, settings, model, tools)
except Exception as e:
- console.print(f"[red]Exception in thread for {file_path}: {e}[/red]")
+ console.print(f"[red]Exception in task for {file_path}: {e}[/red]")
import traceback
traceback.print_exc()
return []
-def review_single_file(
+async def review_single_file(
agent: Any,
file_path: str,
settings: ToolCallerSettings,
+ model: Any | None = None,
+ tools: list[Any] | None = None,
) -> list[IssueModel]:
"""
Review a single diff file and return issues found.
"""
# Get any previous reasoning context
- reasoning_context = get_reasoning_context()
+ reasoning_context = get_reasoning_context(agent.store)
# Force a reasoning pass to ensure think() is invoked during deep review
try:
- from reviewbot.tools import get_diff as get_diff_tool
-
- diff_content = get_diff_tool.invoke({"file_path": file_path})
+ diff_content = get_diff_from_file(agent.store, file_path)
think_messages: list[BaseMessage] = [
SystemMessage(
content=(
- "You are a senior code reviewer. You MUST call think() exactly once "
+ "You are a senior code reviewer. You must think and review. "
"with 2-5 sentences of reasoning about the provided diff. "
- "Do not use any other tools. After calling think(), reply with the "
+ "Do not use any other tools. Once finished, reply with the "
"single word DONE."
)
),
@@ -439,96 +502,97 @@ def review_single_file(
""",
),
]
- think_settings = ToolCallerSettings(max_tool_calls=1, max_iterations=1)
- tool_caller(agent, think_messages, think_settings)
+ if model:
+ think_settings = ToolCallerSettings(max_tool_calls=40)
+ ido_agent = create_ido_agent(model=model, tools=tools or [])
+ await ido_agent.with_tool_caller(think_settings).ainvoke(think_messages)
except Exception as e:
- console.print(f"[yellow]⚠ Failed to record reasoning for {file_path}: {e}[/yellow]")
+ console.print(f"[yellow]Failed to record reasoning for {file_path}: {e}[/yellow]")
messages: list[BaseMessage] = [
SystemMessage(
- content=f"""You are a senior code reviewer analyzing a specific file change.
+ content=f"""You are a senior code reviewer analyzing code changes for bugs, security issues, and logic errors.
+
+AVAILABLE TOOLS:
+- `think()` - Record your internal reasoning (use this to analyze the code)
+- `get_diff(file_path)` - Get the diff for the file being reviewed
+- `read_file(file_path)` - Read the COMPLETE file to see full context beyond the diff
+- `read_file(file_path, line_start, line_end)` - Read specific line ranges
+- `ls_dir(dir_path)` - List contents of a directory to explore the codebase structure
+
+IMPORTANT: CONTEXT LIMITATIONS
+The diff shows only the changed lines, not the full file. When you need to verify something outside the diff (like imports, variable declarations, or function definitions), use `read_file()` to see the complete context.
+
+Use `read_file()` when:
+- You suspect undefined variables/imports but they might exist elsewhere in the file
+- You need to understand surrounding code to assess impact
+- The change references code not shown in the diff
+
+HANDLING NEW FILES:
+If `read_file()` returns an error stating the file is NEW:
+- This file doesn't exist yet in the repository
+- You can only see what's in the diff
+- Be lenient about imports/definitions (assume they're complete in the actual PR)
+- Focus on logic bugs, security issues, and clear errors in the visible code
REASONING TOOL:
-- You have access to a `think()` tool for recording your internal reasoning
-- Use it to plan your approach, analyze patterns, or reason about potential issues
-- Your reasoning is stored and will be available in subsequent requests
-- This helps maintain context and improves review quality{reasoning_context}
- - During deep reviews, you MUST call think() before producing your JSON output
-
-Your task: Review ONLY the file '{file_path}' from the merge request diff.
+- Use `think()` to record your analysis process{reasoning_context}
+- Call `think()` before producing your final output
+- Document your reasoning about each potential issue
+
+Your task: Review the file '{file_path}' and identify actionable issues.
+
+WHAT TO REPORT:
+- **Critical bugs** - Code that will crash, throw errors, or produce incorrect results
+- **Security vulnerabilities** - SQL injection, XSS, authentication bypass, etc.
+- **Logic errors** - Incorrect algorithms, wrong conditions, broken business logic
+- **Data corruption risks** - Code that could corrupt data or cause inconsistent state
+- **Performance problems** - Clear bottlenecks like O(n²) where O(n) is possible
+- **Breaking changes** - Changes that break existing APIs or functionality
+
+WHAT NOT TO REPORT:
+- Code style preferences (naming, formatting, organization)
+- Missing documentation or comments
+- Minor refactoring suggestions that don't fix bugs
+- Hypothetical edge cases without evidence they're relevant
+- Issues based on assumptions about the environment (e.g., "X might not be installed")
+- Version numbers or package versions you're unfamiliar with (they may be newer than your training)
+- Import paths or APIs you don't recognize (they may have changed since your training)
-IMPORTANT GUIDELINES:
-- Be CONSERVATIVE: Only report real, actionable issues - not stylistic preferences or nitpicks
-- If there are NO legitimate issues, return an empty array: []
+IMPORTANT:
- Do NOT invent issues to justify the review
-- Only report issues with clear negative impact (bugs, security risks, performance problems, logic errors)
-- Avoid reporting issues about code style, formatting, or personal preferences unless they violate critical standards
-- Medium/High severity issues should be reserved for actual bugs, security vulnerabilities, or broken functionality
-- The `description` field MUST include a short plain-text explanation (1-3 sentences).
-
-CRITICAL - KNOWLEDGE CUTOFF AWARENESS:
-Your training data has a cutoff date. The code you're reviewing may use:
-- Package versions released AFTER your training (e.g., v2, v3 of libraries)
-- Language versions you don't know about (e.g., Go 1.23+, Python 3.13+)
-- Import paths that have changed since your training
-- APIs that have been updated
-
-DO NOT FLAG as issues:
-Version numbers (e.g., "Go 1.25 doesn't exist" - it might now!)
-Import paths you don't recognize (e.g., "should be v1 not v2" - v2 might be correct!)
-Package versions (e.g., "mongo-driver/v2" - newer versions exist!)
-Language features you don't recognize (they might be new)
-API methods you don't know (they might have been added)
-
-ONLY flag version/import issues if:
-There's an obvious typo (e.g., "monggo" instead of "mongo")
-The code itself shows an error (e.g., import fails in the diff)
-There's a clear pattern mismatch (e.g., mixing v1 and v2 imports inconsistently)
-
-When in doubt about versions/imports: ASSUME THE DEVELOPER IS CORRECT and skip it.
+- Only report issues with direct evidence in the code shown
+
+SEVERITY GUIDELINES:
+- **HIGH**: Crashes, security vulnerabilities, data corruption, broken functionality
+- **MEDIUM**: Logic errors, performance issues, likely bugs in edge cases
+- **LOW**: Minor issues that could cause problems in rare scenarios
SUGGESTIONS:
-- When a fix is simple, provide a "suggestion" field.
-- **GitLab Syntax Requirement**: You must format the suggestion using relative line offsets based on your `start_line` and `end_line`.
-- **The Formula**:
-1. The header MUST be: ```diff
-- **Content**: The suggestion must include the full corrected code for every line from `start_line` to `end_line`.
-- **Indentation**: You MUST preserve the exact leading whitespace of the original code.
-- Format:
-```diff
-[CORRECTED CODE BLOCK]
-```
-Output format: JSON array of issue objects following this schema:
-{IssueModel.model_json_schema()}
-
-Focus ONLY on:
-1. **Critical bugs** - Code that will crash or produce incorrect results
-2. **Security vulnerabilities** - Actual exploitable security issues (SQL injection, XSS, etc.)
-3. **Logic errors** - Incorrect business logic or algorithm implementation
-4. **Performance problems** - Clear performance bottlenecks (O(n²) where O(n) is possible, memory leaks, etc.)
-5. **Breaking changes** - Code that breaks existing functionality or APIs
-
-DO NOT report:
-- Stylistic preferences (variable naming, code organization) unless they severely impact readability
-- Missing comments or documentation
-- Minor code smells that don't impact functionality
-- Hypothetical edge cases without evidence they're relevant
-- Refactoring suggestions unless current code is broken
-- Version numbers, import paths, or package versions you're unfamiliar with
-- Missing imports
+When you identify an issue with a clear fix, provide a `suggestion` field with the corrected code.
+Format as a diff showing the old and new code:
+- Lines starting with `-` show old code to remove
+- Lines starting with `+` show new code to add
+- Preserve exact indentation from the original
+
+OUTPUT:
+Return a JSON array of issues. If no issues are found, return an empty array: []
+Each issue must have: title, description, severity, file_path, start_line, end_line, and optionally suggestion.
Be specific and reference exact line numbers from the diff."""
),
HumanMessage(
content=f"""Review the merge request diff for the file: {file_path}
-INSTRUCTIONS:
-1. Use the get_diff("{file_path}") tool ONCE to retrieve the diff
-2. Review the diff content directly - read other files if absolutely necessary for more context
-3. Output your findings immediately in JSON format
+WORKFLOW:
+1. Use `get_diff("{file_path}")` to get the diff
+2. Analyze the changes for bugs, security issues, and logic errors
+3. If you need context beyond the diff (imports, variable declarations, surrounding code):
+ - Use `read_file("{file_path}")` to see the complete file
+4. Use `think()` to document your reasoning and analysis
+5. Return your findings as a list of issues (or empty list if none)
-Analyze ONLY this file's diff. If you find legitimate issues, output them in JSON format.
-If there are no real issues, output an empty array: []
+Find the real bugs - that's what matters most!
"""
),
]
@@ -536,19 +600,29 @@ def review_single_file(
try:
console.print(f"[cyan]Starting review of: {file_path}[/cyan]")
- # Use retry logic for the LLM call
- raw = with_retry(tool_caller, settings, agent, messages, settings)
+ if model is None:
+ raise ValueError("model parameter is required for ido-agents migration")
- console.print(f"[green]Completed review of: {file_path}[/green]")
- console.print(
- f"Raw response: {raw[:200]}..." if len(str(raw)) > 200 else f"Raw response: {raw}"
+ # Use retry logic for the LLM call with structured output
+ ido_agent = create_ido_agent(model=model, tools=tools or [])
+ issues_result = await (
+ ido_agent.with_structured_output(IssueModelList)
+ .with_tool_caller(settings)
+ .with_retry(max_retries=3)
+ .ainvoke(messages)
)
- issues = parse_issues_from_response(raw, file_path, "review")
+ # Extract the actual list from the RootModel
+ issues = issues_result.root
+
+ console.print(f"[green]Completed review of: {file_path}[/green]")
+ console.print(f"Found {len(issues)} potential issues")
if issues:
# Validate issues against the diff to reduce hallucinations before creating notes.
- issues = validate_issues_for_file(agent, file_path, issues, settings)
+ issues = await validate_issues_for_file(
+ agent, file_path, issues, settings, model, tools
+ )
console.print(f"[blue]Found {len(issues)} issues in {file_path}[/blue]")
return issues
@@ -561,107 +635,98 @@ def review_single_file(
return []
-def parse_issues_from_response(
- raw: Any,
- file_path: str,
- context_label: str,
-) -> list[IssueModel]:
- issues: list[IssueModel] = []
- if isinstance(raw, str):
- try:
- parsed = json.loads(raw)
- if isinstance(parsed, list):
- for issue_data in parsed:
- try:
- issues.append(IssueModel.model_validate(issue_data))
- except Exception as e:
- console.print(f"[yellow]Failed to validate issue: {e}[/yellow]")
- elif isinstance(parsed, dict):
- try:
- issues.append(IssueModel.model_validate(parsed))
- except Exception as e:
- console.print(f"[yellow]Failed to validate issue: {e}[/yellow]")
- except json.JSONDecodeError as e:
- console.print(f"[red]Failed to parse JSON for {file_path} ({context_label}): {e}[/red]")
- return issues
-
-
-def validate_issues_for_file(
+async def validate_issues_for_file(
agent: Any,
file_path: str,
issues: list[IssueModel],
settings: ToolCallerSettings,
+ model: Any | None = None,
+ tools: list[Any] | None = None,
) -> list[IssueModel]:
if not issues:
return []
try:
- from reviewbot.tools import get_diff as get_diff_tool
-
- diff_content = get_diff_tool.invoke({"file_path": file_path})
+ diff_content = get_diff_from_file(agent.store, file_path)
except Exception as e:
console.print(f"[yellow]Issue validation skipped for {file_path}: {e}[/yellow]")
return []
# Use JSON-friendly payload so enums serialize cleanly.
issues_payload = [issue.model_dump(mode="json") for issue in issues]
+
+ # Import json module for dumps
+ import json
+
messages: list[BaseMessage] = [
SystemMessage(
content=(
- "You are an issue checker. Validate each issue strictly against the diff.\n"
- "Keep an issue ONLY if the diff provides direct evidence that the issue is real.\n"
- "Do NOT create new issues and do NOT modify fields. For any removed issue, provide\n"
- "a short reason grounded in the diff. Do not use tools."
+ "You are an issue validator. Your job is to remove FALSE POSITIVES while keeping real bugs.\n\n"
+ "AVAILABLE TOOLS:\n"
+ "- `read_file(file_path)` - Read the complete file to verify issues\n"
+ "- `ls_dir(dir_path)` - List directory contents to verify file structure\n\n"
+ "WHAT TO REMOVE (false positives):\n"
+ "- 'Variable X undefined' - when X is actually defined elsewhere in the file\n"
+ "- 'Import Y missing' - when Y exists at the top of the file\n"
+ "- 'Function Z not declared' - when Z is defined in the complete file\n\n"
+ "WHAT TO KEEP (real issues):\n"
+ "- Logic errors - wrong conditions, broken algorithms, incorrect business logic\n"
+ "- Security vulnerabilities - SQL injection, XSS, auth bypass, etc.\n"
+ "- Bugs that will crash or produce wrong results\n"
+ "- Data corruption risks\n"
+ "- Performance problems\n\n"
+ "RULES:\n"
+ "- KEEP issues about logic, bugs, security, and functionality\n"
+ "- ONLY remove issues that are provably false (use read_file to verify)\n"
+ "- When in doubt, KEEP the issue - don't filter out real bugs\n"
+ "- Do NOT create new issues\n"
+ "- Do NOT modify issue fields"
)
),
HumanMessage(
content=f"""File: {file_path}
-Diff:
+Diff (shows only changes):
```diff
{diff_content}
```
-Issues to validate (JSON):
+Issues to validate:
{json.dumps(issues_payload, indent=2)}
-Return ONLY a JSON object in this exact shape:
-{{
- "valid_issues": [],
- "removed": [
- {{
- "issue": ,
- "reason": ""
- }}
- ]
-}}"""
+TASK:
+1. For issues about "undefined/missing" code, use `read_file("{file_path}")` to check if the code actually exists elsewhere
+2. Remove ONLY clear false positives
+3. Keep all logic bugs, security issues, and real functionality problems
+
+Return a ValidationResult with:
+- valid_issues: confirmed real issues
+- removed: false positives with reason for removal"""
),
]
- validation_settings = ToolCallerSettings(
- max_tool_calls=0,
- max_iterations=1,
- max_retries=settings.max_retries,
- retry_delay=settings.retry_delay,
- retry_max_delay=settings.retry_max_delay,
- )
+ validation_settings = ToolCallerSettings(max_tool_calls=5) # Allow tool calls for validation
try:
- raw = with_retry(tool_caller, validation_settings, agent, messages, validation_settings)
+ if model is None:
+ raise ValueError("model parameter is required for ido-agents migration")
+
+ ido_agent = create_ido_agent(model=model, tools=tools or [])
+ result = await (
+ ido_agent.with_structured_output(ValidationResult)
+ .with_tool_caller(validation_settings)
+ .with_retry(max_retries=3)
+ .ainvoke(messages)
+ )
except Exception as e:
console.print(f"[yellow]Issue validation failed for {file_path}: {e}[/yellow]")
return issues
- validated, removed = parse_validation_response(raw, file_path)
- if validated is None:
+ if result.removed:
console.print(
- f"[yellow]Issue validation response invalid for {file_path}; keeping original issues[/yellow]"
+ f"[dim]Issue validation removed {len(result.removed)} issue(s) in {file_path}[/dim]"
)
- return issues
-
- if removed:
- console.print(f"[dim]Issue validation removed {len(removed)} issue(s) in {file_path}[/dim]")
- for entry in removed:
+ for entry in result.removed:
reason = entry.get("reason", "").strip()
issue = entry.get("issue", {})
title = issue.get("title", "Untitled issue")
@@ -670,40 +735,4 @@ def validate_issues_for_file(
else:
console.print(f"[dim]- {title}: no reason provided[/dim]")
- return validated
-
-
-def parse_validation_response(
- raw: Any,
- file_path: str,
-) -> tuple[list[IssueModel] | None, list[dict[str, Any]]]:
- if not isinstance(raw, str):
- return None, []
- try:
- parsed = json.loads(raw)
- except json.JSONDecodeError as e:
- console.print(f"[red]Failed to parse validation JSON for {file_path}: {e}[/red]")
- return None, []
-
- if not isinstance(parsed, dict):
- console.print(
- f"[red]Validation response for {file_path} is not an object, got {type(parsed)}[/red]"
- )
- return None, []
-
- valid_issues_raw = parsed.get("valid_issues", [])
- removed = parsed.get("removed", [])
-
- if not isinstance(valid_issues_raw, list) or not isinstance(removed, list):
- console.print(f"[red]Validation response for {file_path} missing expected keys[/red]")
- return None, []
-
- valid_issues: list[IssueModel] = []
- for issue_data in valid_issues_raw:
- try:
- valid_issues.append(IssueModel.model_validate(issue_data))
- except Exception as e:
- console.print(f"[yellow]Failed to validate issue after checking: {e}[/yellow]")
- return None, []
-
- return valid_issues, removed
+ return result.valid_issues
diff --git a/src/reviewbot/agent/workflow/__init__.py b/src/reviewbot/agent/workflow/__init__.py
index d09e7a1..c0ccf85 100644
--- a/src/reviewbot/agent/workflow/__init__.py
+++ b/src/reviewbot/agent/workflow/__init__.py
@@ -1,4 +1,12 @@
-from reviewbot.agent.workflow.config import GitLabConfig
-from reviewbot.agent.workflow.runner import work_agent
+from reviewbot.agent.workflow.config import GitLabConfig, GitProviderConfig
+from reviewbot.agent.workflow.runner import work_agent # type: ignore
+from reviewbot.agent.workflow.state import CodebaseState, checkpointer, store
-__all__ = ["GitLabConfig", "work_agent"]
+__all__ = [
+ "GitLabConfig",
+ "GitProviderConfig",
+ "work_agent",
+ "CodebaseState",
+ "checkpointer",
+ "store",
+]
diff --git a/src/reviewbot/agent/workflow/config.py b/src/reviewbot/agent/workflow/config.py
index 8ac06c0..7b74f35 100644
--- a/src/reviewbot/agent/workflow/config.py
+++ b/src/reviewbot/agent/workflow/config.py
@@ -1,11 +1,46 @@
-from dataclasses import dataclass
+from abc import ABC, abstractmethod
+from pydantic import BaseModel, SecretStr
-@dataclass
-class GitLabConfig:
- """GitLab API configuration"""
+
+class GitProviderConfig(BaseModel, ABC):
+ """Abstract base configuration for git providers."""
+
+ token: SecretStr
+
+ model_config = {
+ "extra": "forbid",
+ "frozen": True,
+ }
+
+ @abstractmethod
+ def get_api_base_url(self) -> str:
+ """Return the base API URL for this provider."""
+ pass
+
+ @abstractmethod
+ def get_project_identifier(self) -> str:
+ """Return the project/repository identifier."""
+ pass
+
+ @abstractmethod
+ def get_pr_identifier(self) -> str:
+ """Return the pull/merge request identifier."""
+ pass
+
+
+class GitLabConfig(GitProviderConfig):
+ """GitLab-specific configuration."""
api_v4: str
- token: str
project_id: str
mr_iid: str
+
+ def get_api_base_url(self) -> str:
+ return self.api_v4
+
+ def get_project_identifier(self) -> str:
+ return self.project_id
+
+ def get_pr_identifier(self) -> str:
+ return self.mr_iid
diff --git a/src/reviewbot/agent/workflow/discussions.py b/src/reviewbot/agent/workflow/discussions.py
index 4143234..628b673 100644
--- a/src/reviewbot/agent/workflow/discussions.py
+++ b/src/reviewbot/agent/workflow/discussions.py
@@ -3,7 +3,7 @@
from rich.console import Console # type: ignore
-from reviewbot.agent.workflow.config import GitLabConfig
+from reviewbot.agent.workflow.config import GitProviderConfig
from reviewbot.agent.workflow.diff_extract import create_file_position
from reviewbot.core.issues import IssueModel, IssueSeverity
from reviewbot.infra.gitlab.diff import FileDiff
@@ -15,7 +15,7 @@
def handle_file_issues(
file_path: str,
issues: list[IssueModel],
- gitlab_config: GitLabConfig,
+ gitlab_config: GitProviderConfig,
file_diffs: list[FileDiff],
diff_refs: dict[str, str],
) -> None:
@@ -94,7 +94,7 @@ def build_position() -> dict[str, Any] | None:
f"{issue.description}\n"
)
if issue.suggestion:
- discussion_body += f"\n{issue.suggestion}\n"
+ discussion_body += f"\n```diff\n{issue.suggestion}\n```\n"
discussion_body += "\n"
position = build_position()
@@ -123,7 +123,7 @@ def build_position() -> dict[str, Any] | None:
f"{issue.description}\n"
)
if issue.suggestion:
- reply_body += f"\n{issue.suggestion}\n"
+ reply_body += f"\n```diff\n{issue.suggestion}\n```\n"
note_id = reply_to_discussion(
discussion_id=discussion_id,
body=reply_body,
@@ -156,7 +156,7 @@ def build_position() -> dict[str, Any] | None:
f"{issue.description}\n"
)
if issue.suggestion:
- reply_body += f"\n{issue.suggestion}\n"
+ reply_body += f"\n```diff\n{issue.suggestion}\n```\n"
note_id = reply_to_discussion(
discussion_id=discussion_id,
body=reply_body,
@@ -184,7 +184,7 @@ def build_position() -> dict[str, Any] | None:
def create_discussion(
title: str,
body: str,
- gitlab_config: GitLabConfig,
+ gitlab_config: GitProviderConfig,
position: dict[str, Any] | None = None,
) -> tuple[str, str | None]:
"""
@@ -204,10 +204,10 @@ def create_discussion(
# post_discussion returns (discussion_id, note_id), we only need discussion_id
discussion_id, note_id = post_discussion(
- api_v4=gitlab_config.api_v4,
- token=gitlab_config.token,
- project_id=gitlab_config.project_id,
- mr_iid=gitlab_config.mr_iid,
+ api_v4=gitlab_config.get_api_base_url(),
+ token=gitlab_config.token.get_secret_value(),
+ project_id=gitlab_config.get_project_identifier(),
+ mr_iid=gitlab_config.get_pr_identifier(),
body=body,
position=position,
)
@@ -218,7 +218,7 @@ def create_discussion(
def reply_to_discussion(
discussion_id: str,
body: str,
- gitlab_config: GitLabConfig,
+ gitlab_config: GitProviderConfig,
) -> str | None:
"""
Reply to an existing discussion.
@@ -229,10 +229,10 @@ def reply_to_discussion(
gitlab_config: GitLab API configuration
"""
return post_discussion_reply(
- api_v4=gitlab_config.api_v4,
- token=gitlab_config.token,
- project_id=gitlab_config.project_id,
- merge_request_id=gitlab_config.mr_iid,
+ api_v4=gitlab_config.get_api_base_url(),
+ token=gitlab_config.token.get_secret_value(),
+ project_id=gitlab_config.get_project_identifier(),
+ merge_request_id=gitlab_config.get_pr_identifier(),
discussion_id=discussion_id,
body=body,
)
diff --git a/src/reviewbot/agent/workflow/gitlab_notes.py b/src/reviewbot/agent/workflow/gitlab_notes.py
index c271a5c..262f6d2 100644
--- a/src/reviewbot/agent/workflow/gitlab_notes.py
+++ b/src/reviewbot/agent/workflow/gitlab_notes.py
@@ -1,9 +1,15 @@
+from typing import Any, NamedTuple
from urllib.parse import quote
+from langchain_core.language_models.chat_models import BaseChatModel
+from langgraph.func import task # type: ignore
from rich.console import Console # type: ignore
+from reviewbot.agent.workflow.config import GitProviderConfig
from reviewbot.core.agent import Agent
from reviewbot.core.issues import Issue, IssueSeverity
+from reviewbot.core.reviews.review import Acknowledgment
+from reviewbot.core.reviews.review_model import ReviewSummary
from reviewbot.infra.gitlab.diff import FileDiff
from reviewbot.infra.gitlab.note import (
get_all_discussions,
@@ -12,17 +18,32 @@
update_discussion_note,
)
+
+class AcknowledgmentResult(NamedTuple):
+ discussion_id: str
+ note_id: str
+
+
console = Console()
+@task
def post_review_acknowledgment(
- api_v4: str,
- token: str,
- project_id: str,
- mr_iid: str,
- agent: Agent,
- diffs: list[FileDiff],
-) -> tuple[str, str] | None:
+ *, gitlab: GitProviderConfig, diffs: list[FileDiff], model: BaseChatModel
+) -> AcknowledgmentResult | None:
+ """
+ Posts an initial acknowledgment discussion for the MR review.
+
+ Reads:
+ - CodebaseState from store
+
+ Writes:
+ - acknowledgment ids (returned)
+
+ Returns:
+ AcknowledgmentResult if created, otherwise None
+ """
+
"""
Post a surface-level summary acknowledging the review is starting.
Creates a discussion so it can be updated later.
@@ -41,6 +62,10 @@ def post_review_acknowledgment(
"""
from langchain_core.messages import HumanMessage, SystemMessage
+ api_v4 = gitlab.get_api_base_url()
+ token = gitlab.token.get_secret_value()
+ project_id = gitlab.get_project_identifier()
+ mr_iid = gitlab.get_pr_identifier()
# Check if an acknowledgment already exists
try:
discussions = get_all_discussions(
@@ -57,7 +82,7 @@ def post_review_acknowledgment(
)
# Find ALL "Starting" acknowledgments, then pick the most recent one
- found_acknowledgments = []
+ found_acknowledgments: list[Acknowledgment] = []
for discussion in discussions:
notes = discussion.get("notes", [])
for note in notes:
@@ -69,27 +94,27 @@ def post_review_acknowledgment(
created_at = note.get("created_at", "")
if discussion_id and note_id:
found_acknowledgments.append(
- {
- "discussion_id": str(discussion_id),
- "note_id": str(note_id),
- "created_at": created_at,
- }
+ Acknowledgment(
+ discussion_id=str(discussion_id),
+ note_id=str(note_id),
+ created_at=created_at,
+ )
)
# If we found any in-progress acknowledgments, use the most recent one
if found_acknowledgments:
# Sort by created_at timestamp (most recent first)
- found_acknowledgments.sort(key=lambda x: x["created_at"], reverse=True)
+ found_acknowledgments.sort(key=lambda x: x.created_at, reverse=True)
most_recent = found_acknowledgments[0]
console.print(
f"[dim]Found {len(found_acknowledgments)} in-progress review(s), reusing most recent[/dim]"
)
- return (most_recent["discussion_id"], most_recent["note_id"])
+ return AcknowledgmentResult(most_recent.discussion_id, most_recent.note_id)
# No in-progress reviews found - will create a new acknowledgment
console.print("[dim]No in-progress reviews found, will create new acknowledgment[/dim]")
except Exception as e:
- console.print(f"[yellow]⚠ Could not check for existing acknowledgment: {e}[/yellow]")
+ console.print(f"[yellow]Could not check for existing acknowledgment: {e}[/yellow]")
# Continue anyway - better to post a duplicate than miss it
# Get list of files being reviewed
@@ -121,10 +146,12 @@ def post_review_acknowledgment(
try:
# Get response with no tool calls allowed
- from reviewbot.agent.tasks.core import ToolCallerSettings, tool_caller
+ from ido_agents.agents.ido_agent import create_ido_agent
+ from ido_agents.agents.tool_runner import ToolCallerSettings
- summary_settings = ToolCallerSettings(max_tool_calls=0, max_iterations=1)
- summary = tool_caller(agent, messages, summary_settings)
+ summary_settings = ToolCallerSettings(max_tool_calls=0)
+ ido_agent = create_ido_agent(model=model, tools=[])
+ summary = ido_agent.with_tool_caller(summary_settings).invoke(messages)
# Post as a discussion (so we can update it later)
acknowledgment_body = f"""
@@ -145,16 +172,16 @@ def post_review_acknowledgment(
)
if not note_id:
- console.print("[yellow]⚠ Discussion created but no note ID returned[/yellow]")
+ console.print("[yellow]Discussion created but no note ID returned[/yellow]")
return None
console.print(
f"[green]✓ Posted review acknowledgment (discussion: {discussion_id}, note: {note_id})[/green]"
)
- return (str(discussion_id), str(note_id))
+ return AcknowledgmentResult(str(discussion_id), str(note_id))
except Exception as e:
- console.print(f"[yellow]⚠ Failed to post acknowledgment: {e}[/yellow]")
+ console.print(f"[yellow]Failed to post acknowledgment: {e}[/yellow]")
# Don't fail the whole review if acknowledgment fails
return None
@@ -170,6 +197,8 @@ def update_review_summary(
diffs: list[FileDiff],
diff_refs: dict[str, str],
agent: Agent,
+ model: BaseChatModel | None = None,
+ tools: list[Any] | None = None,
) -> None:
"""
Update the acknowledgment note with a summary of the review results.
@@ -206,7 +235,7 @@ def update_review_summary(
files_with_issues = len(issues_by_file)
# Prepare issue details for LLM
- issues_summary = []
+ issues_summary: list[str] = []
for issue in issues:
issues_summary.append(
f"- **{issue.severity.value.upper()}** in `{issue.file_path}` (lines {issue.start_line}-{issue.end_line}): {issue.description}"
@@ -216,7 +245,7 @@ def update_review_summary(
# Generate LLM summary with reasoning
try:
- from reviewbot.agent.tasks.core import ToolCallerSettings, tool_caller
+ from ido_agents.agents.ido_agent import create_ido_agent
messages = [
SystemMessage(
@@ -262,11 +291,16 @@ def update_review_summary(
),
]
- summary_settings = ToolCallerSettings(max_tool_calls=0, max_iterations=1)
- llm_summary = tool_caller(agent, messages, summary_settings)
+ if model is None:
+ raise ValueError("model parameter is required for ido-agents migration")
+
+ ido_agent = create_ido_agent(model=model, tools=tools or [])
+ llm_summary = ido_agent.with_structured_output(ReviewSummary).invoke(messages)
+
+ llm_summary = str(llm_summary)
except Exception as e:
- console.print(f"[yellow]⚠ Failed to generate LLM summary: {e}[/yellow]")
+ console.print(f"[yellow]Failed to generate LLM summary: {e}[/yellow]")
llm_summary = "Review completed successfully."
# Build final summary combining statistics and LLM reasoning
@@ -298,9 +332,6 @@ def update_review_summary(
if issues:
summary_parts.append("---\n\n")
- issues_by_file: dict[str, list[Issue]] = {}
- for issue in issues:
- issues_by_file.setdefault(issue.file_path, []).append(issue)
severity_badge_colors = {
IssueSeverity.HIGH: "red",
@@ -360,7 +391,7 @@ def update_review_summary(
)
console.print("[green]✓ Updated review acknowledgment with summary[/green]")
except Exception as e:
- console.print(f"[yellow]⚠ Failed to update acknowledgment: {e}[/yellow]")
+ console.print(f"[yellow]Failed to update acknowledgment: {e}[/yellow]")
import traceback
traceback.print_exc()
diff --git a/src/reviewbot/agent/workflow/runner.py b/src/reviewbot/agent/workflow/runner.py
index 5940a63..c020270 100644
--- a/src/reviewbot/agent/workflow/runner.py
+++ b/src/reviewbot/agent/workflow/runner.py
@@ -1,14 +1,15 @@
from pathlib import Path
from typing import Any
+from ido_agents.agents.tool_runner import ToolCallerSettings
+from ido_agents.models.openai import OpenAIModelConfig, build_chat_model
from langchain.agents import create_agent # type: ignore
+from langgraph.func import entrypoint # type: ignore
+from pydantic import BaseModel, SecretStr
from rich.console import Console # type: ignore
-from reviewbot.agent.base import ( # type: ignore
- AgentRunnerInput,
- agent_runner, # type: ignore
-)
-from reviewbot.agent.tasks.core import ToolCallerSettings
+from reviewbot.agent.tasks.data import clone_and_tree, fetch_gitlab_data
+from reviewbot.agent.tasks.issues import identify_issues
from reviewbot.agent.workflow.config import GitLabConfig
from reviewbot.agent.workflow.discussions import handle_file_issues
from reviewbot.agent.workflow.gitlab_notes import (
@@ -16,102 +17,120 @@
update_review_summary,
)
from reviewbot.agent.workflow.ignore import filter_diffs, parse_reviewignore
-from reviewbot.context import Context, store_manager_ctx
+from reviewbot.agent.workflow.state import CodebaseState, checkpointer, store
from reviewbot.core.agent import Agent
from reviewbot.core.config import Config
-from reviewbot.core.issues import Issue
-from reviewbot.infra.embeddings.store_manager import CodebaseStoreManager
-from reviewbot.infra.git.clone import clone_repo_persistent, get_repo_name
-from reviewbot.infra.git.repo_tree import tree
-from reviewbot.infra.gitlab.clone import build_clone_url
-from reviewbot.infra.gitlab.diff import fetch_mr_diffs, get_mr_branch
-from reviewbot.infra.issues.in_memory_issue_store import InMemoryIssueStore
-from reviewbot.models.gpt import get_gpt_model, get_gpt_model_low_effort
-from reviewbot.tools import get_diff, read_file, think
+from reviewbot.infra.git.clone import get_repo_name
+from reviewbot.models.gpt import get_gpt_model_low_effort
+from reviewbot.tools import get_diff, ls_dir, read_file, think
console = Console()
-def work_agent(config: Config, project_id: str, mr_iid: str) -> str:
+class WorkAgentInput(BaseModel):
+ config: Config
+ project_id: str
+ mr_iid: str
+
+ model_config = {
+ "extra": "forbid",
+ "frozen": True,
+ }
+
+
+@entrypoint(checkpointer=checkpointer, store=store)
+async def work_agent(inputs: dict[Any, Any]) -> str:
+ data = WorkAgentInput.model_validate(inputs)
+
+ config = data.config
+ project_id = data.project_id
+ mr_iid = data.mr_iid
+
api_v4 = config.gitlab_api_v4 + "/api/v4"
token = config.gitlab_token
- model = get_gpt_model(config.llm_model_name, config.llm_api_key, config.llm_base_url)
- clone_url = build_clone_url(api_v4, project_id, token)
+ modelCfg = OpenAIModelConfig(
+ model=config.llm_model_name,
+ api_key=config.llm_api_key,
+ base_url=config.llm_base_url,
+ temperature=0.0,
+ reasoning_effort="low",
+ )
- diffs, diff_refs = fetch_mr_diffs(api_v4, project_id, mr_iid, token)
+ model = build_chat_model(modelCfg)
- # Limit tool calls to prevent agent from wandering
- # For diff review: get_diff (1) + maybe read_file for context (1-2) = 3 max
- settings = ToolCallerSettings(max_tool_calls=5, max_iterations=10)
+ data = await fetch_gitlab_data(api_v4, project_id, mr_iid, token)
+ repo = await clone_and_tree(data.clone_url, data.branch)
- # Only provide essential tools - remove search tools to prevent wandering
- tools = [
- get_diff, # Primary tool: get the diff for the file
- read_file, # Optional: get additional context if needed
- think, # Internal reasoning and thought process
- ]
+ diffs = data.diffs
+ diff_refs = data.diff_refs
- agent: Agent = create_agent(
- model=model,
- tools=tools,
- # middleware=[check_message_limit, check_agent_messages], # type: ignore
- )
- branch = get_mr_branch(api_v4, project_id, mr_iid, token)
- repo_path = clone_repo_persistent(clone_url, branch=branch)
- repo_path = Path(repo_path).resolve()
- repo_tree = tree(repo_path)
+ repo_path = Path(repo.repo_path).resolve()
+ repo_tree = repo.repo_tree
# Parse .reviewignore and filter diffs
reviewignore_patterns = parse_reviewignore(repo_path)
filtered_diffs = filter_diffs(diffs, reviewignore_patterns)
console.print(f"[cyan]Reviewing {len(filtered_diffs)} out of {len(diffs)} changed files[/cyan]")
- manager = CodebaseStoreManager()
- manager.set_repo_root(repo_path)
- manager.set_repo_name(get_repo_name(repo_path))
- manager.set_tree(repo_tree)
- manager.set_diffs(filtered_diffs) # Use filtered diffs instead of all diffs
- manager.get_store()
-
- issue_store = InMemoryIssueStore()
- token_ctx = store_manager_ctx.set(Context(store_manager=manager, issue_store=issue_store))
-
- context = store_manager_ctx.get()
+ NS = ("codebase",)
+ state = CodebaseState(
+ repo_root=str(repo_path),
+ repo_name=get_repo_name(repo_path),
+ repo_tree=repo_tree,
+ diffs=filtered_diffs,
+ )
+ store.put(
+ NS,
+ "state",
+ state.model_dump(),
+ )
# Create GitLab configuration
gitlab_config = GitLabConfig(
api_v4=api_v4,
- token=token,
+ token=SecretStr(token),
project_id=project_id,
mr_iid=mr_iid,
)
+ # Create main agent for code review
+ main_agent: Agent = create_agent(
+ model=model,
+ tools=[get_diff, read_file, ls_dir, think],
+ store=store,
+ )
+
# Create a low-effort agent for simple tasks like acknowledgments and quick scans
low_effort_model = get_gpt_model_low_effort(
config.llm_model_name, config.llm_api_key, config.llm_base_url
)
low_effort_agent: Agent = create_agent(
model=low_effort_model,
- tools=[get_diff, think], # Only needs get_diff for quick scanning
+ tools=[get_diff, read_file, ls_dir, think],
+ store=store,
)
+ # Create settings for tool calling
+ settings = ToolCallerSettings(max_tool_calls=100)
+
# Post acknowledgment that review is starting
console.print("[dim]Posting review acknowledgment...[/dim]")
- acknowledgment_ids = post_review_acknowledgment(
- api_v4=api_v4,
- token=token,
- project_id=project_id,
- mr_iid=mr_iid,
- agent=low_effort_agent,
+ ack = await post_review_acknowledgment(
+ gitlab=gitlab_config,
diffs=filtered_diffs,
+ model=low_effort_model,
)
- if acknowledgment_ids:
+
+ if ack is not None:
console.print(
- f"[dim]Acknowledgment created: discussion={acknowledgment_ids[0]}, note={acknowledgment_ids[1]}[/dim]"
+ f"[dim]Acknowledgment created: discussion={ack.discussion_id}, note={ack.note_id}[/dim]"
)
else:
- console.print("[yellow]⚠ Failed to create acknowledgment (returned None)[/yellow]")
+ console.print(
+ "[yellow]Failed to create acknowledgment (returned None), stopping... [/yellow]"
+ )
+ return "Review failed: acknowledgment creation returned None"
try:
# Define callback to create discussions as each file's review completes
@@ -128,23 +147,28 @@ def on_file_review_complete(file_path: str, issues: list[Any]) -> None:
handle_file_issues(file_path, issues, gitlab_config, filtered_diffs, diff_refs)
- # Pass the callback to the agent runner
- issues: list[Issue] = agent_runner.invoke( # type: ignore
- AgentRunnerInput(
- agent=agent,
- context=context,
- settings=settings,
- on_file_complete=on_file_review_complete,
- quick_scan_agent=low_effort_agent,
- )
+ # Call identify_issues task directly
+ issue_models = await identify_issues(
+ settings=settings,
+ on_file_complete=on_file_review_complete,
+ agent=main_agent,
+ quick_scan_agent=low_effort_agent,
+ model=model,
+ tools=[get_diff, read_file, ls_dir, think],
+ quick_scan_model=low_effort_model,
+ quick_scan_tools=[get_diff, read_file, ls_dir, think],
+ acknowledgment_info=(ack.discussion_id, ack.note_id, gitlab_config),
)
- console.print(f"[bold cyan]📊 Total issues found: {len(issues)}[/bold cyan]")
+ # Convert IssueModel to domain Issue objects
+ issues = [im.to_domain() for im in issue_models]
+
+ console.print(f"[bold cyan]Total issues found: {len(issues)}[/bold cyan]")
# Update the acknowledgment note with summary
- console.print(f"[dim]Checking acknowledgment_ids: {acknowledgment_ids}[/dim]")
- if acknowledgment_ids:
- discussion_id, note_id = acknowledgment_ids
+ console.print(f"[dim]Checking acknowledgment_ids: {ack.discussion_id} {ack.note_id}[/dim]")
+ if ack.discussion_id and ack.note_id:
+ discussion_id, note_id = ack.discussion_id, ack.note_id
console.print(
f"[dim]Calling update_review_summary for discussion {discussion_id}, note {note_id}...[/dim]"
)
@@ -159,25 +183,25 @@ def on_file_review_complete(file_path: str, issues: list[Any]) -> None:
diffs=filtered_diffs,
diff_refs=diff_refs,
agent=low_effort_agent,
+ model=low_effort_model,
+ tools=[get_diff, read_file, ls_dir, think],
)
console.print("[dim]update_review_summary completed[/dim]")
else:
console.print(
- "[yellow]⚠ No acknowledgment to update (initial acknowledgment may have failed)[/yellow]"
+ "[yellow]No acknowledgment to update (initial acknowledgment may have failed)[/yellow]"
)
# Discussions are now created as reviews complete, but we still need to
# handle any files that might have been processed but had no issues
# (though the callback already handles this case)
- console.print("[bold green]🎉 All reviews completed and discussions created![/bold green]")
+ console.print("[bold green]All reviews completed and discussions created![/bold green]")
return "Review completed successfully"
except Exception as e:
- console.print(f"[bold red]❌ Error during review: {e}[/bold red]")
+ console.print(f"[bold red]Error during review: {e}[/bold red]")
import traceback
traceback.print_exc()
raise
- finally:
- store_manager_ctx.reset(token_ctx)
diff --git a/src/reviewbot/agent/workflow/state.py b/src/reviewbot/agent/workflow/state.py
new file mode 100644
index 0000000..2ac9106
--- /dev/null
+++ b/src/reviewbot/agent/workflow/state.py
@@ -0,0 +1,21 @@
+from langgraph.checkpoint.memory import InMemorySaver # type: ignore
+from langgraph.store.memory import InMemoryStore # type: ignore
+from pydantic import BaseModel
+
+from reviewbot.infra.gitlab.diff import FileDiff
+
+# Shared store and checkpointer instances
+checkpointer = InMemorySaver()
+store = InMemoryStore() # An instance of InMemoryStore for long-term memory
+
+
+class CodebaseState(BaseModel):
+ repo_root: str
+ repo_name: str
+ repo_tree: str
+ diffs: list[FileDiff]
+
+ model_config = {
+ "extra": "forbid",
+ "frozen": True,
+ }
diff --git a/src/reviewbot/core/config.py b/src/reviewbot/core/config.py
index a44e423..50d602c 100644
--- a/src/reviewbot/core/config.py
+++ b/src/reviewbot/core/config.py
@@ -1,10 +1,7 @@
-from dataclasses import dataclass
+from pydantic import BaseModel, SecretStr
-from pydantic import SecretStr
-
-@dataclass
-class Config:
+class Config(BaseModel):
llm_api_key: SecretStr
llm_base_url: str
llm_model_name: str
@@ -12,3 +9,8 @@ class Config:
gitlab_token: str
gemini_project_id: str
create_threads: bool = False
+
+ model_config = {
+ "extra": "forbid",
+ "frozen": True,
+ }
diff --git a/src/reviewbot/core/issues/issue_model.py b/src/reviewbot/core/issues/issue_model.py
index 148ce64..90117ad 100644
--- a/src/reviewbot/core/issues/issue_model.py
+++ b/src/reviewbot/core/issues/issue_model.py
@@ -1,4 +1,4 @@
-from pydantic import BaseModel, ConfigDict
+from pydantic import BaseModel, ConfigDict, RootModel
from reviewbot.core.issues.issue import Issue, IssueSeverity
@@ -19,3 +19,16 @@ class IssueModel(BaseModel):
def to_domain(self) -> Issue:
return Issue(**self.model_dump())
+
+ @classmethod
+ def from_domain(cls, issue: Issue) -> "IssueModel":
+ return cls.model_validate(issue)
+
+
+class IssueModelList(RootModel[list[IssueModel]]):
+ """Wrapper for a list of IssueModel objects.
+
+ Use .root to access the underlying list.
+ """
+
+ pass
diff --git a/src/reviewbot/core/reviews/review.py b/src/reviewbot/core/reviews/review.py
index d85aef8..03a097f 100644
--- a/src/reviewbot/core/reviews/review.py
+++ b/src/reviewbot/core/reviews/review.py
@@ -11,3 +11,10 @@ class Review:
commit: str = ""
issues: list[Issue] = field(default_factory=list)
summary: str = ""
+
+
+@dataclass(frozen=True)
+class Acknowledgment:
+ discussion_id: str
+ note_id: str
+ created_at: str
diff --git a/src/reviewbot/core/reviews/review_model.py b/src/reviewbot/core/reviews/review_model.py
new file mode 100644
index 0000000..5cbc44a
--- /dev/null
+++ b/src/reviewbot/core/reviews/review_model.py
@@ -0,0 +1,9 @@
+from pydantic import RootModel
+
+
+class ReviewSummary(RootModel[str]):
+ def __str__(self) -> str:
+ return self.root
+
+ def __len__(self) -> int:
+ return len(self.root)
diff --git a/src/reviewbot/infra/gitlab/diff.py b/src/reviewbot/infra/gitlab/diff.py
index 0743a16..d53a86d 100644
--- a/src/reviewbot/infra/gitlab/diff.py
+++ b/src/reviewbot/infra/gitlab/diff.py
@@ -1,23 +1,71 @@
import json
import re
-from dataclasses import dataclass
-from typing import Any
+from typing import Any, Literal
+import httpx
import requests
+from pydantic import BaseModel
from rich.console import Console
console = Console()
-@dataclass(frozen=True)
-class FileDiff:
- old_path: str | None # None for new files
- new_path: str | None # None for deleted files
+class LineRangePoint(BaseModel):
+ line_code: str
+ type: Literal["old", "new"]
+ old_line: int | None = None
+ new_line: int | None = None
+
+ model_config = {"extra": "forbid", "frozen": True}
+
+
+class LineRange(BaseModel):
+ start: LineRangePoint
+ end: LineRangePoint
+
+ model_config = {"extra": "forbid", "frozen": True}
+
+
+class DiffPosition(BaseModel):
+ base_sha: str
+ start_sha: str
+ head_sha: str
+
+ position_type: Literal["text", "image", "file"]
+
+ old_path: str | None = None
+ new_path: str | None = None
+
+ old_line: int | None = None
+ new_line: int | None = None
+
+ line_range: LineRange | None = None
+
+ # image-only fields
+ width: int | None = None
+ height: int | None = None
+ x: float | None = None
+ y: float | None = None
+
+ model_config = {
+ "extra": "forbid",
+ "frozen": True,
+ }
+
+
+class FileDiff(BaseModel):
+ old_path: str | None
+ new_path: str | None
is_new_file: bool
is_deleted_file: bool
is_renamed: bool
- patch: str # full unified diff for this file
- position: dict[str, Any] | None = None # GitLab position object for discussions
+ patch: str
+ position: DiffPosition | None = None
+
+ model_config = {
+ "extra": "forbid",
+ "frozen": True,
+ }
_DIFF_HEADER_RE = re.compile(r"^diff --git a/(.+?) b/(.+?)\s*$")
@@ -348,3 +396,257 @@ def get_mr_branch(api_v4: str, project_id: str, mr_iid: str, token: str, timeout
r = requests.get(mr_url, headers=headers, timeout=timeout)
r.raise_for_status()
return r.json()["source_branch"]
+
+
+# ============================================================================
+# ASYNC VERSIONS (using httpx)
+# ============================================================================
+
+
+async def async_fetch_mr_diffs(
+ api_v4: str,
+ project_id: str,
+ mr_iid: str,
+ token: str,
+ timeout: int = 30,
+) -> tuple[list[FileDiff], dict[str, str]]:
+ """
+ Fetch merge request diffs from GitLab API (async version).
+
+ Supports both the old raw diff format and the new JSON changes format.
+ The new format includes position information for discussions.
+ """
+ api_v4 = api_v4.rstrip("/")
+ headers = {"PRIVATE-TOKEN": token}
+
+ mr_url = f"{api_v4}/projects/{project_id}/merge_requests/{mr_iid}"
+ changes_url = f"{mr_url}/changes"
+
+ async with httpx.AsyncClient() as client:
+ # Get merge request info to extract diff_refs for position objects
+ mr_response = await client.get(mr_url, headers=headers, timeout=timeout)
+ mr_response.raise_for_status()
+ mr_data = mr_response.json()
+
+ # Get diff_refs for position objects
+ diff_refs = mr_data.get("diff_refs") or {}
+ base_sha = diff_refs.get("base_sha")
+ head_sha = diff_refs.get("head_sha")
+ start_sha = diff_refs.get("start_sha")
+ mr_web_url = mr_data.get("web_url")
+ if mr_web_url and "/-/merge_requests/" in mr_web_url:
+ diff_refs["project_web_url"] = mr_web_url.split("/-/merge_requests/")[0]
+
+ # Try the new JSON changes endpoint first
+ changes_response = await client.get(changes_url, headers=headers, timeout=timeout)
+ changes_response.raise_for_status()
+
+ try:
+ # Try to parse as JSON (new format)
+ changes_data = changes_response.json()
+
+ if isinstance(changes_data, dict) and "changes" in changes_data:
+ # New JSON format with changes array
+ file_diffs: list[FileDiff] = []
+
+ for change in changes_data["changes"]:
+ change_old_path: str | None = change.get("old_path")
+ change_new_path: str | None = change.get("new_path")
+ diff_text: str = change.get("diff", "")
+ change_is_new_file: bool = change.get("new_file", False)
+ change_is_deleted_file: bool = change.get("deleted_file", False)
+ change_is_renamed: bool = change.get("renamed_file", False)
+
+ # Create position object for discussions
+ change_position: dict[str, Any] | None = None
+ if base_sha and head_sha and start_sha:
+ # Parse diff to find first hunk with line range information
+ hunk_header_pattern = re.compile(
+ r"^@@\s+-(\d+)(?:,(\d+))?\s+\+(\d+)(?:,(\d+))?\s+@@"
+ )
+
+ change_old_line: int | None = None
+ change_new_line: int | None = None
+
+ lines = diff_text.splitlines()
+ in_hunk = False
+ current_old = 0
+ current_new = 0
+
+ for line in lines:
+ # Check for hunk header
+ match = hunk_header_pattern.match(line)
+ if match:
+ current_old = int(match.group(1))
+ current_new = int(match.group(3))
+ in_hunk = True
+ continue
+
+ if not in_hunk:
+ continue
+
+ # Found a change - use this line!
+ if line.startswith("-"):
+ change_old_line = current_old
+ change_new_line = None # Deletion has no new line
+ break
+ elif line.startswith("+"):
+ change_old_line = None # Addition has no old line
+ change_new_line = current_new
+ break
+
+ # Context line - increment counters
+ if line.startswith(" ") or (
+ line and not line.startswith(("@@", "\\", "diff"))
+ ):
+ current_old += 1
+ current_new += 1
+
+ # Create position object
+ change_position = {
+ "base_sha": base_sha,
+ "head_sha": head_sha,
+ "start_sha": start_sha,
+ "old_path": change_old_path,
+ "new_path": change_new_path,
+ "position_type": "text",
+ }
+
+ if change_new_line is not None:
+ change_position["new_line"] = change_new_line
+
+ if change_old_line is not None:
+ change_position["old_line"] = change_old_line
+
+ # Default fallback
+ if change_new_line is None and change_old_line is None:
+ change_position["new_line"] = 1
+
+ # If diff is empty or too large, try to get it from raw_diffs endpoint
+ if not diff_text or change.get("too_large", False):
+ # Fallback to raw diff endpoint for this file
+ raw_diff_url = f"{mr_url}/diffs"
+ raw_response = await client.get(
+ raw_diff_url, headers=headers, timeout=timeout
+ )
+ raw_response.raise_for_status()
+ raw_diff = raw_response.text
+
+ # Extract this file's diff from raw format
+ file_chunks = _split_raw_diff_by_file(raw_diff)
+ for chunk in file_chunks:
+ lines = chunk.splitlines(keepends=False)
+ if not lines or not lines[0].startswith("diff --git "):
+ continue
+
+ chunk_old_path, chunk_new_path, _, _, _ = _parse_paths_from_chunk(lines)
+ if (chunk_new_path == change_new_path) or (
+ chunk_old_path == change_old_path
+ ):
+ diff_text = chunk
+ break
+
+ file_diffs.append(
+ FileDiff(
+ old_path=change_old_path,
+ new_path=change_new_path,
+ is_new_file=change_is_new_file,
+ is_deleted_file=change_is_deleted_file,
+ is_renamed=change_is_renamed,
+ patch=diff_text,
+ position=None,
+ )
+ )
+
+ return file_diffs, diff_refs
+
+ except (json.JSONDecodeError, KeyError):
+ # Fallback to old raw diff format
+ pass
+
+ # Old format: parse raw diff text
+ raw_diff_url = f"{mr_url}/diffs"
+ raw_response = await client.get(raw_diff_url, headers=headers, timeout=timeout)
+ raw_response.raise_for_status()
+ raw = raw_response.text
+
+ file_chunks = _split_raw_diff_by_file(raw)
+
+ out: list[FileDiff] = []
+ for chunk in file_chunks:
+ lines = chunk.splitlines(keepends=False)
+ if not lines:
+ continue
+ if not lines[0].startswith("diff --git "):
+ continue
+
+ (
+ parsed_old_path,
+ parsed_new_path,
+ parsed_is_new_file,
+ parsed_is_deleted_file,
+ parsed_is_renamed,
+ ) = _parse_paths_from_chunk(lines)
+
+ # Create position object for discussions
+ raw_position: dict[str, Any] | None = None
+ if base_sha and head_sha and start_sha:
+ extracted_new_line: int | None = None
+ extracted_old_line: int | None = None
+
+ # Parse diff to find first hunk and line numbers
+ hunk_header_pattern = re.compile(r"^@@\s+-(\d+)(?:,(\d+))?\s+\+(\d+)(?:,(\d+))?\s+@@")
+ for diff_line in chunk.splitlines():
+ match = hunk_header_pattern.match(diff_line)
+ if match:
+ extracted_old_line = int(match.group(1))
+ extracted_new_line = int(match.group(3))
+ break
+
+ # Create position object with line information
+ raw_position = {
+ "base_sha": base_sha,
+ "head_sha": head_sha,
+ "start_sha": start_sha,
+ "old_path": parsed_old_path,
+ "new_path": parsed_new_path,
+ "position_type": "text",
+ }
+
+ # Add line numbers if we found them
+ if extracted_new_line is not None:
+ raw_position["new_line"] = extracted_new_line
+ if extracted_old_line is not None:
+ raw_position["old_line"] = extracted_old_line
+
+ # If no lines found, use line 1 as default for file-level discussion
+ if extracted_new_line is None and extracted_old_line is None:
+ raw_position["new_line"] = 1
+
+ out.append(
+ FileDiff(
+ old_path=parsed_old_path,
+ new_path=parsed_new_path,
+ is_new_file=parsed_is_new_file,
+ is_deleted_file=parsed_is_deleted_file,
+ is_renamed=parsed_is_renamed,
+ patch=chunk,
+ position=raw_position,
+ )
+ )
+
+ return out, diff_refs
+
+
+async def async_get_mr_branch(
+ api_v4: str, project_id: str, mr_iid: str, token: str, timeout: int = 30
+) -> str:
+ api_v4 = api_v4.rstrip("/")
+ headers = {"PRIVATE-TOKEN": token}
+ mr_url = f"{api_v4}/projects/{project_id}/merge_requests/{mr_iid}"
+
+ async with httpx.AsyncClient() as client:
+ r = await client.get(mr_url, headers=headers, timeout=timeout)
+
+ r.raise_for_status()
+ return r.json()["source_branch"]
diff --git a/src/reviewbot/infra/gitlab/note.py b/src/reviewbot/infra/gitlab/note.py
index 2c859b1..29baef4 100644
--- a/src/reviewbot/infra/gitlab/note.py
+++ b/src/reviewbot/infra/gitlab/note.py
@@ -1,5 +1,6 @@
from typing import Any
+import httpx
import requests
from rich.console import Console
@@ -291,3 +292,302 @@ def update_discussion_note(
error_response = r.text
console.print(f"[red]Error response text: {r.text}[/red]")
raise RuntimeError(f"Failed to update note: {r.status_code} {r.reason}: {error_response}")
+
+
+# ============================================================================
+# ASYNC VERSIONS (using httpx)
+# ============================================================================
+
+
+async def async_post_merge_request_note(
+ api_v4: str,
+ token: str,
+ project_id: str,
+ mr_iid: str,
+ body: str,
+ timeout: int = 30,
+) -> None:
+ url = f"{api_v4.rstrip('/')}/projects/{project_id}/merge_requests/{mr_iid}/notes"
+
+ async with httpx.AsyncClient() as client:
+ r = await client.post(
+ url,
+ headers={"PRIVATE-TOKEN": token},
+ data={"body": body},
+ timeout=timeout,
+ )
+
+ if r.status_code >= 300:
+ raise RuntimeError(f"gitlab note post failed: {r.status_code} {r.reason_phrase}: {r.text}")
+
+
+async def async_post_discussion(
+ api_v4: str,
+ token: str,
+ project_id: str,
+ mr_iid: str,
+ body: str,
+ position: dict[str, Any] | None = None,
+ timeout: int = 30,
+) -> tuple[str, str | None]:
+ """
+ Create a new discussion and return its ID and first note ID.
+
+ Args:
+ api_v4: GitLab API v4 base URL
+ token: GitLab API token
+ project_id: Project ID
+ mr_iid: Merge request IID
+ body: Discussion body content
+ position: Optional position object for file-based discussions
+ timeout: Request timeout
+
+ Returns:
+ Tuple of (discussion_id, note_id). note_id may be None if not found.
+ """
+ url = f"{api_v4.rstrip('/')}/projects/{project_id}/merge_requests/{mr_iid}/discussions"
+
+ # Prepare request data
+ data: dict[str, Any] = {"body": body}
+ if position:
+ has_line_info = (
+ "new_line" in position
+ or "old_line" in position
+ or "line_code" in position
+ or "line_range" in position
+ )
+ if has_line_info or position["position_type"] == "file":
+ data["position"] = position
+ else:
+ console.print(
+ "[yellow]Position object missing line information, creating discussion without position[/yellow]"
+ )
+
+ async with httpx.AsyncClient() as client:
+ r = await client.post(
+ url,
+ headers={"PRIVATE-TOKEN": token},
+ json=data,
+ timeout=timeout,
+ )
+
+ # Log error details if request fails
+ if r.status_code >= 400:
+ console.print(f"[red]Request failed with status {r.status_code}[/red]")
+ console.print(f"[red]Request data: {data}[/red]")
+ try:
+ error_response = r.json()
+ console.print(f"[red]Error response: {error_response}[/red]")
+ except Exception:
+ console.print(f"[red]Error response text: {r.text}[/red]")
+
+ r.raise_for_status()
+
+ # GitLab returns the created discussion with an 'id' field and notes array
+ response_data = r.json()
+ discussion_id = response_data.get("id")
+
+ if not discussion_id:
+ raise RuntimeError(f"Discussion created but no ID returned: {response_data}")
+
+ # Also return the first note ID (the discussion body note)
+ notes = response_data.get("notes", [])
+ note_id = notes[0].get("id") if notes else None
+
+ return discussion_id, note_id
+
+
+async def async_post_discussion_reply(
+ api_v4: str,
+ token: str,
+ project_id: str,
+ merge_request_id: str,
+ discussion_id: str,
+ body: str,
+ timeout: int = 30,
+) -> str | None:
+ url = f"{api_v4.rstrip('/')}/projects/{project_id}/merge_requests/{merge_request_id}/discussions/{discussion_id}/notes"
+
+ async with httpx.AsyncClient() as client:
+ r = await client.post(
+ url,
+ headers={"PRIVATE-TOKEN": token},
+ data={"body": body},
+ timeout=timeout,
+ )
+
+ r.raise_for_status()
+ try:
+ return r.json().get("id")
+ except Exception:
+ return None
+
+
+async def async_create_discussion(
+ title: str,
+ body: str,
+ api_v4: str,
+ token: str,
+ project_id: str,
+ mr_iid: str,
+) -> str:
+ """
+ Create a discussion with title and body.
+
+ Returns:
+ Discussion ID
+ """
+ full_body = f"## {title}\n\n{body}"
+
+ discussion_id, _ = await async_post_discussion(
+ api_v4=api_v4,
+ token=token,
+ project_id=project_id,
+ mr_iid=mr_iid,
+ body=full_body,
+ )
+
+ return discussion_id
+
+
+async def async_reply_to_discussion(
+ discussion_id: str,
+ body: str,
+ api_v4: str,
+ token: str,
+ project_id: str,
+ mr_iid: str,
+) -> str | None:
+ """
+ Reply to an existing discussion and return the note ID if available.
+ """
+ return await async_post_discussion_reply(
+ api_v4=api_v4,
+ token=token,
+ project_id=project_id,
+ merge_request_id=mr_iid,
+ discussion_id=discussion_id,
+ body=body,
+ )
+
+
+async def async_delete_discussion(
+ api_v4: str,
+ token: str,
+ project_id: str,
+ mr_iid: str,
+ discussion_id: str,
+ note_id: str,
+ timeout: int = 30,
+) -> None:
+ url = f"{api_v4.rstrip('/')}/projects/{project_id}/merge_requests/{mr_iid}/discussions/{discussion_id}/notes/{note_id}"
+
+ async with httpx.AsyncClient() as client:
+ r = await client.delete(
+ url,
+ headers={"PRIVATE-TOKEN": token},
+ timeout=timeout,
+ )
+
+ r.raise_for_status()
+
+
+async def async_get_all_discussions(
+ api_v4: str,
+ token: str,
+ project_id: str,
+ mr_iid: str,
+ timeout: int = 30,
+) -> list[dict[str, Any]]:
+ url = f"{api_v4.rstrip('/')}/projects/{project_id}/merge_requests/{mr_iid}/discussions"
+
+ async with httpx.AsyncClient() as client:
+ r = await client.get(
+ url,
+ headers={"PRIVATE-TOKEN": token},
+ timeout=timeout,
+ )
+
+ r.raise_for_status()
+ return r.json()
+
+
+async def async_get_merge_request_notes(
+ api_v4: str,
+ token: str,
+ project_id: str,
+ mr_iid: str,
+ timeout: int = 30,
+) -> list[dict[str, Any]]:
+ """
+ Get all notes (comments) for a merge request.
+
+ Args:
+ api_v4: GitLab API v4 base URL
+ token: GitLab API token
+ project_id: Project ID
+ mr_iid: Merge request IID
+ timeout: Request timeout
+
+ Returns:
+ List of note dictionaries from GitLab API
+ """
+ url = f"{api_v4.rstrip('/')}/projects/{project_id}/merge_requests/{mr_iid}/notes"
+
+ async with httpx.AsyncClient() as client:
+ r = await client.get(
+ url,
+ headers={"PRIVATE-TOKEN": token},
+ timeout=timeout,
+ )
+
+ r.raise_for_status()
+ return r.json()
+
+
+async def async_update_discussion_note(
+ api_v4: str,
+ token: str,
+ project_id: str,
+ mr_iid: str,
+ discussion_id: str,
+ note_id: str,
+ body: str,
+ timeout: int = 30,
+) -> None:
+ """
+ Update a note in a discussion.
+
+ Args:
+ api_v4: GitLab API v4 base URL
+ token: GitLab API token
+ project_id: Project ID
+ mr_iid: Merge request IID
+ discussion_id: Discussion ID
+ note_id: Note ID to update
+ body: New body content for the note
+ timeout: Request timeout
+ """
+ url = f"{api_v4.rstrip('/')}/projects/{project_id}/merge_requests/{mr_iid}/discussions/{discussion_id}/notes/{note_id}"
+
+ async with httpx.AsyncClient() as client:
+ r = await client.put(
+ url,
+ headers={"PRIVATE-TOKEN": token},
+ json={"body": body},
+ timeout=timeout,
+ )
+
+ # Check for errors and raise with detailed information
+ if r.status_code >= 400:
+ console.print(f"[red]Failed to update note: {r.status_code} {r.reason_phrase}[/red]")
+ try:
+ error_response = r.json()
+ console.print(f"[red]Error response: {error_response}[/red]")
+ except ValueError:
+ # JSON parsing failed, use text
+ error_response = r.text
+ console.print(f"[red]Error response text: {r.text}[/red]")
+ raise RuntimeError(
+ f"Failed to update note: {r.status_code} {r.reason_phrase}: {error_response}"
+ )
diff --git a/src/reviewbot/infra/issues/in_memory_issue_store.py b/src/reviewbot/infra/issues/in_memory_issue_store.py
index 5a03fe5..aad7c04 100644
--- a/src/reviewbot/infra/issues/in_memory_issue_store.py
+++ b/src/reviewbot/infra/issues/in_memory_issue_store.py
@@ -1,25 +1,37 @@
from collections.abc import Iterable
from uuid import UUID
+from langgraph.store.memory import InMemoryStore
+
from reviewbot.core.issues.issue import Issue
+from reviewbot.core.issues.issue_model import IssueModel
from reviewbot.core.issues.issue_store import IssueStore
class InMemoryIssueStore(IssueStore):
- def __init__(self) -> None:
- self._items: dict[UUID, Issue] = {}
+ NS = ("issues",)
+
+ def __init__(self, store: InMemoryStore):
+ self.store = store
def add(self, issue: Issue) -> None:
- self._items[issue.id] = issue
+ model = IssueModel.from_domain(issue)
+ self.store.put(self.NS, str(issue.id), model.model_dump())
def get(self, issue_id: UUID) -> Issue | None:
- return self._items.get(issue_id)
+ raw = self.store.get(self.NS, str(issue_id))
+ if raw is None:
+ return None
+ return IssueModel.model_validate(raw).to_domain()
def list(self) -> Iterable[Issue]:
- return self._items.values()
+ items: list[Issue] = []
+ for raw in self.store.search(self.NS):
+ items.append(IssueModel.model_validate(raw).to_domain())
+ return items
def update(self, issue: Issue) -> None:
- self._items[issue.id] = issue
+ self.add(issue)
def delete(self, issue_id: UUID) -> None:
- self._items.pop(issue_id, None)
+ self.store.delete(self.NS, str(issue_id))
diff --git a/src/reviewbot/tools/__init__.py b/src/reviewbot/tools/__init__.py
index 574e906..f914bfa 100644
--- a/src/reviewbot/tools/__init__.py
+++ b/src/reviewbot/tools/__init__.py
@@ -1,6 +1,7 @@
from .diff import get_diff
+from .ls_dir import ls_dir
+from .read_file import read_file
from .search_codebase import (
- read_file,
search_codebase,
search_codebase_semantic_search,
)
@@ -8,6 +9,7 @@
__all__ = [
"get_diff",
+ "ls_dir",
"read_file",
"search_codebase",
"search_codebase_semantic_search",
diff --git a/src/reviewbot/tools/diff.py b/src/reviewbot/tools/diff.py
index f6cdf78..a262eb1 100644
--- a/src/reviewbot/tools/diff.py
+++ b/src/reviewbot/tools/diff.py
@@ -1,15 +1,39 @@
import json
+from typing import Any
-from langchain.tools import tool # type: ignore
+from langchain.tools import ToolRuntime, tool # type: ignore
+from langgraph.func import BaseStore # type: ignore
from rich.console import Console
-from reviewbot.context import store_manager_ctx
+from reviewbot.agent.workflow.state import CodebaseState
console = Console()
+def get_diff_from_file(
+ store: BaseStore,
+ file_path: str,
+) -> str:
+ NS = ("codebase",)
+ raw = store.get(NS, "state")
+ if not raw:
+ raise ValueError("Codebase state not found in store")
+
+ codebase = CodebaseState.model_validate(raw.value)
+ diffs = codebase.diffs
+
+ diff = next((diff for diff in diffs if diff.new_path == file_path), None)
+ if not diff:
+ raise ValueError(f"Diff not found for file: {file_path}")
+
+ return json.dumps(diff.patch)[1:-1]
+
+
@tool
-def get_diff(file_path: str) -> str:
+def get_diff(
+ runtime: ToolRuntime[None, dict[str, Any]],
+ file_path: str,
+) -> str:
"""
Get the diff of the file.
@@ -18,29 +42,27 @@ def get_diff(file_path: str) -> str:
Returns:
string with the diff of the file
"""
- context = store_manager_ctx.get()
- store = context.get("store_manager")
- if not store:
- raise ValueError("Store manager not found")
+ if not runtime.store:
+ raise ValueError("Store not found in runtime")
- diffs = store.get_diffs()
- if not diffs:
- raise ValueError("Diff not found")
-
- diff = next((diff for diff in diffs if diff.new_path == file_path), None)
- if not diff:
- raise ValueError(f"Diff not found for file: {file_path}")
-
- return json.dumps(diff.patch)[1:-1]
+ return get_diff_from_file(
+ runtime.store,
+ file_path,
+ )
@tool
-def get_tree() -> str:
+def get_tree(runtime: ToolRuntime[None, dict[str, Any]]) -> str:
"""
Get the tree of the codebase.
"""
- context = store_manager_ctx.get()
- store = context.get("store_manager")
- if not store:
- raise ValueError("Store manager not found")
- return store.get_tree()
+ if not runtime.store:
+ raise ValueError("Store not found in runtime")
+
+ NS = ("codebase",)
+ raw = runtime.store.get(NS, "state")
+ if not raw:
+ raise ValueError("Codebase state not found in store")
+
+ codebase = CodebaseState.model_validate(raw.value)
+ return codebase.repo_tree
diff --git a/src/reviewbot/tools/issues.py b/src/reviewbot/tools/issues.py
index 6c408da..b18990f 100644
--- a/src/reviewbot/tools/issues.py
+++ b/src/reviewbot/tools/issues.py
@@ -1,7 +1,9 @@
-from langchain.tools import tool # type: ignore
+from typing import Any
+
+from langchain.tools import ToolRuntime, tool # type: ignore
-from reviewbot.context import store_manager_ctx
from reviewbot.core.issues import Issue, IssueSeverity
+from reviewbot.core.issues.issue_model import IssueModel
@tool
@@ -13,6 +15,7 @@ def add_issue(
end_line: int,
severity: IssueSeverity,
status: str,
+ runtime: ToolRuntime[None, dict[str, Any]],
) -> str:
"""Add an issue to the issue store.
@@ -28,10 +31,8 @@ def add_issue(
Returns:
string with the id of the added issue
"""
- context = store_manager_ctx.get()
- issue_store = context.get("issue_store")
- if not issue_store:
- return "Issue store not found."
+ if not runtime.store:
+ raise ValueError("Store not found in runtime")
issue = Issue(
title=title,
@@ -43,5 +44,7 @@ def add_issue(
status=status,
)
- issue_store.add(issue)
+ issue_model = IssueModel.from_domain(issue)
+
+ runtime.store.put(("issues",), str(issue.id), issue_model.model_dump())
return f"Issue added successfully: {issue.id}"
diff --git a/src/reviewbot/tools/ls_dir.py b/src/reviewbot/tools/ls_dir.py
new file mode 100644
index 0000000..6e5cc54
--- /dev/null
+++ b/src/reviewbot/tools/ls_dir.py
@@ -0,0 +1,116 @@
+from pathlib import Path
+
+from langchain.tools import ToolRuntime, tool # type: ignore
+from rich.console import Console
+
+from reviewbot.agent.workflow.state import CodebaseState
+
+console = Console()
+
+
+@tool
+def ls_dir(
+ dir_path: str,
+ runtime: ToolRuntime, # type: ignore
+) -> str:
+ """List the contents of a directory in the repository.
+
+ Use this tool to explore directory structure and see what files and subdirectories exist.
+
+ Args:
+ dir_path: Relative path to the directory in the repository (e.g., "src" or "src/components")
+
+ Returns:
+ A formatted list of files and directories in the specified path
+
+ Examples:
+ - ls_dir("src") - List contents of src directory
+ - ls_dir(".") - List contents of repository root
+ - ls_dir("src/utils") - List contents of src/utils directory
+ """
+ if runtime.store is None:
+ console.print("[red]ls_dir: Store not found in runtime[/red]")
+ raise ValueError("Store not found in runtime")
+
+ console.print(f"[cyan]ls_dir: '{dir_path}'[/cyan]")
+
+ # Get codebase state from store
+ NS = ("codebase",)
+ raw = runtime.store.get(NS, "state")
+ if not raw:
+ console.print("[red]ls_dir: Codebase state not found in store[/red]")
+ raise ValueError("Codebase state not found in store")
+
+ codebase_data = raw.value if hasattr(raw, "value") else raw
+ codebase = CodebaseState.model_validate(codebase_data)
+
+ # Construct full path
+ repo_root = Path(codebase.repo_root)
+ normalized_path = Path(dir_path)
+ full_path = repo_root / normalized_path
+
+ console.print(f"[dim] → Resolved path: {full_path}[/dim]")
+
+ # Check if path exists
+ if not full_path.exists():
+ error_msg = (
+ f"ERROR: Directory not found: '{dir_path}'. "
+ f"Checked at: {full_path}. "
+ "This directory may not exist or the path may be incorrect."
+ )
+ console.print("[red] → Returning: DIRECTORY NOT FOUND error[/red]")
+ return error_msg
+
+ if not full_path.is_dir():
+ error_msg = (
+ f"ERROR: Path is not a directory: '{dir_path}'. Use read_file() to read files instead."
+ )
+ console.print("[red] → Returning: NOT A DIRECTORY error[/red]")
+ return error_msg
+
+ # List directory contents
+ console.print("[green] ✓ Directory exists, listing contents...[/green]")
+ try:
+ entries = sorted(full_path.iterdir(), key=lambda p: (not p.is_dir(), p.name))
+
+ if not entries:
+ console.print("[dim] → Directory is empty[/dim]")
+ return f"Directory '{dir_path}' is empty."
+
+ # Format output
+ output_lines = [f"Contents of '{dir_path}':\n"]
+ dirs: list[str] = []
+ files: list[str] = []
+
+ for entry in entries:
+ relative_name = entry.name
+ if entry.is_dir():
+ dirs.append(f" [DIR] {relative_name}/")
+ else:
+ # Get file size
+ size = entry.stat().st_size
+ if size < 1024:
+ size_str = f"{size}B"
+ elif size < 1024 * 1024:
+ size_str = f"{size / 1024:.1f}KB"
+ else:
+ size_str = f"{size / (1024 * 1024):.1f}MB"
+ files.append(f" [FILE] {relative_name:<40} {size_str:>10}")
+
+ # Add directories first, then files
+ output_lines.extend(dirs)
+ output_lines.extend(files)
+
+ result = "\n".join(output_lines)
+ console.print(f"[green] → Found {len(dirs)} directories and {len(files)} files[/green]")
+ console.print(f"[dim] → Preview: {result[:100]}...[/dim]")
+ return result
+
+ except PermissionError:
+ error_msg = f"ERROR: Permission denied accessing directory: {dir_path}"
+ console.print("[red] → Returning: PERMISSION ERROR[/red]")
+ return error_msg
+ except Exception as e:
+ error_msg = f"ERROR: Failed to list directory '{dir_path}': {str(e)}"
+ console.print(f"[red] → Returning: UNEXPECTED ERROR: {e}[/red]")
+ return error_msg
diff --git a/src/reviewbot/tools/read_file.py b/src/reviewbot/tools/read_file.py
new file mode 100644
index 0000000..46b4174
--- /dev/null
+++ b/src/reviewbot/tools/read_file.py
@@ -0,0 +1,176 @@
+from pathlib import Path
+
+from langchain.tools import ToolRuntime, tool # type: ignore
+from rich.console import Console
+
+from reviewbot.agent.workflow.state import CodebaseState
+
+console = Console()
+
+
+@tool
+def read_file(
+ file_path: str,
+ runtime: ToolRuntime, # type: ignore
+ line_start: int | None = None,
+ line_end: int | None = None,
+) -> str:
+ """Read the contents of a file from the repository.
+
+ Use this tool to get the full context of a file when the diff alone is not sufficient
+ to understand the code. This helps avoid false positives by seeing the complete picture.
+
+ Args:
+ file_path: Relative path to the file in the repository (e.g., "src/main.go")
+ line_start: Optional line number to start reading from (1-indexed)
+ line_end: Optional line number to stop reading at (inclusive)
+
+ Returns:
+ The file contents, optionally limited to the specified line range
+
+ Examples:
+ - read_file("src/main.go") - Read entire file
+ - read_file("src/main.go", line_start=10, line_end=50) - Read lines 10-50
+
+ Note:
+ Returns an error message if the file is newly added (doesn't exist in current checkout)
+ """
+ if runtime.store is None:
+ console.print("[red]read_file: Store not found in runtime[/red]")
+ raise ValueError("Store not found in runtime")
+
+ line_range = f" (lines {line_start}-{line_end})" if line_start or line_end else ""
+ console.print(f"[cyan]read_file: '{file_path}'{line_range}[/cyan]")
+
+ # Get codebase state from store
+ NS = ("codebase",)
+ raw = runtime.store.get(NS, "state")
+ if not raw:
+ console.print("[red]read_file: Codebase state not found in store[/red]")
+ raise ValueError("Codebase state not found in store")
+
+ codebase_data = raw.value if hasattr(raw, "value") else raw
+ codebase = CodebaseState.model_validate(codebase_data)
+
+ # Construct full path, handling different path separators
+ repo_root = Path(codebase.repo_root)
+ # Normalize the file path (convert to Path and resolve)
+ normalized_path = Path(file_path)
+ full_path = repo_root / normalized_path
+
+ console.print(f"[dim] → Resolved path: {full_path}[/dim]")
+
+ # Check if file exists
+ if not full_path.exists():
+ console.print(f"[yellow] File does not exist at: {full_path}[/yellow]")
+
+ # Check if this is a new file (added in the diff)
+ diffs = codebase.diffs
+ is_new_file = any(
+ d.new_path == file_path and (d.is_new_file or not d.old_path) for d in diffs
+ )
+
+ if is_new_file:
+ error_msg = (
+ f"ERROR: Cannot read '{file_path}' - This is a NEW FILE being added in this change. "
+ "The file doesn't exist in the current checkout, only in the diff. "
+ "You can only see the changes in the diff via get_diff(). "
+ "Since this is a new file, you cannot verify imports or variables from elsewhere in the file - "
+ "assume the developer has the complete context and do not flag missing imports/variables as issues."
+ )
+ console.print("[yellow] → Returning: NEW FILE error[/yellow]")
+ console.print(f"[dim] → Message: {error_msg[:100]}...[/dim]")
+ return error_msg
+
+ # File truly doesn't exist
+ error_msg = (
+ f"ERROR: File not found: '{file_path}'. "
+ f"Checked at: {full_path}. "
+ "This file may have been deleted, renamed, or the path may be incorrect. "
+ "Available files in this diff can be checked with get_diff() tool."
+ )
+ console.print("[red] → Returning: FILE NOT FOUND error[/red]")
+ console.print(f"[dim] → Message: {error_msg[:100]}...[/dim]")
+ return error_msg
+
+ if not full_path.is_file():
+ if full_path.is_dir():
+ error_msg = (
+ f"ERROR: Path is a directory, not a file: '{file_path}'. "
+ f"Use ls_dir('{file_path}') to list the contents of this directory instead."
+ )
+ else:
+ error_msg = f"ERROR: Path is not a file: {file_path}"
+ console.print("[red] → Returning: NOT A FILE error[/red]")
+ console.print(f"[dim] → Message: {error_msg}[/dim]")
+ return error_msg
+
+ # Read file
+ console.print("[green] ✓ File exists, reading...[/green]")
+ try:
+ with open(full_path, encoding="utf-8") as f:
+ if line_start is None and line_end is None:
+ content = f.read()
+ num_lines = content.count("\n") + 1
+ num_chars = len(content)
+ console.print(
+ f"[green] → Successfully read entire file: {num_lines} lines, {num_chars} chars[/green]"
+ )
+ console.print(f"[dim] → Preview: {content[:100]}...[/dim]")
+ return content
+
+ lines = f.readlines()
+ total_lines = len(lines)
+
+ # Adjust indices (convert to 0-based)
+ start_idx = (line_start - 1) if line_start else 0
+ end_idx = line_end if line_end else total_lines
+
+ # Auto-adjust range to file bounds (be lenient)
+ if start_idx < 0:
+ console.print(f"[yellow] Adjusting line_start from {line_start} to 1[/yellow]")
+ start_idx = 0
+ if start_idx >= total_lines:
+ console.print(
+ f"[yellow] line_start {line_start} exceeds file length ({total_lines}), using last line[/yellow]"
+ )
+ start_idx = max(0, total_lines - 1)
+
+ if end_idx > total_lines:
+ console.print(
+ f"[yellow] Adjusting line_end from {line_end} to {total_lines} (file length)[/yellow]"
+ )
+ end_idx = total_lines
+
+ if start_idx >= end_idx:
+ console.print(
+ f"[yellow] Invalid range ({line_start}-{line_end}), reading entire file instead[/yellow]"
+ )
+ content = "".join(lines)
+ num_lines = len(lines)
+ num_chars = len(content)
+ console.print(
+ f"[green] → Successfully read entire file: {num_lines} lines, {num_chars} chars[/green]"
+ )
+ return content
+
+ # Return selected lines with line numbers
+ result_lines: list[str] = []
+ for i in range(start_idx, end_idx):
+ line_num = i + 1
+ result_lines.append(f"{line_num:4d} | {lines[i]}")
+
+ content = "".join(result_lines)
+ num_lines_returned = end_idx - start_idx
+ console.print(
+ f"[green] → Successfully read lines {line_start}-{line_end}: {num_lines_returned} lines[/green]"
+ )
+ console.print(f"[dim] → Preview: {content[:100]}...[/dim]")
+ return content
+
+ except UnicodeDecodeError as e:
+ error_msg = f"File is not a text file or uses unsupported encoding: {file_path}"
+ console.print("[red] → Returning: ENCODING ERROR[/red]")
+ console.print(f"[dim] → Message: {error_msg}[/dim]")
+
+ raise ValueError(error_msg) from e
diff --git a/src/reviewbot/tools/search_codebase.py b/src/reviewbot/tools/search_codebase.py
index a1bb303..09e25bb 100644
--- a/src/reviewbot/tools/search_codebase.py
+++ b/src/reviewbot/tools/search_codebase.py
@@ -2,17 +2,49 @@
import shlex
import subprocess
+from pathlib import Path
+from typing import Any
-from langchain.tools import tool # type: ignore
+from langchain.tools import ToolRuntime, tool # type: ignore
from rich.console import Console
-from reviewbot.context import store_manager_ctx
+from reviewbot.agent.workflow.state import CodebaseState
+from reviewbot.infra.embeddings.openai import CodebaseVectorStore
console = Console()
+# Cache for the vector store to avoid rebuilding
+_vector_store_cache: CodebaseVectorStore | None = None
+
+
+def _get_codebase_state(store: Any) -> CodebaseState:
+ """Helper to get CodebaseState from store."""
+ NS = ("codebase",)
+ raw = store.get(NS, "state")
+ if not raw:
+ raise ValueError("Codebase state not found in store")
+ return CodebaseState.model_validate(raw.value)
+
+
+def _get_vector_store(store: Any) -> CodebaseVectorStore:
+ """Get or create vector store for semantic search."""
+ global _vector_store_cache
+
+ codebase = _get_codebase_state(store)
+ repo_root = Path(codebase.repo_root)
+ repo_name = codebase.repo_name
+
+ # Create new instance if not cached or repo changed
+ if _vector_store_cache is None or _vector_store_cache.repo_root != repo_root:
+ _vector_store_cache = CodebaseVectorStore(repo_root, repo_name)
+ if not _vector_store_cache.load():
+ _vector_store_cache.build()
+
+ return _vector_store_cache
+
@tool
-def search_codebase(query: str) -> str:
+def search_codebase(query: str, runtime: ToolRuntime[None, dict[str, Any]]) -> str:
"""
Search the codebase using Unix find + grep.
@@ -21,22 +53,15 @@ def search_codebase(query: str) -> str:
Returns:
grep-style matches: file:line:content
"""
- # path is relative to the repo root
- context = store_manager_ctx.get()
- store = context.get("store_manager")
- if not store:
- raise ValueError("Store manager not found")
-
- repo_root = store.get_store().repo_root.resolve()
-
- base = repo_root # or a validated subpath if you support `path`
+ codebase = _get_codebase_state(runtime.store)
+ repo_root = Path(codebase.repo_root).resolve()
max_lines = 200
cmd = [
"bash",
"-lc",
(
- f"find {shlex.quote(base.as_posix())} -type f "
+ f"find {shlex.quote(repo_root.as_posix())} -type f "
f"! -path '*/.git/*' ! -path '*/node_modules/*' ! -path '*/.venv/*' "
f"-print0 | "
f"xargs -0 grep -nH --color=never -I {shlex.quote(query)} | "
@@ -63,7 +88,9 @@ def search_codebase(query: str) -> str:
@tool
-def search_codebase_semantic_search(query: str, path: str | None = None) -> str:
+def search_codebase_semantic_search(
+ query: str, runtime: ToolRuntime[None, dict[str, Any]], path: str | None = None
+) -> str:
"""Search the codebase for the given query. If a path is provided, search the codebase for the given query in the given path.
Args:
@@ -72,21 +99,18 @@ def search_codebase_semantic_search(query: str, path: str | None = None) -> str:
Returns:
string with the results of the search
"""
- context = store_manager_ctx.get()
- store = context.get("store_manager")
- if not store:
- raise ValueError("Store manager not found")
-
- store = store.get_store()
- if not store:
- raise ValueError("Store not found")
-
- return store.search(query, top_k=5, path=path) # type: ignore
+ vector_store = _get_vector_store(runtime.store)
+ return vector_store.search(query, top_k=5, path=path) # type: ignore
@tool
-def read_file(path: str, line_start: int | None = None, line_end: int | None = None) -> str:
- """Read the file at the given path.
+def read_file_vector(
+ path: str,
+ runtime: ToolRuntime[None, dict[str, Any]],
+ line_start: int | None = None,
+ line_end: int | None = None,
+) -> str:
+ """Read the file at the given path using vector store (legacy, prefer read_file tool).
Args:
path: path to the file to read
@@ -95,14 +119,5 @@ def read_file(path: str, line_start: int | None = None, line_end: int | None = N
Returns:
string with the contents of the file
"""
- context = store_manager_ctx.get()
- store = context.get("store_manager")
- if not store:
- raise ValueError("Store manager not found")
-
- store = store.get_store()
- if not store:
- raise ValueError("Store not found")
-
- result = store.read_file(path, line_start=line_start, line_end=line_end)
- return result
+ vector_store = _get_vector_store(runtime.store)
+ return vector_store.read_file(path, line_start=line_start, line_end=line_end)
diff --git a/src/reviewbot/tools/think.py b/src/reviewbot/tools/think.py
index 0c6b052..f1a25fb 100644
--- a/src/reviewbot/tools/think.py
+++ b/src/reviewbot/tools/think.py
@@ -1,10 +1,10 @@
-from langchain.tools import tool # type: ignore
+from typing import Any
-from reviewbot.context import store_manager_ctx
+from langchain.tools import ToolRuntime, tool # type: ignore
@tool
-def think(reasoning: str) -> str:
+def think(reasoning: str, runtime: ToolRuntime[None, dict[str, Any]]) -> str:
"""Record internal reasoning and thought process.
Use this tool to think through problems, plan your approach, or reason about code before taking action.
@@ -29,17 +29,21 @@ def think(reasoning: str) -> str:
- "This looks like a potential security issue - user input is being directly
concatenated into a SQL query. I should flag this as high severity."
"""
- context = store_manager_ctx.get()
- issue_store = context.get("issue_store")
+ if not runtime.store:
+ raise ValueError("Store not found in runtime")
- if not issue_store:
- return "Context not available for storing reasoning."
+ # Store reasoning in langgraph store
+ NS = ("reasoning",)
+ existing = runtime.store.get(NS, "history")
- # Store reasoning in the issue store's metadata
- if not hasattr(issue_store, "_reasoning_history"):
- issue_store._reasoning_history = []
+ if existing is None:
+ history: dict[str, list[str]] = {"items": []}
+ else:
+ history = existing.value
+
+ history["items"].append(reasoning)
+ runtime.store.put(NS, "history", history)
- issue_store._reasoning_history.append(reasoning)
print("Reasoned:")
print(reasoning)
return f"Reasoning recorded: {reasoning[:100]}{'...' if len(reasoning) > 100 else ''}"
diff --git a/uv.lock b/uv.lock
index 3f0bb95..f656c0b 100644
--- a/uv.lock
+++ b/uv.lock
@@ -2,15 +2,6 @@ version = 1
revision = 3
requires-python = ">=3.13"
-[[package]]
-name = "aiofiles"
-version = "25.1.0"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/41/c3/534eac40372d8ee36ef40df62ec129bee4fdb5ad9706e58a29be53b2c970/aiofiles-25.1.0.tar.gz", hash = "sha256:a8d728f0a29de45dc521f18f07297428d56992a742f0cd2701ba86e44d23d5b2", size = 46354, upload-time = "2025-10-09T20:51:04.358Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/bc/8a/340a1555ae33d7354dbca4faa54948d76d89a27ceef032c8c3bc661d003e/aiofiles-25.1.0-py3-none-any.whl", hash = "sha256:abe311e527c862958650f9438e859c1fa7568a141b22abcd015e120e86a85695", size = 14668, upload-time = "2025-10-09T20:51:03.174Z" },
-]
-
[[package]]
name = "aiohappyeyeballs"
version = "2.6.1"
@@ -22,7 +13,7 @@ wheels = [
[[package]]
name = "aiohttp"
-version = "3.13.2"
+version = "3.13.3"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "aiohappyeyeballs" },
@@ -33,59 +24,59 @@ dependencies = [
{ name = "propcache" },
{ name = "yarl" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/1c/ce/3b83ebba6b3207a7135e5fcaba49706f8a4b6008153b4e30540c982fae26/aiohttp-3.13.2.tar.gz", hash = "sha256:40176a52c186aefef6eb3cad2cdd30cd06e3afbe88fe8ab2af9c0b90f228daca", size = 7837994, upload-time = "2025-10-28T20:59:39.937Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/bf/78/7e90ca79e5aa39f9694dcfd74f4720782d3c6828113bb1f3197f7e7c4a56/aiohttp-3.13.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7519bdc7dfc1940d201651b52bf5e03f5503bda45ad6eacf64dda98be5b2b6be", size = 732139, upload-time = "2025-10-28T20:57:02.455Z" },
- { url = "https://files.pythonhosted.org/packages/db/ed/1f59215ab6853fbaa5c8495fa6cbc39edfc93553426152b75d82a5f32b76/aiohttp-3.13.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:088912a78b4d4f547a1f19c099d5a506df17eacec3c6f4375e2831ec1d995742", size = 490082, upload-time = "2025-10-28T20:57:04.784Z" },
- { url = "https://files.pythonhosted.org/packages/68/7b/fe0fe0f5e05e13629d893c760465173a15ad0039c0a5b0d0040995c8075e/aiohttp-3.13.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5276807b9de9092af38ed23ce120539ab0ac955547b38563a9ba4f5b07b95293", size = 489035, upload-time = "2025-10-28T20:57:06.894Z" },
- { url = "https://files.pythonhosted.org/packages/d2/04/db5279e38471b7ac801d7d36a57d1230feeee130bbe2a74f72731b23c2b1/aiohttp-3.13.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1237c1375eaef0db4dcd7c2559f42e8af7b87ea7d295b118c60c36a6e61cb811", size = 1720387, upload-time = "2025-10-28T20:57:08.685Z" },
- { url = "https://files.pythonhosted.org/packages/31/07/8ea4326bd7dae2bd59828f69d7fdc6e04523caa55e4a70f4a8725a7e4ed2/aiohttp-3.13.2-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:96581619c57419c3d7d78703d5b78c1e5e5fc0172d60f555bdebaced82ded19a", size = 1688314, upload-time = "2025-10-28T20:57:10.693Z" },
- { url = "https://files.pythonhosted.org/packages/48/ab/3d98007b5b87ffd519d065225438cc3b668b2f245572a8cb53da5dd2b1bc/aiohttp-3.13.2-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a2713a95b47374169409d18103366de1050fe0ea73db358fc7a7acb2880422d4", size = 1756317, upload-time = "2025-10-28T20:57:12.563Z" },
- { url = "https://files.pythonhosted.org/packages/97/3d/801ca172b3d857fafb7b50c7c03f91b72b867a13abca982ed6b3081774ef/aiohttp-3.13.2-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:228a1cd556b3caca590e9511a89444925da87d35219a49ab5da0c36d2d943a6a", size = 1858539, upload-time = "2025-10-28T20:57:14.623Z" },
- { url = "https://files.pythonhosted.org/packages/f7/0d/4764669bdf47bd472899b3d3db91fffbe925c8e3038ec591a2fd2ad6a14d/aiohttp-3.13.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ac6cde5fba8d7d8c6ac963dbb0256a9854e9fafff52fbcc58fdf819357892c3e", size = 1739597, upload-time = "2025-10-28T20:57:16.399Z" },
- { url = "https://files.pythonhosted.org/packages/c4/52/7bd3c6693da58ba16e657eb904a5b6decfc48ecd06e9ac098591653b1566/aiohttp-3.13.2-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f2bef8237544f4e42878c61cef4e2839fee6346dc60f5739f876a9c50be7fcdb", size = 1555006, upload-time = "2025-10-28T20:57:18.288Z" },
- { url = "https://files.pythonhosted.org/packages/48/30/9586667acec5993b6f41d2ebcf96e97a1255a85f62f3c653110a5de4d346/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:16f15a4eac3bc2d76c45f7ebdd48a65d41b242eb6c31c2245463b40b34584ded", size = 1683220, upload-time = "2025-10-28T20:57:20.241Z" },
- { url = "https://files.pythonhosted.org/packages/71/01/3afe4c96854cfd7b30d78333852e8e851dceaec1c40fd00fec90c6402dd2/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:bb7fb776645af5cc58ab804c58d7eba545a97e047254a52ce89c157b5af6cd0b", size = 1712570, upload-time = "2025-10-28T20:57:22.253Z" },
- { url = "https://files.pythonhosted.org/packages/11/2c/22799d8e720f4697a9e66fd9c02479e40a49de3de2f0bbe7f9f78a987808/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:e1b4951125ec10c70802f2cb09736c895861cd39fd9dcb35107b4dc8ae6220b8", size = 1733407, upload-time = "2025-10-28T20:57:24.37Z" },
- { url = "https://files.pythonhosted.org/packages/34/cb/90f15dd029f07cebbd91f8238a8b363978b530cd128488085b5703683594/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:550bf765101ae721ee1d37d8095f47b1f220650f85fe1af37a90ce75bab89d04", size = 1550093, upload-time = "2025-10-28T20:57:26.257Z" },
- { url = "https://files.pythonhosted.org/packages/69/46/12dce9be9d3303ecbf4d30ad45a7683dc63d90733c2d9fe512be6716cd40/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:fe91b87fc295973096251e2d25a811388e7d8adf3bd2b97ef6ae78bc4ac6c476", size = 1758084, upload-time = "2025-10-28T20:57:28.349Z" },
- { url = "https://files.pythonhosted.org/packages/f9/c8/0932b558da0c302ffd639fc6362a313b98fdf235dc417bc2493da8394df7/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e0c8e31cfcc4592cb200160344b2fb6ae0f9e4effe06c644b5a125d4ae5ebe23", size = 1716987, upload-time = "2025-10-28T20:57:30.233Z" },
- { url = "https://files.pythonhosted.org/packages/5d/8b/f5bd1a75003daed099baec373aed678f2e9b34f2ad40d85baa1368556396/aiohttp-3.13.2-cp313-cp313-win32.whl", hash = "sha256:0740f31a60848d6edb296a0df827473eede90c689b8f9f2a4cdde74889eb2254", size = 425859, upload-time = "2025-10-28T20:57:32.105Z" },
- { url = "https://files.pythonhosted.org/packages/5d/28/a8a9fc6957b2cee8902414e41816b5ab5536ecf43c3b1843c10e82c559b2/aiohttp-3.13.2-cp313-cp313-win_amd64.whl", hash = "sha256:a88d13e7ca367394908f8a276b89d04a3652044612b9a408a0bb22a5ed976a1a", size = 452192, upload-time = "2025-10-28T20:57:34.166Z" },
- { url = "https://files.pythonhosted.org/packages/9b/36/e2abae1bd815f01c957cbf7be817b3043304e1c87bad526292a0410fdcf9/aiohttp-3.13.2-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:2475391c29230e063ef53a66669b7b691c9bfc3f1426a0f7bcdf1216bdbac38b", size = 735234, upload-time = "2025-10-28T20:57:36.415Z" },
- { url = "https://files.pythonhosted.org/packages/ca/e3/1ee62dde9b335e4ed41db6bba02613295a0d5b41f74a783c142745a12763/aiohttp-3.13.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:f33c8748abef4d8717bb20e8fb1b3e07c6adacb7fd6beaae971a764cf5f30d61", size = 490733, upload-time = "2025-10-28T20:57:38.205Z" },
- { url = "https://files.pythonhosted.org/packages/1a/aa/7a451b1d6a04e8d15a362af3e9b897de71d86feac3babf8894545d08d537/aiohttp-3.13.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:ae32f24bbfb7dbb485a24b30b1149e2f200be94777232aeadba3eecece4d0aa4", size = 491303, upload-time = "2025-10-28T20:57:40.122Z" },
- { url = "https://files.pythonhosted.org/packages/57/1e/209958dbb9b01174870f6a7538cd1f3f28274fdbc88a750c238e2c456295/aiohttp-3.13.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d7f02042c1f009ffb70067326ef183a047425bb2ff3bc434ead4dd4a4a66a2b", size = 1717965, upload-time = "2025-10-28T20:57:42.28Z" },
- { url = "https://files.pythonhosted.org/packages/08/aa/6a01848d6432f241416bc4866cae8dc03f05a5a884d2311280f6a09c73d6/aiohttp-3.13.2-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:93655083005d71cd6c072cdab54c886e6570ad2c4592139c3fb967bfc19e4694", size = 1667221, upload-time = "2025-10-28T20:57:44.869Z" },
- { url = "https://files.pythonhosted.org/packages/87/4f/36c1992432d31bbc789fa0b93c768d2e9047ec8c7177e5cd84ea85155f36/aiohttp-3.13.2-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0db1e24b852f5f664cd728db140cf11ea0e82450471232a394b3d1a540b0f906", size = 1757178, upload-time = "2025-10-28T20:57:47.216Z" },
- { url = "https://files.pythonhosted.org/packages/ac/b4/8e940dfb03b7e0f68a82b88fd182b9be0a65cb3f35612fe38c038c3112cf/aiohttp-3.13.2-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b009194665bcd128e23eaddef362e745601afa4641930848af4c8559e88f18f9", size = 1838001, upload-time = "2025-10-28T20:57:49.337Z" },
- { url = "https://files.pythonhosted.org/packages/d7/ef/39f3448795499c440ab66084a9db7d20ca7662e94305f175a80f5b7e0072/aiohttp-3.13.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c038a8fdc8103cd51dbd986ecdce141473ffd9775a7a8057a6ed9c3653478011", size = 1716325, upload-time = "2025-10-28T20:57:51.327Z" },
- { url = "https://files.pythonhosted.org/packages/d7/51/b311500ffc860b181c05d91c59a1313bdd05c82960fdd4035a15740d431e/aiohttp-3.13.2-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:66bac29b95a00db411cd758fea0e4b9bdba6d549dfe333f9a945430f5f2cc5a6", size = 1547978, upload-time = "2025-10-28T20:57:53.554Z" },
- { url = "https://files.pythonhosted.org/packages/31/64/b9d733296ef79815226dab8c586ff9e3df41c6aff2e16c06697b2d2e6775/aiohttp-3.13.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4ebf9cfc9ba24a74cf0718f04aac2a3bbe745902cc7c5ebc55c0f3b5777ef213", size = 1682042, upload-time = "2025-10-28T20:57:55.617Z" },
- { url = "https://files.pythonhosted.org/packages/3f/30/43d3e0f9d6473a6db7d472104c4eff4417b1e9df01774cb930338806d36b/aiohttp-3.13.2-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:a4b88ebe35ce54205c7074f7302bd08a4cb83256a3e0870c72d6f68a3aaf8e49", size = 1680085, upload-time = "2025-10-28T20:57:57.59Z" },
- { url = "https://files.pythonhosted.org/packages/16/51/c709f352c911b1864cfd1087577760ced64b3e5bee2aa88b8c0c8e2e4972/aiohttp-3.13.2-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:98c4fb90bb82b70a4ed79ca35f656f4281885be076f3f970ce315402b53099ae", size = 1728238, upload-time = "2025-10-28T20:57:59.525Z" },
- { url = "https://files.pythonhosted.org/packages/19/e2/19bd4c547092b773caeb48ff5ae4b1ae86756a0ee76c16727fcfd281404b/aiohttp-3.13.2-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:ec7534e63ae0f3759df3a1ed4fa6bc8f75082a924b590619c0dd2f76d7043caa", size = 1544395, upload-time = "2025-10-28T20:58:01.914Z" },
- { url = "https://files.pythonhosted.org/packages/cf/87/860f2803b27dfc5ed7be532832a3498e4919da61299b4a1f8eb89b8ff44d/aiohttp-3.13.2-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:5b927cf9b935a13e33644cbed6c8c4b2d0f25b713d838743f8fe7191b33829c4", size = 1742965, upload-time = "2025-10-28T20:58:03.972Z" },
- { url = "https://files.pythonhosted.org/packages/67/7f/db2fc7618925e8c7a601094d5cbe539f732df4fb570740be88ed9e40e99a/aiohttp-3.13.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:88d6c017966a78c5265d996c19cdb79235be5e6412268d7e2ce7dee339471b7a", size = 1697585, upload-time = "2025-10-28T20:58:06.189Z" },
- { url = "https://files.pythonhosted.org/packages/0c/07/9127916cb09bb38284db5036036042b7b2c514c8ebaeee79da550c43a6d6/aiohttp-3.13.2-cp314-cp314-win32.whl", hash = "sha256:f7c183e786e299b5d6c49fb43a769f8eb8e04a2726a2bd5887b98b5cc2d67940", size = 431621, upload-time = "2025-10-28T20:58:08.636Z" },
- { url = "https://files.pythonhosted.org/packages/fb/41/554a8a380df6d3a2bba8a7726429a23f4ac62aaf38de43bb6d6cde7b4d4d/aiohttp-3.13.2-cp314-cp314-win_amd64.whl", hash = "sha256:fe242cd381e0fb65758faf5ad96c2e460df6ee5b2de1072fe97e4127927e00b4", size = 457627, upload-time = "2025-10-28T20:58:11Z" },
- { url = "https://files.pythonhosted.org/packages/c7/8e/3824ef98c039d3951cb65b9205a96dd2b20f22241ee17d89c5701557c826/aiohttp-3.13.2-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:f10d9c0b0188fe85398c61147bbd2a657d616c876863bfeff43376e0e3134673", size = 767360, upload-time = "2025-10-28T20:58:13.358Z" },
- { url = "https://files.pythonhosted.org/packages/a4/0f/6a03e3fc7595421274fa34122c973bde2d89344f8a881b728fa8c774e4f1/aiohttp-3.13.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:e7c952aefdf2460f4ae55c5e9c3e80aa72f706a6317e06020f80e96253b1accd", size = 504616, upload-time = "2025-10-28T20:58:15.339Z" },
- { url = "https://files.pythonhosted.org/packages/c6/aa/ed341b670f1bc8a6f2c6a718353d13b9546e2cef3544f573c6a1ff0da711/aiohttp-3.13.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c20423ce14771d98353d2e25e83591fa75dfa90a3c1848f3d7c68243b4fbded3", size = 509131, upload-time = "2025-10-28T20:58:17.693Z" },
- { url = "https://files.pythonhosted.org/packages/7f/f0/c68dac234189dae5c4bbccc0f96ce0cc16b76632cfc3a08fff180045cfa4/aiohttp-3.13.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e96eb1a34396e9430c19d8338d2ec33015e4a87ef2b4449db94c22412e25ccdf", size = 1864168, upload-time = "2025-10-28T20:58:20.113Z" },
- { url = "https://files.pythonhosted.org/packages/8f/65/75a9a76db8364b5d0e52a0c20eabc5d52297385d9af9c35335b924fafdee/aiohttp-3.13.2-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:23fb0783bc1a33640036465019d3bba069942616a6a2353c6907d7fe1ccdaf4e", size = 1719200, upload-time = "2025-10-28T20:58:22.583Z" },
- { url = "https://files.pythonhosted.org/packages/f5/55/8df2ed78d7f41d232f6bd3ff866b6f617026551aa1d07e2f03458f964575/aiohttp-3.13.2-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2e1a9bea6244a1d05a4e57c295d69e159a5c50d8ef16aa390948ee873478d9a5", size = 1843497, upload-time = "2025-10-28T20:58:24.672Z" },
- { url = "https://files.pythonhosted.org/packages/e9/e0/94d7215e405c5a02ccb6a35c7a3a6cfff242f457a00196496935f700cde5/aiohttp-3.13.2-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0a3d54e822688b56e9f6b5816fb3de3a3a64660efac64e4c2dc435230ad23bad", size = 1935703, upload-time = "2025-10-28T20:58:26.758Z" },
- { url = "https://files.pythonhosted.org/packages/0b/78/1eeb63c3f9b2d1015a4c02788fb543141aad0a03ae3f7a7b669b2483f8d4/aiohttp-3.13.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7a653d872afe9f33497215745da7a943d1dc15b728a9c8da1c3ac423af35178e", size = 1792738, upload-time = "2025-10-28T20:58:29.787Z" },
- { url = "https://files.pythonhosted.org/packages/41/75/aaf1eea4c188e51538c04cc568040e3082db263a57086ea74a7d38c39e42/aiohttp-3.13.2-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:56d36e80d2003fa3fc0207fac644216d8532e9504a785ef9a8fd013f84a42c61", size = 1624061, upload-time = "2025-10-28T20:58:32.529Z" },
- { url = "https://files.pythonhosted.org/packages/9b/c2/3b6034de81fbcc43de8aeb209073a2286dfb50b86e927b4efd81cf848197/aiohttp-3.13.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:78cd586d8331fb8e241c2dd6b2f4061778cc69e150514b39a9e28dd050475661", size = 1789201, upload-time = "2025-10-28T20:58:34.618Z" },
- { url = "https://files.pythonhosted.org/packages/c9/38/c15dcf6d4d890217dae79d7213988f4e5fe6183d43893a9cf2fe9e84ca8d/aiohttp-3.13.2-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:20b10bbfbff766294fe99987f7bb3b74fdd2f1a2905f2562132641ad434dcf98", size = 1776868, upload-time = "2025-10-28T20:58:38.835Z" },
- { url = "https://files.pythonhosted.org/packages/04/75/f74fd178ac81adf4f283a74847807ade5150e48feda6aef024403716c30c/aiohttp-3.13.2-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:9ec49dff7e2b3c85cdeaa412e9d438f0ecd71676fde61ec57027dd392f00c693", size = 1790660, upload-time = "2025-10-28T20:58:41.507Z" },
- { url = "https://files.pythonhosted.org/packages/e7/80/7368bd0d06b16b3aba358c16b919e9c46cf11587dc572091031b0e9e3ef0/aiohttp-3.13.2-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:94f05348c4406450f9d73d38efb41d669ad6cd90c7ee194810d0eefbfa875a7a", size = 1617548, upload-time = "2025-10-28T20:58:43.674Z" },
- { url = "https://files.pythonhosted.org/packages/7d/4b/a6212790c50483cb3212e507378fbe26b5086d73941e1ec4b56a30439688/aiohttp-3.13.2-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:fa4dcb605c6f82a80c7f95713c2b11c3b8e9893b3ebd2bc9bde93165ed6107be", size = 1817240, upload-time = "2025-10-28T20:58:45.787Z" },
- { url = "https://files.pythonhosted.org/packages/ff/f7/ba5f0ba4ea8d8f3c32850912944532b933acbf0f3a75546b89269b9b7dde/aiohttp-3.13.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:cf00e5db968c3f67eccd2778574cf64d8b27d95b237770aa32400bd7a1ca4f6c", size = 1762334, upload-time = "2025-10-28T20:58:47.936Z" },
- { url = "https://files.pythonhosted.org/packages/7e/83/1a5a1856574588b1cad63609ea9ad75b32a8353ac995d830bf5da9357364/aiohttp-3.13.2-cp314-cp314t-win32.whl", hash = "sha256:d23b5fe492b0805a50d3371e8a728a9134d8de5447dce4c885f5587294750734", size = 464685, upload-time = "2025-10-28T20:58:50.642Z" },
- { url = "https://files.pythonhosted.org/packages/9f/4d/d22668674122c08f4d56972297c51a624e64b3ed1efaa40187607a7cb66e/aiohttp-3.13.2-cp314-cp314t-win_amd64.whl", hash = "sha256:ff0a7b0a82a7ab905cbda74006318d1b12e37c797eb1b0d4eb3e316cf47f658f", size = 498093, upload-time = "2025-10-28T20:58:52.782Z" },
+sdist = { url = "https://files.pythonhosted.org/packages/50/42/32cf8e7704ceb4481406eb87161349abb46a57fee3f008ba9cb610968646/aiohttp-3.13.3.tar.gz", hash = "sha256:a949eee43d3782f2daae4f4a2819b2cb9b0c5d3b7f7a927067cc84dafdbb9f88", size = 7844556, upload-time = "2026-01-03T17:33:05.204Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/97/8a/12ca489246ca1faaf5432844adbfce7ff2cc4997733e0af120869345643a/aiohttp-3.13.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:5dff64413671b0d3e7d5918ea490bdccb97a4ad29b3f311ed423200b2203e01c", size = 734190, upload-time = "2026-01-03T17:30:45.832Z" },
+ { url = "https://files.pythonhosted.org/packages/32/08/de43984c74ed1fca5c014808963cc83cb00d7bb06af228f132d33862ca76/aiohttp-3.13.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:87b9aab6d6ed88235aa2970294f496ff1a1f9adcd724d800e9b952395a80ffd9", size = 491783, upload-time = "2026-01-03T17:30:47.466Z" },
+ { url = "https://files.pythonhosted.org/packages/17/f8/8dd2cf6112a5a76f81f81a5130c57ca829d101ad583ce57f889179accdda/aiohttp-3.13.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:425c126c0dc43861e22cb1c14ba4c8e45d09516d0a3ae0a3f7494b79f5f233a3", size = 490704, upload-time = "2026-01-03T17:30:49.373Z" },
+ { url = "https://files.pythonhosted.org/packages/6d/40/a46b03ca03936f832bc7eaa47cfbb1ad012ba1be4790122ee4f4f8cba074/aiohttp-3.13.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7f9120f7093c2a32d9647abcaf21e6ad275b4fbec5b55969f978b1a97c7c86bf", size = 1720652, upload-time = "2026-01-03T17:30:50.974Z" },
+ { url = "https://files.pythonhosted.org/packages/f7/7e/917fe18e3607af92657e4285498f500dca797ff8c918bd7d90b05abf6c2a/aiohttp-3.13.3-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:697753042d57f4bf7122cab985bf15d0cef23c770864580f5af4f52023a56bd6", size = 1692014, upload-time = "2026-01-03T17:30:52.729Z" },
+ { url = "https://files.pythonhosted.org/packages/71/b6/cefa4cbc00d315d68973b671cf105b21a609c12b82d52e5d0c9ae61d2a09/aiohttp-3.13.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6de499a1a44e7de70735d0b39f67c8f25eb3d91eb3103be99ca0fa882cdd987d", size = 1759777, upload-time = "2026-01-03T17:30:54.537Z" },
+ { url = "https://files.pythonhosted.org/packages/fb/e3/e06ee07b45e59e6d81498b591fc589629be1553abb2a82ce33efe2a7b068/aiohttp-3.13.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:37239e9f9a7ea9ac5bf6b92b0260b01f8a22281996da609206a84df860bc1261", size = 1861276, upload-time = "2026-01-03T17:30:56.512Z" },
+ { url = "https://files.pythonhosted.org/packages/7c/24/75d274228acf35ceeb2850b8ce04de9dd7355ff7a0b49d607ee60c29c518/aiohttp-3.13.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f76c1e3fe7d7c8afad7ed193f89a292e1999608170dcc9751a7462a87dfd5bc0", size = 1743131, upload-time = "2026-01-03T17:30:58.256Z" },
+ { url = "https://files.pythonhosted.org/packages/04/98/3d21dde21889b17ca2eea54fdcff21b27b93f45b7bb94ca029c31ab59dc3/aiohttp-3.13.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fc290605db2a917f6e81b0e1e0796469871f5af381ce15c604a3c5c7e51cb730", size = 1556863, upload-time = "2026-01-03T17:31:00.445Z" },
+ { url = "https://files.pythonhosted.org/packages/9e/84/da0c3ab1192eaf64782b03971ab4055b475d0db07b17eff925e8c93b3aa5/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4021b51936308aeea0367b8f006dc999ca02bc118a0cc78c303f50a2ff6afb91", size = 1682793, upload-time = "2026-01-03T17:31:03.024Z" },
+ { url = "https://files.pythonhosted.org/packages/ff/0f/5802ada182f575afa02cbd0ec5180d7e13a402afb7c2c03a9aa5e5d49060/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:49a03727c1bba9a97d3e93c9f93ca03a57300f484b6e935463099841261195d3", size = 1716676, upload-time = "2026-01-03T17:31:04.842Z" },
+ { url = "https://files.pythonhosted.org/packages/3f/8c/714d53bd8b5a4560667f7bbbb06b20c2382f9c7847d198370ec6526af39c/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3d9908a48eb7416dc1f4524e69f1d32e5d90e3981e4e37eb0aa1cd18f9cfa2a4", size = 1733217, upload-time = "2026-01-03T17:31:06.868Z" },
+ { url = "https://files.pythonhosted.org/packages/7d/79/e2176f46d2e963facea939f5be2d26368ce543622be6f00a12844d3c991f/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:2712039939ec963c237286113c68dbad80a82a4281543f3abf766d9d73228998", size = 1552303, upload-time = "2026-01-03T17:31:08.958Z" },
+ { url = "https://files.pythonhosted.org/packages/ab/6a/28ed4dea1759916090587d1fe57087b03e6c784a642b85ef48217b0277ae/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:7bfdc049127717581866fa4708791220970ce291c23e28ccf3922c700740fdc0", size = 1763673, upload-time = "2026-01-03T17:31:10.676Z" },
+ { url = "https://files.pythonhosted.org/packages/e8/35/4a3daeb8b9fab49240d21c04d50732313295e4bd813a465d840236dd0ce1/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8057c98e0c8472d8846b9c79f56766bcc57e3e8ac7bfd510482332366c56c591", size = 1721120, upload-time = "2026-01-03T17:31:12.575Z" },
+ { url = "https://files.pythonhosted.org/packages/bc/9f/d643bb3c5fb99547323e635e251c609fbbc660d983144cfebec529e09264/aiohttp-3.13.3-cp313-cp313-win32.whl", hash = "sha256:1449ceddcdbcf2e0446957863af03ebaaa03f94c090f945411b61269e2cb5daf", size = 427383, upload-time = "2026-01-03T17:31:14.382Z" },
+ { url = "https://files.pythonhosted.org/packages/4e/f1/ab0395f8a79933577cdd996dd2f9aa6014af9535f65dddcf88204682fe62/aiohttp-3.13.3-cp313-cp313-win_amd64.whl", hash = "sha256:693781c45a4033d31d4187d2436f5ac701e7bbfe5df40d917736108c1cc7436e", size = 453899, upload-time = "2026-01-03T17:31:15.958Z" },
+ { url = "https://files.pythonhosted.org/packages/99/36/5b6514a9f5d66f4e2597e40dea2e3db271e023eb7a5d22defe96ba560996/aiohttp-3.13.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:ea37047c6b367fd4bd632bff8077449b8fa034b69e812a18e0132a00fae6e808", size = 737238, upload-time = "2026-01-03T17:31:17.909Z" },
+ { url = "https://files.pythonhosted.org/packages/f7/49/459327f0d5bcd8c6c9ca69e60fdeebc3622861e696490d8674a6d0cb90a6/aiohttp-3.13.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:6fc0e2337d1a4c3e6acafda6a78a39d4c14caea625124817420abceed36e2415", size = 492292, upload-time = "2026-01-03T17:31:19.919Z" },
+ { url = "https://files.pythonhosted.org/packages/e8/0b/b97660c5fd05d3495b4eb27f2d0ef18dc1dc4eff7511a9bf371397ff0264/aiohttp-3.13.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c685f2d80bb67ca8c3837823ad76196b3694b0159d232206d1e461d3d434666f", size = 493021, upload-time = "2026-01-03T17:31:21.636Z" },
+ { url = "https://files.pythonhosted.org/packages/54/d4/438efabdf74e30aeceb890c3290bbaa449780583b1270b00661126b8aae4/aiohttp-3.13.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:48e377758516d262bde50c2584fc6c578af272559c409eecbdd2bae1601184d6", size = 1717263, upload-time = "2026-01-03T17:31:23.296Z" },
+ { url = "https://files.pythonhosted.org/packages/71/f2/7bddc7fd612367d1459c5bcf598a9e8f7092d6580d98de0e057eb42697ad/aiohttp-3.13.3-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:34749271508078b261c4abb1767d42b8d0c0cc9449c73a4df494777dc55f0687", size = 1669107, upload-time = "2026-01-03T17:31:25.334Z" },
+ { url = "https://files.pythonhosted.org/packages/00/5a/1aeaecca40e22560f97610a329e0e5efef5e0b5afdf9f857f0d93839ab2e/aiohttp-3.13.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:82611aeec80eb144416956ec85b6ca45a64d76429c1ed46ae1b5f86c6e0c9a26", size = 1760196, upload-time = "2026-01-03T17:31:27.394Z" },
+ { url = "https://files.pythonhosted.org/packages/f8/f8/0ff6992bea7bd560fc510ea1c815f87eedd745fe035589c71ce05612a19a/aiohttp-3.13.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2fff83cfc93f18f215896e3a190e8e5cb413ce01553901aca925176e7568963a", size = 1843591, upload-time = "2026-01-03T17:31:29.238Z" },
+ { url = "https://files.pythonhosted.org/packages/e3/d1/e30e537a15f53485b61f5be525f2157da719819e8377298502aebac45536/aiohttp-3.13.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bbe7d4cecacb439e2e2a8a1a7b935c25b812af7a5fd26503a66dadf428e79ec1", size = 1720277, upload-time = "2026-01-03T17:31:31.053Z" },
+ { url = "https://files.pythonhosted.org/packages/84/45/23f4c451d8192f553d38d838831ebbc156907ea6e05557f39563101b7717/aiohttp-3.13.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b928f30fe49574253644b1ca44b1b8adbd903aa0da4b9054a6c20fc7f4092a25", size = 1548575, upload-time = "2026-01-03T17:31:32.87Z" },
+ { url = "https://files.pythonhosted.org/packages/6a/ed/0a42b127a43712eda7807e7892c083eadfaf8429ca8fb619662a530a3aab/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7b5e8fe4de30df199155baaf64f2fcd604f4c678ed20910db8e2c66dc4b11603", size = 1679455, upload-time = "2026-01-03T17:31:34.76Z" },
+ { url = "https://files.pythonhosted.org/packages/2e/b5/c05f0c2b4b4fe2c9d55e73b6d3ed4fd6c9dc2684b1d81cbdf77e7fad9adb/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:8542f41a62bcc58fc7f11cf7c90e0ec324ce44950003feb70640fc2a9092c32a", size = 1687417, upload-time = "2026-01-03T17:31:36.699Z" },
+ { url = "https://files.pythonhosted.org/packages/c9/6b/915bc5dad66aef602b9e459b5a973529304d4e89ca86999d9d75d80cbd0b/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:5e1d8c8b8f1d91cd08d8f4a3c2b067bfca6ec043d3ff36de0f3a715feeedf926", size = 1729968, upload-time = "2026-01-03T17:31:38.622Z" },
+ { url = "https://files.pythonhosted.org/packages/11/3b/e84581290a9520024a08640b63d07673057aec5ca548177a82026187ba73/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:90455115e5da1c3c51ab619ac57f877da8fd6d73c05aacd125c5ae9819582aba", size = 1545690, upload-time = "2026-01-03T17:31:40.57Z" },
+ { url = "https://files.pythonhosted.org/packages/f5/04/0c3655a566c43fd647c81b895dfe361b9f9ad6d58c19309d45cff52d6c3b/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:042e9e0bcb5fba81886c8b4fbb9a09d6b8a00245fd8d88e4d989c1f96c74164c", size = 1746390, upload-time = "2026-01-03T17:31:42.857Z" },
+ { url = "https://files.pythonhosted.org/packages/1f/53/71165b26978f719c3419381514c9690bd5980e764a09440a10bb816ea4ab/aiohttp-3.13.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2eb752b102b12a76ca02dff751a801f028b4ffbbc478840b473597fc91a9ed43", size = 1702188, upload-time = "2026-01-03T17:31:44.984Z" },
+ { url = "https://files.pythonhosted.org/packages/29/a7/cbe6c9e8e136314fa1980da388a59d2f35f35395948a08b6747baebb6aa6/aiohttp-3.13.3-cp314-cp314-win32.whl", hash = "sha256:b556c85915d8efaed322bf1bdae9486aa0f3f764195a0fb6ee962e5c71ef5ce1", size = 433126, upload-time = "2026-01-03T17:31:47.463Z" },
+ { url = "https://files.pythonhosted.org/packages/de/56/982704adea7d3b16614fc5936014e9af85c0e34b58f9046655817f04306e/aiohttp-3.13.3-cp314-cp314-win_amd64.whl", hash = "sha256:9bf9f7a65e7aa20dd764151fb3d616c81088f91f8df39c3893a536e279b4b984", size = 459128, upload-time = "2026-01-03T17:31:49.2Z" },
+ { url = "https://files.pythonhosted.org/packages/6c/2a/3c79b638a9c3d4658d345339d22070241ea341ed4e07b5ac60fb0f418003/aiohttp-3.13.3-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:05861afbbec40650d8a07ea324367cb93e9e8cc7762e04dd4405df99fa65159c", size = 769512, upload-time = "2026-01-03T17:31:51.134Z" },
+ { url = "https://files.pythonhosted.org/packages/29/b9/3e5014d46c0ab0db8707e0ac2711ed28c4da0218c358a4e7c17bae0d8722/aiohttp-3.13.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2fc82186fadc4a8316768d61f3722c230e2c1dcab4200d52d2ebdf2482e47592", size = 506444, upload-time = "2026-01-03T17:31:52.85Z" },
+ { url = "https://files.pythonhosted.org/packages/90/03/c1d4ef9a054e151cd7839cdc497f2638f00b93cbe8043983986630d7a80c/aiohttp-3.13.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:0add0900ff220d1d5c5ebbf99ed88b0c1bbf87aa7e4262300ed1376a6b13414f", size = 510798, upload-time = "2026-01-03T17:31:54.91Z" },
+ { url = "https://files.pythonhosted.org/packages/ea/76/8c1e5abbfe8e127c893fe7ead569148a4d5a799f7cf958d8c09f3eedf097/aiohttp-3.13.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:568f416a4072fbfae453dcf9a99194bbb8bdeab718e08ee13dfa2ba0e4bebf29", size = 1868835, upload-time = "2026-01-03T17:31:56.733Z" },
+ { url = "https://files.pythonhosted.org/packages/8e/ac/984c5a6f74c363b01ff97adc96a3976d9c98940b8969a1881575b279ac5d/aiohttp-3.13.3-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:add1da70de90a2569c5e15249ff76a631ccacfe198375eead4aadf3b8dc849dc", size = 1720486, upload-time = "2026-01-03T17:31:58.65Z" },
+ { url = "https://files.pythonhosted.org/packages/b2/9a/b7039c5f099c4eb632138728828b33428585031a1e658d693d41d07d89d1/aiohttp-3.13.3-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:10b47b7ba335d2e9b1239fa571131a87e2d8ec96b333e68b2a305e7a98b0bae2", size = 1847951, upload-time = "2026-01-03T17:32:00.989Z" },
+ { url = "https://files.pythonhosted.org/packages/3c/02/3bec2b9a1ba3c19ff89a43a19324202b8eb187ca1e928d8bdac9bbdddebd/aiohttp-3.13.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3dd4dce1c718e38081c8f35f323209d4c1df7d4db4bab1b5c88a6b4d12b74587", size = 1941001, upload-time = "2026-01-03T17:32:03.122Z" },
+ { url = "https://files.pythonhosted.org/packages/37/df/d879401cedeef27ac4717f6426c8c36c3091c6e9f08a9178cc87549c537f/aiohttp-3.13.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:34bac00a67a812570d4a460447e1e9e06fae622946955f939051e7cc895cfab8", size = 1797246, upload-time = "2026-01-03T17:32:05.255Z" },
+ { url = "https://files.pythonhosted.org/packages/8d/15/be122de1f67e6953add23335c8ece6d314ab67c8bebb3f181063010795a7/aiohttp-3.13.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a19884d2ee70b06d9204b2727a7b9f983d0c684c650254679e716b0b77920632", size = 1627131, upload-time = "2026-01-03T17:32:07.607Z" },
+ { url = "https://files.pythonhosted.org/packages/12/12/70eedcac9134cfa3219ab7af31ea56bc877395b1ac30d65b1bc4b27d0438/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5f8ca7f2bb6ba8348a3614c7918cc4bb73268c5ac2a207576b7afea19d3d9f64", size = 1795196, upload-time = "2026-01-03T17:32:09.59Z" },
+ { url = "https://files.pythonhosted.org/packages/32/11/b30e1b1cd1f3054af86ebe60df96989c6a414dd87e27ad16950eee420bea/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:b0d95340658b9d2f11d9697f59b3814a9d3bb4b7a7c20b131df4bcef464037c0", size = 1782841, upload-time = "2026-01-03T17:32:11.445Z" },
+ { url = "https://files.pythonhosted.org/packages/88/0d/d98a9367b38912384a17e287850f5695c528cff0f14f791ce8ee2e4f7796/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:a1e53262fd202e4b40b70c3aff944a8155059beedc8a89bba9dc1f9ef06a1b56", size = 1795193, upload-time = "2026-01-03T17:32:13.705Z" },
+ { url = "https://files.pythonhosted.org/packages/43/a5/a2dfd1f5ff5581632c7f6a30e1744deda03808974f94f6534241ef60c751/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:d60ac9663f44168038586cab2157e122e46bdef09e9368b37f2d82d354c23f72", size = 1621979, upload-time = "2026-01-03T17:32:15.965Z" },
+ { url = "https://files.pythonhosted.org/packages/fa/f0/12973c382ae7c1cccbc4417e129c5bf54c374dfb85af70893646e1f0e749/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:90751b8eed69435bac9ff4e3d2f6b3af1f57e37ecb0fbeee59c0174c9e2d41df", size = 1822193, upload-time = "2026-01-03T17:32:18.219Z" },
+ { url = "https://files.pythonhosted.org/packages/3c/5f/24155e30ba7f8c96918af1350eb0663e2430aad9e001c0489d89cd708ab1/aiohttp-3.13.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:fc353029f176fd2b3ec6cfc71be166aba1936fe5d73dd1992ce289ca6647a9aa", size = 1769801, upload-time = "2026-01-03T17:32:20.25Z" },
+ { url = "https://files.pythonhosted.org/packages/eb/f8/7314031ff5c10e6ece114da79b338ec17eeff3a079e53151f7e9f43c4723/aiohttp-3.13.3-cp314-cp314t-win32.whl", hash = "sha256:2e41b18a58da1e474a057b3d35248d8320029f61d70a37629535b16a0c8f3767", size = 466523, upload-time = "2026-01-03T17:32:22.215Z" },
+ { url = "https://files.pythonhosted.org/packages/b4/63/278a98c715ae467624eafe375542d8ba9b4383a016df8fdefe0ae28382a7/aiohttp-3.13.3-cp314-cp314t-win_amd64.whl", hash = "sha256:44531a36aa2264a1860089ffd4dce7baf875ee5a6079d5fb42e261c704ef7344", size = 499694, upload-time = "2026-01-03T17:32:24.546Z" },
]
[[package]]
@@ -120,14 +111,14 @@ wheels = [
[[package]]
name = "anyio"
-version = "4.12.0"
+version = "4.12.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "idna" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/16/ce/8a777047513153587e5434fd752e89334ac33e379aa3497db860eeb60377/anyio-4.12.0.tar.gz", hash = "sha256:73c693b567b0c55130c104d0b43a9baf3aa6a31fc6110116509f27bf75e21ec0", size = 228266, upload-time = "2025-11-28T23:37:38.911Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/96/f0/5eb65b2bb0d09ac6776f2eb54adee6abe8228ea05b20a5ad0e4945de8aac/anyio-4.12.1.tar.gz", hash = "sha256:41cfcc3a4c85d3f05c932da7c26d0201ac36f72abd4435ba90d0464a3ffed703", size = 228685, upload-time = "2026-01-06T11:45:21.246Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/7f/9c/36c5c37947ebfb8c7f22e0eb6e4d188ee2d53aa3880f3f2744fb894f0cb1/anyio-4.12.0-py3-none-any.whl", hash = "sha256:dad2376a628f98eeca4881fc56cd06affd18f659b17a747d3ff0307ced94b1bb", size = 113362, upload-time = "2025-11-28T23:36:57.897Z" },
+ { url = "https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl", hash = "sha256:d405828884fc140aa80a3c667b8beed277f1dfedec42ba031bd6ac3db606ab6c", size = 113592, upload-time = "2026-01-06T11:45:19.497Z" },
]
[[package]]
@@ -139,22 +130,13 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373", size = 67615, upload-time = "2025-10-06T13:54:43.17Z" },
]
-[[package]]
-name = "cachetools"
-version = "6.2.4"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/bc/1d/ede8680603f6016887c062a2cf4fc8fdba905866a3ab8831aa8aa651320c/cachetools-6.2.4.tar.gz", hash = "sha256:82c5c05585e70b6ba2d3ae09ea60b79548872185d2f24ae1f2709d37299fd607", size = 31731, upload-time = "2025-12-15T18:24:53.744Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/2c/fc/1d7b80d0eb7b714984ce40efc78859c022cd930e402f599d8ca9e39c78a4/cachetools-6.2.4-py3-none-any.whl", hash = "sha256:69a7a52634fed8b8bf6e24a050fb60bff1c9bd8f6d24572b99c32d4e71e62a51", size = 11551, upload-time = "2025-12-15T18:24:52.332Z" },
-]
-
[[package]]
name = "certifi"
-version = "2025.11.12"
+version = "2026.1.4"
source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/a2/8c/58f469717fa48465e4a50c014a0400602d3c437d7c0c468e17ada824da3a/certifi-2025.11.12.tar.gz", hash = "sha256:d8ab5478f2ecd78af242878415affce761ca6bc54a22a27e026d7c25357c3316", size = 160538, upload-time = "2025-11-12T02:54:51.517Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/e0/2d/a891ca51311197f6ad14a7ef42e2399f36cf2f9bd44752b3dc4eab60fdc5/certifi-2026.1.4.tar.gz", hash = "sha256:ac726dd470482006e014ad384921ed6438c457018f4b3d204aea4281258b2120", size = 154268, upload-time = "2026-01-04T02:42:41.825Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/70/7d/9bc192684cea499815ff478dfcdc13835ddf401365057044fb721ec6bddb/certifi-2025.11.12-py3-none-any.whl", hash = "sha256:97de8790030bbd5c2d96b7ec782fc2f7820ef8dba6db909ccf95449f2d062d4b", size = 159438, upload-time = "2025-11-12T02:54:49.735Z" },
+ { url = "https://files.pythonhosted.org/packages/e6/ad/3cc14f097111b4de0040c83a525973216457bbeeb63739ef1ed275c1c021/certifi-2026.1.4-py3-none-any.whl", hash = "sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c", size = 152900, upload-time = "2026-01-04T02:42:40.15Z" },
]
[[package]]
@@ -254,28 +236,28 @@ wheels = [
[[package]]
name = "faiss-cpu"
-version = "1.13.1"
+version = "1.13.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "numpy" },
{ name = "packaging" },
]
wheels = [
- { url = "https://files.pythonhosted.org/packages/66/92/c4f30580aee11fda3f424f8509d9b5ad96b9f44409f52a7ceb6b42880e50/faiss_cpu-1.13.1-cp310-abi3-macosx_14_0_arm64.whl", hash = "sha256:2967def7aa2da8efbf6a5da81138780aa17a9970ca666417cb632a00a593423d", size = 3418004, upload-time = "2025-12-05T01:01:51.955Z" },
- { url = "https://files.pythonhosted.org/packages/04/1f/30803e63affa8bbdfd549f83ed5d39ccf900c030b6da8010d0b95f7ae1d1/faiss_cpu-1.13.1-cp310-abi3-macosx_14_0_x86_64.whl", hash = "sha256:30c179891656a988f5223e586c696432aacc5f4e763d85e165be30ef57ac2bbf", size = 7806468, upload-time = "2025-12-05T01:01:54.096Z" },
- { url = "https://files.pythonhosted.org/packages/17/ae/40f66b640664af319ff8be87a9b0cc2c9ec025a2cf82b27cc27964fcf3c0/faiss_cpu-1.13.1-cp310-abi3-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ff5bdbf392081659e6b0f98f03b602bf08d1b5a790e28aa1185ae925decff6b2", size = 11410471, upload-time = "2025-12-05T01:01:56.038Z" },
- { url = "https://files.pythonhosted.org/packages/38/f8/b8f0862ec6af8a71c6410a61baa35571161f7dba616aed696e91cb464630/faiss_cpu-1.13.1-cp310-abi3-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3de25edb0e69c1b95eeda923b2e23da01f472b2cc3f4817e63b25a56847d6ea7", size = 23719213, upload-time = "2025-12-05T01:01:58.545Z" },
- { url = "https://files.pythonhosted.org/packages/4c/ee/01e07e4e780b0b739a3299ca8e5b4751970629b0f2c51f5ec464718e9f9e/faiss_cpu-1.13.1-cp310-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0b2f0e6cd30511b9fe320a2309389269269d3e363cc88c3a0380095a8c08ae27", size = 13400767, upload-time = "2025-12-05T01:02:00.742Z" },
- { url = "https://files.pythonhosted.org/packages/da/27/0c4e249fe50f87f1f038c80deebcdd28b23617bb42e3e5708b34c86fdae7/faiss_cpu-1.13.1-cp310-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:8ad542573ad05af6c508f4cf5268ba2aad06f0c8d4e780a0eeba7fe6fd274922", size = 24960102, upload-time = "2025-12-05T01:02:04.56Z" },
- { url = "https://files.pythonhosted.org/packages/aa/75/0fb845be2e674531ce7f89207d7f932ffbc8fc50f866dba5569512305cc9/faiss_cpu-1.13.1-cp313-cp313-win_amd64.whl", hash = "sha256:5f71c8840794c39c1e1cdd92c2ef4d3f77b3e650f614f296e31c2545ad2bab51", size = 18812964, upload-time = "2025-12-05T01:02:20.505Z" },
- { url = "https://files.pythonhosted.org/packages/00/2c/c13c816546ffc5b0b7f8ca64811b24b17d73ff6382464f1ab0eed87b7753/faiss_cpu-1.13.1-cp313-cp313-win_arm64.whl", hash = "sha256:24cb2d6ce2459c94e15a6cecfed15ff8d9f997aed7bae4037c0f045022030cb3", size = 8508631, upload-time = "2025-12-05T01:02:22.751Z" },
- { url = "https://files.pythonhosted.org/packages/45/6f/adf064c644a80c0ebd499144ccbab672c9946361132617ceafcc48819771/faiss_cpu-1.13.1-cp314-cp314-win_amd64.whl", hash = "sha256:5195ab9149c563cafe4da8ab4cc0b84b177cbb1f8aa897a8c199e11ef4f37e16", size = 18816994, upload-time = "2025-12-05T01:02:25.055Z" },
- { url = "https://files.pythonhosted.org/packages/61/53/042f863a6a1202af8eec94604dc8b192319253faabb8ee6070297a24c091/faiss_cpu-1.13.1-cp314-cp314-win_arm64.whl", hash = "sha256:ffc58173e24026ee4dc08c50dd3506ad553d4b2103892500b0d4ae9344027d57", size = 8511280, upload-time = "2025-12-05T01:02:27.163Z" },
+ { url = "https://files.pythonhosted.org/packages/07/c9/671f66f6b31ec48e5825d36435f0cb91189fa8bb6b50724029dbff4ca83c/faiss_cpu-1.13.2-cp310-abi3-macosx_14_0_arm64.whl", hash = "sha256:a9064eb34f8f64438dd5b95c8f03a780b1a3f0b99c46eeacb1f0b5d15fc02dc1", size = 3452776, upload-time = "2025-12-24T10:27:01.419Z" },
+ { url = "https://files.pythonhosted.org/packages/5a/4a/97150aa1582fb9c2bca95bd8fc37f27d3b470acec6f0a6833844b21e4b40/faiss_cpu-1.13.2-cp310-abi3-macosx_14_0_x86_64.whl", hash = "sha256:c8d097884521e1ecaea6467aeebbf1aa56ee4a36350b48b2ca6b39366565c317", size = 7896434, upload-time = "2025-12-24T10:27:03.592Z" },
+ { url = "https://files.pythonhosted.org/packages/0b/d0/0940575f059591ca31b63a881058adb16a387020af1709dcb7669460115c/faiss_cpu-1.13.2-cp310-abi3-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0ee330a284042c2480f2e90450a10378fd95655d62220159b1408f59ee83ebf1", size = 11485825, upload-time = "2025-12-24T10:27:05.681Z" },
+ { url = "https://files.pythonhosted.org/packages/e7/e1/a5acac02aa593809f0123539afe7b4aff61d1db149e7093239888c9053e1/faiss_cpu-1.13.2-cp310-abi3-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ab88ee287c25a119213153d033f7dd64c3ccec466ace267395872f554b648cd7", size = 23845772, upload-time = "2025-12-24T10:27:08.194Z" },
+ { url = "https://files.pythonhosted.org/packages/9c/7b/49dcaf354834ec457e85ca769d50bc9b5f3003fab7c94a9dcf08cf742793/faiss_cpu-1.13.2-cp310-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:85511129b34f890d19c98b82a0cd5ffb27d89d1cec2ee41d2621ee9f9ef8cf3f", size = 13477567, upload-time = "2025-12-24T10:27:10.822Z" },
+ { url = "https://files.pythonhosted.org/packages/f7/6b/12bb4037921c38bb2c0b4cfc213ca7e04bbbebbfea89b0b5746248ce446e/faiss_cpu-1.13.2-cp310-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:8b32eb4065bac352b52a9f5ae07223567fab0a976c7d05017c01c45a1c24264f", size = 25102239, upload-time = "2025-12-24T10:27:13.476Z" },
+ { url = "https://files.pythonhosted.org/packages/60/4b/903d85bf3a8264d49964ec799e45c7ffc91098606b8bc9ef2c904c1a56cb/faiss_cpu-1.13.2-cp313-cp313-win_amd64.whl", hash = "sha256:cb4b5ee184816a4b099162ac93c0d7f0033d81a88e7c1291d0a9cc41ec348984", size = 18891330, upload-time = "2025-12-24T10:27:28.806Z" },
+ { url = "https://files.pythonhosted.org/packages/b2/52/5d10642da628f63544aab27e48416be4a7ea25c6b81d8bd65016d8538b00/faiss_cpu-1.13.2-cp313-cp313-win_arm64.whl", hash = "sha256:1243967eeb2298791ff7f3683a4abd2100d7e6ec7542ca05c3b75d47a7f621e5", size = 8553088, upload-time = "2025-12-24T10:27:31.325Z" },
+ { url = "https://files.pythonhosted.org/packages/b0/b1/daaab8046f56c60079648bd83774f61b283b59a9930a2f60790ee4cdedfe/faiss_cpu-1.13.2-cp314-cp314-win_amd64.whl", hash = "sha256:c8b645e7d56591aa35dc75415bb53a62e4a494dba010e16f4b67daeffd830bd7", size = 18892621, upload-time = "2025-12-24T10:27:33.923Z" },
+ { url = "https://files.pythonhosted.org/packages/06/6f/5eaf3e249c636e616ebb52e369a4a2f1d32b1caf9a611b4f917b3dd21423/faiss_cpu-1.13.2-cp314-cp314-win_arm64.whl", hash = "sha256:8113a2a80b59fe5653cf66f5c0f18be0a691825601a52a614c30beb1fca9bc7c", size = 8556374, upload-time = "2025-12-24T10:27:36.653Z" },
]
[[package]]
name = "fastapi"
-version = "0.125.0"
+version = "0.128.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "annotated-doc" },
@@ -283,18 +265,18 @@ dependencies = [
{ name = "starlette" },
{ name = "typing-extensions" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/17/71/2df15009fb4bdd522a069d2fbca6007c6c5487fce5cb965be00fc335f1d1/fastapi-0.125.0.tar.gz", hash = "sha256:16b532691a33e2c5dee1dac32feb31dc6eb41a3dd4ff29a95f9487cb21c054c0", size = 370550, upload-time = "2025-12-17T21:41:44.15Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/52/08/8c8508db6c7b9aae8f7175046af41baad690771c9bcde676419965e338c7/fastapi-0.128.0.tar.gz", hash = "sha256:1cc179e1cef10a6be60ffe429f79b829dce99d8de32d7acb7e6c8dfdf7f2645a", size = 365682, upload-time = "2025-12-27T15:21:13.714Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/34/2f/ff2fcc98f500713368d8b650e1bbc4a0b3ebcdd3e050dcdaad5f5a13fd7e/fastapi-0.125.0-py3-none-any.whl", hash = "sha256:2570ec4f3aecf5cca8f0428aed2398b774fcdfee6c2116f86e80513f2f86a7a1", size = 112888, upload-time = "2025-12-17T21:41:41.286Z" },
+ { url = "https://files.pythonhosted.org/packages/5c/05/5cbb59154b093548acd0f4c7c474a118eda06da25aa75c616b72d8fcd92a/fastapi-0.128.0-py3-none-any.whl", hash = "sha256:aebd93f9716ee3b4f4fcfe13ffb7cf308d99c9f3ab5622d8877441072561582d", size = 103094, upload-time = "2025-12-27T15:21:12.154Z" },
]
[[package]]
name = "filelock"
-version = "3.20.1"
+version = "3.20.2"
source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/a7/23/ce7a1126827cedeb958fc043d61745754464eb56c5937c35bbf2b8e26f34/filelock-3.20.1.tar.gz", hash = "sha256:b8360948b351b80f420878d8516519a2204b07aefcdcfd24912a5d33127f188c", size = 19476, upload-time = "2025-12-15T23:54:28.027Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/c1/e0/a75dbe4bca1e7d41307323dad5ea2efdd95408f74ab2de8bd7dba9b51a1a/filelock-3.20.2.tar.gz", hash = "sha256:a2241ff4ddde2a7cebddf78e39832509cb045d18ec1a09d7248d6bfc6bfbbe64", size = 19510, upload-time = "2026-01-02T15:33:32.582Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/e3/7f/a1a97644e39e7316d850784c642093c99df1290a460df4ede27659056834/filelock-3.20.1-py3-none-any.whl", hash = "sha256:15d9e9a67306188a44baa72f569d2bfd803076269365fdea0934385da4dc361a", size = 16666, upload-time = "2025-12-15T23:54:26.874Z" },
+ { url = "https://files.pythonhosted.org/packages/9a/30/ab407e2ec752aa541704ed8f93c11e2a5d92c168b8a755d818b74a3c5c2d/filelock-3.20.2-py3-none-any.whl", hash = "sha256:fbba7237d6ea277175a32c54bb71ef814a8546d8601269e1bfc388de333974e8", size = 16697, upload-time = "2026-01-02T15:33:31.133Z" },
]
[[package]]
@@ -390,16 +372,15 @@ wheels = [
[[package]]
name = "google-auth"
-version = "2.45.0"
+version = "2.47.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
- { name = "cachetools" },
{ name = "pyasn1-modules" },
{ name = "rsa" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/e5/00/3c794502a8b892c404b2dea5b3650eb21bfc7069612fbfd15c7f17c1cb0d/google_auth-2.45.0.tar.gz", hash = "sha256:90d3f41b6b72ea72dd9811e765699ee491ab24139f34ebf1ca2b9cc0c38708f3", size = 320708, upload-time = "2025-12-15T22:58:42.889Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/60/3c/ec64b9a275ca22fa1cd3b6e77fefcf837b0732c890aa32d2bd21313d9b33/google_auth-2.47.0.tar.gz", hash = "sha256:833229070a9dfee1a353ae9877dcd2dec069a8281a4e72e72f77d4a70ff945da", size = 323719, upload-time = "2026-01-06T21:55:31.045Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/c6/97/451d55e05487a5cd6279a01a7e34921858b16f7dc8aa38a2c684743cd2b3/google_auth-2.45.0-py2.py3-none-any.whl", hash = "sha256:82344e86dc00410ef5382d99be677c6043d72e502b625aa4f4afa0bdacca0f36", size = 233312, upload-time = "2025-12-15T22:58:40.777Z" },
+ { url = "https://files.pythonhosted.org/packages/db/18/79e9008530b79527e0d5f79e7eef08d3b179b7f851cfd3a2f27822fbdfa9/google_auth-2.47.0-py3-none-any.whl", hash = "sha256:c516d68336bfde7cf0da26aab674a36fedcf04b37ac4edd59c597178760c3498", size = 234867, upload-time = "2026-01-06T21:55:28.6Z" },
]
[package.optional-dependencies]
@@ -409,7 +390,7 @@ requests = [
[[package]]
name = "google-genai"
-version = "1.56.0"
+version = "1.57.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "anyio" },
@@ -423,9 +404,9 @@ dependencies = [
{ name = "typing-extensions" },
{ name = "websockets" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/70/ad/d3ac5a102135bd3f1e4b1475ca65d2bd4bcc22eb2e9348ac40fe3fadb1d6/google_genai-1.56.0.tar.gz", hash = "sha256:0491af33c375f099777ae207d9621f044e27091fafad4c50e617eba32165e82f", size = 340451, upload-time = "2025-12-17T12:35:05.412Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/2b/b4/8251c2d2576224a4b51a8ab6159820f9200b8da28ff555c78ee15607096e/google_genai-1.57.0.tar.gz", hash = "sha256:0ff9c36b8d68abfbdbd13b703ece926de5f3e67955666b36315ecf669b94a826", size = 485648, upload-time = "2026-01-07T20:38:20.271Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/84/93/94bc7a89ef4e7ed3666add55cd859d1483a22737251df659bf1aa46e9405/google_genai-1.56.0-py3-none-any.whl", hash = "sha256:9e6b11e0c105ead229368cb5849a480e4d0185519f8d9f538d61ecfcf193b052", size = 426563, upload-time = "2025-12-17T12:35:03.717Z" },
+ { url = "https://files.pythonhosted.org/packages/d6/02/858bdae08e2184b6afe0b18bc3113318522c9cf326a5a1698055edd31f88/google_genai-1.57.0-py3-none-any.whl", hash = "sha256:d63c7a89a1f549c4d14032f41a0cdb4b6fe3f565e2eee6b5e0907a0aeceabefd", size = 713323, upload-time = "2026-01-07T20:38:18.051Z" },
]
[[package]]
@@ -562,6 +543,18 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" },
]
+[[package]]
+name = "ido-agents"
+version = "0.1.4"
+source = { git = "https://github.com/canefe/ido-agents.git?rev=v0.1.4#19d84f10b4e2f39cdd88428137bb1fdc8e0ae817" }
+dependencies = [
+ { name = "langchain" },
+ { name = "langchain-openai" },
+ { name = "langgraph" },
+ { name = "pydantic" },
+ { name = "python-dotenv" },
+]
+
[[package]]
name = "iniconfig"
version = "2.3.0"
@@ -645,21 +638,21 @@ wheels = [
[[package]]
name = "langchain"
-version = "1.2.0"
+version = "1.2.3"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "langchain-core" },
{ name = "langgraph" },
{ name = "pydantic" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/b1/12/3a74c22abdfddd877dfc2ee666d516f9132877fcd25eb4dd694835c59c79/langchain-1.2.0.tar.gz", hash = "sha256:a087d1e2b2969819e29a91a6d5f98302aafe31bd49ba377ecee3bf5a5dcfe14a", size = 536126, upload-time = "2025-12-15T14:51:42.24Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/5f/78/9565319259d92818d96f30d55507ee1072fbf5c008b95a6acecf5e47c4d6/langchain-1.2.3.tar.gz", hash = "sha256:9d6171f9c3c760ca3c7c2cf8518e6f8625380962c488b41e35ebff1f1d611077", size = 548296, upload-time = "2026-01-08T20:26:30.149Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/23/00/4e3fa0d90f5a5c376ccb8ca983d0f0f7287783dfac48702e18f01d24673b/langchain-1.2.0-py3-none-any.whl", hash = "sha256:82f0d17aa4fbb11560b30e1e7d4aeb75e3ad71ce09b85c90ab208b181a24ffac", size = 102828, upload-time = "2025-12-15T14:51:40.802Z" },
+ { url = "https://files.pythonhosted.org/packages/de/e5/9b4f58533f8ce3013b1a993289eb11e8607d9c9d9d14699b29c6ac3b4132/langchain-1.2.3-py3-none-any.whl", hash = "sha256:5cdc7c80f672962b030c4b0d16d0d8f26d849c0ada63a4b8653a20d7505512ae", size = 106428, upload-time = "2026-01-08T20:26:29.162Z" },
]
[[package]]
name = "langchain-classic"
-version = "1.0.0"
+version = "1.0.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "langchain-core" },
@@ -670,9 +663,9 @@ dependencies = [
{ name = "requests" },
{ name = "sqlalchemy" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/d9/b1/a66babeccb2c05ed89690a534296688c0349bee7a71641e91ecc2afd72fd/langchain_classic-1.0.0.tar.gz", hash = "sha256:a63655609254ebc36d660eb5ad7c06c778b2e6733c615ffdac3eac4fbe2b12c5", size = 10514930, upload-time = "2025-10-17T16:02:47.887Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/7c/4b/bd03518418ece4c13192a504449b58c28afee915dc4a6f4b02622458cb1b/langchain_classic-1.0.1.tar.gz", hash = "sha256:40a499684df36b005a1213735dc7f8dca8f5eb67978d6ec763e7a49780864fdc", size = 10516020, upload-time = "2025-12-23T22:55:22.615Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/74/74/246f809a3741c21982f985ca0113ec92d3c84896308561cc4414823f6951/langchain_classic-1.0.0-py3-none-any.whl", hash = "sha256:97f71f150c10123f5511c08873f030e35ede52311d729a7688c721b4e1e01f33", size = 1040701, upload-time = "2025-10-17T16:02:46.35Z" },
+ { url = "https://files.pythonhosted.org/packages/83/0f/eab87f017d7fe28e8c11fff614f4cdbfae32baadb77d0f79e9f922af1df2/langchain_classic-1.0.1-py3-none-any.whl", hash = "sha256:131d83a02bb80044c68fedc1ab4ae885d5b8f8c2c742d8ab9e7534ad9cda8e80", size = 1040666, upload-time = "2025-12-23T22:55:21.025Z" },
]
[[package]]
@@ -700,7 +693,7 @@ wheels = [
[[package]]
name = "langchain-core"
-version = "1.2.2"
+version = "1.2.6"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "jsonpatch" },
@@ -712,14 +705,14 @@ dependencies = [
{ name = "typing-extensions" },
{ name = "uuid-utils" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/70/45/3d63fd7dc873abd9a0b1960775554dcc2a45dd4905937ec0b3d101dd5f10/langchain_core-1.2.2.tar.gz", hash = "sha256:3f9c28ec6d0fe47636d28b19799794458d55da81f37309832b2b9d11c93c5e95", size = 803123, upload-time = "2025-12-16T20:25:53.788Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/b9/ce/ba5ed5ea6df22965b2893c2ed28ebb456204962723d408904c4acfa5e942/langchain_core-1.2.6.tar.gz", hash = "sha256:b4e7841dd7f8690375aa07c54739178dc2c635147d475e0c2955bf82a1afa498", size = 833343, upload-time = "2026-01-02T21:35:44.749Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/59/97/57497c8b26829e38c8dd4abe972d75e38fc3904324a3042bb01d9e0753b8/langchain_core-1.2.2-py3-none-any.whl", hash = "sha256:3a83dc14217de5cba11b1a0bd43c48702401bbd18dc25cac2ffab5ac83a61cd0", size = 476125, upload-time = "2025-12-16T20:25:52.581Z" },
+ { url = "https://files.pythonhosted.org/packages/6f/40/0655892c245d8fbe6bca6d673ab5927e5c3ab7be143de40b52289a0663bc/langchain_core-1.2.6-py3-none-any.whl", hash = "sha256:aa6ed954b4b1f4504937fe75fdf674317027e9a91ba7a97558b0de3dc8004e34", size = 489096, upload-time = "2026-01-02T21:35:43.391Z" },
]
[[package]]
name = "langchain-google-genai"
-version = "4.1.2"
+version = "4.1.3"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "filetype" },
@@ -727,23 +720,23 @@ dependencies = [
{ name = "langchain-core" },
{ name = "pydantic" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/2e/04/c8d2840d96f05485abeb5288bd88ec8c5fb7a24065968201fa54969a47d8/langchain_google_genai-4.1.2.tar.gz", hash = "sha256:aa0dd7807a9a15651d10cd228c574f23fe46e2ce62921bf21d73a63869ecd814", size = 276143, upload-time = "2025-12-19T04:10:57.799Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/ba/85/078d5aba488a82a53b8372ac1037dee4f64b020bac69e6a07e37a5059059/langchain_google_genai-4.1.3.tar.gz", hash = "sha256:28966c8fe58c9a401fdc37aeeeb0eb51744210803838ce050f022fc53d2f994e", size = 277024, upload-time = "2026-01-05T23:29:34.362Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/f6/2f/a63dde25c9d11340d0f5f538a9fea77571b4b4e73294ad58fa6ea84079a0/langchain_google_genai-4.1.2-py3-none-any.whl", hash = "sha256:89790f2e3ca113f7e45883f541a834120d279e21f235fffc491c81cd1af11fdd", size = 65640, upload-time = "2025-12-19T04:10:56.386Z" },
+ { url = "https://files.pythonhosted.org/packages/c9/aa/ca61dc2d202a23d7605a5c0ea24bd86a39a5c23c932a166b87c7797747c5/langchain_google_genai-4.1.3-py3-none-any.whl", hash = "sha256:5d710e2dcf449d49704bdbcd31729be90b386fa008395f9552a5c090241de1a5", size = 66262, upload-time = "2026-01-05T23:29:32.924Z" },
]
[[package]]
name = "langchain-openai"
-version = "1.1.5"
+version = "1.1.7"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "langchain-core" },
{ name = "openai" },
{ name = "tiktoken" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/fc/1c/008a6dd7b3523121be1a4f24701b099ae79193dab9b329dfb787bece08bf/langchain_openai-1.1.5.tar.gz", hash = "sha256:a8ca5f3919bd948867c7d427a575b34f7c141110ef7cbc14ea7bbc46363871de", size = 1038129, upload-time = "2025-12-17T19:14:36.392Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/38/b7/30bfc4d1b658a9ee524bcce3b0b2ec9c45a11c853a13c4f0c9da9882784b/langchain_openai-1.1.7.tar.gz", hash = "sha256:f5ec31961ed24777548b63a5fe313548bc6e0eb9730d6552b8c6418765254c81", size = 1039134, upload-time = "2026-01-07T19:44:59.728Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/e8/c5/22b690a27ba6b1ca6876270473aab1610cb8767314e5038cb6b826d9b69b/langchain_openai-1.1.5-py3-none-any.whl", hash = "sha256:d3a3b0c39e1513bbb9e5d4526c194909a00c5733195dbe90bfea6619b00420ca", size = 84569, upload-time = "2025-12-17T19:14:35.529Z" },
+ { url = "https://files.pythonhosted.org/packages/64/a1/50e7596aca775d8c3883eceeaf47489fac26c57c1abe243c00174f715a8a/langchain_openai-1.1.7-py3-none-any.whl", hash = "sha256:34e9cd686aac1a120d6472804422792bf8080a2103b5d21ee450c9e42d053815", size = 84753, upload-time = "2026-01-07T19:44:58.629Z" },
]
[[package]]
@@ -803,20 +796,20 @@ wheels = [
[[package]]
name = "langgraph-sdk"
-version = "0.3.0"
+version = "0.3.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "httpx" },
{ name = "orjson" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/2b/1b/f328afb4f24f6e18333ff357d9580a3bb5b133ff2c7aae34fef7f5b87f31/langgraph_sdk-0.3.0.tar.gz", hash = "sha256:4145bc3c34feae227ae918341f66d3ba7d1499722c1ef4a8aae5ea828897d1d4", size = 130366, upload-time = "2025-12-12T22:19:30.323Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/a9/d3/b6be0b0aba2a53a8920a2b0b4328a83121ec03eea9952e576d06a4182f6f/langgraph_sdk-0.3.1.tar.gz", hash = "sha256:f6dadfd2444eeff3e01405a9005c95fb3a028d4bd954ebec80ea6150084f92bb", size = 130312, upload-time = "2025-12-18T22:11:47.42Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/69/48/ee4d7afb3c3d38bd2ebe51a4d37f1ed7f1058dd242f35994b562203067aa/langgraph_sdk-0.3.0-py3-none-any.whl", hash = "sha256:c1ade483fba17ae354ee920e4779042b18d5aba875f2a858ba569f62f628f26f", size = 66489, upload-time = "2025-12-12T22:19:29.228Z" },
+ { url = "https://files.pythonhosted.org/packages/ab/fe/0c1c9c01a154eba62b20b02fabe811fd94a2b810061ae9e4d8462b8cf85a/langgraph_sdk-0.3.1-py3-none-any.whl", hash = "sha256:0b856923bfd20bf3441ce9d03bef488aa333fb610e972618799a9d584436acad", size = 66517, upload-time = "2025-12-18T22:11:46.625Z" },
]
[[package]]
name = "langsmith"
-version = "0.5.0"
+version = "0.6.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "httpx" },
@@ -828,22 +821,9 @@ dependencies = [
{ name = "uuid-utils" },
{ name = "zstandard" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/d8/4b/d448307e8557e36b20008d0d1cd0a58233c38d90bf978e1d093be0ca4cb2/langsmith-0.5.0.tar.gz", hash = "sha256:5cadf1ddd30e838cf61679f4a776aaef638d4b02ffbceba9f73283caebd39e1b", size = 869272, upload-time = "2025-12-16T17:35:38.78Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/ee/8a/d9bc95607846bc82fbe0b98d2592ffb5e036c97a362735ae926e3d519df7/langsmith-0.5.0-py3-none-any.whl", hash = "sha256:a83750cb3dccb33148d4ffe005e3e03080fad13e01671efbb74c9a68813bfef8", size = 273711, upload-time = "2025-12-16T17:35:37.165Z" },
-]
-
-[[package]]
-name = "loguru"
-version = "0.7.3"
-source = { registry = "https://pypi.org/simple" }
-dependencies = [
- { name = "colorama", marker = "sys_platform == 'win32'" },
- { name = "win32-setctime", marker = "sys_platform == 'win32'" },
-]
-sdist = { url = "https://files.pythonhosted.org/packages/3a/05/a1dae3dffd1116099471c643b8924f5aa6524411dc6c63fdae648c4f1aca/loguru-0.7.3.tar.gz", hash = "sha256:19480589e77d47b8d85b2c827ad95d49bf31b0dcde16593892eb51dd18706eb6", size = 63559, upload-time = "2024-12-06T11:20:56.608Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/0c/8e/3ea7a8e9ce8c530204964207af7f7778597f5a548dc1a489c0c0940561f3/langsmith-0.6.2.tar.gz", hash = "sha256:c2efd7ed61eed3b6fdbf158ea2e9862bc2636f2edc95e90d2faad9462773d097", size = 1739277, upload-time = "2026-01-08T23:17:40.504Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/0c/29/0348de65b8cc732daa3e33e67806420b2ae89bdce2b04af740289c5c6c8c/loguru-0.7.3-py3-none-any.whl", hash = "sha256:31a33c10c8e1e10422bfd431aeb5d351c7cf7fa671e3c4df004162264b28220c", size = 61595, upload-time = "2024-12-06T11:20:54.538Z" },
+ { url = "https://files.pythonhosted.org/packages/ed/e0/9d173dd2fa7f85d9ec4989f6f5a1a057d281daa8dada0ff8db0de0cb68aa/langsmith-0.6.2-py3-none-any.whl", hash = "sha256:1ea1a591f52683a5aeebdaa2b58458d72ce9598105dd8b29e16f7373631a6434", size = 282918, upload-time = "2026-01-08T23:17:38.858Z" },
]
[[package]]
@@ -860,14 +840,14 @@ wheels = [
[[package]]
name = "marshmallow"
-version = "3.26.1"
+version = "3.26.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "packaging" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/ab/5e/5e53d26b42ab75491cda89b871dab9e97c840bf12c63ec58a1919710cd06/marshmallow-3.26.1.tar.gz", hash = "sha256:e6d8affb6cb61d39d26402096dc0aee12d5a26d490a121f118d2e81dc0719dc6", size = 221825, upload-time = "2025-02-03T15:32:25.093Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/55/79/de6c16cc902f4fc372236926b0ce2ab7845268dcc30fb2fbb7f71b418631/marshmallow-3.26.2.tar.gz", hash = "sha256:bbe2adb5a03e6e3571b573f42527c6fe926e17467833660bebd11593ab8dfd57", size = 222095, upload-time = "2025-12-22T06:53:53.309Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/34/75/51952c7b2d3873b44a0028b1bd26a25078c18f92f256608e8d1dc61b39fd/marshmallow-3.26.1-py3-none-any.whl", hash = "sha256:3350409f20a70a7e4e11a27661187b77cdcaeb20abca41c1454fe33636bea09c", size = 50878, upload-time = "2025-02-03T15:32:22.295Z" },
+ { url = "https://files.pythonhosted.org/packages/be/2f/5108cb3ee4ba6501748c4908b908e55f42a5b66245b4cfe0c99326e1ef6e/marshmallow-3.26.2-py3-none-any.whl", hash = "sha256:013fa8a3c4c276c24d26d84ce934dc964e2aa794345a0f8c7e5a7191482c8a73", size = 50964, upload-time = "2025-12-22T06:53:51.801Z" },
]
[[package]]
@@ -969,61 +949,68 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" },
]
+[[package]]
+name = "nodeenv"
+version = "1.10.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/24/bf/d1bda4f6168e0b2e9e5958945e01910052158313224ada5ce1fb2e1113b8/nodeenv-1.10.0.tar.gz", hash = "sha256:996c191ad80897d076bdfba80a41994c2b47c68e224c542b48feba42ba00f8bb", size = 55611, upload-time = "2025-12-20T14:08:54.006Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/88/b2/d0896bdcdc8d28a7fc5717c305f1a861c26e18c05047949fb371034d98bd/nodeenv-1.10.0-py2.py3-none-any.whl", hash = "sha256:5bb13e3eed2923615535339b3c620e76779af4cb4c6a90deccc9e36b274d3827", size = 23438, upload-time = "2025-12-20T14:08:52.782Z" },
+]
+
[[package]]
name = "numpy"
-version = "2.3.5"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/76/65/21b3bc86aac7b8f2862db1e808f1ea22b028e30a225a34a5ede9bf8678f2/numpy-2.3.5.tar.gz", hash = "sha256:784db1dcdab56bf0517743e746dfb0f885fc68d948aba86eeec2cba234bdf1c0", size = 20584950, upload-time = "2025-11-16T22:52:42.067Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/db/69/9cde09f36da4b5a505341180a3f2e6fadc352fd4d2b7096ce9778db83f1a/numpy-2.3.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d0f23b44f57077c1ede8c5f26b30f706498b4862d3ff0a7298b8411dd2f043ff", size = 16728251, upload-time = "2025-11-16T22:50:19.013Z" },
- { url = "https://files.pythonhosted.org/packages/79/fb/f505c95ceddd7027347b067689db71ca80bd5ecc926f913f1a23e65cf09b/numpy-2.3.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:aa5bc7c5d59d831d9773d1170acac7893ce3a5e130540605770ade83280e7188", size = 12254652, upload-time = "2025-11-16T22:50:21.487Z" },
- { url = "https://files.pythonhosted.org/packages/78/da/8c7738060ca9c31b30e9301ee0cf6c5ffdbf889d9593285a1cead337f9a5/numpy-2.3.5-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:ccc933afd4d20aad3c00bcef049cb40049f7f196e0397f1109dba6fed63267b0", size = 5083172, upload-time = "2025-11-16T22:50:24.562Z" },
- { url = "https://files.pythonhosted.org/packages/a4/b4/ee5bb2537fb9430fd2ef30a616c3672b991a4129bb1c7dcc42aa0abbe5d7/numpy-2.3.5-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:afaffc4393205524af9dfa400fa250143a6c3bc646c08c9f5e25a9f4b4d6a903", size = 6622990, upload-time = "2025-11-16T22:50:26.47Z" },
- { url = "https://files.pythonhosted.org/packages/95/03/dc0723a013c7d7c19de5ef29e932c3081df1c14ba582b8b86b5de9db7f0f/numpy-2.3.5-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c75442b2209b8470d6d5d8b1c25714270686f14c749028d2199c54e29f20b4d", size = 14248902, upload-time = "2025-11-16T22:50:28.861Z" },
- { url = "https://files.pythonhosted.org/packages/f5/10/ca162f45a102738958dcec8023062dad0cbc17d1ab99d68c4e4a6c45fb2b/numpy-2.3.5-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11e06aa0af8c0f05104d56450d6093ee639e15f24ecf62d417329d06e522e017", size = 16597430, upload-time = "2025-11-16T22:50:31.56Z" },
- { url = "https://files.pythonhosted.org/packages/2a/51/c1e29be863588db58175175f057286900b4b3327a1351e706d5e0f8dd679/numpy-2.3.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ed89927b86296067b4f81f108a2271d8926467a8868e554eaf370fc27fa3ccaf", size = 16024551, upload-time = "2025-11-16T22:50:34.242Z" },
- { url = "https://files.pythonhosted.org/packages/83/68/8236589d4dbb87253d28259d04d9b814ec0ecce7cb1c7fed29729f4c3a78/numpy-2.3.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:51c55fe3451421f3a6ef9a9c1439e82101c57a2c9eab9feb196a62b1a10b58ce", size = 18533275, upload-time = "2025-11-16T22:50:37.651Z" },
- { url = "https://files.pythonhosted.org/packages/40/56/2932d75b6f13465239e3b7b7e511be27f1b8161ca2510854f0b6e521c395/numpy-2.3.5-cp313-cp313-win32.whl", hash = "sha256:1978155dd49972084bd6ef388d66ab70f0c323ddee6f693d539376498720fb7e", size = 6277637, upload-time = "2025-11-16T22:50:40.11Z" },
- { url = "https://files.pythonhosted.org/packages/0c/88/e2eaa6cffb115b85ed7c7c87775cb8bcf0816816bc98ca8dbfa2ee33fe6e/numpy-2.3.5-cp313-cp313-win_amd64.whl", hash = "sha256:00dc4e846108a382c5869e77c6ed514394bdeb3403461d25a829711041217d5b", size = 12779090, upload-time = "2025-11-16T22:50:42.503Z" },
- { url = "https://files.pythonhosted.org/packages/8f/88/3f41e13a44ebd4034ee17baa384acac29ba6a4fcc2aca95f6f08ca0447d1/numpy-2.3.5-cp313-cp313-win_arm64.whl", hash = "sha256:0472f11f6ec23a74a906a00b48a4dcf3849209696dff7c189714511268d103ae", size = 10194710, upload-time = "2025-11-16T22:50:44.971Z" },
- { url = "https://files.pythonhosted.org/packages/13/cb/71744144e13389d577f867f745b7df2d8489463654a918eea2eeb166dfc9/numpy-2.3.5-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:414802f3b97f3c1eef41e530aaba3b3c1620649871d8cb38c6eaff034c2e16bd", size = 16827292, upload-time = "2025-11-16T22:50:47.715Z" },
- { url = "https://files.pythonhosted.org/packages/71/80/ba9dc6f2a4398e7f42b708a7fdc841bb638d353be255655498edbf9a15a8/numpy-2.3.5-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5ee6609ac3604fa7780e30a03e5e241a7956f8e2fcfe547d51e3afa5247ac47f", size = 12378897, upload-time = "2025-11-16T22:50:51.327Z" },
- { url = "https://files.pythonhosted.org/packages/2e/6d/db2151b9f64264bcceccd51741aa39b50150de9b602d98ecfe7e0c4bff39/numpy-2.3.5-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:86d835afea1eaa143012a2d7a3f45a3adce2d7adc8b4961f0b362214d800846a", size = 5207391, upload-time = "2025-11-16T22:50:54.542Z" },
- { url = "https://files.pythonhosted.org/packages/80/ae/429bacace5ccad48a14c4ae5332f6aa8ab9f69524193511d60ccdfdc65fa/numpy-2.3.5-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:30bc11310e8153ca664b14c5f1b73e94bd0503681fcf136a163de856f3a50139", size = 6721275, upload-time = "2025-11-16T22:50:56.794Z" },
- { url = "https://files.pythonhosted.org/packages/74/5b/1919abf32d8722646a38cd527bc3771eb229a32724ee6ba340ead9b92249/numpy-2.3.5-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1062fde1dcf469571705945b0f221b73928f34a20c904ffb45db101907c3454e", size = 14306855, upload-time = "2025-11-16T22:50:59.208Z" },
- { url = "https://files.pythonhosted.org/packages/a5/87/6831980559434973bebc30cd9c1f21e541a0f2b0c280d43d3afd909b66d0/numpy-2.3.5-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ce581db493ea1a96c0556360ede6607496e8bf9b3a8efa66e06477267bc831e9", size = 16657359, upload-time = "2025-11-16T22:51:01.991Z" },
- { url = "https://files.pythonhosted.org/packages/dd/91/c797f544491ee99fd00495f12ebb7802c440c1915811d72ac5b4479a3356/numpy-2.3.5-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:cc8920d2ec5fa99875b670bb86ddeb21e295cb07aa331810d9e486e0b969d946", size = 16093374, upload-time = "2025-11-16T22:51:05.291Z" },
- { url = "https://files.pythonhosted.org/packages/74/a6/54da03253afcbe7a72785ec4da9c69fb7a17710141ff9ac5fcb2e32dbe64/numpy-2.3.5-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:9ee2197ef8c4f0dfe405d835f3b6a14f5fee7782b5de51ba06fb65fc9b36e9f1", size = 18594587, upload-time = "2025-11-16T22:51:08.585Z" },
- { url = "https://files.pythonhosted.org/packages/80/e9/aff53abbdd41b0ecca94285f325aff42357c6b5abc482a3fcb4994290b18/numpy-2.3.5-cp313-cp313t-win32.whl", hash = "sha256:70b37199913c1bd300ff6e2693316c6f869c7ee16378faf10e4f5e3275b299c3", size = 6405940, upload-time = "2025-11-16T22:51:11.541Z" },
- { url = "https://files.pythonhosted.org/packages/d5/81/50613fec9d4de5480de18d4f8ef59ad7e344d497edbef3cfd80f24f98461/numpy-2.3.5-cp313-cp313t-win_amd64.whl", hash = "sha256:b501b5fa195cc9e24fe102f21ec0a44dffc231d2af79950b451e0d99cea02234", size = 12920341, upload-time = "2025-11-16T22:51:14.312Z" },
- { url = "https://files.pythonhosted.org/packages/bb/ab/08fd63b9a74303947f34f0bd7c5903b9c5532c2d287bead5bdf4c556c486/numpy-2.3.5-cp313-cp313t-win_arm64.whl", hash = "sha256:a80afd79f45f3c4a7d341f13acbe058d1ca8ac017c165d3fa0d3de6bc1a079d7", size = 10262507, upload-time = "2025-11-16T22:51:16.846Z" },
- { url = "https://files.pythonhosted.org/packages/ba/97/1a914559c19e32d6b2e233cf9a6a114e67c856d35b1d6babca571a3e880f/numpy-2.3.5-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:bf06bc2af43fa8d32d30fae16ad965663e966b1a3202ed407b84c989c3221e82", size = 16735706, upload-time = "2025-11-16T22:51:19.558Z" },
- { url = "https://files.pythonhosted.org/packages/57/d4/51233b1c1b13ecd796311216ae417796b88b0616cfd8a33ae4536330748a/numpy-2.3.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:052e8c42e0c49d2575621c158934920524f6c5da05a1d3b9bab5d8e259e045f0", size = 12264507, upload-time = "2025-11-16T22:51:22.492Z" },
- { url = "https://files.pythonhosted.org/packages/45/98/2fe46c5c2675b8306d0b4a3ec3494273e93e1226a490f766e84298576956/numpy-2.3.5-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:1ed1ec893cff7040a02c8aa1c8611b94d395590d553f6b53629a4461dc7f7b63", size = 5093049, upload-time = "2025-11-16T22:51:25.171Z" },
- { url = "https://files.pythonhosted.org/packages/ce/0e/0698378989bb0ac5f1660c81c78ab1fe5476c1a521ca9ee9d0710ce54099/numpy-2.3.5-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:2dcd0808a421a482a080f89859a18beb0b3d1e905b81e617a188bd80422d62e9", size = 6626603, upload-time = "2025-11-16T22:51:27Z" },
- { url = "https://files.pythonhosted.org/packages/5e/a6/9ca0eecc489640615642a6cbc0ca9e10df70df38c4d43f5a928ff18d8827/numpy-2.3.5-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:727fd05b57df37dc0bcf1a27767a3d9a78cbbc92822445f32cc3436ba797337b", size = 14262696, upload-time = "2025-11-16T22:51:29.402Z" },
- { url = "https://files.pythonhosted.org/packages/c8/f6/07ec185b90ec9d7217a00eeeed7383b73d7e709dae2a9a021b051542a708/numpy-2.3.5-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fffe29a1ef00883599d1dc2c51aa2e5d80afe49523c261a74933df395c15c520", size = 16597350, upload-time = "2025-11-16T22:51:32.167Z" },
- { url = "https://files.pythonhosted.org/packages/75/37/164071d1dde6a1a84c9b8e5b414fa127981bad47adf3a6b7e23917e52190/numpy-2.3.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:8f7f0e05112916223d3f438f293abf0727e1181b5983f413dfa2fefc4098245c", size = 16040190, upload-time = "2025-11-16T22:51:35.403Z" },
- { url = "https://files.pythonhosted.org/packages/08/3c/f18b82a406b04859eb026d204e4e1773eb41c5be58410f41ffa511d114ae/numpy-2.3.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2e2eb32ddb9ccb817d620ac1d8dae7c3f641c1e5f55f531a33e8ab97960a75b8", size = 18536749, upload-time = "2025-11-16T22:51:39.698Z" },
- { url = "https://files.pythonhosted.org/packages/40/79/f82f572bf44cf0023a2fe8588768e23e1592585020d638999f15158609e1/numpy-2.3.5-cp314-cp314-win32.whl", hash = "sha256:66f85ce62c70b843bab1fb14a05d5737741e74e28c7b8b5a064de10142fad248", size = 6335432, upload-time = "2025-11-16T22:51:42.476Z" },
- { url = "https://files.pythonhosted.org/packages/a3/2e/235b4d96619931192c91660805e5e49242389742a7a82c27665021db690c/numpy-2.3.5-cp314-cp314-win_amd64.whl", hash = "sha256:e6a0bc88393d65807d751a614207b7129a310ca4fe76a74e5c7da5fa5671417e", size = 12919388, upload-time = "2025-11-16T22:51:45.275Z" },
- { url = "https://files.pythonhosted.org/packages/07/2b/29fd75ce45d22a39c61aad74f3d718e7ab67ccf839ca8b60866054eb15f8/numpy-2.3.5-cp314-cp314-win_arm64.whl", hash = "sha256:aeffcab3d4b43712bb7a60b65f6044d444e75e563ff6180af8f98dd4b905dfd2", size = 10476651, upload-time = "2025-11-16T22:51:47.749Z" },
- { url = "https://files.pythonhosted.org/packages/17/e1/f6a721234ebd4d87084cfa68d081bcba2f5cfe1974f7de4e0e8b9b2a2ba1/numpy-2.3.5-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:17531366a2e3a9e30762c000f2c43a9aaa05728712e25c11ce1dbe700c53ad41", size = 16834503, upload-time = "2025-11-16T22:51:50.443Z" },
- { url = "https://files.pythonhosted.org/packages/5c/1c/baf7ffdc3af9c356e1c135e57ab7cf8d247931b9554f55c467efe2c69eff/numpy-2.3.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:d21644de1b609825ede2f48be98dfde4656aefc713654eeee280e37cadc4e0ad", size = 12381612, upload-time = "2025-11-16T22:51:53.609Z" },
- { url = "https://files.pythonhosted.org/packages/74/91/f7f0295151407ddc9ba34e699013c32c3c91944f9b35fcf9281163dc1468/numpy-2.3.5-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:c804e3a5aba5460c73955c955bdbd5c08c354954e9270a2c1565f62e866bdc39", size = 5210042, upload-time = "2025-11-16T22:51:56.213Z" },
- { url = "https://files.pythonhosted.org/packages/2e/3b/78aebf345104ec50dd50a4d06ddeb46a9ff5261c33bcc58b1c4f12f85ec2/numpy-2.3.5-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:cc0a57f895b96ec78969c34f682c602bf8da1a0270b09bc65673df2e7638ec20", size = 6724502, upload-time = "2025-11-16T22:51:58.584Z" },
- { url = "https://files.pythonhosted.org/packages/02/c6/7c34b528740512e57ef1b7c8337ab0b4f0bddf34c723b8996c675bc2bc91/numpy-2.3.5-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:900218e456384ea676e24ea6a0417f030a3b07306d29d7ad843957b40a9d8d52", size = 14308962, upload-time = "2025-11-16T22:52:01.698Z" },
- { url = "https://files.pythonhosted.org/packages/80/35/09d433c5262bc32d725bafc619e095b6a6651caf94027a03da624146f655/numpy-2.3.5-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:09a1bea522b25109bf8e6f3027bd810f7c1085c64a0c7ce050c1676ad0ba010b", size = 16655054, upload-time = "2025-11-16T22:52:04.267Z" },
- { url = "https://files.pythonhosted.org/packages/7a/ab/6a7b259703c09a88804fa2430b43d6457b692378f6b74b356155283566ac/numpy-2.3.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:04822c00b5fd0323c8166d66c701dc31b7fbd252c100acd708c48f763968d6a3", size = 16091613, upload-time = "2025-11-16T22:52:08.651Z" },
- { url = "https://files.pythonhosted.org/packages/c2/88/330da2071e8771e60d1038166ff9d73f29da37b01ec3eb43cb1427464e10/numpy-2.3.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d6889ec4ec662a1a37eb4b4fb26b6100841804dac55bd9df579e326cdc146227", size = 18591147, upload-time = "2025-11-16T22:52:11.453Z" },
- { url = "https://files.pythonhosted.org/packages/51/41/851c4b4082402d9ea860c3626db5d5df47164a712cb23b54be028b184c1c/numpy-2.3.5-cp314-cp314t-win32.whl", hash = "sha256:93eebbcf1aafdf7e2ddd44c2923e2672e1010bddc014138b229e49725b4d6be5", size = 6479806, upload-time = "2025-11-16T22:52:14.641Z" },
- { url = "https://files.pythonhosted.org/packages/90/30/d48bde1dfd93332fa557cff1972fbc039e055a52021fbef4c2c4b1eefd17/numpy-2.3.5-cp314-cp314t-win_amd64.whl", hash = "sha256:c8a9958e88b65c3b27e22ca2a076311636850b612d6bbfb76e8d156aacde2aaf", size = 13105760, upload-time = "2025-11-16T22:52:17.975Z" },
- { url = "https://files.pythonhosted.org/packages/2d/fd/4b5eb0b3e888d86aee4d198c23acec7d214baaf17ea93c1adec94c9518b9/numpy-2.3.5-cp314-cp314t-win_arm64.whl", hash = "sha256:6203fdf9f3dc5bdaed7319ad8698e685c7a3be10819f41d32a0723e611733b42", size = 10545459, upload-time = "2025-11-16T22:52:20.55Z" },
+version = "2.4.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/a4/7a/6a3d14e205d292b738db449d0de649b373a59edb0d0b4493821d0a3e8718/numpy-2.4.0.tar.gz", hash = "sha256:6e504f7b16118198f138ef31ba24d985b124c2c469fe8467007cf30fd992f934", size = 20685720, upload-time = "2025-12-20T16:18:19.023Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/a7/0d/853fd96372eda07c824d24adf02e8bc92bb3731b43a9b2a39161c3667cc4/numpy-2.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a152d86a3ae00ba5f47b3acf3b827509fd0b6cb7d3259665e63dafbad22a75ea", size = 16649088, upload-time = "2025-12-20T16:16:31.421Z" },
+ { url = "https://files.pythonhosted.org/packages/e3/37/cc636f1f2a9f585434e20a3e6e63422f70bfe4f7f6698e941db52ea1ac9a/numpy-2.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:39b19251dec4de8ff8496cd0806cbe27bf0684f765abb1f4809554de93785f2d", size = 12364065, upload-time = "2025-12-20T16:16:33.491Z" },
+ { url = "https://files.pythonhosted.org/packages/ed/69/0b78f37ca3690969beee54103ce5f6021709134e8020767e93ba691a72f1/numpy-2.4.0-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:009bd0ea12d3c784b6639a8457537016ce5172109e585338e11334f6a7bb88ee", size = 5192640, upload-time = "2025-12-20T16:16:35.636Z" },
+ { url = "https://files.pythonhosted.org/packages/1d/2a/08569f8252abf590294dbb09a430543ec8f8cc710383abfb3e75cc73aeda/numpy-2.4.0-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:5fe44e277225fd3dff6882d86d3d447205d43532c3627313d17e754fb3905a0e", size = 6541556, upload-time = "2025-12-20T16:16:37.276Z" },
+ { url = "https://files.pythonhosted.org/packages/93/e9/a949885a4e177493d61519377952186b6cbfdf1d6002764c664ba28349b5/numpy-2.4.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f935c4493eda9069851058fa0d9e39dbf6286be690066509305e52912714dbb2", size = 14396562, upload-time = "2025-12-20T16:16:38.953Z" },
+ { url = "https://files.pythonhosted.org/packages/99/98/9d4ad53b0e9ef901c2ef1d550d2136f5ac42d3fd2988390a6def32e23e48/numpy-2.4.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8cfa5f29a695cb7438965e6c3e8d06e0416060cf0d709c1b1c1653a939bf5c2a", size = 16351719, upload-time = "2025-12-20T16:16:41.503Z" },
+ { url = "https://files.pythonhosted.org/packages/28/de/5f3711a38341d6e8dd619f6353251a0cdd07f3d6d101a8fd46f4ef87f895/numpy-2.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ba0cb30acd3ef11c94dc27fbfba68940652492bc107075e7ffe23057f9425681", size = 16176053, upload-time = "2025-12-20T16:16:44.552Z" },
+ { url = "https://files.pythonhosted.org/packages/2a/5b/2a3753dc43916501b4183532e7ace862e13211042bceafa253afb5c71272/numpy-2.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:60e8c196cd82cbbd4f130b5290007e13e6de3eca79f0d4d38014769d96a7c475", size = 18277859, upload-time = "2025-12-20T16:16:47.174Z" },
+ { url = "https://files.pythonhosted.org/packages/2c/c5/a18bcdd07a941db3076ef489d036ab16d2bfc2eae0cf27e5a26e29189434/numpy-2.4.0-cp313-cp313-win32.whl", hash = "sha256:5f48cb3e88fbc294dc90e215d86fbaf1c852c63dbdb6c3a3e63f45c4b57f7344", size = 5953849, upload-time = "2025-12-20T16:16:49.554Z" },
+ { url = "https://files.pythonhosted.org/packages/4f/f1/719010ff8061da6e8a26e1980cf090412d4f5f8060b31f0c45d77dd67a01/numpy-2.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:a899699294f28f7be8992853c0c60741f16ff199205e2e6cdca155762cbaa59d", size = 12302840, upload-time = "2025-12-20T16:16:51.227Z" },
+ { url = "https://files.pythonhosted.org/packages/f5/5a/b3d259083ed8b4d335270c76966cb6cf14a5d1b69e1a608994ac57a659e6/numpy-2.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:9198f447e1dc5647d07c9a6bbe2063cc0132728cc7175b39dbc796da5b54920d", size = 10308509, upload-time = "2025-12-20T16:16:53.313Z" },
+ { url = "https://files.pythonhosted.org/packages/31/01/95edcffd1bb6c0633df4e808130545c4f07383ab629ac7e316fb44fff677/numpy-2.4.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:74623f2ab5cc3f7c886add4f735d1031a1d2be4a4ae63c0546cfd74e7a31ddf6", size = 12491815, upload-time = "2025-12-20T16:16:55.496Z" },
+ { url = "https://files.pythonhosted.org/packages/59/ea/5644b8baa92cc1c7163b4b4458c8679852733fa74ca49c942cfa82ded4e0/numpy-2.4.0-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:0804a8e4ab070d1d35496e65ffd3cf8114c136a2b81f61dfab0de4b218aacfd5", size = 5320321, upload-time = "2025-12-20T16:16:57.468Z" },
+ { url = "https://files.pythonhosted.org/packages/26/4e/e10938106d70bc21319bd6a86ae726da37edc802ce35a3a71ecdf1fdfe7f/numpy-2.4.0-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:02a2038eb27f9443a8b266a66911e926566b5a6ffd1a689b588f7f35b81e7dc3", size = 6641635, upload-time = "2025-12-20T16:16:59.379Z" },
+ { url = "https://files.pythonhosted.org/packages/b3/8d/a8828e3eaf5c0b4ab116924df82f24ce3416fa38d0674d8f708ddc6c8aac/numpy-2.4.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1889b3a3f47a7b5bee16bc25a2145bd7cb91897f815ce3499db64c7458b6d91d", size = 14456053, upload-time = "2025-12-20T16:17:01.768Z" },
+ { url = "https://files.pythonhosted.org/packages/68/a1/17d97609d87d4520aa5ae2dcfb32305654550ac6a35effb946d303e594ce/numpy-2.4.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85eef4cb5625c47ee6425c58a3502555e10f45ee973da878ac8248ad58c136f3", size = 16401702, upload-time = "2025-12-20T16:17:04.235Z" },
+ { url = "https://files.pythonhosted.org/packages/18/32/0f13c1b2d22bea1118356b8b963195446f3af124ed7a5adfa8fdecb1b6ca/numpy-2.4.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6dc8b7e2f4eb184b37655195f421836cfae6f58197b67e3ffc501f1333d993fa", size = 16242493, upload-time = "2025-12-20T16:17:06.856Z" },
+ { url = "https://files.pythonhosted.org/packages/ae/23/48f21e3d309fbc137c068a1475358cbd3a901b3987dcfc97a029ab3068e2/numpy-2.4.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:44aba2f0cafd287871a495fb3163408b0bd25bbce135c6f621534a07f4f7875c", size = 18324222, upload-time = "2025-12-20T16:17:09.392Z" },
+ { url = "https://files.pythonhosted.org/packages/ac/52/41f3d71296a3dcaa4f456aaa3c6fc8e745b43d0552b6bde56571bb4b4a0f/numpy-2.4.0-cp313-cp313t-win32.whl", hash = "sha256:20c115517513831860c573996e395707aa9fb691eb179200125c250e895fcd93", size = 6076216, upload-time = "2025-12-20T16:17:11.437Z" },
+ { url = "https://files.pythonhosted.org/packages/35/ff/46fbfe60ab0710d2a2b16995f708750307d30eccbb4c38371ea9e986866e/numpy-2.4.0-cp313-cp313t-win_amd64.whl", hash = "sha256:b48e35f4ab6f6a7597c46e301126ceba4c44cd3280e3750f85db48b082624fa4", size = 12444263, upload-time = "2025-12-20T16:17:13.182Z" },
+ { url = "https://files.pythonhosted.org/packages/a3/e3/9189ab319c01d2ed556c932ccf55064c5d75bb5850d1df7a482ce0badead/numpy-2.4.0-cp313-cp313t-win_arm64.whl", hash = "sha256:4d1cfce39e511069b11e67cd0bd78ceff31443b7c9e5c04db73c7a19f572967c", size = 10378265, upload-time = "2025-12-20T16:17:15.211Z" },
+ { url = "https://files.pythonhosted.org/packages/ab/ed/52eac27de39d5e5a6c9aadabe672bc06f55e24a3d9010cd1183948055d76/numpy-2.4.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:c95eb6db2884917d86cde0b4d4cf31adf485c8ec36bf8696dd66fa70de96f36b", size = 16647476, upload-time = "2025-12-20T16:17:17.671Z" },
+ { url = "https://files.pythonhosted.org/packages/77/c0/990ce1b7fcd4e09aeaa574e2a0a839589e4b08b2ca68070f1acb1fea6736/numpy-2.4.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:65167da969cd1ec3a1df31cb221ca3a19a8aaa25370ecb17d428415e93c1935e", size = 12374563, upload-time = "2025-12-20T16:17:20.216Z" },
+ { url = "https://files.pythonhosted.org/packages/37/7c/8c5e389c6ae8f5fd2277a988600d79e9625db3fff011a2d87ac80b881a4c/numpy-2.4.0-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:3de19cfecd1465d0dcf8a5b5ea8b3155b42ed0b639dba4b71e323d74f2a3be5e", size = 5203107, upload-time = "2025-12-20T16:17:22.47Z" },
+ { url = "https://files.pythonhosted.org/packages/e6/94/ca5b3bd6a8a70a5eec9a0b8dd7f980c1eff4b8a54970a9a7fef248ef564f/numpy-2.4.0-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:6c05483c3136ac4c91b4e81903cb53a8707d316f488124d0398499a4f8e8ef51", size = 6538067, upload-time = "2025-12-20T16:17:24.001Z" },
+ { url = "https://files.pythonhosted.org/packages/79/43/993eb7bb5be6761dde2b3a3a594d689cec83398e3f58f4758010f3b85727/numpy-2.4.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36667db4d6c1cea79c8930ab72fadfb4060feb4bfe724141cd4bd064d2e5f8ce", size = 14411926, upload-time = "2025-12-20T16:17:25.822Z" },
+ { url = "https://files.pythonhosted.org/packages/03/75/d4c43b61de473912496317a854dac54f1efec3eeb158438da6884b70bb90/numpy-2.4.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9a818668b674047fd88c4cddada7ab8f1c298812783e8328e956b78dc4807f9f", size = 16354295, upload-time = "2025-12-20T16:17:28.308Z" },
+ { url = "https://files.pythonhosted.org/packages/b8/0a/b54615b47ee8736a6461a4bb6749128dd3435c5a759d5663f11f0e9af4ac/numpy-2.4.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:1ee32359fb7543b7b7bd0b2f46294db27e29e7bbdf70541e81b190836cd83ded", size = 16190242, upload-time = "2025-12-20T16:17:30.993Z" },
+ { url = "https://files.pythonhosted.org/packages/98/ce/ea207769aacad6246525ec6c6bbd66a2bf56c72443dc10e2f90feed29290/numpy-2.4.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e493962256a38f58283de033d8af176c5c91c084ea30f15834f7545451c42059", size = 18280875, upload-time = "2025-12-20T16:17:33.327Z" },
+ { url = "https://files.pythonhosted.org/packages/17/ef/ec409437aa962ea372ed601c519a2b141701683ff028f894b7466f0ab42b/numpy-2.4.0-cp314-cp314-win32.whl", hash = "sha256:6bbaebf0d11567fa8926215ae731e1d58e6ec28a8a25235b8a47405d301332db", size = 6002530, upload-time = "2025-12-20T16:17:35.729Z" },
+ { url = "https://files.pythonhosted.org/packages/5f/4a/5cb94c787a3ed1ac65e1271b968686521169a7b3ec0b6544bb3ca32960b0/numpy-2.4.0-cp314-cp314-win_amd64.whl", hash = "sha256:3d857f55e7fdf7c38ab96c4558c95b97d1c685be6b05c249f5fdafcbd6f9899e", size = 12435890, upload-time = "2025-12-20T16:17:37.599Z" },
+ { url = "https://files.pythonhosted.org/packages/48/a0/04b89db963af9de1104975e2544f30de89adbf75b9e75f7dd2599be12c79/numpy-2.4.0-cp314-cp314-win_arm64.whl", hash = "sha256:bb50ce5fb202a26fd5404620e7ef820ad1ab3558b444cb0b55beb7ef66cd2d63", size = 10591892, upload-time = "2025-12-20T16:17:39.649Z" },
+ { url = "https://files.pythonhosted.org/packages/53/e5/d74b5ccf6712c06c7a545025a6a71bfa03bdc7e0568b405b0d655232fd92/numpy-2.4.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:355354388cba60f2132df297e2d53053d4063f79077b67b481d21276d61fc4df", size = 12494312, upload-time = "2025-12-20T16:17:41.714Z" },
+ { url = "https://files.pythonhosted.org/packages/c2/08/3ca9cc2ddf54dfee7ae9a6479c071092a228c68aef08252aa08dac2af002/numpy-2.4.0-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:1d8f9fde5f6dc1b6fc34df8162f3b3079365468703fee7f31d4e0cc8c63baed9", size = 5322862, upload-time = "2025-12-20T16:17:44.145Z" },
+ { url = "https://files.pythonhosted.org/packages/87/74/0bb63a68394c0c1e52670cfff2e309afa41edbe11b3327d9af29e4383f34/numpy-2.4.0-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:e0434aa22c821f44eeb4c650b81c7fbdd8c0122c6c4b5a576a76d5a35625ecd9", size = 6644986, upload-time = "2025-12-20T16:17:46.203Z" },
+ { url = "https://files.pythonhosted.org/packages/06/8f/9264d9bdbcf8236af2823623fe2f3981d740fc3461e2787e231d97c38c28/numpy-2.4.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:40483b2f2d3ba7aad426443767ff5632ec3156ef09742b96913787d13c336471", size = 14457958, upload-time = "2025-12-20T16:17:48.017Z" },
+ { url = "https://files.pythonhosted.org/packages/8c/d9/f9a69ae564bbc7236a35aa883319364ef5fd41f72aa320cc1cbe66148fe2/numpy-2.4.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9e6a7664ddd9746e20b7325351fe1a8408d0a2bf9c63b5e898290ddc8f09544", size = 16398394, upload-time = "2025-12-20T16:17:50.409Z" },
+ { url = "https://files.pythonhosted.org/packages/34/c7/39241501408dde7f885d241a98caba5421061a2c6d2b2197ac5e3aa842d8/numpy-2.4.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:ecb0019d44f4cdb50b676c5d0cb4b1eae8e15d1ed3d3e6639f986fc92b2ec52c", size = 16241044, upload-time = "2025-12-20T16:17:52.661Z" },
+ { url = "https://files.pythonhosted.org/packages/7c/95/cae7effd90e065a95e59fe710eeee05d7328ed169776dfdd9f789e032125/numpy-2.4.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d0ffd9e2e4441c96a9c91ec1783285d80bf835b677853fc2770a89d50c1e48ac", size = 18321772, upload-time = "2025-12-20T16:17:54.947Z" },
+ { url = "https://files.pythonhosted.org/packages/96/df/3c6c279accd2bfb968a76298e5b276310bd55d243df4fa8ac5816d79347d/numpy-2.4.0-cp314-cp314t-win32.whl", hash = "sha256:77f0d13fa87036d7553bf81f0e1fe3ce68d14c9976c9851744e4d3e91127e95f", size = 6148320, upload-time = "2025-12-20T16:17:57.249Z" },
+ { url = "https://files.pythonhosted.org/packages/92/8d/f23033cce252e7a75cae853d17f582e86534c46404dea1c8ee094a9d6d84/numpy-2.4.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b1f5b45829ac1848893f0ddf5cb326110604d6df96cdc255b0bf9edd154104d4", size = 12623460, upload-time = "2025-12-20T16:17:58.963Z" },
+ { url = "https://files.pythonhosted.org/packages/a4/4f/1f8475907d1a7c4ef9020edf7f39ea2422ec896849245f00688e4b268a71/numpy-2.4.0-cp314-cp314t-win_arm64.whl", hash = "sha256:23a3e9d1a6f360267e8fbb38ba5db355a6a7e9be71d7fce7ab3125e88bb646c8", size = 10661799, upload-time = "2025-12-20T16:18:01.078Z" },
]
[[package]]
name = "openai"
-version = "2.13.0"
+version = "2.14.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "anyio" },
@@ -1035,9 +1022,9 @@ dependencies = [
{ name = "tqdm" },
{ name = "typing-extensions" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/0f/39/8e347e9fda125324d253084bb1b82407e5e3c7777a03dc398f79b2d95626/openai-2.13.0.tar.gz", hash = "sha256:9ff633b07a19469ec476b1e2b5b26c5ef700886524a7a72f65e6f0b5203142d5", size = 626583, upload-time = "2025-12-16T18:19:44.387Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/d8/b1/12fe1c196bea326261718eb037307c1c1fe1dedc2d2d4de777df822e6238/openai-2.14.0.tar.gz", hash = "sha256:419357bedde9402d23bf8f2ee372fca1985a73348debba94bddff06f19459952", size = 626938, upload-time = "2025-12-19T03:28:45.742Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/bb/d5/eb52edff49d3d5ea116e225538c118699ddeb7c29fa17ec28af14bc10033/openai-2.13.0-py3-none-any.whl", hash = "sha256:746521065fed68df2f9c2d85613bb50844343ea81f60009b60e6a600c9352c79", size = 1066837, upload-time = "2025-12-16T18:19:43.124Z" },
+ { url = "https://files.pythonhosted.org/packages/27/4b/7c1a00c2c3fbd004253937f7520f692a9650767aa73894d7a34f0d65d3f4/openai-2.14.0-py3-none-any.whl", hash = "sha256:7ea40aca4ffc4c4a776e77679021b47eec1160e341f42ae086ba949c9dcc9183", size = 1067558, upload-time = "2025-12-19T03:28:43.727Z" },
]
[[package]]
@@ -1306,6 +1293,19 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" },
]
+[[package]]
+name = "pyright"
+version = "1.1.408"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "nodeenv" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/74/b2/5db700e52554b8f025faa9c3c624c59f1f6c8841ba81ab97641b54322f16/pyright-1.1.408.tar.gz", hash = "sha256:f28f2321f96852fa50b5829ea492f6adb0e6954568d1caa3f3af3a5f555eb684", size = 4400578, upload-time = "2026-01-08T08:07:38.795Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/0c/82/a2c93e32800940d9573fb28c346772a14778b84ba7524e691b324620ab89/pyright-1.1.408-py3-none-any.whl", hash = "sha256:090b32865f4fdb1e0e6cd82bf5618480d48eecd2eb2e70f960982a3d9a4c17c1", size = 6399144, upload-time = "2026-01-08T08:07:37.082Z" },
+]
+
[[package]]
name = "pytest"
version = "9.0.2"
@@ -1460,12 +1460,14 @@ wheels = [
[[package]]
name = "reviewbot"
-version = "0.1.0"
+version = "0.3.0"
source = { editable = "." }
dependencies = [
{ name = "dotenv" },
{ name = "faiss-cpu" },
{ name = "fastapi" },
+ { name = "httpx" },
+ { name = "ido-agents" },
{ name = "langchain" },
{ name = "langchain-community" },
{ name = "langchain-google-genai" },
@@ -1478,7 +1480,6 @@ dependencies = [
{ name = "transformers" },
{ name = "typer" },
{ name = "uvicorn" },
- { name = "xai-review" },
]
[package.optional-dependencies]
@@ -1488,6 +1489,7 @@ examples = [
[package.dev-dependencies]
dev = [
+ { name = "pyright" },
{ name = "ruff" },
{ name = "ty" },
]
@@ -1498,6 +1500,8 @@ requires-dist = [
{ name = "faiss-cpu", specifier = ">=1.13.1" },
{ name = "fastapi", specifier = ">=0.125.0" },
{ name = "fastapi", marker = "extra == 'examples'" },
+ { name = "httpx", specifier = ">=0.28.1" },
+ { name = "ido-agents", git = "https://github.com/canefe/ido-agents.git?rev=v0.1.4" },
{ name = "langchain", specifier = ">=1.2.0" },
{ name = "langchain-community", specifier = ">=0.4.1" },
{ name = "langchain-google-genai", specifier = ">=4.1.2" },
@@ -1510,12 +1514,12 @@ requires-dist = [
{ name = "transformers", specifier = ">=4.57.3" },
{ name = "typer", specifier = ">=0.20.0" },
{ name = "uvicorn", specifier = ">=0.40.0" },
- { name = "xai-review", specifier = ">=0.48.0" },
]
provides-extras = ["examples"]
[package.metadata.requires-dev]
dev = [
+ { name = "pyright", specifier = ">=1.1.408" },
{ name = "ruff", specifier = ">=0.8.6" },
{ name = "ty", specifier = ">=0.0.4" },
]
@@ -1547,28 +1551,28 @@ wheels = [
[[package]]
name = "ruff"
-version = "0.14.10"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/57/08/52232a877978dd8f9cf2aeddce3e611b40a63287dfca29b6b8da791f5e8d/ruff-0.14.10.tar.gz", hash = "sha256:9a2e830f075d1a42cd28420d7809ace390832a490ed0966fe373ba288e77aaf4", size = 5859763, upload-time = "2025-12-18T19:28:57.98Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/60/01/933704d69f3f05ee16ef11406b78881733c186fe14b6a46b05cfcaf6d3b2/ruff-0.14.10-py3-none-linux_armv6l.whl", hash = "sha256:7a3ce585f2ade3e1f29ec1b92df13e3da262178df8c8bdf876f48fa0e8316c49", size = 13527080, upload-time = "2025-12-18T19:29:25.642Z" },
- { url = "https://files.pythonhosted.org/packages/df/58/a0349197a7dfa603ffb7f5b0470391efa79ddc327c1e29c4851e85b09cc5/ruff-0.14.10-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:674f9be9372907f7257c51f1d4fc902cb7cf014b9980152b802794317941f08f", size = 13797320, upload-time = "2025-12-18T19:29:02.571Z" },
- { url = "https://files.pythonhosted.org/packages/7b/82/36be59f00a6082e38c23536df4e71cdbc6af8d7c707eade97fcad5c98235/ruff-0.14.10-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d85713d522348837ef9df8efca33ccb8bd6fcfc86a2cde3ccb4bc9d28a18003d", size = 12918434, upload-time = "2025-12-18T19:28:51.202Z" },
- { url = "https://files.pythonhosted.org/packages/a6/00/45c62a7f7e34da92a25804f813ebe05c88aa9e0c25e5cb5a7d23dd7450e3/ruff-0.14.10-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6987ebe0501ae4f4308d7d24e2d0fe3d7a98430f5adfd0f1fead050a740a3a77", size = 13371961, upload-time = "2025-12-18T19:29:04.991Z" },
- { url = "https://files.pythonhosted.org/packages/40/31/a5906d60f0405f7e57045a70f2d57084a93ca7425f22e1d66904769d1628/ruff-0.14.10-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:16a01dfb7b9e4eee556fbfd5392806b1b8550c9b4a9f6acd3dbe6812b193c70a", size = 13275629, upload-time = "2025-12-18T19:29:21.381Z" },
- { url = "https://files.pythonhosted.org/packages/3e/60/61c0087df21894cf9d928dc04bcd4fb10e8b2e8dca7b1a276ba2155b2002/ruff-0.14.10-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7165d31a925b7a294465fa81be8c12a0e9b60fb02bf177e79067c867e71f8b1f", size = 14029234, upload-time = "2025-12-18T19:29:00.132Z" },
- { url = "https://files.pythonhosted.org/packages/44/84/77d911bee3b92348b6e5dab5a0c898d87084ea03ac5dc708f46d88407def/ruff-0.14.10-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:c561695675b972effb0c0a45db233f2c816ff3da8dcfbe7dfc7eed625f218935", size = 15449890, upload-time = "2025-12-18T19:28:53.573Z" },
- { url = "https://files.pythonhosted.org/packages/e9/36/480206eaefa24a7ec321582dda580443a8f0671fdbf6b1c80e9c3e93a16a/ruff-0.14.10-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4bb98fcbbc61725968893682fd4df8966a34611239c9fd07a1f6a07e7103d08e", size = 15123172, upload-time = "2025-12-18T19:29:23.453Z" },
- { url = "https://files.pythonhosted.org/packages/5c/38/68e414156015ba80cef5473d57919d27dfb62ec804b96180bafdeaf0e090/ruff-0.14.10-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f24b47993a9d8cb858429e97bdf8544c78029f09b520af615c1d261bf827001d", size = 14460260, upload-time = "2025-12-18T19:29:27.808Z" },
- { url = "https://files.pythonhosted.org/packages/b3/19/9e050c0dca8aba824d67cc0db69fb459c28d8cd3f6855b1405b3f29cc91d/ruff-0.14.10-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59aabd2e2c4fd614d2862e7939c34a532c04f1084476d6833dddef4afab87e9f", size = 14229978, upload-time = "2025-12-18T19:29:11.32Z" },
- { url = "https://files.pythonhosted.org/packages/51/eb/e8dd1dd6e05b9e695aa9dd420f4577debdd0f87a5ff2fedda33c09e9be8c/ruff-0.14.10-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:213db2b2e44be8625002dbea33bb9c60c66ea2c07c084a00d55732689d697a7f", size = 14338036, upload-time = "2025-12-18T19:29:09.184Z" },
- { url = "https://files.pythonhosted.org/packages/6a/12/f3e3a505db7c19303b70af370d137795fcfec136d670d5de5391e295c134/ruff-0.14.10-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:b914c40ab64865a17a9a5b67911d14df72346a634527240039eb3bd650e5979d", size = 13264051, upload-time = "2025-12-18T19:29:13.431Z" },
- { url = "https://files.pythonhosted.org/packages/08/64/8c3a47eaccfef8ac20e0484e68e0772013eb85802f8a9f7603ca751eb166/ruff-0.14.10-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:1484983559f026788e3a5c07c81ef7d1e97c1c78ed03041a18f75df104c45405", size = 13283998, upload-time = "2025-12-18T19:29:06.994Z" },
- { url = "https://files.pythonhosted.org/packages/12/84/534a5506f4074e5cc0529e5cd96cfc01bb480e460c7edf5af70d2bcae55e/ruff-0.14.10-py3-none-musllinux_1_2_i686.whl", hash = "sha256:c70427132db492d25f982fffc8d6c7535cc2fd2c83fc8888f05caaa248521e60", size = 13601891, upload-time = "2025-12-18T19:28:55.811Z" },
- { url = "https://files.pythonhosted.org/packages/0d/1e/14c916087d8598917dbad9b2921d340f7884824ad6e9c55de948a93b106d/ruff-0.14.10-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5bcf45b681e9f1ee6445d317ce1fa9d6cba9a6049542d1c3d5b5958986be8830", size = 14336660, upload-time = "2025-12-18T19:29:16.531Z" },
- { url = "https://files.pythonhosted.org/packages/f2/1c/d7b67ab43f30013b47c12b42d1acd354c195351a3f7a1d67f59e54227ede/ruff-0.14.10-py3-none-win32.whl", hash = "sha256:104c49fc7ab73f3f3a758039adea978869a918f31b73280db175b43a2d9b51d6", size = 13196187, upload-time = "2025-12-18T19:29:19.006Z" },
- { url = "https://files.pythonhosted.org/packages/fb/9c/896c862e13886fae2af961bef3e6312db9ebc6adc2b156fe95e615dee8c1/ruff-0.14.10-py3-none-win_amd64.whl", hash = "sha256:466297bd73638c6bdf06485683e812db1c00c7ac96d4ddd0294a338c62fdc154", size = 14661283, upload-time = "2025-12-18T19:29:30.16Z" },
- { url = "https://files.pythonhosted.org/packages/74/31/b0e29d572670dca3674eeee78e418f20bdf97fa8aa9ea71380885e175ca0/ruff-0.14.10-py3-none-win_arm64.whl", hash = "sha256:e51d046cf6dda98a4633b8a8a771451107413b0f07183b2bef03f075599e44e6", size = 13729839, upload-time = "2025-12-18T19:28:48.636Z" },
+version = "0.14.11"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/d4/77/9a7fe084d268f8855d493e5031ea03fa0af8cc05887f638bf1c4e3363eb8/ruff-0.14.11.tar.gz", hash = "sha256:f6dc463bfa5c07a59b1ff2c3b9767373e541346ea105503b4c0369c520a66958", size = 5993417, upload-time = "2026-01-08T19:11:58.322Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/f0/a6/a4c40a5aaa7e331f245d2dc1ac8ece306681f52b636b40ef87c88b9f7afd/ruff-0.14.11-py3-none-linux_armv6l.whl", hash = "sha256:f6ff2d95cbd335841a7217bdfd9c1d2e44eac2c584197ab1385579d55ff8830e", size = 12951208, upload-time = "2026-01-08T19:12:09.218Z" },
+ { url = "https://files.pythonhosted.org/packages/5c/5c/360a35cb7204b328b685d3129c08aca24765ff92b5a7efedbdd6c150d555/ruff-0.14.11-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6f6eb5c1c8033680f4172ea9c8d3706c156223010b8b97b05e82c59bdc774ee6", size = 13330075, upload-time = "2026-01-08T19:12:02.549Z" },
+ { url = "https://files.pythonhosted.org/packages/1b/9e/0cc2f1be7a7d33cae541824cf3f95b4ff40d03557b575912b5b70273c9ec/ruff-0.14.11-py3-none-macosx_11_0_arm64.whl", hash = "sha256:f2fc34cc896f90080fca01259f96c566f74069a04b25b6205d55379d12a6855e", size = 12257809, upload-time = "2026-01-08T19:12:00.366Z" },
+ { url = "https://files.pythonhosted.org/packages/a7/e5/5faab97c15bb75228d9f74637e775d26ac703cc2b4898564c01ab3637c02/ruff-0.14.11-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53386375001773ae812b43205d6064dae49ff0968774e6befe16a994fc233caa", size = 12678447, upload-time = "2026-01-08T19:12:13.899Z" },
+ { url = "https://files.pythonhosted.org/packages/1b/33/e9767f60a2bef779fb5855cab0af76c488e0ce90f7bb7b8a45c8a2ba4178/ruff-0.14.11-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a697737dce1ca97a0a55b5ff0434ee7205943d4874d638fe3ae66166ff46edbe", size = 12758560, upload-time = "2026-01-08T19:11:42.55Z" },
+ { url = "https://files.pythonhosted.org/packages/eb/84/4c6cf627a21462bb5102f7be2a320b084228ff26e105510cd2255ea868e5/ruff-0.14.11-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6845ca1da8ab81ab1dce755a32ad13f1db72e7fba27c486d5d90d65e04d17b8f", size = 13599296, upload-time = "2026-01-08T19:11:30.371Z" },
+ { url = "https://files.pythonhosted.org/packages/88/e1/92b5ed7ea66d849f6157e695dc23d5d6d982bd6aa8d077895652c38a7cae/ruff-0.14.11-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:e36ce2fd31b54065ec6f76cb08d60159e1b32bdf08507862e32f47e6dde8bcbf", size = 15048981, upload-time = "2026-01-08T19:12:04.742Z" },
+ { url = "https://files.pythonhosted.org/packages/61/df/c1bd30992615ac17c2fb64b8a7376ca22c04a70555b5d05b8f717163cf9f/ruff-0.14.11-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:590bcc0e2097ecf74e62a5c10a6b71f008ad82eb97b0a0079e85defe19fe74d9", size = 14633183, upload-time = "2026-01-08T19:11:40.069Z" },
+ { url = "https://files.pythonhosted.org/packages/04/e9/fe552902f25013dd28a5428a42347d9ad20c4b534834a325a28305747d64/ruff-0.14.11-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:53fe71125fc158210d57fe4da26e622c9c294022988d08d9347ec1cf782adafe", size = 14050453, upload-time = "2026-01-08T19:11:37.555Z" },
+ { url = "https://files.pythonhosted.org/packages/ae/93/f36d89fa021543187f98991609ce6e47e24f35f008dfe1af01379d248a41/ruff-0.14.11-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a35c9da08562f1598ded8470fcfef2afb5cf881996e6c0a502ceb61f4bc9c8a3", size = 13757889, upload-time = "2026-01-08T19:12:07.094Z" },
+ { url = "https://files.pythonhosted.org/packages/b7/9f/c7fb6ecf554f28709a6a1f2a7f74750d400979e8cd47ed29feeaa1bd4db8/ruff-0.14.11-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:0f3727189a52179393ecf92ec7057c2210203e6af2676f08d92140d3e1ee72c1", size = 13955832, upload-time = "2026-01-08T19:11:55.064Z" },
+ { url = "https://files.pythonhosted.org/packages/db/a0/153315310f250f76900a98278cf878c64dfb6d044e184491dd3289796734/ruff-0.14.11-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:eb09f849bd37147a789b85995ff734a6c4a095bed5fd1608c4f56afc3634cde2", size = 12586522, upload-time = "2026-01-08T19:11:35.356Z" },
+ { url = "https://files.pythonhosted.org/packages/2f/2b/a73a2b6e6d2df1d74bf2b78098be1572191e54bec0e59e29382d13c3adc5/ruff-0.14.11-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:c61782543c1231bf71041461c1f28c64b961d457d0f238ac388e2ab173d7ecb7", size = 12724637, upload-time = "2026-01-08T19:11:47.796Z" },
+ { url = "https://files.pythonhosted.org/packages/f0/41/09100590320394401cd3c48fc718a8ba71c7ddb1ffd07e0ad6576b3a3df2/ruff-0.14.11-py3-none-musllinux_1_2_i686.whl", hash = "sha256:82ff352ea68fb6766140381748e1f67f83c39860b6446966cff48a315c3e2491", size = 13145837, upload-time = "2026-01-08T19:11:32.87Z" },
+ { url = "https://files.pythonhosted.org/packages/3b/d8/e035db859d1d3edf909381eb8ff3e89a672d6572e9454093538fe6f164b0/ruff-0.14.11-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:728e56879df4ca5b62a9dde2dd0eb0edda2a55160c0ea28c4025f18c03f86984", size = 13850469, upload-time = "2026-01-08T19:12:11.694Z" },
+ { url = "https://files.pythonhosted.org/packages/4e/02/bb3ff8b6e6d02ce9e3740f4c17dfbbfb55f34c789c139e9cd91985f356c7/ruff-0.14.11-py3-none-win32.whl", hash = "sha256:337c5dd11f16ee52ae217757d9b82a26400be7efac883e9e852646f1557ed841", size = 12851094, upload-time = "2026-01-08T19:11:45.163Z" },
+ { url = "https://files.pythonhosted.org/packages/58/f1/90ddc533918d3a2ad628bc3044cdfc094949e6d4b929220c3f0eb8a1c998/ruff-0.14.11-py3-none-win_amd64.whl", hash = "sha256:f981cea63d08456b2c070e64b79cb62f951aa1305282974d4d5216e6e0178ae6", size = 14001379, upload-time = "2026-01-08T19:11:52.591Z" },
+ { url = "https://files.pythonhosted.org/packages/c4/1c/1dbe51782c0e1e9cfce1d1004752672d2d4629ea46945d19d731ad772b3b/ruff-0.14.11-py3-none-win_arm64.whl", hash = "sha256:649fb6c9edd7f751db276ef42df1f3df41c38d67d199570ae2a7bd6cbc3590f0", size = 12938644, upload-time = "2026-01-08T19:11:50.027Z" },
]
[[package]]
@@ -1703,27 +1707,28 @@ wheels = [
[[package]]
name = "tokenizers"
-version = "0.22.1"
+version = "0.22.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "huggingface-hub" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/1c/46/fb6854cec3278fbfa4a75b50232c77622bc517ac886156e6afbfa4d8fc6e/tokenizers-0.22.1.tar.gz", hash = "sha256:61de6522785310a309b3407bac22d99c4db5dba349935e99e4d15ea2226af2d9", size = 363123, upload-time = "2025-09-19T09:49:23.424Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/73/6f/f80cfef4a312e1fb34baf7d85c72d4411afde10978d4657f8cdd811d3ccc/tokenizers-0.22.2.tar.gz", hash = "sha256:473b83b915e547aa366d1eee11806deaf419e17be16310ac0a14077f1e28f917", size = 372115, upload-time = "2026-01-05T10:45:15.988Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/bf/33/f4b2d94ada7ab297328fc671fed209368ddb82f965ec2224eb1892674c3a/tokenizers-0.22.1-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:59fdb013df17455e5f950b4b834a7b3ee2e0271e6378ccb33aa74d178b513c73", size = 3069318, upload-time = "2025-09-19T09:49:11.848Z" },
- { url = "https://files.pythonhosted.org/packages/1c/58/2aa8c874d02b974990e89ff95826a4852a8b2a273c7d1b4411cdd45a4565/tokenizers-0.22.1-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:8d4e484f7b0827021ac5f9f71d4794aaef62b979ab7608593da22b1d2e3c4edc", size = 2926478, upload-time = "2025-09-19T09:49:09.759Z" },
- { url = "https://files.pythonhosted.org/packages/1e/3b/55e64befa1e7bfea963cf4b787b2cea1011362c4193f5477047532ce127e/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19d2962dd28bc67c1f205ab180578a78eef89ac60ca7ef7cbe9635a46a56422a", size = 3256994, upload-time = "2025-09-19T09:48:56.701Z" },
- { url = "https://files.pythonhosted.org/packages/71/0b/fbfecf42f67d9b7b80fde4aabb2b3110a97fac6585c9470b5bff103a80cb/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:38201f15cdb1f8a6843e6563e6e79f4abd053394992b9bbdf5213ea3469b4ae7", size = 3153141, upload-time = "2025-09-19T09:48:59.749Z" },
- { url = "https://files.pythonhosted.org/packages/17/a9/b38f4e74e0817af8f8ef925507c63c6ae8171e3c4cb2d5d4624bf58fca69/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1cbe5454c9a15df1b3443c726063d930c16f047a3cc724b9e6e1a91140e5a21", size = 3508049, upload-time = "2025-09-19T09:49:05.868Z" },
- { url = "https://files.pythonhosted.org/packages/d2/48/dd2b3dac46bb9134a88e35d72e1aa4869579eacc1a27238f1577270773ff/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e7d094ae6312d69cc2a872b54b91b309f4f6fbce871ef28eb27b52a98e4d0214", size = 3710730, upload-time = "2025-09-19T09:49:01.832Z" },
- { url = "https://files.pythonhosted.org/packages/93/0e/ccabc8d16ae4ba84a55d41345207c1e2ea88784651a5a487547d80851398/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:afd7594a56656ace95cdd6df4cca2e4059d294c5cfb1679c57824b605556cb2f", size = 3412560, upload-time = "2025-09-19T09:49:03.867Z" },
- { url = "https://files.pythonhosted.org/packages/d0/c6/dc3a0db5a6766416c32c034286d7c2d406da1f498e4de04ab1b8959edd00/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2ef6063d7a84994129732b47e7915e8710f27f99f3a3260b8a38fc7ccd083f4", size = 3250221, upload-time = "2025-09-19T09:49:07.664Z" },
- { url = "https://files.pythonhosted.org/packages/d7/a6/2c8486eef79671601ff57b093889a345dd3d576713ef047776015dc66de7/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ba0a64f450b9ef412c98f6bcd2a50c6df6e2443b560024a09fa6a03189726879", size = 9345569, upload-time = "2025-09-19T09:49:14.214Z" },
- { url = "https://files.pythonhosted.org/packages/6b/16/32ce667f14c35537f5f605fe9bea3e415ea1b0a646389d2295ec348d5657/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:331d6d149fa9c7d632cde4490fb8bbb12337fa3a0232e77892be656464f4b446", size = 9271599, upload-time = "2025-09-19T09:49:16.639Z" },
- { url = "https://files.pythonhosted.org/packages/51/7c/a5f7898a3f6baa3fc2685c705e04c98c1094c523051c805cdd9306b8f87e/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:607989f2ea68a46cb1dfbaf3e3aabdf3f21d8748312dbeb6263d1b3b66c5010a", size = 9533862, upload-time = "2025-09-19T09:49:19.146Z" },
- { url = "https://files.pythonhosted.org/packages/36/65/7e75caea90bc73c1dd8d40438adf1a7bc26af3b8d0a6705ea190462506e1/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a0f307d490295717726598ef6fa4f24af9d484809223bbc253b201c740a06390", size = 9681250, upload-time = "2025-09-19T09:49:21.501Z" },
- { url = "https://files.pythonhosted.org/packages/30/2c/959dddef581b46e6209da82df3b78471e96260e2bc463f89d23b1bf0e52a/tokenizers-0.22.1-cp39-abi3-win32.whl", hash = "sha256:b5120eed1442765cd90b903bb6cfef781fd8fe64e34ccaecbae4c619b7b12a82", size = 2472003, upload-time = "2025-09-19T09:49:27.089Z" },
- { url = "https://files.pythonhosted.org/packages/b3/46/e33a8c93907b631a99377ef4c5f817ab453d0b34f93529421f42ff559671/tokenizers-0.22.1-cp39-abi3-win_amd64.whl", hash = "sha256:65fd6e3fb11ca1e78a6a93602490f134d1fdeb13bcef99389d5102ea318ed138", size = 2674684, upload-time = "2025-09-19T09:49:24.953Z" },
+ { url = "https://files.pythonhosted.org/packages/92/97/5dbfabf04c7e348e655e907ed27913e03db0923abb5dfdd120d7b25630e1/tokenizers-0.22.2-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:544dd704ae7238755d790de45ba8da072e9af3eea688f698b137915ae959281c", size = 3100275, upload-time = "2026-01-05T10:41:02.158Z" },
+ { url = "https://files.pythonhosted.org/packages/2e/47/174dca0502ef88b28f1c9e06b73ce33500eedfac7a7692108aec220464e7/tokenizers-0.22.2-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:1e418a55456beedca4621dbab65a318981467a2b188e982a23e117f115ce5001", size = 2981472, upload-time = "2026-01-05T10:41:00.276Z" },
+ { url = "https://files.pythonhosted.org/packages/d6/84/7990e799f1309a8b87af6b948f31edaa12a3ed22d11b352eaf4f4b2e5753/tokenizers-0.22.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2249487018adec45d6e3554c71d46eb39fa8ea67156c640f7513eb26f318cec7", size = 3290736, upload-time = "2026-01-05T10:40:32.165Z" },
+ { url = "https://files.pythonhosted.org/packages/78/59/09d0d9ba94dcd5f4f1368d4858d24546b4bdc0231c2354aa31d6199f0399/tokenizers-0.22.2-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25b85325d0815e86e0bac263506dd114578953b7b53d7de09a6485e4a160a7dd", size = 3168835, upload-time = "2026-01-05T10:40:38.847Z" },
+ { url = "https://files.pythonhosted.org/packages/47/50/b3ebb4243e7160bda8d34b731e54dd8ab8b133e50775872e7a434e524c28/tokenizers-0.22.2-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bfb88f22a209ff7b40a576d5324bf8286b519d7358663db21d6246fb17eea2d5", size = 3521673, upload-time = "2026-01-05T10:40:56.614Z" },
+ { url = "https://files.pythonhosted.org/packages/e0/fa/89f4cb9e08df770b57adb96f8cbb7e22695a4cb6c2bd5f0c4f0ebcf33b66/tokenizers-0.22.2-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c774b1276f71e1ef716e5486f21e76333464f47bece56bbd554485982a9e03e", size = 3724818, upload-time = "2026-01-05T10:40:44.507Z" },
+ { url = "https://files.pythonhosted.org/packages/64/04/ca2363f0bfbe3b3d36e95bf67e56a4c88c8e3362b658e616d1ac185d47f2/tokenizers-0.22.2-cp39-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df6c4265b289083bf710dff49bc51ef252f9d5be33a45ee2bed151114a56207b", size = 3379195, upload-time = "2026-01-05T10:40:51.139Z" },
+ { url = "https://files.pythonhosted.org/packages/2e/76/932be4b50ef6ccedf9d3c6639b056a967a86258c6d9200643f01269211ca/tokenizers-0.22.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:369cc9fc8cc10cb24143873a0d95438bb8ee257bb80c71989e3ee290e8d72c67", size = 3274982, upload-time = "2026-01-05T10:40:58.331Z" },
+ { url = "https://files.pythonhosted.org/packages/1d/28/5f9f5a4cc211b69e89420980e483831bcc29dade307955cc9dc858a40f01/tokenizers-0.22.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:29c30b83d8dcd061078b05ae0cb94d3c710555fbb44861139f9f83dcca3dc3e4", size = 9478245, upload-time = "2026-01-05T10:41:04.053Z" },
+ { url = "https://files.pythonhosted.org/packages/6c/fb/66e2da4704d6aadebf8cb39f1d6d1957df667ab24cff2326b77cda0dcb85/tokenizers-0.22.2-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:37ae80a28c1d3265bb1f22464c856bd23c02a05bb211e56d0c5301a435be6c1a", size = 9560069, upload-time = "2026-01-05T10:45:10.673Z" },
+ { url = "https://files.pythonhosted.org/packages/16/04/fed398b05caa87ce9b1a1bb5166645e38196081b225059a6edaff6440fac/tokenizers-0.22.2-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:791135ee325f2336f498590eb2f11dc5c295232f288e75c99a36c5dbce63088a", size = 9899263, upload-time = "2026-01-05T10:45:12.559Z" },
+ { url = "https://files.pythonhosted.org/packages/05/a1/d62dfe7376beaaf1394917e0f8e93ee5f67fea8fcf4107501db35996586b/tokenizers-0.22.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:38337540fbbddff8e999d59970f3c6f35a82de10053206a7562f1ea02d046fa5", size = 10033429, upload-time = "2026-01-05T10:45:14.333Z" },
+ { url = "https://files.pythonhosted.org/packages/fd/18/a545c4ea42af3df6effd7d13d250ba77a0a86fb20393143bbb9a92e434d4/tokenizers-0.22.2-cp39-abi3-win32.whl", hash = "sha256:a6bf3f88c554a2b653af81f3204491c818ae2ac6fbc09e76ef4773351292bc92", size = 2502363, upload-time = "2026-01-05T10:45:20.593Z" },
+ { url = "https://files.pythonhosted.org/packages/65/71/0670843133a43d43070abeb1949abfdef12a86d490bea9cd9e18e37c5ff7/tokenizers-0.22.2-cp39-abi3-win_amd64.whl", hash = "sha256:c9ea31edff2968b44a88f97d784c2f16dc0729b8b143ed004699ebca91f05c48", size = 2747786, upload-time = "2026-01-05T10:45:18.411Z" },
+ { url = "https://files.pythonhosted.org/packages/72/f4/0de46cfa12cdcbcd464cc59fde36912af405696f687e53a091fb432f694c/tokenizers-0.22.2-cp39-abi3-win_arm64.whl", hash = "sha256:9ce725d22864a1e965217204946f830c37876eee3b2ba6fc6255e8e903d5fcbc", size = 2612133, upload-time = "2026-01-05T10:45:17.232Z" },
]
[[package]]
@@ -1761,32 +1766,31 @@ wheels = [
[[package]]
name = "ty"
-version = "0.0.4"
+version = "0.0.10"
source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/48/d9/97d5808e851f790e58f8a54efb5c7b9f404640baf9e295f424846040b316/ty-0.0.4.tar.gz", hash = "sha256:2ea47a0089d74730658ec4e988c8ef476a1e9bd92df3e56709c4003c2895ff3b", size = 4780289, upload-time = "2025-12-19T00:13:53.12Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/b7/85/97b5276baa217e05db2fe3d5c61e4dfd35d1d3d0ec95bfca1986820114e0/ty-0.0.10.tar.gz", hash = "sha256:0a1f9f7577e56cd508a8f93d0be2a502fdf33de6a7d65a328a4c80b784f4ac5f", size = 4892892, upload-time = "2026-01-07T23:00:23.572Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/b1/94/b32a962243cc8a16e8dc74cf1fe75e8bb013d0e13e71bb540e2c86214b61/ty-0.0.4-py3-none-linux_armv6l.whl", hash = "sha256:5225da65a8d1defeb21ee9d74298b1b97c6cbab36e235a310c1430d9079e4b6a", size = 9762399, upload-time = "2025-12-19T00:14:11.261Z" },
- { url = "https://files.pythonhosted.org/packages/d1/d2/7c76e0c22ddfc2fcd4a3458a65f87ce074070eb1c68c07ee475cc2b6ea68/ty-0.0.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:f87770d7988f470b795a2043185082fa959dbe1979a11b4bfe20f1214d37bd6e", size = 9590410, upload-time = "2025-12-19T00:13:55.759Z" },
- { url = "https://files.pythonhosted.org/packages/a5/84/de4b1fc85669faca3622071d5a3f3ec7bfb239971f368c28fae461d3398a/ty-0.0.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:ecf68b8ea48674a289d733b4786aecc259242a2d9a920b3ec8583db18c67496a", size = 9131113, upload-time = "2025-12-19T00:14:08.593Z" },
- { url = "https://files.pythonhosted.org/packages/a7/ff/b5bf385b6983be56a470856bbcbac1b7e816bcd765a7e9d39ab2399e387d/ty-0.0.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:efc396d76a57e527393cae4ee8faf23b93be3df9e93202f39925721a7a2bb7b8", size = 9599152, upload-time = "2025-12-19T00:13:40.484Z" },
- { url = "https://files.pythonhosted.org/packages/36/d6/9880ba106f2f20d13e6a5dca5d5ca44bfb3782936ee67ff635f89a2959c0/ty-0.0.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c893b968d2f9964a4d4db9992c9ba66b01f411b1f48dffcde08622e19cd6ab97", size = 9585368, upload-time = "2025-12-19T00:14:00.994Z" },
- { url = "https://files.pythonhosted.org/packages/3f/53/503cfc18bc4c7c4e02f89dd43debc41a6e343b41eb43df658dfb493a386d/ty-0.0.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:526c925b80d68a53c165044d2370fcfc0def1f119f7b7e483ee61d24da6fb891", size = 9998412, upload-time = "2025-12-19T00:14:18.653Z" },
- { url = "https://files.pythonhosted.org/packages/1d/bd/dd2d3e29834da5add2eda0ab5b433171ce9ce9a248c364d2e237f82073d7/ty-0.0.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:857f605a7fa366b6c6e6f38abc311d0606be513c2bee8977b5c8fd4bde1a82d5", size = 10853890, upload-time = "2025-12-19T00:13:50.891Z" },
- { url = "https://files.pythonhosted.org/packages/07/fe/28ba3be1672e6b8df46e43de66a02dc076ffba7853d391a5466421886225/ty-0.0.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b4cc981aa3ebdac2c233421b1e58c80b0df6a8e6e6fa8b9e69fbdfd2f82768af", size = 10587263, upload-time = "2025-12-19T00:14:21.577Z" },
- { url = "https://files.pythonhosted.org/packages/26/9c/bb598772043f686afe5bc26cb386020709c1a0bcc164bc22ad9da2b4f55d/ty-0.0.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b03b2708b0bf67c76424a860f848aebaa4772c05529170c3761bfcaea93ec199", size = 10401204, upload-time = "2025-12-19T00:13:43.453Z" },
- { url = "https://files.pythonhosted.org/packages/ac/18/71765e9d63669bf09461c3fea84a7a63232ccb0e83b84676f07b987fc217/ty-0.0.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:469890e885544beb129c21e2f8f15321f0573d094aec13da68593c5f86389ff9", size = 10129713, upload-time = "2025-12-19T00:14:13.725Z" },
- { url = "https://files.pythonhosted.org/packages/c3/2d/c03eba570aa85e9c361de5ed36d60b9ab139e93ee91057f455ab4af48e54/ty-0.0.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:abfd928d09567e12068aeca875e920def3badf1978896f474aa4b85b552703c4", size = 9586203, upload-time = "2025-12-19T00:14:03.423Z" },
- { url = "https://files.pythonhosted.org/packages/61/f1/8c3c82a8df69bd4417c77be4f895d043db26dd47bfcc90b33dc109cd0096/ty-0.0.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:44b8e94f9d64df12eae4cf8031c5ca9a4c610b57092b26ad3d68d91bcc7af122", size = 9608230, upload-time = "2025-12-19T00:13:58.252Z" },
- { url = "https://files.pythonhosted.org/packages/51/0c/d8ba3a85c089c246ef6bd49d0f0b40bc0f9209bb819e8c02ccbea5cb4d57/ty-0.0.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9d6a439813e21a06769daf858105818c385d88018929d4a56970d4ddd5cd3df2", size = 9725125, upload-time = "2025-12-19T00:14:05.996Z" },
- { url = "https://files.pythonhosted.org/packages/4d/38/e30f64ad1e40905c766576ec70cffc69163591a5842ce14652672f6ab394/ty-0.0.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:c3cfcf26cfe6c828e91d7a529cc2dda37bc3b51ba06909c9be07002a6584af52", size = 10237174, upload-time = "2025-12-19T00:14:23.858Z" },
- { url = "https://files.pythonhosted.org/packages/cb/d7/8d650aa0be8936dd3ed74e2b0655230e2904caa6077c30c16a089b523cff/ty-0.0.4-py3-none-win32.whl", hash = "sha256:58bbf70dd27af6b00dedbdebeec92d5993aa238664f96fa5c0064930f7a0d30b", size = 9188434, upload-time = "2025-12-19T00:13:45.875Z" },
- { url = "https://files.pythonhosted.org/packages/82/d7/9fc0c81cf0b0d281ac9c18bfbdb4d6bae2173503ba79e40b210ab41c2c8b/ty-0.0.4-py3-none-win_amd64.whl", hash = "sha256:7c2db0f96218f08c140bd9d3fcbb1b3c8c5c4f0c9b0a5624487f0a2bf4b76163", size = 10019313, upload-time = "2025-12-19T00:14:15.968Z" },
- { url = "https://files.pythonhosted.org/packages/5f/b8/3e3246738eed1cd695c5964a401f3b9c757d20ac21fdae06281af9f40ef6/ty-0.0.4-py3-none-win_arm64.whl", hash = "sha256:69f14fc98e4a847afa9f8c5d5234d008820dbc09c7dcdb3ac1ba16628f5132df", size = 9561857, upload-time = "2025-12-19T00:13:48.382Z" },
+ { url = "https://files.pythonhosted.org/packages/0c/7a/5a7147ce5231c3ccc55d6f945dabd7412e233e755d28093bfdec988ba595/ty-0.0.10-py3-none-linux_armv6l.whl", hash = "sha256:406a8ea4e648551f885629b75dc3f070427de6ed099af45e52051d4c68224829", size = 9835881, upload-time = "2026-01-07T22:08:17.492Z" },
+ { url = "https://files.pythonhosted.org/packages/3e/7d/89f4d2277c938332d047237b47b11b82a330dbff4fff0de8574cba992128/ty-0.0.10-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:d6e0a733e3d6d3bce56d6766bc61923e8b130241088dc2c05e3c549487190096", size = 9696404, upload-time = "2026-01-07T22:08:37.965Z" },
+ { url = "https://files.pythonhosted.org/packages/e8/cd/9dd49e6d40e54d4b7d563f9e2a432c4ec002c0673a81266e269c4bc194ce/ty-0.0.10-py3-none-macosx_11_0_arm64.whl", hash = "sha256:e4832f8879cb95fc725f7e7fcab4f22be0cf2550f3a50641d5f4409ee04176d4", size = 9181195, upload-time = "2026-01-07T22:59:07.187Z" },
+ { url = "https://files.pythonhosted.org/packages/d2/b8/3e7c556654ba0569ed5207138d318faf8633d87e194760fc030543817c26/ty-0.0.10-py3-none-manylinux_2_24_aarch64.whl", hash = "sha256:6b58cc78e5865bc908f053559a80bb77cab0dc168aaad2e88f2b47955694b138", size = 9665002, upload-time = "2026-01-07T22:08:30.782Z" },
+ { url = "https://files.pythonhosted.org/packages/98/96/410a483321406c932c4e3aa1581d1072b72cdcde3ae83cd0664a65c7b254/ty-0.0.10-py3-none-manylinux_2_24_armv7l.whl", hash = "sha256:83c6a514bb86f05005fa93e3b173ae3fde94d291d994bed6fe1f1d2e5c7331cf", size = 9664948, upload-time = "2026-01-07T23:04:14.655Z" },
+ { url = "https://files.pythonhosted.org/packages/1f/5d/cba2ab3e2f660763a72ad12620d0739db012e047eaa0ceaa252bf5e94ebb/ty-0.0.10-py3-none-manylinux_2_24_i686.whl", hash = "sha256:2e43f71e357f8a4f7fc75e4753b37beb2d0f297498055b1673a9306aa3e21897", size = 10125401, upload-time = "2026-01-07T22:08:28.171Z" },
+ { url = "https://files.pythonhosted.org/packages/a7/67/29536e0d97f204a2933122239298e754db4564f4ed7f34e2153012b954be/ty-0.0.10-py3-none-manylinux_2_24_ppc64le.whl", hash = "sha256:18be3c679965c23944c8e574be0635504398c64c55f3f0c46259464e10c0a1c7", size = 10714052, upload-time = "2026-01-07T22:08:20.098Z" },
+ { url = "https://files.pythonhosted.org/packages/63/c8/82ac83b79a71c940c5dcacb644f526f0c8fdf4b5e9664065ab7ee7c0e4ec/ty-0.0.10-py3-none-manylinux_2_24_s390x.whl", hash = "sha256:5477981681440a35acdf9b95c3097410c547abaa32b893f61553dbc3b0096fff", size = 10395924, upload-time = "2026-01-07T22:08:22.839Z" },
+ { url = "https://files.pythonhosted.org/packages/9e/4c/2f9ac5edbd0e67bf82f5cd04275c4e87cbbf69a78f43e5dcf90c1573d44e/ty-0.0.10-py3-none-manylinux_2_24_x86_64.whl", hash = "sha256:e206a23bd887574302138b33383ae1edfcc39d33a06a12a5a00803b3f0287a45", size = 10220096, upload-time = "2026-01-07T22:08:13.171Z" },
+ { url = "https://files.pythonhosted.org/packages/04/13/3be2b7bfd53b9952b39b6f2c2ef55edeb1a2fea3bf0285962736ee26731c/ty-0.0.10-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:4e09ddb0d3396bd59f645b85eab20f9a72989aa8b736b34338dcb5ffecfe77b6", size = 9649120, upload-time = "2026-01-07T22:08:34.003Z" },
+ { url = "https://files.pythonhosted.org/packages/93/e3/edd58547d9fd01e4e584cec9dced4f6f283506b422cdd953e946f6a8e9f0/ty-0.0.10-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:139d2a741579ad86a044233b5d7e189bb81f427eebce3464202f49c3ec0eba3b", size = 9686033, upload-time = "2026-01-07T22:08:40.967Z" },
+ { url = "https://files.pythonhosted.org/packages/cc/bc/9d2f5fec925977446d577fb9b322d0e7b1b1758709f23a6cfc10231e9b84/ty-0.0.10-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6bae10420c0abfe4601fbbc6ce637b67d0b87a44fa520283131a26da98f2e74c", size = 9841905, upload-time = "2026-01-07T23:04:21.694Z" },
+ { url = "https://files.pythonhosted.org/packages/7c/b8/5acd3492b6a4ef255ace24fcff0d4b1471a05b7f3758d8910a681543f899/ty-0.0.10-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:7358bbc5d037b9c59c3a48895206058bcd583985316c4125a74dd87fd1767adb", size = 10320058, upload-time = "2026-01-07T22:08:25.645Z" },
+ { url = "https://files.pythonhosted.org/packages/35/67/5b6906fccef654c7e801d6ac8dcbe0d493e1f04c38127f82a5e6d7e0aa0e/ty-0.0.10-py3-none-win32.whl", hash = "sha256:f51b6fd485bc695d0fdf555e69e6a87d1c50f14daef6cb980c9c941e12d6bcba", size = 9271806, upload-time = "2026-01-07T22:08:10.08Z" },
+ { url = "https://files.pythonhosted.org/packages/42/36/82e66b9753a76964d26fd9bc3514ea0abce0a5ba5ad7d5f084070c6981da/ty-0.0.10-py3-none-win_amd64.whl", hash = "sha256:16deb77a72cf93b89b4d29577829613eda535fbe030513dfd9fba70fe38bc9f5", size = 10130520, upload-time = "2026-01-07T23:04:11.759Z" },
+ { url = "https://files.pythonhosted.org/packages/63/52/89da123f370e80b587d2db8551ff31562c882d87b32b0e92b59504b709ae/ty-0.0.10-py3-none-win_arm64.whl", hash = "sha256:7495288bca7afba9a4488c9906466d648ffd3ccb6902bc3578a6dbd91a8f05f0", size = 9626026, upload-time = "2026-01-07T23:04:17.91Z" },
]
[[package]]
name = "typer"
-version = "0.20.0"
+version = "0.21.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "click" },
@@ -1794,9 +1798,9 @@ dependencies = [
{ name = "shellingham" },
{ name = "typing-extensions" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/8f/28/7c85c8032b91dbe79725b6f17d2fffc595dff06a35c7a30a37bef73a1ab4/typer-0.20.0.tar.gz", hash = "sha256:1aaf6494031793e4876fb0bacfa6a912b551cf43c1e63c800df8b1a866720c37", size = 106492, upload-time = "2025-10-20T17:03:49.445Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/36/bf/8825b5929afd84d0dabd606c67cd57b8388cb3ec385f7ef19c5cc2202069/typer-0.21.1.tar.gz", hash = "sha256:ea835607cd752343b6b2b7ce676893e5a0324082268b48f27aa058bdb7d2145d", size = 110371, upload-time = "2026-01-06T11:21:10.989Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/78/64/7713ffe4b5983314e9d436a90d5bd4f63b6054e2aca783a3cfc44cb95bbf/typer-0.20.0-py3-none-any.whl", hash = "sha256:5b463df6793ec1dca6213a3cf4c0f03bc6e322ac5e16e13ddd622a889489784a", size = 47028, upload-time = "2025-10-20T17:03:47.617Z" },
+ { url = "https://files.pythonhosted.org/packages/a0/1d/d9257dd49ff2ca23ea5f132edf1281a0c4f9de8a762b9ae399b670a59235/typer-0.21.1-py3-none-any.whl", hash = "sha256:7985e89081c636b88d172c2ee0cfe33c253160994d47bdfdc302defd7d1f1d01", size = 47381, upload-time = "2026-01-06T11:21:09.824Z" },
]
[[package]]
@@ -1835,33 +1839,33 @@ wheels = [
[[package]]
name = "urllib3"
-version = "2.6.2"
+version = "2.6.3"
source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/1e/24/a2a2ed9addd907787d7aa0355ba36a6cadf1768b934c652ea78acbd59dcd/urllib3-2.6.2.tar.gz", hash = "sha256:016f9c98bb7e98085cb2b4b17b87d2c702975664e4f060c6532e64d1c1a5e797", size = 432930, upload-time = "2025-12-11T15:56:40.252Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/c7/24/5f1b3bdffd70275f6661c76461e25f024d5a38a46f04aaca912426a2b1d3/urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed", size = 435556, upload-time = "2026-01-07T16:24:43.925Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/6d/b9/4095b668ea3678bf6a0af005527f39de12fb026516fb3df17495a733b7f8/urllib3-2.6.2-py3-none-any.whl", hash = "sha256:ec21cddfe7724fc7cb4ba4bea7aa8e2ef36f607a4bab81aa6ce42a13dc3f03dd", size = 131182, upload-time = "2025-12-11T15:56:38.584Z" },
+ { url = "https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584, upload-time = "2026-01-07T16:24:42.685Z" },
]
[[package]]
name = "uuid-utils"
-version = "0.12.0"
+version = "0.13.0"
source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/0b/0e/512fb221e4970c2f75ca9dae412d320b7d9ddc9f2b15e04ea8e44710396c/uuid_utils-0.12.0.tar.gz", hash = "sha256:252bd3d311b5d6b7f5dfce7a5857e27bb4458f222586bb439463231e5a9cbd64", size = 20889, upload-time = "2025-12-01T17:29:55.494Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/fe/8a/17b11768dcb473d3a255c02ffdd94fbd1b345c906efea0a39124dcbaed52/uuid_utils-0.13.0.tar.gz", hash = "sha256:4c17df6427a9e23a4cd7fb9ee1efb53b8abb078660b9bdb2524ca8595022dfe1", size = 21921, upload-time = "2026-01-08T15:48:10.841Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/8a/43/de5cd49a57b6293b911b6a9a62fc03e55db9f964da7d5882d9edbee1e9d2/uuid_utils-0.12.0-cp39-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:3b9b30707659292f207b98f294b0e081f6d77e1fbc760ba5b41331a39045f514", size = 603197, upload-time = "2025-12-01T17:29:30.104Z" },
- { url = "https://files.pythonhosted.org/packages/02/fa/5fd1d8c9234e44f0c223910808cde0de43bb69f7df1349e49b1afa7f2baa/uuid_utils-0.12.0-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:add3d820c7ec14ed37317375bea30249699c5d08ff4ae4dbee9fc9bce3bfbf65", size = 305168, upload-time = "2025-12-01T17:29:31.384Z" },
- { url = "https://files.pythonhosted.org/packages/c8/c6/8633ac9942bf9dc97a897b5154e5dcffa58816ec4dd780b3b12b559ff05c/uuid_utils-0.12.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b8fce83ecb3b16af29c7809669056c4b6e7cc912cab8c6d07361645de12dd79", size = 340580, upload-time = "2025-12-01T17:29:32.362Z" },
- { url = "https://files.pythonhosted.org/packages/f3/88/8a61307b04b4da1c576373003e6d857a04dade52ab035151d62cb84d5cb5/uuid_utils-0.12.0-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ec921769afcb905035d785582b0791d02304a7850fbd6ce924c1a8976380dfc6", size = 346771, upload-time = "2025-12-01T17:29:33.708Z" },
- { url = "https://files.pythonhosted.org/packages/1c/fb/aab2dcf94b991e62aa167457c7825b9b01055b884b888af926562864398c/uuid_utils-0.12.0-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6f3b060330f5899a92d5c723547dc6a95adef42433e9748f14c66859a7396664", size = 474781, upload-time = "2025-12-01T17:29:35.237Z" },
- { url = "https://files.pythonhosted.org/packages/5a/7a/dbd5e49c91d6c86dba57158bbfa0e559e1ddf377bb46dcfd58aea4f0d567/uuid_utils-0.12.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:908dfef7f0bfcf98d406e5dc570c25d2f2473e49b376de41792b6e96c1d5d291", size = 343685, upload-time = "2025-12-01T17:29:36.677Z" },
- { url = "https://files.pythonhosted.org/packages/1a/19/8c4b1d9f450159733b8be421a4e1fb03533709b80ed3546800102d085572/uuid_utils-0.12.0-cp39-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4c6a24148926bd0ca63e8a2dabf4cc9dc329a62325b3ad6578ecd60fbf926506", size = 366482, upload-time = "2025-12-01T17:29:37.979Z" },
- { url = "https://files.pythonhosted.org/packages/82/43/c79a6e45687647f80a159c8ba34346f287b065452cc419d07d2212d38420/uuid_utils-0.12.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:64a91e632669f059ef605f1771d28490b1d310c26198e46f754e8846dddf12f4", size = 523132, upload-time = "2025-12-01T17:29:39.293Z" },
- { url = "https://files.pythonhosted.org/packages/5a/a2/b2d75a621260a40c438aa88593827dfea596d18316520a99e839f7a5fb9d/uuid_utils-0.12.0-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:93c082212470bb4603ca3975916c205a9d7ef1443c0acde8fbd1e0f5b36673c7", size = 614218, upload-time = "2025-12-01T17:29:40.315Z" },
- { url = "https://files.pythonhosted.org/packages/13/6b/ba071101626edd5a6dabf8525c9a1537ff3d885dbc210540574a03901fef/uuid_utils-0.12.0-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:431b1fb7283ba974811b22abd365f2726f8f821ab33f0f715be389640e18d039", size = 546241, upload-time = "2025-12-01T17:29:41.656Z" },
- { url = "https://files.pythonhosted.org/packages/01/12/9a942b81c0923268e6d85bf98d8f0a61fcbcd5e432fef94fdf4ce2ef8748/uuid_utils-0.12.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2ffd7838c40149100299fa37cbd8bab5ee382372e8e65a148002a37d380df7c8", size = 511842, upload-time = "2025-12-01T17:29:43.107Z" },
- { url = "https://files.pythonhosted.org/packages/a9/a7/c326f5163dd48b79368b87d8a05f5da4668dd228a3f5ca9d79d5fee2fc40/uuid_utils-0.12.0-cp39-abi3-win32.whl", hash = "sha256:487f17c0fee6cbc1d8b90fe811874174a9b1b5683bf2251549e302906a50fed3", size = 179088, upload-time = "2025-12-01T17:29:44.492Z" },
- { url = "https://files.pythonhosted.org/packages/38/92/41c8734dd97213ee1d5ae435cf4499705dc4f2751e3b957fd12376f61784/uuid_utils-0.12.0-cp39-abi3-win_amd64.whl", hash = "sha256:9598e7c9da40357ae8fffc5d6938b1a7017f09a1acbcc95e14af8c65d48c655a", size = 183003, upload-time = "2025-12-01T17:29:45.47Z" },
- { url = "https://files.pythonhosted.org/packages/c9/f9/52ab0359618987331a1f739af837d26168a4b16281c9c3ab46519940c628/uuid_utils-0.12.0-cp39-abi3-win_arm64.whl", hash = "sha256:c9bea7c5b2aa6f57937ebebeee4d4ef2baad10f86f1b97b58a3f6f34c14b4e84", size = 182975, upload-time = "2025-12-01T17:29:46.444Z" },
+ { url = "https://files.pythonhosted.org/packages/85/b8/d40848ca22781f206c60a1885fc737d2640392bd6b5792d455525accd89c/uuid_utils-0.13.0-cp39-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:83628283e977fb212e756bc055df8fdd2f9f589a2e539ba1abe755b8ce8df7a4", size = 602130, upload-time = "2026-01-08T15:47:34.877Z" },
+ { url = "https://files.pythonhosted.org/packages/40/b9/00a944b8096632ea12638181f8e294abcde3e3b8b5e29b777f809896f6ae/uuid_utils-0.13.0-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:c47638ed6334ab19d80f73664f153b04bbb04ab8ce4298d10da6a292d4d21c47", size = 304213, upload-time = "2026-01-08T15:47:36.807Z" },
+ { url = "https://files.pythonhosted.org/packages/da/d7/07b36c33aef683b81c9afff3ec178d5eb39d325447a68c3c68a62e4abb32/uuid_utils-0.13.0-cp39-abi3-manylinux_2_24_aarch64.whl", hash = "sha256:b276b538c57733ed406948584912da422a604313c71479654848b84b9e19c9b0", size = 340624, upload-time = "2026-01-08T15:47:38.821Z" },
+ { url = "https://files.pythonhosted.org/packages/7d/55/fcff2fff02a27866cb1a6614c9df2b3ace721f0a0aab2b7b8f5a7d4e4221/uuid_utils-0.13.0-cp39-abi3-manylinux_2_24_armv7l.whl", hash = "sha256:bdaf2b77e34b199cf04cde28399495fd1ed951de214a4ece1f3919b2f945bb06", size = 346705, upload-time = "2026-01-08T15:47:40.397Z" },
+ { url = "https://files.pythonhosted.org/packages/41/48/67438506c2bb8bee1b4b00d7c0b3ff866401b4790849bf591d654d4ea0bc/uuid_utils-0.13.0-cp39-abi3-manylinux_2_24_i686.whl", hash = "sha256:eb2f0baf81e82f9769a7684022dca8f3bf801ca1574a3e94df1876e9d6f9271e", size = 366023, upload-time = "2026-01-08T15:47:42.662Z" },
+ { url = "https://files.pythonhosted.org/packages/8b/d7/2d91ce17f62fd764d593430de296b70843cc25229c772453f7261de9e6a8/uuid_utils-0.13.0-cp39-abi3-manylinux_2_24_ppc64le.whl", hash = "sha256:6be6c4d11275f5cc402a4fdba6c2b1ce45fd3d99bb78716cd1cc2cbf6802b2ce", size = 471149, upload-time = "2026-01-08T15:47:44.963Z" },
+ { url = "https://files.pythonhosted.org/packages/2e/9a/aa0756186073ba84daf5704c150d41ede10eb3185d510e02532e2071550e/uuid_utils-0.13.0-cp39-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:77621cf6ceca7f42173a642a01c01c216f9eaec3b7b65d093d2d6a433ca0a83d", size = 342130, upload-time = "2026-01-08T15:47:46.331Z" },
+ { url = "https://files.pythonhosted.org/packages/74/b4/3191789f4dc3bed59d79cec90559821756297a25d7dc34d1bf7781577a75/uuid_utils-0.13.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9a5a9eb06c2bb86dd876cd7b2fe927fc8543d14c90d971581db6ffda4a02526f", size = 524128, upload-time = "2026-01-08T15:47:47.628Z" },
+ { url = "https://files.pythonhosted.org/packages/b2/30/29839210a8fff9fc219bfa7c8d8cd115324e92618cba0cda090d54d3d321/uuid_utils-0.13.0-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:775347c6110fb71360df17aac74132d8d47c1dbe71233ac98197fc872a791fd2", size = 615872, upload-time = "2026-01-08T15:47:50.61Z" },
+ { url = "https://files.pythonhosted.org/packages/99/ed/15000c96a8bd8f5fd8efd622109bf52549ea0b366f8ce71c45580fa55878/uuid_utils-0.13.0-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:cf95f6370ad1a0910ee7b5ad5228fd19c4ae32fe3627389006adaf519408c41e", size = 581023, upload-time = "2026-01-08T15:47:52.776Z" },
+ { url = "https://files.pythonhosted.org/packages/67/c8/3f809fa2dc2ca4bd331c792a3c7d3e45ae2b709d85847a12b8b27d1d5f19/uuid_utils-0.13.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5a88e23e0b2f4203fefe2ccbca5736ee06fcad10e61b5e7e39c8d7904bc13300", size = 546715, upload-time = "2026-01-08T15:47:54.415Z" },
+ { url = "https://files.pythonhosted.org/packages/f5/80/4f7c7efd734d1494397c781bd3d421688e9c187ae836e3174625b1ddf8b0/uuid_utils-0.13.0-cp39-abi3-win32.whl", hash = "sha256:3e4f2cc54e6a99c0551158100ead528479ad2596847478cbad624977064ffce3", size = 177650, upload-time = "2026-01-08T15:47:55.679Z" },
+ { url = "https://files.pythonhosted.org/packages/6c/94/d05ab68622e66ad787a241dfe5ccc649b3af09f30eae977b9ee8f7046aaa/uuid_utils-0.13.0-cp39-abi3-win_amd64.whl", hash = "sha256:046cb2756e1597b3de22d24851b769913e192135830486a0a70bf41327f0360c", size = 183211, upload-time = "2026-01-08T15:47:57.604Z" },
+ { url = "https://files.pythonhosted.org/packages/69/37/674b3ce25cd715b831ea8ebbd828b74c40159f04c95d1bb963b2c876fe79/uuid_utils-0.13.0-cp39-abi3-win_arm64.whl", hash = "sha256:5447a680df6ef8a5a353976aaf4c97cc3a3a22b1ee13671c44227b921e3ae2a9", size = 183518, upload-time = "2026-01-08T15:47:59.148Z" },
]
[[package]]
@@ -1897,33 +1901,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f", size = 169743, upload-time = "2025-03-05T20:03:39.41Z" },
]
-[[package]]
-name = "win32-setctime"
-version = "1.2.0"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/b3/8f/705086c9d734d3b663af0e9bb3d4de6578d08f46b1b101c2442fd9aecaa2/win32_setctime-1.2.0.tar.gz", hash = "sha256:ae1fdf948f5640aae05c511ade119313fb6a30d7eabe25fef9764dca5873c4c0", size = 4867, upload-time = "2024-12-07T15:28:28.314Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/e1/07/c6fe3ad3e685340704d314d765b7912993bcb8dc198f0e7a89382d37974b/win32_setctime-1.2.0-py3-none-any.whl", hash = "sha256:95d644c4e708aba81dc3704a116d8cbc974d70b3bdb8be1d150e36be6e9d1390", size = 4083, upload-time = "2024-12-07T15:28:26.465Z" },
-]
-
-[[package]]
-name = "xai-review"
-version = "0.48.0"
-source = { registry = "https://pypi.org/simple" }
-dependencies = [
- { name = "aiofiles" },
- { name = "httpx" },
- { name = "loguru" },
- { name = "pydantic" },
- { name = "pydantic-settings" },
- { name = "pyyaml" },
- { name = "typer" },
-]
-sdist = { url = "https://files.pythonhosted.org/packages/dc/56/84a9efb4239c706953180649cdd8255dcb331d2274a8b93e267c4debe3b8/xai_review-0.48.0.tar.gz", hash = "sha256:6c403c7f4cd0b1aecd413acfc3ced8a26a92b89f114538a3f6f7aa7cc79b49b1", size = 148434, upload-time = "2025-12-16T18:03:13.722Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/e6/f3/4e5f4acc136a26f59a95d92dc8e93048889e293a870dfbaa19e1ac6e87a8/xai_review-0.48.0-py3-none-any.whl", hash = "sha256:beed9ee3eae117394ef50cb9a65497ce23d057485a9e608bd6bb16a590cb5f11", size = 292712, upload-time = "2025-12-16T18:03:12.656Z" },
-]
-
[[package]]
name = "xxhash"
version = "3.6.0"