From 395194a960c02d513d16e15bbf031be9302d7f36 Mon Sep 17 00:00:00 2001 From: Luca Chang Date: Fri, 19 Sep 2025 12:50:57 -0700 Subject: [PATCH 01/41] Implement new data models for async tools (SEP-1391) --- src/mcp/types.py | 81 ++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 81 insertions(+) diff --git a/src/mcp/types.py b/src/mcp/types.py index 62feda87a..f9f62cf88 100644 --- a/src/mcp/types.py +++ b/src/mcp/types.py @@ -24,6 +24,7 @@ """ LATEST_PROTOCOL_VERSION = "2025-06-18" +NEXT_PROTOCOL_VERSION = "next" # Development version with async tool support """ The default negotiated version of the Model Context Protocol when no version is specified. @@ -852,6 +853,12 @@ class Tool(BaseMetadata): An optional JSON Schema object defining the structure of the tool's output returned in the structuredContent field of a CallToolResult. """ + invocationMode: Literal["sync", "async"] | None = None + """ + Optional invocation mode for the tool. If not specified, defaults to sync-only. + - "sync": Tool supports synchronous execution only + - "async": Tool supports asynchronous execution only + """ annotations: ToolAnnotations | None = None """Optional additional tool information.""" meta: dict[str, Any] | None = Field(alias="_meta", default=None) @@ -868,11 +875,79 @@ class ListToolsResult(PaginatedResult): tools: list[Tool] +class AsyncRequestProperties(BaseModel): + """Properties for async tool execution requests.""" + + keepAlive: int | None = None + """Number of seconds the client wants the result to be kept available upon completion.""" + model_config = ConfigDict(extra="allow") + + +class AsyncResultProperties(BaseModel): + """Properties for async tool execution results.""" + + token: str + """Server-generated token to use for checking status and retrieving results.""" + keepAlive: int + """Number of seconds the result will be kept available upon completion.""" + message: str | None = None + """Optional message to immediately provide to the client.""" + model_config = ConfigDict(extra="allow") + + +# Async status checking types +class CheckToolAsyncStatusParams(RequestParams): + """Parameters for checking async tool status.""" + + token: str + """Token from the original async tool call.""" + + +class CheckToolAsyncStatusRequest(Request[CheckToolAsyncStatusParams, Literal["tools/async/status"]]): + """Request to check the status of an async tool call.""" + + method: Literal["tools/async/status"] = "tools/async/status" + params: CheckToolAsyncStatusParams + + +class CheckToolAsyncStatusResult(Result): + """Result of checking async tool status.""" + + status: Literal["submitted", "working", "completed", "canceled", "failed", "unknown"] + """Current status of the async operation.""" + error: str | None = None + """Error message if status is 'failed'.""" + + +# Async payload retrieval types +class GetToolAsyncPayloadParams(RequestParams): + """Parameters for getting async tool payload.""" + + token: str + """Token from the original async tool call.""" + + +class GetToolAsyncPayloadRequest(Request[GetToolAsyncPayloadParams, Literal["tools/async/result"]]): + """Request to get the result of a completed async tool call.""" + + method: Literal["tools/async/result"] = "tools/async/result" + params: GetToolAsyncPayloadParams + + +class GetToolAsyncPayloadResult(Result): + """Result containing the final async tool call result.""" + + result: "CallToolResult" + """The result of the tool call.""" + + class CallToolRequestParams(RequestParams): """Parameters for calling a tool.""" name: str arguments: dict[str, Any] | None = None + async_properties: AsyncRequestProperties | None = Field(serialization_alias="async", default=None) + """Optional async execution parameters.""" model_config = ConfigDict(extra="allow") @@ -890,6 +965,8 @@ class CallToolResult(Result): structuredContent: dict[str, Any] | None = None """An optional JSON object that represents the structured result of the tool call.""" isError: bool = False + async_properties: AsyncResultProperties | None = Field(serialization_alias="async", default=None) + """Optional async execution information. Present when tool is executed asynchronously.""" class ToolListChangedNotification(Notification[NotificationParams | None, Literal["notifications/tools/list_changed"]]): @@ -1232,6 +1309,8 @@ class ClientRequest( | UnsubscribeRequest | CallToolRequest | ListToolsRequest + | CheckToolAsyncStatusRequest + | GetToolAsyncPayloadRequest ] ): pass @@ -1315,6 +1394,8 @@ class ServerResult( | ReadResourceResult | CallToolResult | ListToolsResult + | CheckToolAsyncStatusResult + | GetToolAsyncPayloadResult ] ): pass From cbda6e336e935c7e29a8cae906a9c691291af31c Mon Sep 17 00:00:00 2001 From: Luca Chang Date: Fri, 19 Sep 2025 13:58:10 -0700 Subject: [PATCH 02/41] Add "next" protocol version to isolate async tools from existing clients --- src/mcp/shared/version.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/mcp/shared/version.py b/src/mcp/shared/version.py index 23c46d04b..12d7df67a 100644 --- a/src/mcp/shared/version.py +++ b/src/mcp/shared/version.py @@ -1,3 +1,3 @@ -from mcp.types import LATEST_PROTOCOL_VERSION +from mcp.types import LATEST_PROTOCOL_VERSION, NEXT_PROTOCOL_VERSION -SUPPORTED_PROTOCOL_VERSIONS: list[str] = ["2024-11-05", "2025-03-26", LATEST_PROTOCOL_VERSION] +SUPPORTED_PROTOCOL_VERSIONS: list[str] = ["2024-11-05", "2025-03-26", LATEST_PROTOCOL_VERSION, NEXT_PROTOCOL_VERSION] From e5e4078bedc1e7b9c5efb443ba45d7232f30de72 Mon Sep 17 00:00:00 2001 From: Luca Chang Date: Fri, 19 Sep 2025 14:00:15 -0700 Subject: [PATCH 03/41] Implement session functions for async tools --- src/mcp/client/session.py | 50 +++++++++- src/mcp/server/fastmcp/server.py | 56 ++++++++++- src/mcp/server/fastmcp/tools/base.py | 13 ++- src/mcp/server/fastmcp/tools/tool_manager.py | 8 +- tests/server/fastmcp/test_server.py | 48 ++++++++++ tests/server/fastmcp/test_tool_manager.py | 99 ++++++++++++++++++++ 6 files changed, 270 insertions(+), 4 deletions(-) diff --git a/src/mcp/client/session.py b/src/mcp/client/session.py index bcf80d62a..854cff8c0 100644 --- a/src/mcp/client/session.py +++ b/src/mcp/client/session.py @@ -2,6 +2,7 @@ from datetime import timedelta from typing import Any, Protocol +import anyio import anyio.lowlevel from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream from jsonschema import SchemaError, ValidationError, validate @@ -273,8 +274,18 @@ async def call_tool( arguments: dict[str, Any] | None = None, read_timeout_seconds: timedelta | None = None, progress_callback: ProgressFnT | None = None, + *, + async_properties: types.AsyncRequestProperties | None = None, ) -> types.CallToolResult: - """Send a tools/call request with optional progress callback support.""" + """Send a tools/call request with optional progress callback support. + + Args: + name: Name of the tool to call + arguments: Arguments to pass to the tool + read_timeout_seconds: Read timeout for the request + progress_callback: Optional progress callback + async_properties: Optional async parameters for async tool execution + """ result = await self.send_request( types.ClientRequest( @@ -282,6 +293,7 @@ async def call_tool( params=types.CallToolRequestParams( name=name, arguments=arguments, + async_properties=async_properties, ), ) ), @@ -295,6 +307,42 @@ async def call_tool( return result + async def check_tool_async_status(self, token: str) -> types.CheckToolAsyncStatusResult: + """Check the status of an async tool operation. + + Args: + token: Token returned from async call_tool + + Returns: + Status result with current operation state + """ + return await self.send_request( + types.ClientRequest( + types.CheckToolAsyncStatusRequest( + params=types.CheckToolAsyncStatusParams(token=token), + ) + ), + types.CheckToolAsyncStatusResult, + ) + + async def get_tool_async_result(self, token: str) -> types.GetToolAsyncPayloadResult: + """Get the result of a completed async tool operation. + + Args: + token: Token returned from async call_tool + + Returns: + The final tool result + """ + return await self.send_request( + types.ClientRequest( + types.GetToolAsyncPayloadRequest( + params=types.GetToolAsyncPayloadParams(token=token), + ) + ), + types.GetToolAsyncPayloadResult, + ) + async def _validate_tool_result(self, name: str, result: types.CallToolResult) -> None: """Validate the structured content of a tool result against its output schema.""" if name not in self._tool_output_schemas: diff --git a/src/mcp/server/fastmcp/server.py b/src/mcp/server/fastmcp/server.py index d86fa85e3..c16baa45a 100644 --- a/src/mcp/server/fastmcp/server.py +++ b/src/mcp/server/fastmcp/server.py @@ -30,6 +30,7 @@ from mcp.server.fastmcp.prompts import Prompt, PromptManager from mcp.server.fastmcp.resources import FunctionResource, Resource, ResourceManager from mcp.server.fastmcp.tools import Tool, ToolManager +from mcp.server.fastmcp.tools.base import InvocationMode from mcp.server.fastmcp.utilities.context_injection import find_context_parameter from mcp.server.fastmcp.utilities.logging import configure_logging, get_logger from mcp.server.lowlevel.helper_types import ReadResourceContents @@ -43,7 +44,7 @@ from mcp.server.streamable_http_manager import StreamableHTTPSessionManager from mcp.server.transport_security import TransportSecuritySettings from mcp.shared.context import LifespanContextT, RequestContext, RequestT -from mcp.types import AnyFunction, ContentBlock, GetPromptResult, ToolAnnotations +from mcp.types import NEXT_PROTOCOL_VERSION, AnyFunction, ContentBlock, GetPromptResult, ToolAnnotations from mcp.types import Prompt as MCPPrompt from mcp.types import PromptArgument as MCPPromptArgument from mcp.types import Resource as MCPResource @@ -266,9 +267,39 @@ def _setup_handlers(self) -> None: self._mcp_server.get_prompt()(self.get_prompt) self._mcp_server.list_resource_templates()(self.list_resource_templates) + def _client_supports_async(self) -> bool: + """Check if the current client supports async tools based on protocol version.""" + try: + context = self.get_context() + if context.request_context and context.request_context.session.client_params: + client_version = str(context.request_context.session.client_params.protocolVersion) + # Only "next" version supports async tools for now + return client_version == NEXT_PROTOCOL_VERSION + except ValueError: + # Context not available (outside of request), assume no async support + pass + return False + + def _get_invocation_mode(self, info: Tool, client_supports_async: bool) -> Literal["sync", "async"] | None: + """Determine invocationMode field based on client support.""" + if not client_supports_async: + return None # Old clients don't see invocationMode field + + # New clients see the invocationMode field + if "async" in info.invocation_modes and len(info.invocation_modes) == 1: + return "async" # Async-only + elif len(info.invocation_modes) > 1 or info.invocation_modes == ["sync"]: + return "sync" # Hybrid or explicit sync + return None + async def list_tools(self) -> list[MCPTool]: """List all available tools.""" tools = self._tool_manager.list_tools() + + # Check if client supports async tools based on protocol version + client_supports_async = self._client_supports_async() + + # Filter out async-only tools for old clients and set invocationMode based on client support return [ MCPTool( name=info.name, @@ -277,8 +308,10 @@ async def list_tools(self) -> list[MCPTool]: inputSchema=info.parameters, outputSchema=info.output_schema, annotations=info.annotations, + invocationMode=self._get_invocation_mode(info, client_supports_async), ) for info in tools + if client_supports_async or info.invocation_modes != ["async"] ] def get_context(self) -> Context[ServerSession, LifespanResultT, Request]: @@ -348,6 +381,7 @@ def add_tool( description: str | None = None, annotations: ToolAnnotations | None = None, structured_output: bool | None = None, + invocation_modes: list[InvocationMode] | None = None, ) -> None: """Add a tool to the server. @@ -364,6 +398,8 @@ def add_tool( - If None, auto-detects based on the function's return type annotation - If True, unconditionally creates a structured tool (return type annotation permitting) - If False, unconditionally creates an unstructured tool + invocation_modes: List of supported invocation modes (e.g., ["sync", "async"]) + - If None, defaults to ["sync"] for backwards compatibility """ self._tool_manager.add_tool( fn, @@ -372,6 +408,7 @@ def add_tool( description=description, annotations=annotations, structured_output=structured_output, + invocation_modes=invocation_modes, ) def tool( @@ -381,6 +418,7 @@ def tool( description: str | None = None, annotations: ToolAnnotations | None = None, structured_output: bool | None = None, + invocation_modes: list[InvocationMode] | None = None, ) -> Callable[[AnyFunction], AnyFunction]: """Decorator to register a tool. @@ -397,6 +435,10 @@ def tool( - If None, auto-detects based on the function's return type annotation - If True, unconditionally creates a structured tool (return type annotation permitting) - If False, unconditionally creates an unstructured tool + invocation_modes: List of supported invocation modes (e.g., ["sync", "async"]) + - If None, defaults to ["sync"] for backwards compatibility + - Supports "sync" for synchronous execution and "async" for asynchronous execution + - Tools with "async" mode will be hidden from clients that don't support async execution Example: @server.tool() @@ -412,6 +454,17 @@ def tool_with_context(x: int, ctx: Context) -> str: async def async_tool(x: int, context: Context) -> str: await context.report_progress(50, 100) return str(x) + + @server.tool(invocation_modes=["async"]) + async def async_only_tool(data: str, ctx: Context) -> str: + # This tool only supports async execution + await ctx.info("Starting long-running analysis...") + return await analyze_data(data) + + @server.tool(invocation_modes=["sync", "async"]) + def hybrid_tool(x: int) -> str: + # This tool supports both sync and async execution + return str(x) """ # Check if user passed function directly instead of calling decorator if callable(name): @@ -427,6 +480,7 @@ def decorator(fn: AnyFunction) -> AnyFunction: description=description, annotations=annotations, structured_output=structured_output, + invocation_modes=invocation_modes, ) return fn diff --git a/src/mcp/server/fastmcp/tools/base.py b/src/mcp/server/fastmcp/tools/base.py index bb5003de3..2491b9e94 100644 --- a/src/mcp/server/fastmcp/tools/base.py +++ b/src/mcp/server/fastmcp/tools/base.py @@ -4,7 +4,7 @@ import inspect from collections.abc import Callable from functools import cached_property -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING, Any, Literal from pydantic import BaseModel, Field @@ -18,6 +18,8 @@ from mcp.server.session import ServerSessionT from mcp.shared.context import LifespanContextT, RequestT +InvocationMode = Literal["sync", "async"] + class Tool(BaseModel): """Internal tool registration info.""" @@ -33,6 +35,9 @@ class Tool(BaseModel): is_async: bool = Field(description="Whether the tool is async") context_kwarg: str | None = Field(None, description="Name of the kwarg that should receive context") annotations: ToolAnnotations | None = Field(None, description="Optional annotations for the tool") + invocation_modes: list[InvocationMode] = Field( + default=["sync"], description="Supported invocation modes (sync/async)" + ) @cached_property def output_schema(self) -> dict[str, Any] | None: @@ -48,6 +53,7 @@ def from_function( context_kwarg: str | None = None, annotations: ToolAnnotations | None = None, structured_output: bool | None = None, + invocation_modes: list[InvocationMode] | None = None, ) -> Tool: """Create a Tool from a function.""" func_name = name or fn.__name__ @@ -68,6 +74,10 @@ def from_function( ) parameters = func_arg_metadata.arg_model.model_json_schema(by_alias=True) + # Default to sync mode if no invocation modes specified + if invocation_modes is None: + invocation_modes = ["sync"] + return cls( fn=fn, name=func_name, @@ -78,6 +88,7 @@ def from_function( is_async=is_async, context_kwarg=context_kwarg, annotations=annotations, + invocation_modes=invocation_modes, ) async def run( diff --git a/src/mcp/server/fastmcp/tools/tool_manager.py b/src/mcp/server/fastmcp/tools/tool_manager.py index bfa8b2382..cc8866dd9 100644 --- a/src/mcp/server/fastmcp/tools/tool_manager.py +++ b/src/mcp/server/fastmcp/tools/tool_manager.py @@ -4,7 +4,7 @@ from typing import TYPE_CHECKING, Any from mcp.server.fastmcp.exceptions import ToolError -from mcp.server.fastmcp.tools.base import Tool +from mcp.server.fastmcp.tools.base import InvocationMode, Tool from mcp.server.fastmcp.utilities.logging import get_logger from mcp.shared.context import LifespanContextT, RequestT from mcp.types import ToolAnnotations @@ -50,8 +50,13 @@ def add_tool( description: str | None = None, annotations: ToolAnnotations | None = None, structured_output: bool | None = None, + invocation_modes: list[InvocationMode] | None = None, ) -> Tool: """Add a tool to the server.""" + # Default to sync mode if no invocation modes specified + if invocation_modes is None: + invocation_modes = ["sync"] + tool = Tool.from_function( fn, name=name, @@ -59,6 +64,7 @@ def add_tool( description=description, annotations=annotations, structured_output=structured_output, + invocation_modes=invocation_modes, ) existing = self._tools.get(tool.name) if existing: diff --git a/tests/server/fastmcp/test_server.py b/tests/server/fastmcp/test_server.py index 5e34ba1b1..dbb1eaeb5 100644 --- a/tests/server/fastmcp/test_server.py +++ b/tests/server/fastmcp/test_server.py @@ -603,6 +603,54 @@ def get_settings() -> dict[str, str]: assert result.isError is False assert result.structuredContent == {"theme": "dark", "language": "en", "timezone": "UTC"} + @pytest.mark.anyio + async def test_list_tools_invocation_mode_sync(self): + """Test that sync tools have proper invocationMode field.""" + mcp = FastMCP() + + @mcp.tool() + def sync_tool(x: int) -> int: + """A sync tool.""" + return x * 2 + + async with client_session(mcp._mcp_server) as client: + tools = await client.list_tools() + tool = next(t for t in tools.tools if t.name == "sync_tool") + # Sync tools should not have invocationMode field (None) for old clients + assert tool.invocationMode is None + + @pytest.mark.anyio + async def test_list_tools_invocation_mode_async_only(self): + """Test that async-only tools have proper invocationMode field.""" + mcp = FastMCP() + + @mcp.tool(invocation_modes=["async"]) + async def async_only_tool(x: int) -> int: + """An async-only tool.""" + return x * 2 + + async with client_session(mcp._mcp_server) as client: + tools = await client.list_tools() + # Async-only tools should be filtered out for old clients + async_tools = [t for t in tools.tools if t.name == "async_only_tool"] + assert len(async_tools) == 0 + + @pytest.mark.anyio + async def test_list_tools_invocation_mode_hybrid(self): + """Test that hybrid tools have proper invocationMode field.""" + mcp = FastMCP() + + @mcp.tool(invocation_modes=["sync", "async"]) + def hybrid_tool(x: int) -> int: + """A hybrid tool.""" + return x * 2 + + async with client_session(mcp._mcp_server) as client: + tools = await client.list_tools() + tool = next(t for t in tools.tools if t.name == "hybrid_tool") + # Hybrid tools should not have invocationMode field (None) for old clients + assert tool.invocationMode is None + class TestServerResources: @pytest.mark.anyio diff --git a/tests/server/fastmcp/test_tool_manager.py b/tests/server/fastmcp/test_tool_manager.py index 8b6168275..82439eb8a 100644 --- a/tests/server/fastmcp/test_tool_manager.py +++ b/tests/server/fastmcp/test_tool_manager.py @@ -178,6 +178,55 @@ def f(x: int) -> int: manager.add_tool(f) assert "Tool already exists: f" not in caplog.text + def test_invocation_modes_default(self): + """Test that tools default to sync mode when no invocation_modes specified.""" + + def sync_tool(x: int) -> int: + """A sync tool.""" + return x * 2 + + manager = ToolManager() + tool = manager.add_tool(sync_tool) + + assert tool.invocation_modes == ["sync"] + + def test_invocation_modes_async_only(self): + """Test async-only tool creation.""" + + async def async_tool(x: int) -> int: + """An async-only tool.""" + return x * 2 + + manager = ToolManager() + tool = manager.add_tool(async_tool, invocation_modes=["async"]) + + assert tool.invocation_modes == ["async"] + assert tool.is_async is True + + def test_invocation_modes_hybrid(self): + """Test hybrid sync/async tool creation.""" + + def hybrid_tool(x: int) -> int: + """A hybrid tool that supports both modes.""" + return x * 2 + + manager = ToolManager() + tool = manager.add_tool(hybrid_tool, invocation_modes=["sync", "async"]) + + assert tool.invocation_modes == ["sync", "async"] + + def test_invocation_modes_explicit_sync(self): + """Test explicitly setting sync mode.""" + + def explicit_sync_tool(x: int) -> int: + """An explicitly sync tool.""" + return x * 2 + + manager = ToolManager() + tool = manager.add_tool(explicit_sync_tool, invocation_modes=["sync"]) + + assert tool.invocation_modes == ["sync"] + class TestCallTools: @pytest.mark.anyio @@ -633,3 +682,53 @@ def get_scores() -> dict[str, int]: # Test converted result result = await manager.call_tool("get_scores", {}) assert result == expected + + +class TestInvocationModes: + """Test invocation modes functionality.""" + + def test_invocation_mode_type_safety(self): + """Test InvocationMode literal type validation.""" + from mcp.server.fastmcp.tools.base import InvocationMode + + # Valid modes should work + valid_modes: list[InvocationMode] = ["sync", "async"] + assert valid_modes == ["sync", "async"] + + def test_tool_from_function_with_invocation_modes(self): + """Test Tool.from_function with invocation_modes parameter.""" + from mcp.server.fastmcp.tools.base import Tool + + def test_tool(x: int) -> int: + return x + + # Test default behavior + tool_default = Tool.from_function(test_tool) + assert tool_default.invocation_modes == ["sync"] + + # Test explicit sync + tool_sync = Tool.from_function(test_tool, invocation_modes=["sync"]) + assert tool_sync.invocation_modes == ["sync"] + + # Test async only + tool_async = Tool.from_function(test_tool, invocation_modes=["async"]) + assert tool_async.invocation_modes == ["async"] + + # Test hybrid + tool_hybrid = Tool.from_function(test_tool, invocation_modes=["sync", "async"]) + assert tool_hybrid.invocation_modes == ["sync", "async"] + + def test_tool_manager_invocation_modes_parameter(self): + """Test ToolManager.add_tool with invocation_modes parameter.""" + manager = ToolManager() + + def test_tool(x: int) -> int: + return x + + # Test that invocation_modes parameter is passed through + tool = manager.add_tool(test_tool, invocation_modes=["async"]) + assert tool.invocation_modes == ["async"] + + # Test default behavior when None + tool_default = manager.add_tool(test_tool, name="test_tool_default") + assert tool_default.invocation_modes == ["sync"] From 7dd550b40867d58571e910235aa4838890908dbd Mon Sep 17 00:00:00 2001 From: Luca Chang Date: Mon, 22 Sep 2025 16:01:47 -0700 Subject: [PATCH 04/41] Implement server-side handling for async tool calls --- pyproject.toml | 2 +- src/mcp/server/fastmcp/server.py | 3 + src/mcp/server/lowlevel/async_operations.py | 202 ++++++++++++ src/mcp/server/lowlevel/server.py | 91 +++++- src/mcp/types.py | 6 +- tests/server/test_async_operations.py | 291 ++++++++++++++++++ .../server/test_lowlevel_async_operations.py | 249 +++++++++++++++ 7 files changed, 832 insertions(+), 12 deletions(-) create mode 100644 src/mcp/server/lowlevel/async_operations.py create mode 100644 tests/server/test_async_operations.py create mode 100644 tests/server/test_lowlevel_async_operations.py diff --git a/pyproject.toml b/pyproject.toml index c6119867e..1ab47bf0d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -128,7 +128,7 @@ mccabe.max-complexity = 24 # Default is 10 [tool.ruff.lint.pylint] allow-magic-value-types = ["bytes", "float", "int", "str"] -max-args = 23 # Default is 5 +max-args = 24 # Default is 5 max-branches = 23 # Default is 12 max-returns = 13 # Default is 6 max-statements = 102 # Default is 50 diff --git a/src/mcp/server/fastmcp/server.py b/src/mcp/server/fastmcp/server.py index c16baa45a..63fa2d1c9 100644 --- a/src/mcp/server/fastmcp/server.py +++ b/src/mcp/server/fastmcp/server.py @@ -33,6 +33,7 @@ from mcp.server.fastmcp.tools.base import InvocationMode from mcp.server.fastmcp.utilities.context_injection import find_context_parameter from mcp.server.fastmcp.utilities.logging import configure_logging, get_logger +from mcp.server.lowlevel.async_operations import AsyncOperationManager from mcp.server.lowlevel.helper_types import ReadResourceContents from mcp.server.lowlevel.server import LifespanResultT from mcp.server.lowlevel.server import Server as MCPServer @@ -129,6 +130,7 @@ def __init__( token_verifier: TokenVerifier | None = None, event_store: EventStore | None = None, *, + async_operations: AsyncOperationManager | None = None, tools: list[Tool] | None = None, debug: bool = False, log_level: Literal["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"] = "INFO", @@ -178,6 +180,7 @@ def __init__( self._tool_manager = ToolManager(tools=tools, warn_on_duplicate_tools=self.settings.warn_on_duplicate_tools) self._resource_manager = ResourceManager(warn_on_duplicate_resources=self.settings.warn_on_duplicate_resources) self._prompt_manager = PromptManager(warn_on_duplicate_prompts=self.settings.warn_on_duplicate_prompts) + self.async_operations = async_operations or AsyncOperationManager() # Validate auth configuration if self.settings.auth is not None: if auth_server_provider and token_verifier: diff --git a/src/mcp/server/lowlevel/async_operations.py b/src/mcp/server/lowlevel/async_operations.py new file mode 100644 index 000000000..9d3f78dc7 --- /dev/null +++ b/src/mcp/server/lowlevel/async_operations.py @@ -0,0 +1,202 @@ +"""Async operations management for FastMCP servers.""" + +from __future__ import annotations + +import asyncio +import secrets +import time +from collections.abc import Callable +from dataclasses import dataclass +from typing import Any + +import mcp.types as types +from mcp.types import AsyncOperationStatus + + +@dataclass +class AsyncOperation: + """Represents an async tool operation.""" + + token: str + tool_name: str + arguments: dict[str, Any] + session_id: str + status: AsyncOperationStatus + created_at: float + keep_alive: int + result: types.CallToolResult | None = None + error: str | None = None + + @property + def is_expired(self) -> bool: + """Check if operation has expired based on keepAlive.""" + if self.status in ("completed", "failed", "canceled"): + return time.time() > (self.created_at + self.keep_alive) + return False + + @property + def is_terminal(self) -> bool: + """Check if operation is in a terminal state.""" + return self.status in ("completed", "failed", "canceled", "unknown") + + +class AsyncOperationManager: + """Manages async tool operations with token-based tracking.""" + + def __init__(self, *, token_generator: Callable[[str], str] | None = None): + self._operations: dict[str, AsyncOperation] = {} + self._cleanup_task: asyncio.Task[None] | None = None + self._cleanup_interval = 60 # Cleanup every 60 seconds + self._token_generator = token_generator or self._default_token_generator + + def _default_token_generator(self, session_id: str) -> str: + """Default token generation using random tokens.""" + return secrets.token_urlsafe(32) + + def generate_token(self, session_id: str) -> str: + """Generate a token.""" + return self._token_generator(session_id) + + def create_operation( + self, + tool_name: str, + arguments: dict[str, Any], + session_id: str, + keep_alive: int = 3600, + ) -> AsyncOperation: + """Create a new async operation.""" + token = self.generate_token(session_id) + operation = AsyncOperation( + token=token, + tool_name=tool_name, + arguments=arguments, + session_id=session_id, + status="submitted", + created_at=time.time(), + keep_alive=keep_alive, + ) + self._operations[token] = operation + return operation + + def get_operation(self, token: str) -> AsyncOperation | None: + """Get operation by token.""" + return self._operations.get(token) + + def mark_working(self, token: str) -> bool: + """Mark operation as working.""" + operation = self._operations.get(token) + if not operation: + return False + + # Can only transition to working from submitted + if operation.status != "submitted": + return False + + operation.status = "working" + return True + + def complete_operation(self, token: str, result: types.CallToolResult) -> bool: + """Complete operation with result.""" + operation = self._operations.get(token) + if not operation: + return False + + # Can only complete from submitted or working states + if operation.status not in ("submitted", "working"): + return False + + operation.status = "completed" + operation.result = result + return True + + def fail_operation(self, token: str, error: str) -> bool: + """Fail operation with error.""" + operation = self._operations.get(token) + if not operation: + return False + + # Can only fail from submitted or working states + if operation.status not in ("submitted", "working"): + return False + + operation.status = "failed" + operation.error = error + return True + + def get_operation_result(self, token: str) -> types.CallToolResult | None: + """Get result for completed operation.""" + operation = self._operations.get(token) + if not operation or operation.status != "completed": + return None + return operation.result + + def cancel_operation(self, token: str) -> bool: + """Cancel operation.""" + operation = self._operations.get(token) + if not operation: + return False + + # Can only cancel from submitted or working states + if operation.status not in ("submitted", "working"): + return False + + operation.status = "canceled" + return True + + def remove_operation(self, token: str) -> bool: + """Remove operation by token.""" + return self._operations.pop(token, None) is not None + + def cleanup_expired_operations(self) -> int: + """Remove expired operations and return count removed.""" + expired_tokens = [token for token, op in self._operations.items() if op.is_expired] + + for token in expired_tokens: + del self._operations[token] + + return len(expired_tokens) + + def get_session_operations(self, session_id: str) -> list[AsyncOperation]: + """Get all operations for a session.""" + return [op for op in self._operations.values() if op.session_id == session_id] + + def cancel_session_operations(self, session_id: str) -> int: + """Cancel all operations for a session.""" + session_ops = self.get_session_operations(session_id) + canceled_count = 0 + + for op in session_ops: + if not op.is_terminal: + op.status = "canceled" + canceled_count += 1 + + return canceled_count + + async def start_cleanup_task(self) -> None: + """Start the background cleanup task.""" + if self._cleanup_task is not None: + return + + self._cleanup_task = asyncio.create_task(self._cleanup_loop()) + + async def stop_cleanup_task(self) -> None: + """Stop the background cleanup task.""" + if self._cleanup_task is not None: + self._cleanup_task.cancel() + try: + await self._cleanup_task + except asyncio.CancelledError: + pass + self._cleanup_task = None + + async def _cleanup_loop(self) -> None: + """Background cleanup loop.""" + while True: + try: + await asyncio.sleep(self._cleanup_interval) + self.cleanup_expired_operations() + except asyncio.CancelledError: + break + except Exception: + # Log error but continue cleanup loop + pass diff --git a/src/mcp/server/lowlevel/server.py b/src/mcp/server/lowlevel/server.py index 3076e283e..1e909d0c1 100644 --- a/src/mcp/server/lowlevel/server.py +++ b/src/mcp/server/lowlevel/server.py @@ -82,6 +82,7 @@ async def main(): from typing_extensions import TypeVar import mcp.types as types +from mcp.server.lowlevel.async_operations import AsyncOperation, AsyncOperationManager from mcp.server.lowlevel.helper_types import ReadResourceContents from mcp.server.models import InitializationOptions from mcp.server.session import ServerSession @@ -135,6 +136,7 @@ def __init__( name: str, version: str | None = None, instructions: str | None = None, + async_operations: AsyncOperationManager | None = None, lifespan: Callable[ [Server[LifespanResultT, RequestT]], AbstractAsyncContextManager[LifespanResultT], @@ -144,6 +146,7 @@ def __init__( self.version = version self.instructions = instructions self.lifespan = lifespan + self.async_operations = async_operations or AsyncOperationManager() self.request_handlers: dict[type, Callable[..., Awaitable[types.ServerResult]]] = { types.PingRequest: _ping_handler, } @@ -554,6 +557,64 @@ async def handler(req: types.CompleteRequest): return decorator + def _validate_operation_token(self, token: str) -> AsyncOperation: + """Validate operation token and return operation if valid.""" + operation = self.async_operations.get_operation(token) + if not operation: + raise McpError(types.ErrorData(code=-32602, message="Invalid token")) + + if operation.is_expired: + raise McpError(types.ErrorData(code=-32602, message="Token expired")) + + return operation + + def check_tool_async_status(self): + """Register a handler for checking async tool execution status.""" + + def decorator(func: Callable[[str], Awaitable[types.CheckToolAsyncStatusResult]]): + logger.debug("Registering handler for CheckToolAsyncStatusRequest") + + async def handler(req: types.CheckToolAsyncStatusRequest): + # Validate token and get operation + operation = self._validate_operation_token(req.params.token) + + return types.ServerResult( + types.CheckToolAsyncStatusResult( + status=operation.status, + error=operation.error, + ) + ) + + self.request_handlers[types.CheckToolAsyncStatusRequest] = handler + return func + + return decorator + + def get_tool_async_result(self): + """Register a handler for retrieving async tool execution results.""" + + def decorator(func: Callable[[str], Awaitable[types.GetToolAsyncPayloadResult]]): + logger.debug("Registering handler for GetToolAsyncPayloadRequest") + + async def handler(req: types.GetToolAsyncPayloadRequest): + # Validate token and get operation + operation = self._validate_operation_token(req.params.token) + + if operation.status != "completed": + raise McpError( + types.ErrorData(code=-32600, message=f"Operation not completed (status: {operation.status})") + ) + + if not operation.result: + raise McpError(types.ErrorData(code=-32600, message="No result available for completed operation")) + + return types.ServerResult(types.GetToolAsyncPayloadResult(result=operation.result)) + + self.request_handlers[types.GetToolAsyncPayloadRequest] = handler + return func + + return decorator + async def run( self, read_stream: MemoryObjectReceiveStream[SessionMessage | Exception], @@ -581,17 +642,27 @@ async def run( ) ) - async with anyio.create_task_group() as tg: - async for message in session.incoming_messages: - logger.debug("Received message: %s", message) + # Start async operations cleanup task + await self.async_operations.start_cleanup_task() - tg.start_soon( - self._handle_message, - message, - session, - lifespan_context, - raise_exceptions, - ) + try: + async with anyio.create_task_group() as tg: + async for message in session.incoming_messages: + logger.debug("Received message: %s", message) + + tg.start_soon( + self._handle_message, + message, + session, + lifespan_context, + raise_exceptions, + ) + finally: + # Cancel session operations and stop cleanup task + session_id = getattr(session, "session_id", None) + if session_id is not None: + self.async_operations.cancel_session_operations(session_id) + await self.async_operations.stop_cleanup_task() async def _handle_message( self, diff --git a/src/mcp/types.py b/src/mcp/types.py index f9f62cf88..e8c4c68f2 100644 --- a/src/mcp/types.py +++ b/src/mcp/types.py @@ -910,10 +910,14 @@ class CheckToolAsyncStatusRequest(Request[CheckToolAsyncStatusParams, Literal["t params: CheckToolAsyncStatusParams +"""Status values for async operations.""" +AsyncOperationStatus = Literal["submitted", "working", "completed", "canceled", "failed", "unknown"] + + class CheckToolAsyncStatusResult(Result): """Result of checking async tool status.""" - status: Literal["submitted", "working", "completed", "canceled", "failed", "unknown"] + status: AsyncOperationStatus """Current status of the async operation.""" error: str | None = None """Error message if status is 'failed'.""" diff --git a/tests/server/test_async_operations.py b/tests/server/test_async_operations.py new file mode 100644 index 000000000..0a02786ae --- /dev/null +++ b/tests/server/test_async_operations.py @@ -0,0 +1,291 @@ +"""Tests for AsyncOperationManager.""" + +import secrets +import time +from typing import Any, cast +from unittest.mock import Mock + +import pytest + +import mcp.types as types +from mcp.server.lowlevel.async_operations import AsyncOperation, AsyncOperationManager +from mcp.types import AsyncOperationStatus + + +class TestAsyncOperationManager: + """Test AsyncOperationManager functionality.""" + + def _create_manager_with_operation( + self, session_id: str = "session1", **kwargs: Any + ) -> tuple[AsyncOperationManager, AsyncOperation]: + """Helper to create manager with a test operation.""" + manager = AsyncOperationManager() + operation = manager.create_operation("test_tool", {"arg": "value"}, session_id, **kwargs) + return manager, operation + + def test_token_generation(self): + """Test token generation with default and custom generators.""" + # Default token generation + manager = AsyncOperationManager() + token1 = manager.generate_token("test_session") + token2 = manager.generate_token("test_session") + assert token1 != token2 and len(token1) > 20 and not token1.startswith("test_session_") + + # Custom token generator + custom_manager = AsyncOperationManager(token_generator=lambda sid: f"custom_{sid}_token") + assert custom_manager.generate_token("test") == "custom_test_token" + + # Session-scoped token generator + scoped_manager = AsyncOperationManager(token_generator=lambda sid: f"{sid}_{secrets.token_urlsafe(16)}") + token1, token2 = scoped_manager.generate_token("s1"), scoped_manager.generate_token("s2") + assert token1.startswith("s1_") and token2.startswith("s2_") and token1 != token2 + + def test_operation_lifecycle(self): + """Test complete operation lifecycle including direct transitions.""" + manager, operation = self._create_manager_with_operation() + token = operation.token + + # Test creation + assert operation.status == "submitted" and operation.result is None + + # Test working transition + assert manager.mark_working(token) + working_op = manager.get_operation(token) + assert working_op is not None and working_op.status == "working" + + # Test completion + result = types.CallToolResult(content=[types.TextContent(type="text", text="success")]) + assert manager.complete_operation(token, result) + completed_op = manager.get_operation(token) + assert completed_op is not None + assert completed_op.status == "completed" and completed_op.result == result + assert manager.get_operation_result(token) == result + + # Test direct completion from submitted (new manager to avoid interference) + direct_manager, direct_op = self._create_manager_with_operation() + assert direct_manager.complete_operation(direct_op.token, result) + direct_completed = direct_manager.get_operation(direct_op.token) + assert direct_completed is not None and direct_completed.status == "completed" + + # Test direct failure from submitted (new manager to avoid interference) + fail_manager, fail_op = self._create_manager_with_operation() + assert fail_manager.fail_operation(fail_op.token, "immediate error") + failed = fail_manager.get_operation(fail_op.token) + assert failed is not None + assert failed.status == "failed" and failed.error == "immediate error" + + def test_operation_failure_and_cancellation(self): + """Test operation failure and cancellation.""" + manager, operation = self._create_manager_with_operation() + + # Test failure + manager.mark_working(operation.token) + assert manager.fail_operation(operation.token, "Something went wrong") + failed_op = manager.get_operation(operation.token) + assert failed_op is not None + assert failed_op.status == "failed" and failed_op.error == "Something went wrong" + assert manager.get_operation_result(operation.token) is None + + # Test cancellation (new manager to avoid interference) + cancel_manager, cancel_op = self._create_manager_with_operation() + assert cancel_manager.cancel_operation(cancel_op.token) + canceled_op = cancel_manager.get_operation(cancel_op.token) + assert canceled_op is not None and canceled_op.status == "canceled" + + def test_state_transitions_and_terminal_states(self): + """Test state transition validation and terminal state immutability.""" + manager, operation = self._create_manager_with_operation() + token = operation.token + result = Mock() + + # Valid transitions + assert manager.mark_working(token) + assert manager.complete_operation(token, result) + + # Invalid transitions from terminal state + assert not manager.mark_working(token) + assert not manager.fail_operation(token, "error") + assert not manager.cancel_operation(token) + completed_check = manager.get_operation(token) + assert completed_check is not None and completed_check.status == "completed" + + # Test other terminal states (use separate managers since previous operation is already completed) + def fail_action(m: AsyncOperationManager, t: str) -> bool: + return m.fail_operation(t, "err") + + def cancel_action(m: AsyncOperationManager, t: str) -> bool: + return m.cancel_operation(t) + + for status, action in [ + ("failed", fail_action), + ("canceled", cancel_action), + ]: + test_manager, test_op = self._create_manager_with_operation() + action(test_manager, test_op.token) + terminal_op = test_manager.get_operation(test_op.token) + assert terminal_op is not None + assert terminal_op.status == status and terminal_op.is_terminal + + def test_nonexistent_token_operations(self): + """Test operations on nonexistent tokens.""" + manager = AsyncOperationManager() + fake_token = "fake_token" + + for method, args in [ + ("get_operation", ()), + ("mark_working", ()), + ("complete_operation", (Mock(),)), + ("fail_operation", ("error",)), + ("cancel_operation", ()), + ("get_operation_result", ()), + ("remove_operation", ()), + ]: + assert getattr(manager, method)(fake_token, *args) in (None, False) + + def test_session_management(self): + """Test session-based operation management and termination.""" + manager = AsyncOperationManager() + + # Create operations for different sessions + ops = [manager.create_operation(f"tool{i}", {}, f"session{i % 2}") for i in range(4)] + + # Test session filtering + s0_ops = manager.get_session_operations("session0") + s1_ops = manager.get_session_operations("session1") + assert len(s0_ops) == 2 and len(s1_ops) == 2 + + # Test session termination - ops[0] and ops[2] are in session0 + manager.mark_working(ops[0].token) # session0 - should be canceled + manager.complete_operation(ops[2].token, Mock()) # session0 - should NOT be canceled (completed) + + canceled_count = manager.cancel_session_operations("session0") + assert canceled_count == 1 # Only working operation canceled, not completed + + s0_after = manager.get_session_operations("session0") + # Find the operations by status since order might vary + working_op = next(op for op in s0_after if op.token == ops[0].token) + completed_op = next(op for op in s0_after if op.token == ops[2].token) + assert working_op.status == "canceled" and completed_op.status == "completed" + + def test_expiration_and_cleanup(self): + """Test operation expiration and cleanup.""" + manager = AsyncOperationManager() + + # Create operations with different expiration times + short_op = manager.create_operation("tool1", {}, "session1", keep_alive=1) + long_op = manager.create_operation("tool2", {}, "session1", keep_alive=10) + + # Complete both and make first expired + for op in [short_op, long_op]: + manager.complete_operation(op.token, Mock()) + short_op.created_at = time.time() - 2 + + # Test expiration detection + assert short_op.is_expired and not long_op.is_expired + + # Test cleanup + removed_count = manager.cleanup_expired_operations() + assert removed_count == 1 + assert manager.get_operation(short_op.token) is None + assert manager.get_operation(long_op.token) is not None + + def test_concurrent_operations(self): + """Test concurrent operation handling and memory management.""" + manager = AsyncOperationManager() + + # Create many operations + operations = [manager.create_operation(f"tool_{i}", {"data": "x" * 100}, f"session_{i % 3}") for i in range(50)] + + # All should be created successfully with unique tokens + assert len(operations) == 50 + tokens = [op.token for op in operations] + assert len(set(tokens)) == 50 + + # Complete half with short keepAlive and make them expired + for i in range(25): + manager.complete_operation(operations[i].token, Mock()) + operations[i].keep_alive = 1 + operations[i].created_at = time.time() - 2 + + # Cleanup should remove expired operations + removed_count = manager.cleanup_expired_operations() + assert removed_count == 25 and len(manager._operations) == 25 + + @pytest.mark.anyio + async def test_cleanup_task_lifecycle(self): + """Test background cleanup task management.""" + manager = AsyncOperationManager() + + await manager.start_cleanup_task() + assert manager._cleanup_task is not None and not manager._cleanup_task.done() + + # Starting again should be no-op + await manager.start_cleanup_task() + + await manager.stop_cleanup_task() + assert manager._cleanup_task is None + + def test_dependency_injection_and_integration(self): + """Test AsyncOperationManager dependency injection and server integration.""" + from mcp.server.fastmcp import FastMCP + from mcp.server.lowlevel import Server + + # Test custom manager injection + custom_manager = AsyncOperationManager() + operation = custom_manager.create_operation("shared_tool", {"data": "shared"}, "session1") + + # Test FastMCP integration + fastmcp = FastMCP("FastMCP", async_operations=custom_manager) + assert fastmcp.async_operations is custom_manager + assert fastmcp.async_operations.get_operation(operation.token) is operation + + # Test lowlevel Server integration + lowlevel = Server("LowLevel", async_operations=custom_manager) + assert lowlevel.async_operations is custom_manager + assert lowlevel.async_operations.get_operation(operation.token) is operation + + # Test default creation + default_fastmcp = FastMCP("Default") + default_server = Server("Default") + assert isinstance(default_fastmcp.async_operations, AsyncOperationManager) + assert isinstance(default_server.async_operations, AsyncOperationManager) + assert default_fastmcp.async_operations is not custom_manager + + # Test shared manager between servers + new_op = fastmcp.async_operations.create_operation("new_tool", {}, "session2") + assert lowlevel.async_operations.get_operation(new_op.token) is new_op + + +class TestAsyncOperation: + """Test AsyncOperation dataclass.""" + + def test_terminal_and_expiration_logic(self): + """Test terminal state detection and expiration logic.""" + now = time.time() + operation = AsyncOperation("test", "test", {}, "session", "submitted", now, 3600) + + # Test terminal state detection + for status_str, is_terminal in [ + ("submitted", False), + ("working", False), + ("completed", True), + ("failed", True), + ("canceled", True), + ("unknown", True), + ]: + status: AsyncOperationStatus = cast(AsyncOperationStatus, status_str) + operation.status = status + assert operation.is_terminal == is_terminal + + # Test expiration logic + working_status: AsyncOperationStatus = "working" + operation.status = working_status + assert not operation.is_expired # Non-terminal never expires + + completed_status: AsyncOperationStatus = "completed" + operation.status = completed_status + operation.created_at = now - 1800 # 30 minutes ago + assert not operation.is_expired # Within keepAlive + + operation.created_at = now - 7200 # 2 hours ago + assert operation.is_expired # Past keepAlive diff --git a/tests/server/test_lowlevel_async_operations.py b/tests/server/test_lowlevel_async_operations.py new file mode 100644 index 000000000..10089c13f --- /dev/null +++ b/tests/server/test_lowlevel_async_operations.py @@ -0,0 +1,249 @@ +"""Test async operations integration in lowlevel Server.""" + +import asyncio +import time +from typing import cast + +import pytest + +import mcp.types as types +from mcp.server.lowlevel import Server +from mcp.server.lowlevel.async_operations import AsyncOperationManager +from mcp.shared.exceptions import McpError + + +class TestLowlevelServerAsyncOperations: + """Test lowlevel Server async operations integration.""" + + def test_check_async_status_invalid_token(self): + """Test check_tool_async_status handler with invalid token.""" + manager = AsyncOperationManager() + server = Server("Test", async_operations=manager) + + # Register the handler + @server.check_tool_async_status() + async def check_status_handler(token: str) -> types.CheckToolAsyncStatusResult: + # This function is not actually called due to built-in logic + return types.CheckToolAsyncStatusResult(status="unknown") + + # Test invalid token + invalid_request = types.CheckToolAsyncStatusRequest( + params=types.CheckToolAsyncStatusParams(token="invalid_token") + ) + + handler = server.request_handlers[types.CheckToolAsyncStatusRequest] + + with pytest.raises(McpError) as exc_info: + + async def run_handler(): + return await handler(invalid_request) + + asyncio.run(run_handler()) + + assert exc_info.value.error.code == -32602 + assert exc_info.value.error.message == "Invalid token" + + def test_check_async_status_expired_token(self): + """Test check_tool_async_status handler with expired token.""" + manager = AsyncOperationManager() + server = Server("Test", async_operations=manager) + + @server.check_tool_async_status() + async def check_status_handler(token: str) -> types.CheckToolAsyncStatusResult: + return types.CheckToolAsyncStatusResult(status="unknown") + + # Create and complete operation with short keepAlive + operation = manager.create_operation("test_tool", {}, "session1", keep_alive=1) + manager.complete_operation(operation.token, types.CallToolResult(content=[])) + + # Make it expired + operation.created_at = time.time() - 2 + + expired_request = types.CheckToolAsyncStatusRequest( + params=types.CheckToolAsyncStatusParams(token=operation.token) + ) + + handler = server.request_handlers[types.CheckToolAsyncStatusRequest] + + with pytest.raises(McpError) as exc_info: + + async def run_handler(): + return await handler(expired_request) + + asyncio.run(run_handler()) + + assert exc_info.value.error.code == -32602 + assert exc_info.value.error.message == "Token expired" + + def test_check_async_status_valid_operation(self): + """Test check_tool_async_status handler with valid operation.""" + manager = AsyncOperationManager() + server = Server("Test", async_operations=manager) + + @server.check_tool_async_status() + async def check_status_handler(token: str) -> types.CheckToolAsyncStatusResult: + return types.CheckToolAsyncStatusResult(status="unknown") + + # Create valid operation + operation = manager.create_operation("test_tool", {}, "session1") + manager.mark_working(operation.token) + + valid_request = types.CheckToolAsyncStatusRequest( + params=types.CheckToolAsyncStatusParams(token=operation.token) + ) + + handler = server.request_handlers[types.CheckToolAsyncStatusRequest] + + async def run_handler(): + return await handler(valid_request) + + result = asyncio.run(run_handler()) + + assert isinstance(result, types.ServerResult) + status_result = cast(types.CheckToolAsyncStatusResult, result.root) + assert status_result.status == "working" + assert status_result.error is None + + def test_check_async_status_failed_operation(self): + """Test check_tool_async_status handler with failed operation.""" + manager = AsyncOperationManager() + server = Server("Test", async_operations=manager) + + @server.check_tool_async_status() + async def check_status_handler(token: str) -> types.CheckToolAsyncStatusResult: + return types.CheckToolAsyncStatusResult(status="unknown") + + # Create and fail operation + operation = manager.create_operation("test_tool", {}, "session1") + manager.fail_operation(operation.token, "Something went wrong") + + failed_request = types.CheckToolAsyncStatusRequest( + params=types.CheckToolAsyncStatusParams(token=operation.token) + ) + + handler = server.request_handlers[types.CheckToolAsyncStatusRequest] + + async def run_handler(): + return await handler(failed_request) + + result = asyncio.run(run_handler()) + + assert isinstance(result, types.ServerResult) + status_result = cast(types.CheckToolAsyncStatusResult, result.root) + assert status_result.status == "failed" + assert status_result.error == "Something went wrong" + + def test_get_async_result_invalid_token(self): + """Test get_tool_async_result handler with invalid token.""" + manager = AsyncOperationManager() + server = Server("Test", async_operations=manager) + + @server.get_tool_async_result() + async def get_result_handler(token: str) -> types.GetToolAsyncPayloadResult: + return types.GetToolAsyncPayloadResult(result=types.CallToolResult(content=[])) + + invalid_request = types.GetToolAsyncPayloadRequest( + params=types.GetToolAsyncPayloadParams(token="invalid_token") + ) + + handler = server.request_handlers[types.GetToolAsyncPayloadRequest] + + with pytest.raises(McpError) as exc_info: + + async def run_handler(): + return await handler(invalid_request) + + asyncio.run(run_handler()) + + assert exc_info.value.error.code == -32602 + assert exc_info.value.error.message == "Invalid token" + + def test_get_async_result_expired_token(self): + """Test get_tool_async_result handler with expired token.""" + manager = AsyncOperationManager() + server = Server("Test", async_operations=manager) + + @server.get_tool_async_result() + async def get_result_handler(token: str) -> types.GetToolAsyncPayloadResult: + return types.GetToolAsyncPayloadResult(result=types.CallToolResult(content=[])) + + # Create and complete operation with short keepAlive + operation = manager.create_operation("test_tool", {}, "session1", keep_alive=1) + manager.complete_operation(operation.token, types.CallToolResult(content=[])) + + # Make it expired + operation.created_at = time.time() - 2 + + expired_request = types.GetToolAsyncPayloadRequest( + params=types.GetToolAsyncPayloadParams(token=operation.token) + ) + + handler = server.request_handlers[types.GetToolAsyncPayloadRequest] + + with pytest.raises(McpError) as exc_info: + + async def run_handler(): + return await handler(expired_request) + + asyncio.run(run_handler()) + + assert exc_info.value.error.code == -32602 + assert exc_info.value.error.message == "Token expired" + + def test_get_async_result_not_completed(self): + """Test get_tool_async_result handler with non-completed operation.""" + manager = AsyncOperationManager() + server = Server("Test", async_operations=manager) + + @server.get_tool_async_result() + async def get_result_handler(token: str) -> types.GetToolAsyncPayloadResult: + return types.GetToolAsyncPayloadResult(result=types.CallToolResult(content=[])) + + # Create operation that's still working + operation = manager.create_operation("test_tool", {}, "session1") + manager.mark_working(operation.token) + + working_request = types.GetToolAsyncPayloadRequest( + params=types.GetToolAsyncPayloadParams(token=operation.token) + ) + + handler = server.request_handlers[types.GetToolAsyncPayloadRequest] + + with pytest.raises(McpError) as exc_info: + + async def run_handler(): + return await handler(working_request) + + asyncio.run(run_handler()) + + assert exc_info.value.error.code == -32600 + assert exc_info.value.error.message == "Operation not completed (status: working)" + + def test_get_async_result_completed_with_result(self): + """Test get_tool_async_result handler with completed operation.""" + manager = AsyncOperationManager() + server = Server("Test", async_operations=manager) + + @server.get_tool_async_result() + async def get_result_handler(token: str) -> types.GetToolAsyncPayloadResult: + return types.GetToolAsyncPayloadResult(result=types.CallToolResult(content=[])) + + # Create and complete operation with result + operation = manager.create_operation("test_tool", {}, "session1") + result = types.CallToolResult(content=[types.TextContent(type="text", text="success")]) + manager.complete_operation(operation.token, result) + + completed_request = types.GetToolAsyncPayloadRequest( + params=types.GetToolAsyncPayloadParams(token=operation.token) + ) + + handler = server.request_handlers[types.GetToolAsyncPayloadRequest] + + async def run_handler(): + return await handler(completed_request) + + response = asyncio.run(run_handler()) + + assert isinstance(response, types.ServerResult) + payload_result = cast(types.GetToolAsyncPayloadResult, response.root) + assert payload_result.result == result From 8d281be6bb55200db9f7bb06e71f0603087dee8c Mon Sep 17 00:00:00 2001 From: Luca Chang Date: Tue, 23 Sep 2025 11:27:34 -0700 Subject: [PATCH 05/41] Rename types for latest SEP-1391 revision --- src/mcp/client/session.py | 18 +-- src/mcp/server/lowlevel/server.py | 24 ++-- src/mcp/types.py | 53 +++++--- .../server/test_lowlevel_async_operations.py | 118 ++++++++---------- 4 files changed, 114 insertions(+), 99 deletions(-) diff --git a/src/mcp/client/session.py b/src/mcp/client/session.py index 854cff8c0..73aecc584 100644 --- a/src/mcp/client/session.py +++ b/src/mcp/client/session.py @@ -293,7 +293,7 @@ async def call_tool( params=types.CallToolRequestParams( name=name, arguments=arguments, - async_properties=async_properties, + operation_params=async_properties, ), ) ), @@ -307,7 +307,7 @@ async def call_tool( return result - async def check_tool_async_status(self, token: str) -> types.CheckToolAsyncStatusResult: + async def get_operation_status(self, token: str) -> types.GetOperationStatusResult: """Check the status of an async tool operation. Args: @@ -318,14 +318,14 @@ async def check_tool_async_status(self, token: str) -> types.CheckToolAsyncStatu """ return await self.send_request( types.ClientRequest( - types.CheckToolAsyncStatusRequest( - params=types.CheckToolAsyncStatusParams(token=token), + types.GetOperationStatusRequest( + params=types.GetOperationStatusParams(token=token), ) ), - types.CheckToolAsyncStatusResult, + types.GetOperationStatusResult, ) - async def get_tool_async_result(self, token: str) -> types.GetToolAsyncPayloadResult: + async def get_operation_result(self, token: str) -> types.GetOperationPayloadResult: """Get the result of a completed async tool operation. Args: @@ -336,11 +336,11 @@ async def get_tool_async_result(self, token: str) -> types.GetToolAsyncPayloadRe """ return await self.send_request( types.ClientRequest( - types.GetToolAsyncPayloadRequest( - params=types.GetToolAsyncPayloadParams(token=token), + types.GetOperationPayloadRequest( + params=types.GetOperationPayloadParams(token=token), ) ), - types.GetToolAsyncPayloadResult, + types.GetOperationPayloadResult, ) async def _validate_tool_result(self, name: str, result: types.CallToolResult) -> None: diff --git a/src/mcp/server/lowlevel/server.py b/src/mcp/server/lowlevel/server.py index 1e909d0c1..78494c6f7 100644 --- a/src/mcp/server/lowlevel/server.py +++ b/src/mcp/server/lowlevel/server.py @@ -568,35 +568,35 @@ def _validate_operation_token(self, token: str) -> AsyncOperation: return operation - def check_tool_async_status(self): + def get_operation_status(self): """Register a handler for checking async tool execution status.""" - def decorator(func: Callable[[str], Awaitable[types.CheckToolAsyncStatusResult]]): - logger.debug("Registering handler for CheckToolAsyncStatusRequest") + def decorator(func: Callable[[str], Awaitable[types.GetOperationStatusResult]]): + logger.debug("Registering handler for GetOperationStatusRequest") - async def handler(req: types.CheckToolAsyncStatusRequest): + async def handler(req: types.GetOperationStatusRequest): # Validate token and get operation operation = self._validate_operation_token(req.params.token) return types.ServerResult( - types.CheckToolAsyncStatusResult( + types.GetOperationStatusResult( status=operation.status, error=operation.error, ) ) - self.request_handlers[types.CheckToolAsyncStatusRequest] = handler + self.request_handlers[types.GetOperationStatusRequest] = handler return func return decorator - def get_tool_async_result(self): + def get_operation_result(self): """Register a handler for retrieving async tool execution results.""" - def decorator(func: Callable[[str], Awaitable[types.GetToolAsyncPayloadResult]]): - logger.debug("Registering handler for GetToolAsyncPayloadRequest") + def decorator(func: Callable[[str], Awaitable[types.GetOperationPayloadResult]]): + logger.debug("Registering handler for GetOperationPayloadRequest") - async def handler(req: types.GetToolAsyncPayloadRequest): + async def handler(req: types.GetOperationPayloadRequest): # Validate token and get operation operation = self._validate_operation_token(req.params.token) @@ -608,9 +608,9 @@ async def handler(req: types.GetToolAsyncPayloadRequest): if not operation.result: raise McpError(types.ErrorData(code=-32600, message="No result available for completed operation")) - return types.ServerResult(types.GetToolAsyncPayloadResult(result=operation.result)) + return types.ServerResult(types.GetOperationPayloadResult(result=operation.result)) - self.request_handlers[types.GetToolAsyncPayloadRequest] = handler + self.request_handlers[types.GetOperationPayloadRequest] = handler return func return decorator diff --git a/src/mcp/types.py b/src/mcp/types.py index e8c4c68f2..c5b1c632e 100644 --- a/src/mcp/types.py +++ b/src/mcp/types.py @@ -53,7 +53,14 @@ class Meta(BaseModel): model_config = ConfigDict(extra="allow") + class Operation(BaseModel): + token: str + """The token associated with the originating asynchronous tool call.""" + model_config = ConfigDict(extra="allow") + meta: Meta | None = Field(alias="_meta", default=None) + operation: Operation | None = Field(alias="_operation", default=None) + """Async operation parameters, only used when a request is sent during an asynchronous tool call.""" class PaginatedRequestParams(RequestParams): @@ -68,11 +75,18 @@ class NotificationParams(BaseModel): class Meta(BaseModel): model_config = ConfigDict(extra="allow") + class Operation(BaseModel): + token: str + """The token associated with the originating asynchronous tool call.""" + model_config = ConfigDict(extra="allow") + meta: Meta | None = Field(alias="_meta", default=None) """ See [MCP specification](https://github.com/modelcontextprotocol/modelcontextprotocol/blob/47339c03c143bb4ec01a26e721a1b8fe66634ebe/docs/specification/draft/basic/index.mdx#general-fields) for notes on _meta usage. """ + operation: Operation | None = Field(alias="_operation", default=None) + """Async operation parameters, only used when a notification is sent during an asynchronous tool call.""" RequestParamsT = TypeVar("RequestParamsT", bound=RequestParams | dict[str, Any] | None) @@ -106,11 +120,20 @@ class Notification(BaseModel, Generic[NotificationParamsT, MethodT]): class Result(BaseModel): """Base class for JSON-RPC results.""" + class Operation(BaseModel): + token: str + """The token associated with the originating asynchronous tool call.""" + model_config = ConfigDict(extra="allow") + meta: dict[str, Any] | None = Field(alias="_meta", default=None) """ See [MCP specification](https://github.com/modelcontextprotocol/modelcontextprotocol/blob/47339c03c143bb4ec01a26e721a1b8fe66634ebe/docs/specification/draft/basic/index.mdx#general-fields) for notes on _meta usage. """ + operation: Operation | None = Field(alias="_operation", default=None) + """ + Async operation parameters, only used when a result is sent in response to a request with operation parameters. + """ model_config = ConfigDict(extra="allow") @@ -896,25 +919,25 @@ class AsyncResultProperties(BaseModel): # Async status checking types -class CheckToolAsyncStatusParams(RequestParams): +class GetOperationStatusParams(RequestParams): """Parameters for checking async tool status.""" token: str """Token from the original async tool call.""" -class CheckToolAsyncStatusRequest(Request[CheckToolAsyncStatusParams, Literal["tools/async/status"]]): +class GetOperationStatusRequest(Request[GetOperationStatusParams, Literal["tools/async/status"]]): """Request to check the status of an async tool call.""" method: Literal["tools/async/status"] = "tools/async/status" - params: CheckToolAsyncStatusParams + params: GetOperationStatusParams """Status values for async operations.""" -AsyncOperationStatus = Literal["submitted", "working", "completed", "canceled", "failed", "unknown"] +AsyncOperationStatus = Literal["submitted", "working", "input_required", "completed", "canceled", "failed", "unknown"] -class CheckToolAsyncStatusResult(Result): +class GetOperationStatusResult(Result): """Result of checking async tool status.""" status: AsyncOperationStatus @@ -924,21 +947,21 @@ class CheckToolAsyncStatusResult(Result): # Async payload retrieval types -class GetToolAsyncPayloadParams(RequestParams): +class GetOperationPayloadParams(RequestParams): """Parameters for getting async tool payload.""" token: str """Token from the original async tool call.""" -class GetToolAsyncPayloadRequest(Request[GetToolAsyncPayloadParams, Literal["tools/async/result"]]): +class GetOperationPayloadRequest(Request[GetOperationPayloadParams, Literal["tools/async/result"]]): """Request to get the result of a completed async tool call.""" method: Literal["tools/async/result"] = "tools/async/result" - params: GetToolAsyncPayloadParams + params: GetOperationPayloadParams -class GetToolAsyncPayloadResult(Result): +class GetOperationPayloadResult(Result): """Result containing the final async tool call result.""" result: "CallToolResult" @@ -950,7 +973,7 @@ class CallToolRequestParams(RequestParams): name: str arguments: dict[str, Any] | None = None - async_properties: AsyncRequestProperties | None = Field(serialization_alias="async", default=None) + operation_params: AsyncRequestProperties | None = Field(serialization_alias="operation", default=None) """Optional async execution parameters.""" model_config = ConfigDict(extra="allow") @@ -969,7 +992,7 @@ class CallToolResult(Result): structuredContent: dict[str, Any] | None = None """An optional JSON object that represents the structured result of the tool call.""" isError: bool = False - async_properties: AsyncResultProperties | None = Field(serialization_alias="async", default=None) + operation_result: AsyncResultProperties | None = Field(serialization_alias="operation", default=None) """Optional async execution information. Present when tool is executed asynchronously.""" @@ -1313,8 +1336,8 @@ class ClientRequest( | UnsubscribeRequest | CallToolRequest | ListToolsRequest - | CheckToolAsyncStatusRequest - | GetToolAsyncPayloadRequest + | GetOperationStatusRequest + | GetOperationPayloadRequest ] ): pass @@ -1398,8 +1421,8 @@ class ServerResult( | ReadResourceResult | CallToolResult | ListToolsResult - | CheckToolAsyncStatusResult - | GetToolAsyncPayloadResult + | GetOperationStatusResult + | GetOperationPayloadResult ] ): pass diff --git a/tests/server/test_lowlevel_async_operations.py b/tests/server/test_lowlevel_async_operations.py index 10089c13f..a1f58f653 100644 --- a/tests/server/test_lowlevel_async_operations.py +++ b/tests/server/test_lowlevel_async_operations.py @@ -16,22 +16,20 @@ class TestLowlevelServerAsyncOperations: """Test lowlevel Server async operations integration.""" def test_check_async_status_invalid_token(self): - """Test check_tool_async_status handler with invalid token.""" + """Test get_operation_status handler with invalid token.""" manager = AsyncOperationManager() server = Server("Test", async_operations=manager) # Register the handler - @server.check_tool_async_status() - async def check_status_handler(token: str) -> types.CheckToolAsyncStatusResult: + @server.get_operation_status() + async def check_status_handler(token: str) -> types.GetOperationStatusResult: # This function is not actually called due to built-in logic - return types.CheckToolAsyncStatusResult(status="unknown") + return types.GetOperationStatusResult(status="unknown") # Test invalid token - invalid_request = types.CheckToolAsyncStatusRequest( - params=types.CheckToolAsyncStatusParams(token="invalid_token") - ) + invalid_request = types.GetOperationStatusRequest(params=types.GetOperationStatusParams(token="invalid_token")) - handler = server.request_handlers[types.CheckToolAsyncStatusRequest] + handler = server.request_handlers[types.GetOperationStatusRequest] with pytest.raises(McpError) as exc_info: @@ -44,13 +42,13 @@ async def run_handler(): assert exc_info.value.error.message == "Invalid token" def test_check_async_status_expired_token(self): - """Test check_tool_async_status handler with expired token.""" + """Test get_operation_status handler with expired token.""" manager = AsyncOperationManager() server = Server("Test", async_operations=manager) - @server.check_tool_async_status() - async def check_status_handler(token: str) -> types.CheckToolAsyncStatusResult: - return types.CheckToolAsyncStatusResult(status="unknown") + @server.get_operation_status() + async def check_status_handler(token: str) -> types.GetOperationStatusResult: + return types.GetOperationStatusResult(status="unknown") # Create and complete operation with short keepAlive operation = manager.create_operation("test_tool", {}, "session1", keep_alive=1) @@ -59,11 +57,9 @@ async def check_status_handler(token: str) -> types.CheckToolAsyncStatusResult: # Make it expired operation.created_at = time.time() - 2 - expired_request = types.CheckToolAsyncStatusRequest( - params=types.CheckToolAsyncStatusParams(token=operation.token) - ) + expired_request = types.GetOperationStatusRequest(params=types.GetOperationStatusParams(token=operation.token)) - handler = server.request_handlers[types.CheckToolAsyncStatusRequest] + handler = server.request_handlers[types.GetOperationStatusRequest] with pytest.raises(McpError) as exc_info: @@ -76,23 +72,21 @@ async def run_handler(): assert exc_info.value.error.message == "Token expired" def test_check_async_status_valid_operation(self): - """Test check_tool_async_status handler with valid operation.""" + """Test get_operation_status handler with valid operation.""" manager = AsyncOperationManager() server = Server("Test", async_operations=manager) - @server.check_tool_async_status() - async def check_status_handler(token: str) -> types.CheckToolAsyncStatusResult: - return types.CheckToolAsyncStatusResult(status="unknown") + @server.get_operation_status() + async def check_status_handler(token: str) -> types.GetOperationStatusResult: + return types.GetOperationStatusResult(status="unknown") # Create valid operation operation = manager.create_operation("test_tool", {}, "session1") manager.mark_working(operation.token) - valid_request = types.CheckToolAsyncStatusRequest( - params=types.CheckToolAsyncStatusParams(token=operation.token) - ) + valid_request = types.GetOperationStatusRequest(params=types.GetOperationStatusParams(token=operation.token)) - handler = server.request_handlers[types.CheckToolAsyncStatusRequest] + handler = server.request_handlers[types.GetOperationStatusRequest] async def run_handler(): return await handler(valid_request) @@ -100,28 +94,26 @@ async def run_handler(): result = asyncio.run(run_handler()) assert isinstance(result, types.ServerResult) - status_result = cast(types.CheckToolAsyncStatusResult, result.root) + status_result = cast(types.GetOperationStatusResult, result.root) assert status_result.status == "working" assert status_result.error is None def test_check_async_status_failed_operation(self): - """Test check_tool_async_status handler with failed operation.""" + """Test get_operation_status handler with failed operation.""" manager = AsyncOperationManager() server = Server("Test", async_operations=manager) - @server.check_tool_async_status() - async def check_status_handler(token: str) -> types.CheckToolAsyncStatusResult: - return types.CheckToolAsyncStatusResult(status="unknown") + @server.get_operation_status() + async def check_status_handler(token: str) -> types.GetOperationStatusResult: + return types.GetOperationStatusResult(status="unknown") # Create and fail operation operation = manager.create_operation("test_tool", {}, "session1") manager.fail_operation(operation.token, "Something went wrong") - failed_request = types.CheckToolAsyncStatusRequest( - params=types.CheckToolAsyncStatusParams(token=operation.token) - ) + failed_request = types.GetOperationStatusRequest(params=types.GetOperationStatusParams(token=operation.token)) - handler = server.request_handlers[types.CheckToolAsyncStatusRequest] + handler = server.request_handlers[types.GetOperationStatusRequest] async def run_handler(): return await handler(failed_request) @@ -129,24 +121,24 @@ async def run_handler(): result = asyncio.run(run_handler()) assert isinstance(result, types.ServerResult) - status_result = cast(types.CheckToolAsyncStatusResult, result.root) + status_result = cast(types.GetOperationStatusResult, result.root) assert status_result.status == "failed" assert status_result.error == "Something went wrong" def test_get_async_result_invalid_token(self): - """Test get_tool_async_result handler with invalid token.""" + """Test get_operation_result handler with invalid token.""" manager = AsyncOperationManager() server = Server("Test", async_operations=manager) - @server.get_tool_async_result() - async def get_result_handler(token: str) -> types.GetToolAsyncPayloadResult: - return types.GetToolAsyncPayloadResult(result=types.CallToolResult(content=[])) + @server.get_operation_result() + async def get_result_handler(token: str) -> types.GetOperationPayloadResult: + return types.GetOperationPayloadResult(result=types.CallToolResult(content=[])) - invalid_request = types.GetToolAsyncPayloadRequest( - params=types.GetToolAsyncPayloadParams(token="invalid_token") + invalid_request = types.GetOperationPayloadRequest( + params=types.GetOperationPayloadParams(token="invalid_token") ) - handler = server.request_handlers[types.GetToolAsyncPayloadRequest] + handler = server.request_handlers[types.GetOperationPayloadRequest] with pytest.raises(McpError) as exc_info: @@ -159,13 +151,13 @@ async def run_handler(): assert exc_info.value.error.message == "Invalid token" def test_get_async_result_expired_token(self): - """Test get_tool_async_result handler with expired token.""" + """Test get_operation_result handler with expired token.""" manager = AsyncOperationManager() server = Server("Test", async_operations=manager) - @server.get_tool_async_result() - async def get_result_handler(token: str) -> types.GetToolAsyncPayloadResult: - return types.GetToolAsyncPayloadResult(result=types.CallToolResult(content=[])) + @server.get_operation_result() + async def get_result_handler(token: str) -> types.GetOperationPayloadResult: + return types.GetOperationPayloadResult(result=types.CallToolResult(content=[])) # Create and complete operation with short keepAlive operation = manager.create_operation("test_tool", {}, "session1", keep_alive=1) @@ -174,11 +166,11 @@ async def get_result_handler(token: str) -> types.GetToolAsyncPayloadResult: # Make it expired operation.created_at = time.time() - 2 - expired_request = types.GetToolAsyncPayloadRequest( - params=types.GetToolAsyncPayloadParams(token=operation.token) + expired_request = types.GetOperationPayloadRequest( + params=types.GetOperationPayloadParams(token=operation.token) ) - handler = server.request_handlers[types.GetToolAsyncPayloadRequest] + handler = server.request_handlers[types.GetOperationPayloadRequest] with pytest.raises(McpError) as exc_info: @@ -191,23 +183,23 @@ async def run_handler(): assert exc_info.value.error.message == "Token expired" def test_get_async_result_not_completed(self): - """Test get_tool_async_result handler with non-completed operation.""" + """Test get_operation_result handler with non-completed operation.""" manager = AsyncOperationManager() server = Server("Test", async_operations=manager) - @server.get_tool_async_result() - async def get_result_handler(token: str) -> types.GetToolAsyncPayloadResult: - return types.GetToolAsyncPayloadResult(result=types.CallToolResult(content=[])) + @server.get_operation_result() + async def get_result_handler(token: str) -> types.GetOperationPayloadResult: + return types.GetOperationPayloadResult(result=types.CallToolResult(content=[])) # Create operation that's still working operation = manager.create_operation("test_tool", {}, "session1") manager.mark_working(operation.token) - working_request = types.GetToolAsyncPayloadRequest( - params=types.GetToolAsyncPayloadParams(token=operation.token) + working_request = types.GetOperationPayloadRequest( + params=types.GetOperationPayloadParams(token=operation.token) ) - handler = server.request_handlers[types.GetToolAsyncPayloadRequest] + handler = server.request_handlers[types.GetOperationPayloadRequest] with pytest.raises(McpError) as exc_info: @@ -220,24 +212,24 @@ async def run_handler(): assert exc_info.value.error.message == "Operation not completed (status: working)" def test_get_async_result_completed_with_result(self): - """Test get_tool_async_result handler with completed operation.""" + """Test get_operation_result handler with completed operation.""" manager = AsyncOperationManager() server = Server("Test", async_operations=manager) - @server.get_tool_async_result() - async def get_result_handler(token: str) -> types.GetToolAsyncPayloadResult: - return types.GetToolAsyncPayloadResult(result=types.CallToolResult(content=[])) + @server.get_operation_result() + async def get_result_handler(token: str) -> types.GetOperationPayloadResult: + return types.GetOperationPayloadResult(result=types.CallToolResult(content=[])) # Create and complete operation with result operation = manager.create_operation("test_tool", {}, "session1") result = types.CallToolResult(content=[types.TextContent(type="text", text="success")]) manager.complete_operation(operation.token, result) - completed_request = types.GetToolAsyncPayloadRequest( - params=types.GetToolAsyncPayloadParams(token=operation.token) + completed_request = types.GetOperationPayloadRequest( + params=types.GetOperationPayloadParams(token=operation.token) ) - handler = server.request_handlers[types.GetToolAsyncPayloadRequest] + handler = server.request_handlers[types.GetOperationPayloadRequest] async def run_handler(): return await handler(completed_request) @@ -245,5 +237,5 @@ async def run_handler(): response = asyncio.run(run_handler()) assert isinstance(response, types.ServerResult) - payload_result = cast(types.GetToolAsyncPayloadResult, response.root) + payload_result = cast(types.GetOperationPayloadResult, response.root) assert payload_result.result == result From 0dc8d430d614b8c7d55fb63bcfbd68f0f5eb3db7 Mon Sep 17 00:00:00 2001 From: Luca Chang Date: Tue, 23 Sep 2025 11:33:31 -0700 Subject: [PATCH 06/41] Handle cancellation notifications on async ops --- src/mcp/server/lowlevel/server.py | 46 +++++++++++-- tests/server/test_cancellation_logic.py | 86 +++++++++++++++++++++++++ 2 files changed, 127 insertions(+), 5 deletions(-) create mode 100644 tests/server/test_cancellation_logic.py diff --git a/src/mcp/server/lowlevel/server.py b/src/mcp/server/lowlevel/server.py index 78494c6f7..bdd654792 100644 --- a/src/mcp/server/lowlevel/server.py +++ b/src/mcp/server/lowlevel/server.py @@ -90,6 +90,7 @@ async def main(): from mcp.shared.exceptions import McpError from mcp.shared.message import ServerMessageMetadata, SessionMessage from mcp.shared.session import RequestResponder +from mcp.types import RequestId logger = logging.getLogger(__name__) @@ -147,10 +148,14 @@ def __init__( self.instructions = instructions self.lifespan = lifespan self.async_operations = async_operations or AsyncOperationManager() + # Track request ID to operation token mapping for cancellation + self._request_to_operation: dict[RequestId, str] = {} self.request_handlers: dict[type, Callable[..., Awaitable[types.ServerResult]]] = { types.PingRequest: _ping_handler, } - self.notification_handlers: dict[type, Callable[..., Awaitable[None]]] = {} + self.notification_handlers: dict[type, Callable[..., Awaitable[None]]] = { + types.CancelledNotification: self._handle_cancelled_notification, + } self._tool_cache: dict[str, types.Tool] = {} logger.debug("Initializing server %r", name) @@ -566,6 +571,10 @@ def _validate_operation_token(self, token: str) -> AsyncOperation: if operation.is_expired: raise McpError(types.ErrorData(code=-32602, message="Token expired")) + # Check if operation was cancelled - ignore subsequent requests + if operation.status == "canceled": + raise McpError(types.ErrorData(code=-32602, message="Operation was cancelled")) + return operation def get_operation_status(self): @@ -615,6 +624,23 @@ async def handler(req: types.GetOperationPayloadRequest): return decorator + def handle_cancelled_notification(self, request_id: RequestId) -> None: + """Handle cancellation notification for a request.""" + # Check if this request ID corresponds to an async operation + if request_id in self._request_to_operation: + token = self._request_to_operation[request_id] + # Cancel the operation + if self.async_operations.cancel_operation(token): + logger.debug(f"Cancelled async operation {token} for request {request_id}") + # Clean up the mapping + del self._request_to_operation[request_id] + + async def _handle_cancelled_notification(self, notification: types.CancelledNotification) -> None: + """Handle cancelled notification from client.""" + request_id = notification.params.requestId + logger.debug(f"Received cancellation notification for request {request_id}") + self.handle_cancelled_notification(request_id) + async def run( self, read_stream: MemoryObjectReceiveStream[SessionMessage | Exception], @@ -695,7 +721,7 @@ async def _handle_request( if handler := self.request_handlers.get(type(req)): # type: ignore logger.debug("Dispatching request of type %s", type(req).__name__) - token = None + context_token = None try: # Extract request context from message metadata request_data = None @@ -704,7 +730,7 @@ async def _handle_request( # Set our global state that can be retrieved via # app.get_request_context() - token = request_ctx.set( + context_token = request_ctx.set( RequestContext( message.request_id, message.request_meta, @@ -714,6 +740,16 @@ async def _handle_request( ) ) response = await handler(req) + + # Track async operations for cancellation + if isinstance(req, types.CallToolRequest): + result = response.root + if isinstance(result, types.CallToolResult) and result.operation_result is not None: + # This is an async operation, track the request ID to token mapping + operation_token = result.operation_result.token + self._request_to_operation[message.request_id] = operation_token + logger.debug(f"Tracking async operation {operation_token} for request {message.request_id}") + except McpError as err: response = err.error except anyio.get_cancelled_exc_class(): @@ -728,8 +764,8 @@ async def _handle_request( response = types.ErrorData(code=0, message=str(err), data=None) finally: # Reset the global state after we are done - if token is not None: - request_ctx.reset(token) + if context_token is not None: + request_ctx.reset(context_token) await message.respond(response) else: diff --git a/tests/server/test_cancellation_logic.py b/tests/server/test_cancellation_logic.py new file mode 100644 index 000000000..feb840acf --- /dev/null +++ b/tests/server/test_cancellation_logic.py @@ -0,0 +1,86 @@ +"""Tests for async operation cancellation logic.""" + +import pytest + +import mcp.types as types +from mcp.server.lowlevel.async_operations import AsyncOperationManager +from mcp.server.lowlevel.server import Server +from mcp.shared.exceptions import McpError + + +class TestCancellationLogic: + """Test cancellation logic for async operations.""" + + def test_handle_cancelled_notification(self): + """Test handling of cancelled notifications.""" + manager = AsyncOperationManager() + server = Server("Test", async_operations=manager) + + # Create an operation + operation = manager.create_operation("test_tool", {"arg": "value"}, "session1") + + # Track the operation with a request ID + request_id = "req_123" + server._request_to_operation[request_id] = operation.token + + # Handle cancellation + server.handle_cancelled_notification(request_id) + + # Verify operation was cancelled + cancelled_op = manager.get_operation(operation.token) + assert cancelled_op is not None + assert cancelled_op.status == "canceled" + + # Verify mapping was cleaned up + assert request_id not in server._request_to_operation + + def test_cancelled_notification_handler(self): + """Test the async cancelled notification handler.""" + manager = AsyncOperationManager() + server = Server("Test", async_operations=manager) + + # Create an operation + operation = manager.create_operation("test_tool", {"arg": "value"}, "session1") + + # Track the operation with a request ID + request_id = "req_456" + server._request_to_operation[request_id] = operation.token + + # Create cancelled notification + notification = types.CancelledNotification(params=types.CancelledNotificationParams(requestId=request_id)) + + # Handle the notification + import asyncio + + asyncio.run(server._handle_cancelled_notification(notification)) + + # Verify operation was cancelled + cancelled_op = manager.get_operation(operation.token) + assert cancelled_op is not None + assert cancelled_op.status == "canceled" + + def test_validate_operation_token_cancelled(self): + """Test that cancelled operations are rejected.""" + manager = AsyncOperationManager() + server = Server("Test", async_operations=manager) + + # Create and cancel an operation + operation = manager.create_operation("test_tool", {"arg": "value"}, "session1") + manager.cancel_operation(operation.token) + + # Verify that accessing cancelled operation raises error + with pytest.raises(McpError) as exc_info: + server._validate_operation_token(operation.token) + + assert exc_info.value.error.code == -32602 + assert "cancelled" in exc_info.value.error.message.lower() + + def test_nonexistent_request_id_cancellation(self): + """Test cancellation of non-existent request ID.""" + server = Server("Test") + + # Should not raise error for non-existent request ID + server.handle_cancelled_notification("nonexistent_request") + + # Verify no operations were affected + assert len(server._request_to_operation) == 0 From e70f44190c78e83e6f6c3da5380fa0788cd616b2 Mon Sep 17 00:00:00 2001 From: Luca Chang Date: Tue, 23 Sep 2025 11:56:11 -0700 Subject: [PATCH 07/41] Implement support for input_required status --- src/mcp/server/lowlevel/async_operations.py | 26 +++ src/mcp/server/lowlevel/server.py | 39 ++++ tests/server/test_cancellation_logic.py | 86 ------- .../server/test_lowlevel_async_operations.py | 218 ++++++++++++++++++ 4 files changed, 283 insertions(+), 86 deletions(-) delete mode 100644 tests/server/test_cancellation_logic.py diff --git a/src/mcp/server/lowlevel/async_operations.py b/src/mcp/server/lowlevel/async_operations.py index 9d3f78dc7..bafa7f262 100644 --- a/src/mcp/server/lowlevel/async_operations.py +++ b/src/mcp/server/lowlevel/async_operations.py @@ -172,6 +172,32 @@ def cancel_session_operations(self, session_id: str) -> int: return canceled_count + def mark_input_required(self, token: str) -> bool: + """Mark operation as requiring input from client.""" + operation = self._operations.get(token) + if not operation: + return False + + # Can only move to input_required from submitted or working states + if operation.status not in ("submitted", "working"): + return False + + operation.status = "input_required" + return True + + def mark_input_completed(self, token: str) -> bool: + """Mark operation as no longer requiring input, return to working state.""" + operation = self._operations.get(token) + if not operation: + return False + + # Can only move from input_required back to working + if operation.status != "input_required": + return False + + operation.status = "working" + return True + async def start_cleanup_task(self) -> None: """Start the background cleanup task.""" if self._cleanup_task is not None: diff --git a/src/mcp/server/lowlevel/server.py b/src/mcp/server/lowlevel/server.py index bdd654792..ad7b75359 100644 --- a/src/mcp/server/lowlevel/server.py +++ b/src/mcp/server/lowlevel/server.py @@ -641,6 +641,35 @@ async def _handle_cancelled_notification(self, notification: types.CancelledNoti logger.debug(f"Received cancellation notification for request {request_id}") self.handle_cancelled_notification(request_id) + def send_request_for_operation(self, token: str, request: types.ServerRequest) -> None: + """Send a request associated with an async operation.""" + # Mark operation as requiring input + if self.async_operations.mark_input_required(token): + # Add operation token to request + if hasattr(request.root, "params") and request.root.params is not None: + if not hasattr(request.root.params, "operation") or request.root.params.operation is None: + # Create operation field if it doesn't exist + operation_data = types.RequestParams.Operation(token=token) + request.root.params.operation = operation_data + logger.debug(f"Marked operation {token} as input_required and added to request") + + def send_notification_for_operation(self, token: str, notification: types.ServerNotification) -> None: + """Send a notification associated with an async operation.""" + # Mark operation as requiring input + if self.async_operations.mark_input_required(token): + # Add operation token to notification + if hasattr(notification.root, "params") and notification.root.params is not None: + if not hasattr(notification.root.params, "operation") or notification.root.params.operation is None: + # Create operation field if it doesn't exist + operation_data = types.NotificationParams.Operation(token=token) + notification.root.params.operation = operation_data + logger.debug(f"Marked operation {token} as input_required and added to notification") + + def complete_request_for_operation(self, token: str) -> None: + """Mark that a request for an operation has been completed.""" + if self.async_operations.mark_input_completed(token): + logger.debug(f"Marked operation {token} as no longer requiring input") + async def run( self, read_stream: MemoryObjectReceiveStream[SessionMessage | Exception], @@ -741,6 +770,16 @@ async def _handle_request( ) response = await handler(req) + # Handle operation token in response (for input_required operations) + if ( + hasattr(req, "params") + and req.params is not None + and hasattr(req.params, "operation") + and req.params.operation is not None + ): + operation_token = req.params.operation.token + self.complete_request_for_operation(operation_token) + # Track async operations for cancellation if isinstance(req, types.CallToolRequest): result = response.root diff --git a/tests/server/test_cancellation_logic.py b/tests/server/test_cancellation_logic.py deleted file mode 100644 index feb840acf..000000000 --- a/tests/server/test_cancellation_logic.py +++ /dev/null @@ -1,86 +0,0 @@ -"""Tests for async operation cancellation logic.""" - -import pytest - -import mcp.types as types -from mcp.server.lowlevel.async_operations import AsyncOperationManager -from mcp.server.lowlevel.server import Server -from mcp.shared.exceptions import McpError - - -class TestCancellationLogic: - """Test cancellation logic for async operations.""" - - def test_handle_cancelled_notification(self): - """Test handling of cancelled notifications.""" - manager = AsyncOperationManager() - server = Server("Test", async_operations=manager) - - # Create an operation - operation = manager.create_operation("test_tool", {"arg": "value"}, "session1") - - # Track the operation with a request ID - request_id = "req_123" - server._request_to_operation[request_id] = operation.token - - # Handle cancellation - server.handle_cancelled_notification(request_id) - - # Verify operation was cancelled - cancelled_op = manager.get_operation(operation.token) - assert cancelled_op is not None - assert cancelled_op.status == "canceled" - - # Verify mapping was cleaned up - assert request_id not in server._request_to_operation - - def test_cancelled_notification_handler(self): - """Test the async cancelled notification handler.""" - manager = AsyncOperationManager() - server = Server("Test", async_operations=manager) - - # Create an operation - operation = manager.create_operation("test_tool", {"arg": "value"}, "session1") - - # Track the operation with a request ID - request_id = "req_456" - server._request_to_operation[request_id] = operation.token - - # Create cancelled notification - notification = types.CancelledNotification(params=types.CancelledNotificationParams(requestId=request_id)) - - # Handle the notification - import asyncio - - asyncio.run(server._handle_cancelled_notification(notification)) - - # Verify operation was cancelled - cancelled_op = manager.get_operation(operation.token) - assert cancelled_op is not None - assert cancelled_op.status == "canceled" - - def test_validate_operation_token_cancelled(self): - """Test that cancelled operations are rejected.""" - manager = AsyncOperationManager() - server = Server("Test", async_operations=manager) - - # Create and cancel an operation - operation = manager.create_operation("test_tool", {"arg": "value"}, "session1") - manager.cancel_operation(operation.token) - - # Verify that accessing cancelled operation raises error - with pytest.raises(McpError) as exc_info: - server._validate_operation_token(operation.token) - - assert exc_info.value.error.code == -32602 - assert "cancelled" in exc_info.value.error.message.lower() - - def test_nonexistent_request_id_cancellation(self): - """Test cancellation of non-existent request ID.""" - server = Server("Test") - - # Should not raise error for non-existent request ID - server.handle_cancelled_notification("nonexistent_request") - - # Verify no operations were affected - assert len(server._request_to_operation) == 0 diff --git a/tests/server/test_lowlevel_async_operations.py b/tests/server/test_lowlevel_async_operations.py index a1f58f653..f822a19f4 100644 --- a/tests/server/test_lowlevel_async_operations.py +++ b/tests/server/test_lowlevel_async_operations.py @@ -239,3 +239,221 @@ async def run_handler(): assert isinstance(response, types.ServerResult) payload_result = cast(types.GetOperationPayloadResult, response.root) assert payload_result.result == result + + +class TestCancellationLogic: + """Test cancellation logic for async operations.""" + + def test_handle_cancelled_notification(self): + """Test handling of cancelled notifications.""" + manager = AsyncOperationManager() + server = Server("Test", async_operations=manager) + + # Create an operation + operation = manager.create_operation("test_tool", {"arg": "value"}, "session1") + + # Track the operation with a request ID + request_id = "req_123" + server._request_to_operation[request_id] = operation.token + + # Handle cancellation + server.handle_cancelled_notification(request_id) + + # Verify operation was cancelled + cancelled_op = manager.get_operation(operation.token) + assert cancelled_op is not None + assert cancelled_op.status == "canceled" + + # Verify mapping was cleaned up + assert request_id not in server._request_to_operation + + def test_cancelled_notification_handler(self): + """Test the async cancelled notification handler.""" + manager = AsyncOperationManager() + server = Server("Test", async_operations=manager) + + # Create an operation + operation = manager.create_operation("test_tool", {"arg": "value"}, "session1") + + # Track the operation with a request ID + request_id = "req_456" + server._request_to_operation[request_id] = operation.token + + # Create cancelled notification + notification = types.CancelledNotification(params=types.CancelledNotificationParams(requestId=request_id)) + + # Handle the notification + import asyncio + + asyncio.run(server._handle_cancelled_notification(notification)) + + # Verify operation was cancelled + cancelled_op = manager.get_operation(operation.token) + assert cancelled_op is not None + assert cancelled_op.status == "canceled" + + def test_validate_operation_token_cancelled(self): + """Test that cancelled operations are rejected.""" + manager = AsyncOperationManager() + server = Server("Test", async_operations=manager) + + # Create and cancel an operation + operation = manager.create_operation("test_tool", {"arg": "value"}, "session1") + manager.cancel_operation(operation.token) + + # Verify that accessing cancelled operation raises error + with pytest.raises(McpError) as exc_info: + server._validate_operation_token(operation.token) + + assert exc_info.value.error.code == -32602 + assert "cancelled" in exc_info.value.error.message.lower() + + def test_nonexistent_request_id_cancellation(self): + """Test cancellation of non-existent request ID.""" + server = Server("Test") + + # Should not raise error for non-existent request ID + server.handle_cancelled_notification("nonexistent_request") + + # Verify no operations were affected + assert len(server._request_to_operation) == 0 + + +class TestInputRequiredBehavior: + """Test input_required status handling for async operations.""" + + def test_mark_input_required(self): + """Test marking operation as requiring input.""" + manager = AsyncOperationManager() + + # Create operation in submitted state + operation = manager.create_operation("test_tool", {"arg": "value"}, "session1") + assert operation.status == "submitted" + + # Mark as input required + result = manager.mark_input_required(operation.token) + assert result is True + + # Verify status changed + updated_op = manager.get_operation(operation.token) + assert updated_op is not None + assert updated_op.status == "input_required" + + def test_mark_input_required_from_working(self): + """Test marking working operation as requiring input.""" + manager = AsyncOperationManager() + + # Create and mark as working + operation = manager.create_operation("test_tool", {"arg": "value"}, "session1") + manager.mark_working(operation.token) + assert operation.status == "working" + + # Mark as input required + result = manager.mark_input_required(operation.token) + assert result is True + assert operation.status == "input_required" + + def test_mark_input_required_invalid_states(self): + """Test that input_required can only be set from valid states.""" + manager = AsyncOperationManager() + + # Test from completed state + operation = manager.create_operation("test_tool", {"arg": "value"}, "session1") + manager.complete_operation(operation.token, types.CallToolResult(content=[])) + + result = manager.mark_input_required(operation.token) + assert result is False + assert operation.status == "completed" + + def test_mark_input_completed(self): + """Test marking input as completed.""" + manager = AsyncOperationManager() + + # Create operation and mark as input required + operation = manager.create_operation("test_tool", {"arg": "value"}, "session1") + manager.mark_input_required(operation.token) + assert operation.status == "input_required" + + # Mark input as completed + result = manager.mark_input_completed(operation.token) + assert result is True + assert operation.status == "working" + + def test_mark_input_completed_invalid_state(self): + """Test that input can only be completed from input_required state.""" + manager = AsyncOperationManager() + + # Create operation in submitted state + operation = manager.create_operation("test_tool", {"arg": "value"}, "session1") + assert operation.status == "submitted" + + # Try to mark input completed from wrong state + result = manager.mark_input_completed(operation.token) + assert result is False + assert operation.status == "submitted" + + def test_nonexistent_token_operations(self): + """Test input_required operations on nonexistent tokens.""" + manager = AsyncOperationManager() + + # Test with fake token + assert manager.mark_input_required("fake_token") is False + assert manager.mark_input_completed("fake_token") is False + + def test_server_send_request_for_operation(self): + """Test server method for sending requests with operation tokens.""" + manager = AsyncOperationManager() + server = Server("Test", async_operations=manager) + + # Create operation + operation = manager.create_operation("test_tool", {"arg": "value"}, "session1") + manager.mark_working(operation.token) + + # Create a mock request + request = types.ServerRequest( + types.CreateMessageRequest( + params=types.CreateMessageRequestParams( + messages=[types.SamplingMessage(role="user", content=types.TextContent(type="text", text="test"))], + maxTokens=100, + ) + ) + ) + + # Send request for operation + server.send_request_for_operation(operation.token, request) + + # Verify operation status changed + updated_op = manager.get_operation(operation.token) + assert updated_op is not None + assert updated_op.status == "input_required" + + def test_server_complete_request_for_operation(self): + """Test server method for completing requests.""" + manager = AsyncOperationManager() + server = Server("Test", async_operations=manager) + + # Create operation and mark as input required + operation = manager.create_operation("test_tool", {"arg": "value"}, "session1") + manager.mark_input_required(operation.token) + + # Complete request for operation + server.complete_request_for_operation(operation.token) + + # Verify operation status changed back to working + updated_op = manager.get_operation(operation.token) + assert updated_op is not None + assert updated_op.status == "working" + + def test_input_required_is_terminal_check(self): + """Test that input_required is not considered a terminal state.""" + manager = AsyncOperationManager() + + # Create operation and mark as input required + operation = manager.create_operation("test_tool", {"arg": "value"}, "session1") + manager.mark_input_required(operation.token) + + # Verify it's not terminal + assert not operation.is_terminal + + # Verify it doesn't expire while in input_required state + assert not operation.is_expired From 207923048448467d6117ba9be0d3a75730041f59 Mon Sep 17 00:00:00 2001 From: Luca Chang Date: Tue, 23 Sep 2025 17:01:57 -0700 Subject: [PATCH 08/41] Support configuring the broadcasted client version --- src/mcp/client/session.py | 4 +++- src/mcp/shared/memory.py | 2 ++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/src/mcp/client/session.py b/src/mcp/client/session.py index 73aecc584..f7668f5f6 100644 --- a/src/mcp/client/session.py +++ b/src/mcp/client/session.py @@ -119,6 +119,7 @@ def __init__( logging_callback: LoggingFnT | None = None, message_handler: MessageHandlerFnT | None = None, client_info: types.Implementation | None = None, + protocol_version: str | None = None, ) -> None: super().__init__( read_stream, @@ -128,6 +129,7 @@ def __init__( read_timeout_seconds=read_timeout_seconds, ) self._client_info = client_info or DEFAULT_CLIENT_INFO + self._protocol_version = protocol_version or types.LATEST_PROTOCOL_VERSION self._sampling_callback = sampling_callback or _default_sampling_callback self._elicitation_callback = elicitation_callback or _default_elicitation_callback self._list_roots_callback = list_roots_callback or _default_list_roots_callback @@ -153,7 +155,7 @@ async def initialize(self) -> types.InitializeResult: types.ClientRequest( types.InitializeRequest( params=types.InitializeRequestParams( - protocolVersion=types.LATEST_PROTOCOL_VERSION, + protocolVersion=self._protocol_version, capabilities=types.ClientCapabilities( sampling=sampling, elicitation=elicitation, diff --git a/src/mcp/shared/memory.py b/src/mcp/shared/memory.py index c94e5e6ac..3cce9cbff 100644 --- a/src/mcp/shared/memory.py +++ b/src/mcp/shared/memory.py @@ -61,6 +61,7 @@ async def create_connected_server_and_client_session( client_info: types.Implementation | None = None, raise_exceptions: bool = False, elicitation_callback: ElicitationFnT | None = None, + protocol_version: str | None = None, ) -> AsyncGenerator[ClientSession, None]: """Creates a ClientSession that is connected to a running MCP server.""" async with create_client_server_memory_streams() as ( @@ -92,6 +93,7 @@ async def create_connected_server_and_client_session( message_handler=message_handler, client_info=client_info, elicitation_callback=elicitation_callback, + protocol_version=protocol_version, ) as client_session: await client_session.initialize() yield client_session From 04bac419f22c2dfb3296d1f7dace776441fe1dd2 Mon Sep 17 00:00:00 2001 From: Luca Chang Date: Tue, 23 Sep 2025 17:37:38 -0700 Subject: [PATCH 09/41] Pass AsyncOperations from FastMCP to Server --- src/mcp/server/fastmcp/server.py | 53 ++++++++++++++++++++++++++++++-- 1 file changed, 51 insertions(+), 2 deletions(-) diff --git a/src/mcp/server/fastmcp/server.py b/src/mcp/server/fastmcp/server.py index 63fa2d1c9..9cfb2e7e5 100644 --- a/src/mcp/server/fastmcp/server.py +++ b/src/mcp/server/fastmcp/server.py @@ -45,7 +45,15 @@ from mcp.server.streamable_http_manager import StreamableHTTPSessionManager from mcp.server.transport_security import TransportSecuritySettings from mcp.shared.context import LifespanContextT, RequestContext, RequestT -from mcp.types import NEXT_PROTOCOL_VERSION, AnyFunction, ContentBlock, GetPromptResult, ToolAnnotations +from mcp.types import ( + NEXT_PROTOCOL_VERSION, + AnyFunction, + ContentBlock, + GetOperationPayloadResult, + GetOperationStatusResult, + GetPromptResult, + ToolAnnotations, +) from mcp.types import Prompt as MCPPrompt from mcp.types import PromptArgument as MCPPromptArgument from mcp.types import Resource as MCPResource @@ -170,9 +178,12 @@ def __init__( transport_security=transport_security, ) + self._async_operations = async_operations or AsyncOperationManager() + self._mcp_server = MCPServer( name=name or "FastMCP", instructions=instructions, + async_operations=self._async_operations, # TODO(Marcelo): It seems there's a type mismatch between the lifespan type from an FastMCP and Server. # We need to create a Lifespan type that is a generic on the server type, like Starlette does. lifespan=(lifespan_wrapper(self, self.settings.lifespan) if self.settings.lifespan else default_lifespan), # type: ignore @@ -180,7 +191,6 @@ def __init__( self._tool_manager = ToolManager(tools=tools, warn_on_duplicate_tools=self.settings.warn_on_duplicate_tools) self._resource_manager = ResourceManager(warn_on_duplicate_resources=self.settings.warn_on_duplicate_resources) self._prompt_manager = PromptManager(warn_on_duplicate_prompts=self.settings.warn_on_duplicate_prompts) - self.async_operations = async_operations or AsyncOperationManager() # Validate auth configuration if self.settings.auth is not None: if auth_server_provider and token_verifier: @@ -270,6 +280,45 @@ def _setup_handlers(self) -> None: self._mcp_server.get_prompt()(self.get_prompt) self._mcp_server.list_resource_templates()(self.list_resource_templates) + # Register async operation handlers + logger.info(f"Async operations manager: {self._async_operations}") + logger.info("Registering async operation handlers") + self._mcp_server.get_operation_status()(self.get_operation_status) + self._mcp_server.get_operation_result()(self.get_operation_result) + + async def get_operation_status(self, token: str) -> GetOperationStatusResult: + """Get the status of an async operation.""" + try: + operation = self._async_operations.get_operation(token) + if not operation: + raise ValueError(f"Operation not found: {token}") + + return GetOperationStatusResult( + status=operation.status, + error=operation.error if operation.status == "failed" else None, + ) + except Exception: + logger.exception(f"Error getting operation status for token {token}") + raise + + async def get_operation_result(self, token: str) -> GetOperationPayloadResult: + """Get the result of a completed async operation.""" + try: + operation = self._async_operations.get_operation(token) + if not operation: + raise ValueError(f"Operation not found: {token}") + + if operation.status != "completed": + raise ValueError(f"Operation not completed: {operation.status}") + + if not operation.result: + raise ValueError("Operation completed but no result available") + + return GetOperationPayloadResult(result=operation.result) + except Exception: + logger.exception(f"Error getting operation result for token {token}") + raise + def _client_supports_async(self) -> bool: """Check if the current client supports async tools based on protocol version.""" try: From 2df5e7cba50450a1315b78421e4ad42181a5ec36 Mon Sep 17 00:00:00 2001 From: Luca Chang Date: Tue, 23 Sep 2025 17:50:09 -0700 Subject: [PATCH 10/41] Implement lowlevel async CallTool --- src/mcp/server/lowlevel/server.py | 143 ++++++++++++++++++++-------- src/mcp/types.py | 4 +- tests/server/fastmcp/test_server.py | 98 +++++++++++++++++++ 3 files changed, 204 insertions(+), 41 deletions(-) diff --git a/src/mcp/server/lowlevel/server.py b/src/mcp/server/lowlevel/server.py index ad7b75359..c90b0fdb8 100644 --- a/src/mcp/server/lowlevel/server.py +++ b/src/mcp/server/lowlevel/server.py @@ -67,6 +67,7 @@ async def main(): from __future__ import annotations as _annotations +import asyncio import contextvars import json import logging @@ -465,46 +466,55 @@ async def handler(req: types.CallToolRequest): except jsonschema.ValidationError as e: return self._make_error_result(f"Input validation error: {e.message}") - # tool call - results = await func(tool_name, arguments) + # Check for async execution + if tool and self.async_operations and self._should_execute_async(tool): + # Create async operation + session_id = f"session_{id(self.request_context.session)}" + operation = self.async_operations.create_operation( + tool_name=tool_name, + arguments=arguments, + session_id=session_id, + ) + logger.debug(f"Created async operation with token: {operation.token}") - # output normalization - unstructured_content: UnstructuredContent - maybe_structured_content: StructuredContent | None - if isinstance(results, tuple) and len(results) == 2: - # tool returned both structured and unstructured content - unstructured_content, maybe_structured_content = cast(CombinationContent, results) - elif isinstance(results, dict): - # tool returned structured content only - maybe_structured_content = cast(StructuredContent, results) - unstructured_content = [types.TextContent(type="text", text=json.dumps(results, indent=2))] - elif hasattr(results, "__iter__"): - # tool returned unstructured content only - unstructured_content = cast(UnstructuredContent, results) - maybe_structured_content = None - else: - return self._make_error_result(f"Unexpected return type from tool: {type(results).__name__}") - - # output validation - if tool and tool.outputSchema is not None: - if maybe_structured_content is None: - return self._make_error_result( - "Output validation error: outputSchema defined but no structured output returned" - ) - else: + # Start async execution in background + async def execute_async(): try: - jsonschema.validate(instance=maybe_structured_content, schema=tool.outputSchema) - except jsonschema.ValidationError as e: - return self._make_error_result(f"Output validation error: {e.message}") - - # result - return types.ServerResult( - types.CallToolResult( - content=list(unstructured_content), - structuredContent=maybe_structured_content, - isError=False, + logger.debug(f"Starting async execution of {tool_name}") + results = await func(tool_name, arguments) + logger.debug(f"Async execution completed for {tool_name}") + + # Process results using shared logic + result = self._process_tool_result(results, tool) + self.async_operations.complete_operation(operation.token, result) + logger.debug(f"Completed async operation {operation.token}") + except Exception as e: + logger.exception(f"Async execution failed for {tool_name}") + self.async_operations.fail_operation(operation.token, str(e)) + + asyncio.create_task(execute_async()) + + # Return operation result immediately + logger.info(f"Returning async operation result for {tool_name}") + return types.ServerResult( + types.CallToolResult( + content=[], + operation=types.AsyncResultProperties( + token=operation.token, + keepAlive=3600, + ), + ) ) - ) + + # tool call + results = await func(tool_name, arguments) + + # Process results using shared logic + try: + result = self._process_tool_result(results, tool) + return types.ServerResult(result) + except ValueError as e: + return self._make_error_result(str(e)) except Exception as e: return self._make_error_result(str(e)) @@ -513,6 +523,61 @@ async def handler(req: types.CallToolRequest): return decorator + def _process_tool_result( + self, results: UnstructuredContent | StructuredContent | CombinationContent, tool: types.Tool | None = None + ) -> types.CallToolResult: + """Process tool results and create CallToolResult with validation.""" + # output normalization + unstructured_content: UnstructuredContent + maybe_structured_content: StructuredContent | None + if isinstance(results, tuple) and len(results) == 2: + # tool returned both structured and unstructured content + unstructured_content, maybe_structured_content = cast(CombinationContent, results) + elif isinstance(results, dict): + # tool returned structured content only + maybe_structured_content = cast(StructuredContent, results) + unstructured_content = [types.TextContent(type="text", text=json.dumps(results, indent=2))] + elif hasattr(results, "__iter__"): + # tool returned unstructured content only + unstructured_content = cast(UnstructuredContent, results) + maybe_structured_content = None + else: + raise ValueError(f"Unexpected return type from tool: {type(results).__name__}") + + # output validation + if tool and tool.outputSchema is not None: + if maybe_structured_content is None: + raise ValueError("Output validation error: outputSchema defined but no structured output returned") + else: + try: + jsonschema.validate(instance=maybe_structured_content, schema=tool.outputSchema) + except jsonschema.ValidationError as e: + raise ValueError(f"Output validation error: {e.message}") + + # result + return types.CallToolResult( + content=list(unstructured_content), + structuredContent=maybe_structured_content, + isError=False, + ) + + def _should_execute_async(self, tool: types.Tool) -> bool: + """Check if a tool should be executed asynchronously.""" + # Check if client supports async tools (protocol version "next") + try: + if self.request_context and self.request_context.session.client_params: + client_version = str(self.request_context.session.client_params.protocolVersion) + if client_version != "next": + return False + else: + return False + except (AttributeError, ValueError): + return False + + # Check if tool is async-only + invocation_mode = getattr(tool, "invocationMode", None) + return invocation_mode == "async" + def progress_notification(self): def decorator( func: Callable[[str | int, float, float | None, str | None], Awaitable[None]], @@ -783,9 +848,9 @@ async def _handle_request( # Track async operations for cancellation if isinstance(req, types.CallToolRequest): result = response.root - if isinstance(result, types.CallToolResult) and result.operation_result is not None: + if isinstance(result, types.CallToolResult) and result.operation is not None: # This is an async operation, track the request ID to token mapping - operation_token = result.operation_result.token + operation_token = result.operation.token self._request_to_operation[message.request_id] = operation_token logger.debug(f"Tracking async operation {operation_token} for request {message.request_id}") diff --git a/src/mcp/types.py b/src/mcp/types.py index c5b1c632e..c2b341794 100644 --- a/src/mcp/types.py +++ b/src/mcp/types.py @@ -130,7 +130,7 @@ class Operation(BaseModel): See [MCP specification](https://github.com/modelcontextprotocol/modelcontextprotocol/blob/47339c03c143bb4ec01a26e721a1b8fe66634ebe/docs/specification/draft/basic/index.mdx#general-fields) for notes on _meta usage. """ - operation: Operation | None = Field(alias="_operation", default=None) + _operation: Operation | None = None """ Async operation parameters, only used when a result is sent in response to a request with operation parameters. """ @@ -992,7 +992,7 @@ class CallToolResult(Result): structuredContent: dict[str, Any] | None = None """An optional JSON object that represents the structured result of the tool call.""" isError: bool = False - operation_result: AsyncResultProperties | None = Field(serialization_alias="operation", default=None) + operation: AsyncResultProperties | None = Field(default=None) """Optional async execution information. Present when tool is executed asynchronously.""" diff --git a/tests/server/fastmcp/test_server.py b/tests/server/fastmcp/test_server.py index dbb1eaeb5..1c5cce2ea 100644 --- a/tests/server/fastmcp/test_server.py +++ b/tests/server/fastmcp/test_server.py @@ -1,3 +1,4 @@ +import asyncio import base64 from pathlib import Path from typing import TYPE_CHECKING, Any @@ -651,6 +652,103 @@ def hybrid_tool(x: int) -> int: # Hybrid tools should not have invocationMode field (None) for old clients assert tool.invocationMode is None + @pytest.mark.anyio + async def test_async_tool_call_basic(self): + """Test basic async tool call functionality.""" + mcp = FastMCP("AsyncTest") + + @mcp.tool(invocation_modes=["async"]) + async def async_add(a: int, b: int) -> int: + """Add two numbers asynchronously.""" + await asyncio.sleep(0.01) # Simulate async work + return a + b + + async with client_session(mcp._mcp_server, protocol_version="next") as client: + result = await client.call_tool("async_add", {"a": 5, "b": 3}) + + # Should get operation token for async call + assert result.operation is not None + token = result.operation.token + + # Poll for completion + while True: + status = await client.get_operation_status(token) + if status.status == "completed": + final_result = await client.get_operation_result(token) + assert not final_result.result.isError + assert len(final_result.result.content) == 1 + content = final_result.result.content[0] + assert isinstance(content, TextContent) + assert content.text == "8" + break + elif status.status == "failed": + pytest.fail(f"Operation failed: {status.error}") + await asyncio.sleep(0.01) + + @pytest.mark.anyio + async def test_async_tool_call_structured_output(self): + """Test async tool call with structured output.""" + mcp = FastMCP("AsyncTest") + + class AsyncResult(BaseModel): + value: int + processed: bool = True + + @mcp.tool(invocation_modes=["async"]) + async def async_structured_tool(x: int) -> AsyncResult: + """Process data and return structured result.""" + await asyncio.sleep(0.01) # Simulate async work + return AsyncResult(value=x * 2) + + async with client_session(mcp._mcp_server, protocol_version="next") as client: + result = await client.call_tool("async_structured_tool", {"x": 21}) + + # Should get operation token for async call + assert result.operation is not None + token = result.operation.token + + # Poll for completion + while True: + status = await client.get_operation_status(token) + if status.status == "completed": + final_result = await client.get_operation_result(token) + assert not final_result.result.isError + assert final_result.result.structuredContent is not None + assert final_result.result.structuredContent == {"value": 42, "processed": True} + break + elif status.status == "failed": + pytest.fail(f"Operation failed: {status.error}") + await asyncio.sleep(0.01) + + @pytest.mark.anyio + async def test_async_tool_call_validation_error(self): + """Test async tool call with server-side validation error.""" + mcp = FastMCP("AsyncTest") + + @mcp.tool(invocation_modes=["async"]) + async def async_invalid_tool() -> list[int]: + """Tool that returns invalid structured output.""" + await asyncio.sleep(0.01) # Simulate async work + return [1, 2, 3, [4]] # type: ignore + + async with client_session(mcp._mcp_server, protocol_version="next") as client: + result = await client.call_tool("async_invalid_tool", {}) + + # Should get operation token for async call + assert result.operation is not None + token = result.operation.token + + # Poll for completion - should fail due to validation error + while True: + status = await client.get_operation_status(token) + if status.status == "failed": + # Operation should fail due to validation error + assert status.error is not None + break + elif status.status == "completed": + pytest.fail("Operation should have failed due to validation error") + await asyncio.sleep(0.01) + class TestServerResources: @pytest.mark.anyio From 759a9a39928808440bc78176b0c2bdc28c7769dc Mon Sep 17 00:00:00 2001 From: Luca Chang Date: Tue, 23 Sep 2025 17:51:59 -0700 Subject: [PATCH 11/41] Implement async tools snippets --- .../snippets/clients/async_tools_client.py | 219 ++++++++++++++++++ examples/snippets/pyproject.toml | 5 +- examples/snippets/servers/__init__.py | 3 +- examples/snippets/servers/async_tools.py | 139 +++++++++++ tests/server/fastmcp/test_integration.py | 108 ++++++++- tests/server/test_async_operations.py | 10 +- 6 files changed, 474 insertions(+), 10 deletions(-) create mode 100644 examples/snippets/clients/async_tools_client.py create mode 100644 examples/snippets/servers/async_tools.py diff --git a/examples/snippets/clients/async_tools_client.py b/examples/snippets/clients/async_tools_client.py new file mode 100644 index 000000000..c4395c7c8 --- /dev/null +++ b/examples/snippets/clients/async_tools_client.py @@ -0,0 +1,219 @@ +""" +Client example showing how to use async tools. + +cd to the `examples/snippets` directory and run: + uv run async-tools-client + uv run async-tools-client --protocol=latest # backwards compatible mode + uv run async-tools-client --protocol=next # async tools mode +""" + +import asyncio +import os +import sys + +from mcp import ClientSession, StdioServerParameters, types +from mcp.client.stdio import stdio_client + +# Create server parameters for stdio connection +server_params = StdioServerParameters( + command="uv", # Using uv to run the server + args=["run", "server", "async_tools", "stdio"], + env={"UV_INDEX": os.environ.get("UV_INDEX", "")}, +) + + +async def demonstrate_sync_tool(session: ClientSession): + """Demonstrate calling a synchronous tool.""" + print("\n=== Synchronous Tool Demo ===") + + result = await session.call_tool("sync_tool", arguments={"x": 21}) + + # Print the result + for content in result.content: + if isinstance(content, types.TextContent): + print(f"Sync tool result: {content.text}") + + +async def demonstrate_async_tool(session: ClientSession): + """Demonstrate calling an async-only tool.""" + print("\n=== Asynchronous Tool Demo ===") + + # Call the async tool + result = await session.call_tool("async_only_tool", arguments={"data": "sample dataset"}) + + if result.operation: + token = result.operation.token + print(f"Async operation started with token: {token}") + + # Poll for status updates + while True: + status = await session.get_operation_status(token) + print(f"Status: {status.status}") + + if status.status == "completed": + # Get the final result + final_result = await session.get_operation_result(token) + for content in final_result.result.content: + if isinstance(content, types.TextContent): + print(f"Final result: {content.text}") + break + elif status.status == "failed": + print(f"Operation failed: {status.error}") + break + elif status.status in ("canceled", "unknown"): + print(f"Operation ended with status: {status.status}") + break + + # Wait before polling again + await asyncio.sleep(1) + else: + # Synchronous result (shouldn't happen for async-only tools) + for content in result.content: + if isinstance(content, types.TextContent): + print(f"Unexpected sync result: {content.text}") + + +async def demonstrate_hybrid_tool(session: ClientSession): + """Demonstrate calling a hybrid tool in both modes.""" + print("\n=== Hybrid Tool Demo ===") + + # Call hybrid tool (will be sync by default for compatibility) + result = await session.call_tool("hybrid_tool", arguments={"message": "hello world"}) + + for content in result.content: + if isinstance(content, types.TextContent): + print(f"Hybrid tool result: {content.text}") + + +async def demonstrate_batch_processing(session: ClientSession): + """Demonstrate batch processing with progress updates.""" + print("\n=== Batch Processing Demo ===") + + items = ["apple", "banana", "cherry", "date", "elderberry"] + result = await session.call_tool("batch_operation_tool", arguments={"items": items}) + + if result.operation: + token = result.operation.token + print(f"Batch operation started with token: {token}") + + # Poll for status with progress tracking + while True: + status = await session.get_operation_status(token) + print(f"Status: {status.status}") + + if status.status == "completed": + # Get the final result + final_result = await session.get_operation_result(token) + + # Check for structured result + if final_result.result.structuredContent: + print(f"Structured result: {final_result.result.structuredContent}") + + # Also show text content + for content in final_result.result.content: + if isinstance(content, types.TextContent): + print(f"Text result: {content.text}") + break + elif status.status == "failed": + print(f"Operation failed: {status.error}") + break + elif status.status in ("canceled", "unknown"): + print(f"Operation ended with status: {status.status}") + break + + # Wait before polling again + await asyncio.sleep(0.5) + else: + print("Unexpected: batch operation returned synchronous result") + + +async def demonstrate_data_processing(session: ClientSession): + """Demonstrate complex data processing pipeline.""" + print("\n=== Data Processing Pipeline Demo ===") + + operations = ["validate", "clean", "transform", "analyze", "export"] + result = await session.call_tool( + "data_processing_tool", arguments={"dataset": "customer_data.csv", "operations": operations} + ) + + if result.operation: + token = result.operation.token + print(f"Data processing started with token: {token}") + + # Poll for completion + while True: + status = await session.get_operation_status(token) + print(f"Status: {status.status}") + + if status.status == "completed": + final_result = await session.get_operation_result(token) + + # Show structured result if available + if final_result.result.structuredContent: + print("Processing results:") + for op, result_text in final_result.result.structuredContent.items(): + print(f" {op}: {result_text}") + break + elif status.status == "failed": + print(f"Processing failed: {status.error}") + break + elif status.status in ("canceled", "unknown"): + print(f"Processing ended with status: {status.status}") + break + + await asyncio.sleep(0.8) + + +async def run(): + """Run all async tool demonstrations.""" + # Determine protocol version from command line + protocol_version = "next" # Default to next for async tools + if len(sys.argv) > 1: + if "--protocol=latest" in sys.argv: + protocol_version = "2025-06-18" # Latest stable protocol + elif "--protocol=next" in sys.argv: + protocol_version = "next" # Development protocol version with async tools + + print(f"Using protocol version: {protocol_version}") + print() + + async with stdio_client(server_params) as (read, write): + # Use configured protocol version + async with ClientSession(read, write, protocol_version=protocol_version) as session: + # Initialize the connection + await session.initialize() + + # List available tools to see invocation modes + tools = await session.list_tools() + print("Available tools:") + for tool in tools.tools: + invocation_mode = getattr(tool, "invocationMode", "sync") + print(f" - {tool.name}: {tool.description} (mode: {invocation_mode})") + + # Demonstrate different tool types + await demonstrate_sync_tool(session) + await demonstrate_hybrid_tool(session) + await demonstrate_async_tool(session) + await demonstrate_batch_processing(session) + await demonstrate_data_processing(session) + + print("\n=== All demonstrations complete! ===") + + +def main(): + """Entry point for the async tools client.""" + if "--help" in sys.argv or "-h" in sys.argv: + print("Usage: async-tools-client [--protocol=latest|next]") + print() + print("Protocol versions:") + print(" --protocol=latest Use stable protocol (only sync/hybrid tools visible)") + print(" --protocol=next Use development protocol (all async tools visible)") + print() + print("Default: --protocol=next") + return + + asyncio.run(run()) + + +if __name__ == "__main__": + main() diff --git a/examples/snippets/pyproject.toml b/examples/snippets/pyproject.toml index 76791a55a..ea9c1658a 100644 --- a/examples/snippets/pyproject.toml +++ b/examples/snippets/pyproject.toml @@ -3,9 +3,7 @@ name = "mcp-snippets" version = "0.1.0" description = "MCP Example Snippets" requires-python = ">=3.10" -dependencies = [ - "mcp", -] +dependencies = ["mcp"] [build-system] requires = ["setuptools", "wheel"] @@ -21,3 +19,4 @@ completion-client = "clients.completion_client:main" direct-execution-server = "servers.direct_execution:main" display-utilities-client = "clients.display_utilities:main" oauth-client = "clients.oauth_client:run" +async-tools-client = "clients.async_tools_client:main" diff --git a/examples/snippets/servers/__init__.py b/examples/snippets/servers/__init__.py index b9865e822..a5aefd538 100644 --- a/examples/snippets/servers/__init__.py +++ b/examples/snippets/servers/__init__.py @@ -22,7 +22,8 @@ def run_server(): print("Usage: server [transport]") print("Available servers: basic_tool, basic_resource, basic_prompt, tool_progress,") print(" sampling, elicitation, completion, notifications,") - print(" fastmcp_quickstart, structured_output, images") + print(" fastmcp_quickstart, structured_output, images,") + print(" async_tools_example") print("Available transports: stdio (default), sse, streamable-http") sys.exit(1) diff --git a/examples/snippets/servers/async_tools.py b/examples/snippets/servers/async_tools.py new file mode 100644 index 000000000..315104241 --- /dev/null +++ b/examples/snippets/servers/async_tools.py @@ -0,0 +1,139 @@ +""" +FastMCP async tools example showing different invocation modes. + +cd to the `examples/snippets/clients` directory and run: + uv run server async_tools stdio +""" + +import asyncio + +from mcp.server.fastmcp import Context, FastMCP + +# Create an MCP server with async operations support +mcp = FastMCP("Async Tools Demo") + + +@mcp.tool() +def sync_tool(x: int) -> str: + """An implicitly-synchronous tool.""" + return f"Sync result: {x * 2}" + + +@mcp.tool(invocation_modes=["async"]) +async def async_only_tool(data: str, ctx: Context) -> str: # type: ignore[type-arg] + """An async-only tool that takes time to complete.""" + await ctx.info("Starting long-running analysis...") + + # Simulate long-running work with progress updates + for i in range(5): + await asyncio.sleep(0.5) + progress = (i + 1) / 5 + await ctx.report_progress(progress, 1.0, f"Processing step {i + 1}/5") + + await ctx.info("Analysis complete!") + return f"Async analysis result for: {data}" + + +@mcp.tool(invocation_modes=["sync", "async"]) +def hybrid_tool(message: str, ctx: Context | None = None) -> str: # type: ignore[type-arg] + """A hybrid tool that works both sync and async.""" + if ctx: + # Async mode - we have context for progress reporting + import asyncio + + async def async_work(): + await ctx.info(f"Processing '{message}' asynchronously...") + await asyncio.sleep(0.5) # Simulate some work + await ctx.debug("Async processing complete") + + # Run the async work (this is a bit of a hack for demo purposes) + try: + loop = asyncio.get_event_loop() + loop.create_task(async_work()) + except RuntimeError: + pass # No event loop running + + # Both sync and async modes return the same result + return f"Hybrid result: {message.upper()}" + + +@mcp.tool(invocation_modes=["async"]) +async def data_processing_tool(dataset: str, operations: list[str], ctx: Context) -> dict[str, str]: # type: ignore[type-arg] + """Simulate a complex data processing pipeline.""" + await ctx.info(f"Starting data processing pipeline for {dataset}") + + results: dict[str, str] = {} + total_ops = len(operations) + + for i, operation in enumerate(operations): + await ctx.debug(f"Executing operation: {operation}") + + # Simulate processing time + processing_time = 0.5 + (i * 0.2) # Increasing complexity + await asyncio.sleep(processing_time) + + # Report progress + progress = (i + 1) / total_ops + await ctx.report_progress(progress, 1.0, f"Completed {operation}") + + # Store result + results[operation] = f"Result of {operation} on {dataset}" + + await ctx.info("Data processing pipeline complete!") + return results + + +@mcp.tool(invocation_modes=["async"]) +async def file_analysis_tool(file_path: str, ctx: Context) -> str: # type: ignore[type-arg] + """Simulate file analysis with user interaction.""" + await ctx.info(f"Analyzing file: {file_path}") + + # Simulate initial analysis + await asyncio.sleep(1) + await ctx.report_progress(0.3, 1.0, "Initial scan complete") + + # Simulate finding an issue that requires user input + await ctx.warning("Found potential security issue - requires user confirmation") + + # In a real implementation, you would use ctx.elicit() here to ask the user + # For this demo, we'll just simulate the decision + await asyncio.sleep(0.5) + await ctx.info("User confirmed - continuing analysis") + + # Complete the analysis + await asyncio.sleep(1) + await ctx.report_progress(1.0, 1.0, "Analysis complete") + + return f"File analysis complete for {file_path}. No issues found after user review." + + +@mcp.tool(invocation_modes=["async"]) +async def batch_operation_tool(items: list[str], ctx: Context) -> list[str]: # type: ignore[type-arg] + """Process a batch of items with detailed progress reporting.""" + await ctx.info(f"Starting batch operation on {len(items)} items") + + results: list[str] = [] + + for i, item in enumerate(items): + await ctx.debug(f"Processing item {i + 1}: {item}") + + # Simulate variable processing time + processing_time = 0.2 + (len(item) * 0.1) + await asyncio.sleep(processing_time) + + # Report progress for this item + progress = (i + 1) / len(items) + await ctx.report_progress(progress, 1.0, f"Processed {i + 1}/{len(items)}: {item}") + + # Process the item + result = f"PROCESSED_{item.upper()}" + results.append(result) + + await ctx.debug(f"Item {i + 1} result: {result}") + + await ctx.info(f"Batch operation complete! Processed {len(results)} items") + return results + + +if __name__ == "__main__": + mcp.run() diff --git a/tests/server/fastmcp/test_integration.py b/tests/server/fastmcp/test_integration.py index dc88cc025..7452b7066 100644 --- a/tests/server/fastmcp/test_integration.py +++ b/tests/server/fastmcp/test_integration.py @@ -22,6 +22,7 @@ from pydantic import AnyUrl from examples.snippets.servers import ( + async_tools, basic_prompt, basic_resource, basic_tool, @@ -104,7 +105,9 @@ def server_url(server_port: int) -> str: def run_server_with_transport(module_name: str, port: int, transport: str) -> None: """Run server with specified transport.""" # Get the MCP instance based on module name - if module_name == "basic_tool": + if module_name == "async_tools": + mcp = async_tools.mcp + elif module_name == "basic_tool": mcp = basic_tool.mcp elif module_name == "basic_resource": mcp = basic_resource.mcp @@ -663,6 +666,109 @@ async def test_fastmcp_quickstart(server_transport: str, server_url: str) -> Non assert resource_result.contents[0].text == "Hello, Alice!" +# Test async tools example with "next" protocol +@pytest.mark.anyio +@pytest.mark.parametrize( + "server_transport", + [ + # Skip SSE for async tools - SSE client has issues with long polling in test environment + # causing BrokenResourceError during async operation status polling + # ("async_tools", "sse"), + ("async_tools", "streamable-http"), + ], + indirect=True, +) +async def test_async_tools(server_transport: str, server_url: str) -> None: + """Test async tools functionality with 'next' protocol version.""" + transport = server_transport + client_cm = create_client_for_transport(transport, server_url) + + async with client_cm as client_streams: + read_stream, write_stream = unpack_streams(client_streams) + async with ClientSession(read_stream, write_stream, protocol_version="next") as session: + # Test initialization + result = await session.initialize() + assert isinstance(result, InitializeResult) + assert result.serverInfo.name == "Async Tools Demo" + + # Test sync tool (should work normally) + sync_result = await session.call_tool("sync_tool", {"x": 21}) + assert len(sync_result.content) == 1 + assert isinstance(sync_result.content[0], TextContent) + assert sync_result.content[0].text == "Sync result: 42" + + # Test async-only tool (should return operation token) + async_result = await session.call_tool("async_only_tool", {"data": "test data"}) + assert async_result.operation is not None + token = async_result.operation.token + + # Poll for completion + import asyncio + + while True: + status = await session.get_operation_status(token) + if status.status == "completed": + final_result = await session.get_operation_result(token) + assert not final_result.result.isError + assert len(final_result.result.content) == 1 + content = final_result.result.content[0] + assert isinstance(content, TextContent) + assert "Async analysis result for: test data" in content.text + break + elif status.status == "failed": + pytest.fail(f"Async operation failed: {status.error}") + await asyncio.sleep(0.01) + + # Test hybrid tool (should work as sync by default) + hybrid_result = await session.call_tool("hybrid_tool", {"message": "hello"}) + assert len(hybrid_result.content) == 1 + assert isinstance(hybrid_result.content[0], TextContent) + assert "Hybrid result: HELLO" in hybrid_result.content[0].text + + +# Test async tools example with legacy protocol +@pytest.mark.anyio +@pytest.mark.parametrize( + "server_transport", + [ + ("async_tools", "streamable-http"), + ], + indirect=True, +) +async def test_async_tools_legacy_protocol(server_transport: str, server_url: str) -> None: + """Test async tools functionality with '2025-06-18' protocol version.""" + transport = server_transport + client_cm = create_client_for_transport(transport, server_url) + + async with client_cm as client_streams: + read_stream, write_stream = unpack_streams(client_streams) + async with ClientSession(read_stream, write_stream, protocol_version="2025-06-18") as session: + # Test initialization + result = await session.initialize() + assert isinstance(result, InitializeResult) + assert result.serverInfo.name == "Async Tools Demo" + + # Test sync tool (should work normally) + sync_result = await session.call_tool("sync_tool", {"x": 21}) + assert len(sync_result.content) == 1 + assert isinstance(sync_result.content[0], TextContent) + assert sync_result.content[0].text == "Sync result: 42" + + # Test async-only tool (executes synchronously with legacy protocol) + async_result = await session.call_tool("async_only_tool", {"data": "test data"}) + assert async_result.operation is None # No operation token with legacy protocol + assert len(async_result.content) == 1 + content = async_result.content[0] + assert isinstance(content, TextContent) + assert "Async analysis result for: test data" in content.text + + # Test hybrid tool (should work as sync) + hybrid_result = await session.call_tool("hybrid_tool", {"message": "hello"}) + assert len(hybrid_result.content) == 1 + assert isinstance(hybrid_result.content[0], TextContent) + assert "Hybrid result: HELLO" in hybrid_result.content[0].text + + # Test structured output example @pytest.mark.anyio @pytest.mark.parametrize( diff --git a/tests/server/test_async_operations.py b/tests/server/test_async_operations.py index 0a02786ae..aadd086b0 100644 --- a/tests/server/test_async_operations.py +++ b/tests/server/test_async_operations.py @@ -236,8 +236,8 @@ def test_dependency_injection_and_integration(self): # Test FastMCP integration fastmcp = FastMCP("FastMCP", async_operations=custom_manager) - assert fastmcp.async_operations is custom_manager - assert fastmcp.async_operations.get_operation(operation.token) is operation + assert fastmcp._async_operations is custom_manager + assert fastmcp._async_operations.get_operation(operation.token) is operation # Test lowlevel Server integration lowlevel = Server("LowLevel", async_operations=custom_manager) @@ -247,12 +247,12 @@ def test_dependency_injection_and_integration(self): # Test default creation default_fastmcp = FastMCP("Default") default_server = Server("Default") - assert isinstance(default_fastmcp.async_operations, AsyncOperationManager) + assert isinstance(default_fastmcp._async_operations, AsyncOperationManager) assert isinstance(default_server.async_operations, AsyncOperationManager) - assert default_fastmcp.async_operations is not custom_manager + assert default_fastmcp._async_operations is not custom_manager # Test shared manager between servers - new_op = fastmcp.async_operations.create_operation("new_tool", {}, "session2") + new_op = fastmcp._async_operations.create_operation("new_tool", {}, "session2") assert lowlevel.async_operations.get_operation(new_op.token) is new_op From 011a363ea3db59baadf1d69e9859d4c0c047144f Mon Sep 17 00:00:00 2001 From: Luca Chang Date: Tue, 23 Sep 2025 20:23:58 -0700 Subject: [PATCH 12/41] Implement optoken to tool name map on client end for validation --- src/mcp/client/session.py | 34 ++++- src/mcp/server/fastmcp/server.py | 6 +- src/mcp/server/lowlevel/server.py | 10 +- .../lowlevel => shared}/async_operations.py | 142 ++++++++++++++---- .../server/test_lowlevel_async_operations.py | 76 +++++----- .../test_async_operations.py | 50 +++--- 6 files changed, 218 insertions(+), 100 deletions(-) rename src/mcp/{server/lowlevel => shared}/async_operations.py (60%) rename tests/{server => shared}/test_async_operations.py (86%) diff --git a/src/mcp/client/session.py b/src/mcp/client/session.py index f7668f5f6..8f571f729 100644 --- a/src/mcp/client/session.py +++ b/src/mcp/client/session.py @@ -9,6 +9,7 @@ from pydantic import AnyUrl, TypeAdapter import mcp.types as types +from mcp.shared.async_operations import ClientAsyncOperationManager from mcp.shared.context import RequestContext from mcp.shared.message import SessionMessage from mcp.shared.session import BaseSession, ProgressFnT, RequestResponder @@ -136,6 +137,7 @@ def __init__( self._logging_callback = logging_callback or _default_logging_callback self._message_handler = message_handler or _default_message_handler self._tool_output_schemas: dict[str, dict[str, Any] | None] = {} + self._operation_manager = ClientAsyncOperationManager() async def initialize(self) -> types.InitializeResult: sampling = types.SamplingCapability() if self._sampling_callback is not _default_sampling_callback else None @@ -174,8 +176,15 @@ async def initialize(self) -> types.InitializeResult: await self.send_notification(types.ClientNotification(types.InitializedNotification())) + # Start cleanup task for operations + await self._operation_manager.start_cleanup_task() + return result + async def close(self) -> None: + """Clean up resources.""" + await self._operation_manager.stop_cleanup_task() + async def send_ping(self) -> types.EmptyResult: """Send a ping request.""" return await self.send_request( @@ -305,7 +314,14 @@ async def call_tool( ) if not result.isError: - await self._validate_tool_result(name, result) + # Track operation for async operations + if result.operation is not None: + self._operation_manager.track_operation( + result.operation.token, name, result.operation.keepAlive or 3600 + ) + logger.debug(f"Tracking operation for token: {result.operation.token}") + else: + await self._validate_tool_result(name, result) return result @@ -336,7 +352,7 @@ async def get_operation_result(self, token: str) -> types.GetOperationPayloadRes Returns: The final tool result """ - return await self.send_request( + result = await self.send_request( types.ClientRequest( types.GetOperationPayloadRequest( params=types.GetOperationPayloadParams(token=token), @@ -345,7 +361,18 @@ async def get_operation_result(self, token: str) -> types.GetOperationPayloadRes types.GetOperationPayloadResult, ) - async def _validate_tool_result(self, name: str, result: types.CallToolResult) -> None: + # Validate using the stored tool name + if hasattr(result, "result") and result.result: + # Clean up expired operations first + self._operation_manager.cleanup_expired() + + tool_name = self._operation_manager.get_tool_name(token) + await self._validate_tool_result(tool_name, result.result) + # Keep the operation for potential future retrievals + + return result + + async def _validate_tool_result(self, name: str | None, result: types.CallToolResult) -> None: """Validate the structured content of a tool result against its output schema.""" if name not in self._tool_output_schemas: # refresh output schema cache @@ -358,6 +385,7 @@ async def _validate_tool_result(self, name: str, result: types.CallToolResult) - logger.warning(f"Tool {name} not listed by server, cannot validate any structured content") if output_schema is not None: + logger.debug(f"Validating structured content for tool: {name}") if result.structuredContent is None: raise RuntimeError(f"Tool {name} has an output schema but did not return structured content") try: diff --git a/src/mcp/server/fastmcp/server.py b/src/mcp/server/fastmcp/server.py index 9cfb2e7e5..6221cb7e7 100644 --- a/src/mcp/server/fastmcp/server.py +++ b/src/mcp/server/fastmcp/server.py @@ -33,7 +33,6 @@ from mcp.server.fastmcp.tools.base import InvocationMode from mcp.server.fastmcp.utilities.context_injection import find_context_parameter from mcp.server.fastmcp.utilities.logging import configure_logging, get_logger -from mcp.server.lowlevel.async_operations import AsyncOperationManager from mcp.server.lowlevel.helper_types import ReadResourceContents from mcp.server.lowlevel.server import LifespanResultT from mcp.server.lowlevel.server import Server as MCPServer @@ -44,6 +43,7 @@ from mcp.server.streamable_http import EventStore from mcp.server.streamable_http_manager import StreamableHTTPSessionManager from mcp.server.transport_security import TransportSecuritySettings +from mcp.shared.async_operations import ServerAsyncOperationManager from mcp.shared.context import LifespanContextT, RequestContext, RequestT from mcp.types import ( NEXT_PROTOCOL_VERSION, @@ -138,7 +138,7 @@ def __init__( token_verifier: TokenVerifier | None = None, event_store: EventStore | None = None, *, - async_operations: AsyncOperationManager | None = None, + async_operations: ServerAsyncOperationManager | None = None, tools: list[Tool] | None = None, debug: bool = False, log_level: Literal["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"] = "INFO", @@ -178,7 +178,7 @@ def __init__( transport_security=transport_security, ) - self._async_operations = async_operations or AsyncOperationManager() + self._async_operations = async_operations or ServerAsyncOperationManager() self._mcp_server = MCPServer( name=name or "FastMCP", diff --git a/src/mcp/server/lowlevel/server.py b/src/mcp/server/lowlevel/server.py index c90b0fdb8..cf4bf3341 100644 --- a/src/mcp/server/lowlevel/server.py +++ b/src/mcp/server/lowlevel/server.py @@ -83,10 +83,10 @@ async def main(): from typing_extensions import TypeVar import mcp.types as types -from mcp.server.lowlevel.async_operations import AsyncOperation, AsyncOperationManager from mcp.server.lowlevel.helper_types import ReadResourceContents from mcp.server.models import InitializationOptions from mcp.server.session import ServerSession +from mcp.shared.async_operations import ServerAsyncOperation, ServerAsyncOperationManager from mcp.shared.context import RequestContext from mcp.shared.exceptions import McpError from mcp.shared.message import ServerMessageMetadata, SessionMessage @@ -138,7 +138,7 @@ def __init__( name: str, version: str | None = None, instructions: str | None = None, - async_operations: AsyncOperationManager | None = None, + async_operations: ServerAsyncOperationManager | None = None, lifespan: Callable[ [Server[LifespanResultT, RequestT]], AbstractAsyncContextManager[LifespanResultT], @@ -148,7 +148,7 @@ def __init__( self.version = version self.instructions = instructions self.lifespan = lifespan - self.async_operations = async_operations or AsyncOperationManager() + self.async_operations = async_operations or ServerAsyncOperationManager() # Track request ID to operation token mapping for cancellation self._request_to_operation: dict[RequestId, str] = {} self.request_handlers: dict[type, Callable[..., Awaitable[types.ServerResult]]] = { @@ -469,11 +469,9 @@ async def handler(req: types.CallToolRequest): # Check for async execution if tool and self.async_operations and self._should_execute_async(tool): # Create async operation - session_id = f"session_{id(self.request_context.session)}" operation = self.async_operations.create_operation( tool_name=tool_name, arguments=arguments, - session_id=session_id, ) logger.debug(f"Created async operation with token: {operation.token}") @@ -627,7 +625,7 @@ async def handler(req: types.CompleteRequest): return decorator - def _validate_operation_token(self, token: str) -> AsyncOperation: + def _validate_operation_token(self, token: str) -> ServerAsyncOperation: """Validate operation token and return operation if valid.""" operation = self.async_operations.get_operation(token) if not operation: diff --git a/src/mcp/server/lowlevel/async_operations.py b/src/mcp/shared/async_operations.py similarity index 60% rename from src/mcp/server/lowlevel/async_operations.py rename to src/mcp/shared/async_operations.py index bafa7f262..f847f8f6d 100644 --- a/src/mcp/server/lowlevel/async_operations.py +++ b/src/mcp/shared/async_operations.py @@ -7,23 +7,38 @@ import time from collections.abc import Callable from dataclasses import dataclass -from typing import Any +from typing import Any, Generic, TypeVar import mcp.types as types from mcp.types import AsyncOperationStatus @dataclass -class AsyncOperation: +class ClientAsyncOperation: + """Minimal operation tracking for client-side use.""" + + token: str + tool_name: str + created_at: float + keep_alive: int + + @property + def is_expired(self) -> bool: + """Check if operation has expired based on keepAlive.""" + return time.time() > (self.created_at + self.keep_alive) + + +@dataclass +class ServerAsyncOperation: """Represents an async tool operation.""" token: str tool_name: str arguments: dict[str, Any] - session_id: str status: AsyncOperationStatus created_at: float keep_alive: int + session_id: str | None = None result: types.CallToolResult | None = None error: str | None = None @@ -40,51 +55,126 @@ def is_terminal(self) -> bool: return self.status in ("completed", "failed", "canceled", "unknown") -class AsyncOperationManager: - """Manages async tool operations with token-based tracking.""" +OperationT = TypeVar("OperationT", ClientAsyncOperation, ServerAsyncOperation) + - def __init__(self, *, token_generator: Callable[[str], str] | None = None): - self._operations: dict[str, AsyncOperation] = {} +class BaseOperationManager(Generic[OperationT]): + """Base class for operation management.""" + + def __init__(self, *, token_generator: Callable[[str | None], str] | None = None): + self._operations: dict[str, OperationT] = {} self._cleanup_task: asyncio.Task[None] | None = None self._cleanup_interval = 60 # Cleanup every 60 seconds self._token_generator = token_generator or self._default_token_generator - def _default_token_generator(self, session_id: str) -> str: + def _default_token_generator(self, session_id: str | None = None) -> str: """Default token generation using random tokens.""" return secrets.token_urlsafe(32) - def generate_token(self, session_id: str) -> str: + def generate_token(self, session_id: str | None = None) -> str: """Generate a token.""" return self._token_generator(session_id) + def _get_operation(self, token: str) -> OperationT | None: + """Internal method to get operation by token.""" + return self._operations.get(token) + + def _set_operation(self, token: str, operation: OperationT) -> None: + """Internal method to store an operation.""" + self._operations[token] = operation + + def _remove_operation(self, token: str) -> OperationT | None: + """Internal method to remove and return an operation.""" + return self._operations.pop(token, None) + + def get_operation(self, token: str) -> OperationT | None: + """Get operation by token.""" + return self._get_operation(token) + + def remove_operation(self, token: str) -> bool: + """Remove an operation by token.""" + return self._remove_operation(token) is not None + + def cleanup_expired(self) -> int: + """Remove expired operations and return count of removed operations.""" + expired_tokens = [token for token, operation in self._operations.items() if operation.is_expired] + for token in expired_tokens: + self._remove_operation(token) + return len(expired_tokens) + + async def start_cleanup_task(self) -> None: + """Start the background cleanup task.""" + if self._cleanup_task is None: + self._cleanup_task = asyncio.create_task(self._cleanup_loop()) + + async def stop_cleanup_task(self) -> None: + """Stop the background cleanup task.""" + if self._cleanup_task: + self._cleanup_task.cancel() + try: + await self._cleanup_task + except asyncio.CancelledError: + pass + self._cleanup_task = None + + async def _cleanup_loop(self) -> None: + """Background task to clean up expired operations.""" + while True: + try: + await asyncio.sleep(self._cleanup_interval) + count = self.cleanup_expired() + if count > 0: + print(f"Cleaned up {count} expired operations") + except asyncio.CancelledError: + break + + +class ClientAsyncOperationManager(BaseOperationManager[ClientAsyncOperation]): + """Manages client-side operation tracking.""" + + def track_operation(self, token: str, tool_name: str, keep_alive: int = 3600) -> None: + """Track a client operation.""" + operation = ClientAsyncOperation( + token=token, + tool_name=tool_name, + created_at=time.time(), + keep_alive=keep_alive, + ) + self._set_operation(token, operation) + + def get_tool_name(self, token: str) -> str | None: + """Get tool name for a tracked operation.""" + operation = self._get_operation(token) + return operation.tool_name if operation else None + + +class ServerAsyncOperationManager(BaseOperationManager[ServerAsyncOperation]): + """Manages async tool operations with token-based tracking.""" + def create_operation( self, tool_name: str, arguments: dict[str, Any], - session_id: str, keep_alive: int = 3600, - ) -> AsyncOperation: + session_id: str | None = None, + ) -> ServerAsyncOperation: """Create a new async operation.""" token = self.generate_token(session_id) - operation = AsyncOperation( + operation = ServerAsyncOperation( token=token, tool_name=tool_name, arguments=arguments, - session_id=session_id, status="submitted", created_at=time.time(), keep_alive=keep_alive, + session_id=session_id, ) - self._operations[token] = operation + self._set_operation(token, operation) return operation - def get_operation(self, token: str) -> AsyncOperation | None: - """Get operation by token.""" - return self._operations.get(token) - def mark_working(self, token: str) -> bool: """Mark operation as working.""" - operation = self._operations.get(token) + operation = self._get_operation(token) if not operation: return False @@ -97,7 +187,7 @@ def mark_working(self, token: str) -> bool: def complete_operation(self, token: str, result: types.CallToolResult) -> bool: """Complete operation with result.""" - operation = self._operations.get(token) + operation = self._get_operation(token) if not operation: return False @@ -111,7 +201,7 @@ def complete_operation(self, token: str, result: types.CallToolResult) -> bool: def fail_operation(self, token: str, error: str) -> bool: """Fail operation with error.""" - operation = self._operations.get(token) + operation = self._get_operation(token) if not operation: return False @@ -125,14 +215,14 @@ def fail_operation(self, token: str, error: str) -> bool: def get_operation_result(self, token: str) -> types.CallToolResult | None: """Get result for completed operation.""" - operation = self._operations.get(token) + operation = self._get_operation(token) if not operation or operation.status != "completed": return None return operation.result def cancel_operation(self, token: str) -> bool: """Cancel operation.""" - operation = self._operations.get(token) + operation = self._get_operation(token) if not operation: return False @@ -156,7 +246,7 @@ def cleanup_expired_operations(self) -> int: return len(expired_tokens) - def get_session_operations(self, session_id: str) -> list[AsyncOperation]: + def get_session_operations(self, session_id: str) -> list[ServerAsyncOperation]: """Get all operations for a session.""" return [op for op in self._operations.values() if op.session_id == session_id] @@ -174,7 +264,7 @@ def cancel_session_operations(self, session_id: str) -> int: def mark_input_required(self, token: str) -> bool: """Mark operation as requiring input from client.""" - operation = self._operations.get(token) + operation = self._get_operation(token) if not operation: return False @@ -187,7 +277,7 @@ def mark_input_required(self, token: str) -> bool: def mark_input_completed(self, token: str) -> bool: """Mark operation as no longer requiring input, return to working state.""" - operation = self._operations.get(token) + operation = self._get_operation(token) if not operation: return False diff --git a/tests/server/test_lowlevel_async_operations.py b/tests/server/test_lowlevel_async_operations.py index f822a19f4..ff35ae2c1 100644 --- a/tests/server/test_lowlevel_async_operations.py +++ b/tests/server/test_lowlevel_async_operations.py @@ -8,7 +8,7 @@ import mcp.types as types from mcp.server.lowlevel import Server -from mcp.server.lowlevel.async_operations import AsyncOperationManager +from mcp.shared.async_operations import ServerAsyncOperationManager from mcp.shared.exceptions import McpError @@ -17,7 +17,7 @@ class TestLowlevelServerAsyncOperations: def test_check_async_status_invalid_token(self): """Test get_operation_status handler with invalid token.""" - manager = AsyncOperationManager() + manager = ServerAsyncOperationManager() server = Server("Test", async_operations=manager) # Register the handler @@ -43,7 +43,7 @@ async def run_handler(): def test_check_async_status_expired_token(self): """Test get_operation_status handler with expired token.""" - manager = AsyncOperationManager() + manager = ServerAsyncOperationManager() server = Server("Test", async_operations=manager) @server.get_operation_status() @@ -51,7 +51,7 @@ async def check_status_handler(token: str) -> types.GetOperationStatusResult: return types.GetOperationStatusResult(status="unknown") # Create and complete operation with short keepAlive - operation = manager.create_operation("test_tool", {}, "session1", keep_alive=1) + operation = manager.create_operation("test_tool", {}, keep_alive=1, session_id="session1") manager.complete_operation(operation.token, types.CallToolResult(content=[])) # Make it expired @@ -73,7 +73,7 @@ async def run_handler(): def test_check_async_status_valid_operation(self): """Test get_operation_status handler with valid operation.""" - manager = AsyncOperationManager() + manager = ServerAsyncOperationManager() server = Server("Test", async_operations=manager) @server.get_operation_status() @@ -81,7 +81,7 @@ async def check_status_handler(token: str) -> types.GetOperationStatusResult: return types.GetOperationStatusResult(status="unknown") # Create valid operation - operation = manager.create_operation("test_tool", {}, "session1") + operation = manager.create_operation("test_tool", {}, session_id="session1") manager.mark_working(operation.token) valid_request = types.GetOperationStatusRequest(params=types.GetOperationStatusParams(token=operation.token)) @@ -100,7 +100,7 @@ async def run_handler(): def test_check_async_status_failed_operation(self): """Test get_operation_status handler with failed operation.""" - manager = AsyncOperationManager() + manager = ServerAsyncOperationManager() server = Server("Test", async_operations=manager) @server.get_operation_status() @@ -108,7 +108,7 @@ async def check_status_handler(token: str) -> types.GetOperationStatusResult: return types.GetOperationStatusResult(status="unknown") # Create and fail operation - operation = manager.create_operation("test_tool", {}, "session1") + operation = manager.create_operation("test_tool", {}, session_id="session1") manager.fail_operation(operation.token, "Something went wrong") failed_request = types.GetOperationStatusRequest(params=types.GetOperationStatusParams(token=operation.token)) @@ -127,7 +127,7 @@ async def run_handler(): def test_get_async_result_invalid_token(self): """Test get_operation_result handler with invalid token.""" - manager = AsyncOperationManager() + manager = ServerAsyncOperationManager() server = Server("Test", async_operations=manager) @server.get_operation_result() @@ -152,7 +152,7 @@ async def run_handler(): def test_get_async_result_expired_token(self): """Test get_operation_result handler with expired token.""" - manager = AsyncOperationManager() + manager = ServerAsyncOperationManager() server = Server("Test", async_operations=manager) @server.get_operation_result() @@ -160,7 +160,7 @@ async def get_result_handler(token: str) -> types.GetOperationPayloadResult: return types.GetOperationPayloadResult(result=types.CallToolResult(content=[])) # Create and complete operation with short keepAlive - operation = manager.create_operation("test_tool", {}, "session1", keep_alive=1) + operation = manager.create_operation("test_tool", {}, keep_alive=1, session_id="session1") manager.complete_operation(operation.token, types.CallToolResult(content=[])) # Make it expired @@ -184,7 +184,7 @@ async def run_handler(): def test_get_async_result_not_completed(self): """Test get_operation_result handler with non-completed operation.""" - manager = AsyncOperationManager() + manager = ServerAsyncOperationManager() server = Server("Test", async_operations=manager) @server.get_operation_result() @@ -192,7 +192,7 @@ async def get_result_handler(token: str) -> types.GetOperationPayloadResult: return types.GetOperationPayloadResult(result=types.CallToolResult(content=[])) # Create operation that's still working - operation = manager.create_operation("test_tool", {}, "session1") + operation = manager.create_operation("test_tool", {}, session_id="session1") manager.mark_working(operation.token) working_request = types.GetOperationPayloadRequest( @@ -213,7 +213,7 @@ async def run_handler(): def test_get_async_result_completed_with_result(self): """Test get_operation_result handler with completed operation.""" - manager = AsyncOperationManager() + manager = ServerAsyncOperationManager() server = Server("Test", async_operations=manager) @server.get_operation_result() @@ -221,7 +221,7 @@ async def get_result_handler(token: str) -> types.GetOperationPayloadResult: return types.GetOperationPayloadResult(result=types.CallToolResult(content=[])) # Create and complete operation with result - operation = manager.create_operation("test_tool", {}, "session1") + operation = manager.create_operation("test_tool", {}, session_id="session1") result = types.CallToolResult(content=[types.TextContent(type="text", text="success")]) manager.complete_operation(operation.token, result) @@ -246,11 +246,11 @@ class TestCancellationLogic: def test_handle_cancelled_notification(self): """Test handling of cancelled notifications.""" - manager = AsyncOperationManager() + manager = ServerAsyncOperationManager() server = Server("Test", async_operations=manager) # Create an operation - operation = manager.create_operation("test_tool", {"arg": "value"}, "session1") + operation = manager.create_operation("test_tool", {"arg": "value"}, session_id="session1") # Track the operation with a request ID request_id = "req_123" @@ -269,11 +269,11 @@ def test_handle_cancelled_notification(self): def test_cancelled_notification_handler(self): """Test the async cancelled notification handler.""" - manager = AsyncOperationManager() + manager = ServerAsyncOperationManager() server = Server("Test", async_operations=manager) # Create an operation - operation = manager.create_operation("test_tool", {"arg": "value"}, "session1") + operation = manager.create_operation("test_tool", {"arg": "value"}, session_id="session1") # Track the operation with a request ID request_id = "req_456" @@ -294,11 +294,11 @@ def test_cancelled_notification_handler(self): def test_validate_operation_token_cancelled(self): """Test that cancelled operations are rejected.""" - manager = AsyncOperationManager() + manager = ServerAsyncOperationManager() server = Server("Test", async_operations=manager) # Create and cancel an operation - operation = manager.create_operation("test_tool", {"arg": "value"}, "session1") + operation = manager.create_operation("test_tool", {"arg": "value"}, session_id="session1") manager.cancel_operation(operation.token) # Verify that accessing cancelled operation raises error @@ -324,10 +324,10 @@ class TestInputRequiredBehavior: def test_mark_input_required(self): """Test marking operation as requiring input.""" - manager = AsyncOperationManager() + manager = ServerAsyncOperationManager() # Create operation in submitted state - operation = manager.create_operation("test_tool", {"arg": "value"}, "session1") + operation = manager.create_operation("test_tool", {"arg": "value"}, session_id="session1") assert operation.status == "submitted" # Mark as input required @@ -341,10 +341,10 @@ def test_mark_input_required(self): def test_mark_input_required_from_working(self): """Test marking working operation as requiring input.""" - manager = AsyncOperationManager() + manager = ServerAsyncOperationManager() # Create and mark as working - operation = manager.create_operation("test_tool", {"arg": "value"}, "session1") + operation = manager.create_operation("test_tool", {"arg": "value"}, session_id="session1") manager.mark_working(operation.token) assert operation.status == "working" @@ -355,10 +355,10 @@ def test_mark_input_required_from_working(self): def test_mark_input_required_invalid_states(self): """Test that input_required can only be set from valid states.""" - manager = AsyncOperationManager() + manager = ServerAsyncOperationManager() # Test from completed state - operation = manager.create_operation("test_tool", {"arg": "value"}, "session1") + operation = manager.create_operation("test_tool", {"arg": "value"}, session_id="session1") manager.complete_operation(operation.token, types.CallToolResult(content=[])) result = manager.mark_input_required(operation.token) @@ -367,10 +367,10 @@ def test_mark_input_required_invalid_states(self): def test_mark_input_completed(self): """Test marking input as completed.""" - manager = AsyncOperationManager() + manager = ServerAsyncOperationManager() # Create operation and mark as input required - operation = manager.create_operation("test_tool", {"arg": "value"}, "session1") + operation = manager.create_operation("test_tool", {"arg": "value"}, session_id="session1") manager.mark_input_required(operation.token) assert operation.status == "input_required" @@ -381,10 +381,10 @@ def test_mark_input_completed(self): def test_mark_input_completed_invalid_state(self): """Test that input can only be completed from input_required state.""" - manager = AsyncOperationManager() + manager = ServerAsyncOperationManager() # Create operation in submitted state - operation = manager.create_operation("test_tool", {"arg": "value"}, "session1") + operation = manager.create_operation("test_tool", {"arg": "value"}, session_id="session1") assert operation.status == "submitted" # Try to mark input completed from wrong state @@ -394,7 +394,7 @@ def test_mark_input_completed_invalid_state(self): def test_nonexistent_token_operations(self): """Test input_required operations on nonexistent tokens.""" - manager = AsyncOperationManager() + manager = ServerAsyncOperationManager() # Test with fake token assert manager.mark_input_required("fake_token") is False @@ -402,11 +402,11 @@ def test_nonexistent_token_operations(self): def test_server_send_request_for_operation(self): """Test server method for sending requests with operation tokens.""" - manager = AsyncOperationManager() + manager = ServerAsyncOperationManager() server = Server("Test", async_operations=manager) # Create operation - operation = manager.create_operation("test_tool", {"arg": "value"}, "session1") + operation = manager.create_operation("test_tool", {"arg": "value"}, session_id="session1") manager.mark_working(operation.token) # Create a mock request @@ -429,11 +429,11 @@ def test_server_send_request_for_operation(self): def test_server_complete_request_for_operation(self): """Test server method for completing requests.""" - manager = AsyncOperationManager() + manager = ServerAsyncOperationManager() server = Server("Test", async_operations=manager) # Create operation and mark as input required - operation = manager.create_operation("test_tool", {"arg": "value"}, "session1") + operation = manager.create_operation("test_tool", {"arg": "value"}, session_id="session1") manager.mark_input_required(operation.token) # Complete request for operation @@ -446,10 +446,10 @@ def test_server_complete_request_for_operation(self): def test_input_required_is_terminal_check(self): """Test that input_required is not considered a terminal state.""" - manager = AsyncOperationManager() + manager = ServerAsyncOperationManager() # Create operation and mark as input required - operation = manager.create_operation("test_tool", {"arg": "value"}, "session1") + operation = manager.create_operation("test_tool", {"arg": "value"}, session_id="session1") manager.mark_input_required(operation.token) # Verify it's not terminal diff --git a/tests/server/test_async_operations.py b/tests/shared/test_async_operations.py similarity index 86% rename from tests/server/test_async_operations.py rename to tests/shared/test_async_operations.py index aadd086b0..ad8d0195d 100644 --- a/tests/server/test_async_operations.py +++ b/tests/shared/test_async_operations.py @@ -8,7 +8,7 @@ import pytest import mcp.types as types -from mcp.server.lowlevel.async_operations import AsyncOperation, AsyncOperationManager +from mcp.shared.async_operations import ServerAsyncOperation, ServerAsyncOperationManager from mcp.types import AsyncOperationStatus @@ -17,26 +17,26 @@ class TestAsyncOperationManager: def _create_manager_with_operation( self, session_id: str = "session1", **kwargs: Any - ) -> tuple[AsyncOperationManager, AsyncOperation]: + ) -> tuple[ServerAsyncOperationManager, ServerAsyncOperation]: """Helper to create manager with a test operation.""" - manager = AsyncOperationManager() - operation = manager.create_operation("test_tool", {"arg": "value"}, session_id, **kwargs) + manager = ServerAsyncOperationManager() + operation = manager.create_operation("test_tool", {"arg": "value"}, session_id=session_id, **kwargs) return manager, operation def test_token_generation(self): """Test token generation with default and custom generators.""" # Default token generation - manager = AsyncOperationManager() + manager = ServerAsyncOperationManager() token1 = manager.generate_token("test_session") token2 = manager.generate_token("test_session") assert token1 != token2 and len(token1) > 20 and not token1.startswith("test_session_") # Custom token generator - custom_manager = AsyncOperationManager(token_generator=lambda sid: f"custom_{sid}_token") + custom_manager = ServerAsyncOperationManager(token_generator=lambda sid: f"custom_{sid}_token") assert custom_manager.generate_token("test") == "custom_test_token" # Session-scoped token generator - scoped_manager = AsyncOperationManager(token_generator=lambda sid: f"{sid}_{secrets.token_urlsafe(16)}") + scoped_manager = ServerAsyncOperationManager(token_generator=lambda sid: f"{sid}_{secrets.token_urlsafe(16)}") token1, token2 = scoped_manager.generate_token("s1"), scoped_manager.generate_token("s2") assert token1.startswith("s1_") and token2.startswith("s2_") and token1 != token2 @@ -110,10 +110,10 @@ def test_state_transitions_and_terminal_states(self): assert completed_check is not None and completed_check.status == "completed" # Test other terminal states (use separate managers since previous operation is already completed) - def fail_action(m: AsyncOperationManager, t: str) -> bool: + def fail_action(m: ServerAsyncOperationManager, t: str) -> bool: return m.fail_operation(t, "err") - def cancel_action(m: AsyncOperationManager, t: str) -> bool: + def cancel_action(m: ServerAsyncOperationManager, t: str) -> bool: return m.cancel_operation(t) for status, action in [ @@ -128,7 +128,7 @@ def cancel_action(m: AsyncOperationManager, t: str) -> bool: def test_nonexistent_token_operations(self): """Test operations on nonexistent tokens.""" - manager = AsyncOperationManager() + manager = ServerAsyncOperationManager() fake_token = "fake_token" for method, args in [ @@ -144,10 +144,10 @@ def test_nonexistent_token_operations(self): def test_session_management(self): """Test session-based operation management and termination.""" - manager = AsyncOperationManager() + manager = ServerAsyncOperationManager() # Create operations for different sessions - ops = [manager.create_operation(f"tool{i}", {}, f"session{i % 2}") for i in range(4)] + ops = [manager.create_operation(f"tool{i}", {}, session_id=f"session{i % 2}") for i in range(4)] # Test session filtering s0_ops = manager.get_session_operations("session0") @@ -169,11 +169,11 @@ def test_session_management(self): def test_expiration_and_cleanup(self): """Test operation expiration and cleanup.""" - manager = AsyncOperationManager() + manager = ServerAsyncOperationManager() # Create operations with different expiration times - short_op = manager.create_operation("tool1", {}, "session1", keep_alive=1) - long_op = manager.create_operation("tool2", {}, "session1", keep_alive=10) + short_op = manager.create_operation("tool1", {}, keep_alive=1, session_id="session1") + long_op = manager.create_operation("tool2", {}, keep_alive=10, session_id="session1") # Complete both and make first expired for op in [short_op, long_op]: @@ -191,10 +191,12 @@ def test_expiration_and_cleanup(self): def test_concurrent_operations(self): """Test concurrent operation handling and memory management.""" - manager = AsyncOperationManager() + manager = ServerAsyncOperationManager() # Create many operations - operations = [manager.create_operation(f"tool_{i}", {"data": "x" * 100}, f"session_{i % 3}") for i in range(50)] + operations = [ + manager.create_operation(f"tool_{i}", {"data": "x" * 100}, session_id=f"session_{i % 3}") for i in range(50) + ] # All should be created successfully with unique tokens assert len(operations) == 50 @@ -214,7 +216,7 @@ def test_concurrent_operations(self): @pytest.mark.anyio async def test_cleanup_task_lifecycle(self): """Test background cleanup task management.""" - manager = AsyncOperationManager() + manager = ServerAsyncOperationManager() await manager.start_cleanup_task() assert manager._cleanup_task is not None and not manager._cleanup_task.done() @@ -231,8 +233,8 @@ def test_dependency_injection_and_integration(self): from mcp.server.lowlevel import Server # Test custom manager injection - custom_manager = AsyncOperationManager() - operation = custom_manager.create_operation("shared_tool", {"data": "shared"}, "session1") + custom_manager = ServerAsyncOperationManager() + operation = custom_manager.create_operation("shared_tool", {"data": "shared"}, session_id="session1") # Test FastMCP integration fastmcp = FastMCP("FastMCP", async_operations=custom_manager) @@ -247,12 +249,12 @@ def test_dependency_injection_and_integration(self): # Test default creation default_fastmcp = FastMCP("Default") default_server = Server("Default") - assert isinstance(default_fastmcp._async_operations, AsyncOperationManager) - assert isinstance(default_server.async_operations, AsyncOperationManager) + assert isinstance(default_fastmcp._async_operations, ServerAsyncOperationManager) + assert isinstance(default_server.async_operations, ServerAsyncOperationManager) assert default_fastmcp._async_operations is not custom_manager # Test shared manager between servers - new_op = fastmcp._async_operations.create_operation("new_tool", {}, "session2") + new_op = fastmcp._async_operations.create_operation("new_tool", {}, session_id="session2") assert lowlevel.async_operations.get_operation(new_op.token) is new_op @@ -262,7 +264,7 @@ class TestAsyncOperation: def test_terminal_and_expiration_logic(self): """Test terminal state detection and expiration logic.""" now = time.time() - operation = AsyncOperation("test", "test", {}, "session", "submitted", now, 3600) + operation = ServerAsyncOperation("test", "test", {}, "submitted", now, 3600) # Test terminal state detection for status_str, is_terminal in [ From e40055a40f9b65376b7eaaa129feafe2192ff33a Mon Sep 17 00:00:00 2001 From: Luca Chang Date: Wed, 24 Sep 2025 12:14:08 -0700 Subject: [PATCH 13/41] Support configuring async tool keepalives --- src/mcp/server/fastmcp/server.py | 11 ++ src/mcp/server/fastmcp/tools/base.py | 3 + src/mcp/server/fastmcp/tools/tool_manager.py | 27 +++++ src/mcp/server/lowlevel/server.py | 11 +- tests/server/fastmcp/test_server.py | 119 +++++++++++++++++++ tests/server/fastmcp/test_tool_manager.py | 78 ++++++++++++ 6 files changed, 248 insertions(+), 1 deletion(-) diff --git a/src/mcp/server/fastmcp/server.py b/src/mcp/server/fastmcp/server.py index 6221cb7e7..cd66ea118 100644 --- a/src/mcp/server/fastmcp/server.py +++ b/src/mcp/server/fastmcp/server.py @@ -130,6 +130,8 @@ async def wrap(_: MCPServer[LifespanResultT, Request]) -> AsyncIterator[Lifespan class FastMCP(Generic[LifespanResultT]): + _tool_manager: ToolManager + def __init__( self, name: str | None = None, @@ -361,6 +363,7 @@ async def list_tools(self) -> list[MCPTool]: outputSchema=info.output_schema, annotations=info.annotations, invocationMode=self._get_invocation_mode(info, client_supports_async), + _meta=info.meta, ) for info in tools if client_supports_async or info.invocation_modes != ["async"] @@ -434,6 +437,7 @@ def add_tool( annotations: ToolAnnotations | None = None, structured_output: bool | None = None, invocation_modes: list[InvocationMode] | None = None, + keep_alive: int | None = None, ) -> None: """Add a tool to the server. @@ -452,6 +456,8 @@ def add_tool( - If False, unconditionally creates an unstructured tool invocation_modes: List of supported invocation modes (e.g., ["sync", "async"]) - If None, defaults to ["sync"] for backwards compatibility + keep_alive: How long (in seconds) async operation results should be kept available. + Only applies to async tools. """ self._tool_manager.add_tool( fn, @@ -461,6 +467,7 @@ def add_tool( annotations=annotations, structured_output=structured_output, invocation_modes=invocation_modes, + keep_alive=keep_alive, ) def tool( @@ -471,6 +478,7 @@ def tool( annotations: ToolAnnotations | None = None, structured_output: bool | None = None, invocation_modes: list[InvocationMode] | None = None, + keep_alive: int | None = None, ) -> Callable[[AnyFunction], AnyFunction]: """Decorator to register a tool. @@ -491,6 +499,8 @@ def tool( - If None, defaults to ["sync"] for backwards compatibility - Supports "sync" for synchronous execution and "async" for asynchronous execution - Tools with "async" mode will be hidden from clients that don't support async execution + keep_alive: How long (in seconds) async operation results should be kept available. + Only applies to async tools. Example: @server.tool() @@ -533,6 +543,7 @@ def decorator(fn: AnyFunction) -> AnyFunction: annotations=annotations, structured_output=structured_output, invocation_modes=invocation_modes, + keep_alive=keep_alive, ) return fn diff --git a/src/mcp/server/fastmcp/tools/base.py b/src/mcp/server/fastmcp/tools/base.py index 2491b9e94..84936439b 100644 --- a/src/mcp/server/fastmcp/tools/base.py +++ b/src/mcp/server/fastmcp/tools/base.py @@ -38,6 +38,7 @@ class Tool(BaseModel): invocation_modes: list[InvocationMode] = Field( default=["sync"], description="Supported invocation modes (sync/async)" ) + meta: dict[str, Any] | None = Field(description="Optional additional tool information.", default=None) @cached_property def output_schema(self) -> dict[str, Any] | None: @@ -54,6 +55,7 @@ def from_function( annotations: ToolAnnotations | None = None, structured_output: bool | None = None, invocation_modes: list[InvocationMode] | None = None, + meta: dict[str, Any] | None = None, ) -> Tool: """Create a Tool from a function.""" func_name = name or fn.__name__ @@ -89,6 +91,7 @@ def from_function( context_kwarg=context_kwarg, annotations=annotations, invocation_modes=invocation_modes, + meta=meta, ) async def run( diff --git a/src/mcp/server/fastmcp/tools/tool_manager.py b/src/mcp/server/fastmcp/tools/tool_manager.py index cc8866dd9..e3a61ba75 100644 --- a/src/mcp/server/fastmcp/tools/tool_manager.py +++ b/src/mcp/server/fastmcp/tools/tool_manager.py @@ -51,12 +51,38 @@ def add_tool( annotations: ToolAnnotations | None = None, structured_output: bool | None = None, invocation_modes: list[InvocationMode] | None = None, + keep_alive: int | None = None, + meta: dict[str, Any] | None = None, ) -> Tool: """Add a tool to the server.""" # Default to sync mode if no invocation modes specified if invocation_modes is None: invocation_modes = ["sync"] + # Set appropriate default keep_alive based on async compatibility + # if user didn't specify custom keep_alive + if keep_alive is None and "async" in invocation_modes: + keep_alive = 3600 # Default for async-compatible tools + + # Validate keep_alive is only used with async-compatible tools + if keep_alive is not None and "async" not in invocation_modes: + raise ValueError( + f"keep_alive parameter can only be used with async-compatible tools. " + f"Tool '{name or fn.__name__}' has invocation_modes={invocation_modes} " + f"but specifies keep_alive={keep_alive}. " + f"Add 'async' to invocation_modes to use keep_alive." + ) + + meta = meta or {} + if keep_alive is not None: + meta.update( + { + # default keepalive value is stashed in _meta to pass it to the lowlevel Server + # without adding it to the actual protocol-level tool definition + "_keep_alive": keep_alive + } + ) + tool = Tool.from_function( fn, name=name, @@ -65,6 +91,7 @@ def add_tool( annotations=annotations, structured_output=structured_output, invocation_modes=invocation_modes, + meta=meta, ) existing = self._tools.get(tool.name) if existing: diff --git a/src/mcp/server/lowlevel/server.py b/src/mcp/server/lowlevel/server.py index cf4bf3341..8b85eaab3 100644 --- a/src/mcp/server/lowlevel/server.py +++ b/src/mcp/server/lowlevel/server.py @@ -468,10 +468,13 @@ async def handler(req: types.CallToolRequest): # Check for async execution if tool and self.async_operations and self._should_execute_async(tool): + keep_alive = self._get_tool_keep_alive(tool) + # Create async operation operation = self.async_operations.create_operation( tool_name=tool_name, arguments=arguments, + keep_alive=keep_alive, ) logger.debug(f"Created async operation with token: {operation.token}") @@ -499,7 +502,7 @@ async def execute_async(): content=[], operation=types.AsyncResultProperties( token=operation.token, - keepAlive=3600, + keepAlive=operation.keep_alive, ), ) ) @@ -576,6 +579,12 @@ def _should_execute_async(self, tool: types.Tool) -> bool: invocation_mode = getattr(tool, "invocationMode", None) return invocation_mode == "async" + def _get_tool_keep_alive(self, tool: types.Tool) -> int: + """Get the keepalive value for an async tool.""" + if not tool.meta or "_keep_alive" not in tool.meta: + raise ValueError(f"_keep_alive not defined for tool {tool.name}") + return cast(int, tool.meta["_keep_alive"]) + def progress_notification(self): def decorator( func: Callable[[str | int, float, float | None, str | None], Awaitable[None]], diff --git a/tests/server/fastmcp/test_server.py b/tests/server/fastmcp/test_server.py index 1c5cce2ea..73862abd4 100644 --- a/tests/server/fastmcp/test_server.py +++ b/tests/server/fastmcp/test_server.py @@ -749,6 +749,125 @@ async def async_invalid_tool() -> list[int]: pytest.fail("Operation should have failed due to validation error") await asyncio.sleep(0.01) + @pytest.mark.anyio + async def test_tool_keep_alive_validation_no_sync_only(self): + """Test that keep_alive validation prevents use on sync-only tools.""" + mcp = FastMCP() + + # Should raise error when keep_alive is used on sync-only tool + with pytest.raises(ValueError, match="keep_alive parameter can only be used with async-compatible tools"): + + @mcp.tool(keep_alive=1800) # Custom keep_alive on sync-only tool + def sync_only_tool(x: int) -> str: + return str(x) + + @pytest.mark.anyio + async def test_tool_keep_alive_default_async_tools(self): + """Test that async tools get correct default keep_alive.""" + mcp = FastMCP() + + # Async tools should get default keep_alive of 3600 + @mcp.tool(invocation_modes=["async"]) # No keep_alive specified + def async_tool_default(x: int) -> str: + return str(x) + + tools = mcp._tool_manager.list_tools() + tool = next(t for t in tools if t.name == "async_tool_default") + assert tool.meta is not None + assert tool.meta["_keep_alive"] == 3600 + + @pytest.mark.anyio + async def test_async_tool_keep_alive_expiry(self): + """Test that async operations expire after keep_alive duration.""" + mcp = FastMCP("AsyncKeepAliveTest") + + @mcp.tool(invocation_modes=["async"], keep_alive=1) # 1 second keep_alive + def short_lived_tool(data: str) -> str: + return f"Processed: {data}" + + # Check that the tool has correct keep_alive + tools = mcp._tool_manager.list_tools() + tool = next(t for t in tools if t.name == "short_lived_tool") + assert tool.meta is not None + assert tool.meta["_keep_alive"] == 1 + + async with client_session(mcp._mcp_server, protocol_version="next") as client: + # First list tools to populate keep_alive mapping + await client.list_tools() + + # Call the async tool + result = await client.call_tool("short_lived_tool", {"data": "test"}) + + # Should get operation token + assert result.operation is not None + token = result.operation.token + assert result.operation.keepAlive == 1 + + # Wait for operation to complete + while True: + status = await client.get_operation_status(token) + if status.status == "completed": + break + + # Get result while still alive + operation_result = await client.get_operation_result(token) + assert operation_result.result is not None + + # Wait for keep_alive to expire (1 second + buffer) + await asyncio.sleep(1.2) + + # Operation should now be expired/unavailable + with pytest.raises(Exception): # Should raise error for expired operation + await client.get_operation_result(token) + + @pytest.mark.anyio + async def test_async_tool_keep_alive_expiry_structured_content(self): + """Test that async operations with structured content expire correctly.""" + mcp = FastMCP("AsyncKeepAliveStructuredTest") + + class ProcessResult(BaseModel): + status: str + data: str + count: int + + @mcp.tool(invocation_modes=["async"], keep_alive=1) # 1 second keep_alive + def structured_tool(input_data: str) -> ProcessResult: + return ProcessResult(status="success", data=f"Processed: {input_data}", count=42) + + async with client_session(mcp._mcp_server, protocol_version="next") as client: + # First list tools to populate keep_alive mapping + await client.list_tools() + + # Call the async tool + result = await client.call_tool("structured_tool", {"input_data": "test"}) + + # Should get operation token + assert result.operation is not None + token = result.operation.token + assert result.operation.keepAlive == 1 + + # Wait for operation to complete + while True: + status = await client.get_operation_status(token) + if status.status == "completed": + break + + # Get structured result while still alive + operation_result = await client.get_operation_result(token) + assert operation_result.result is not None + assert operation_result.result.structuredContent is not None + structured_data = operation_result.result.structuredContent + assert structured_data["status"] == "success" + assert structured_data["data"] == "Processed: test" + assert structured_data["count"] == 42 + + # Wait for keep_alive to expire (1 second + buffer) + await asyncio.sleep(1.2) + + # Operation should now be expired/unavailable - validation should fail gracefully + with pytest.raises(Exception): # Should raise error for expired operation + await client.get_operation_result(token) + class TestServerResources: @pytest.mark.anyio diff --git a/tests/server/fastmcp/test_tool_manager.py b/tests/server/fastmcp/test_tool_manager.py index 82439eb8a..9db6ff3f2 100644 --- a/tests/server/fastmcp/test_tool_manager.py +++ b/tests/server/fastmcp/test_tool_manager.py @@ -633,6 +633,84 @@ def get_user() -> UserOutput: } assert tool.output_schema == expected_schema + def test_tool_meta_property(self): + """Test that Tool.meta property works correctly.""" + + def double_number(n: int) -> int: + """Double a number.""" + return 10 + + manager = ToolManager() + tool = manager.add_tool(double_number, meta={"foo": "bar"}) + + # Test that meta is populated + expected_meta = { + "foo": "bar", + } + assert tool.meta == expected_meta + + def test_tool_keep_alive_property_sync(self): + """Test that keep_alive property works correctly with sync-only tools.""" + + def double_number(n: int) -> int: + """Double a number.""" + return 10 + + manager = ToolManager() + + # Should raise error when keep_alive is used on sync-only tool + with pytest.raises(ValueError, match="keep_alive parameter can only be used with async-compatible tools"): + manager.add_tool(double_number, invocation_modes=["sync"], keep_alive=1) + + def test_tool_keep_alive_property_async(self): + """Test that keep_alive property works correctly with async-only tools.""" + + def double_number(n: int) -> int: + """Double a number.""" + return 10 + + manager = ToolManager() + tool = manager.add_tool(double_number, invocation_modes=["async"], keep_alive=1) + + # Test that meta is populated and has the keepalive stashed in it + expected_meta = { + "_keep_alive": 1, + } + assert tool.meta == expected_meta + + def test_tool_keep_alive_property_hybrid(self): + """Test that keep_alive property works correctly with hybrid sync/async tools.""" + + def double_number(n: int) -> int: + """Double a number.""" + return 10 + + manager = ToolManager() + tool = manager.add_tool(double_number, invocation_modes=["sync", "async"], keep_alive=1) + + # Test that meta is populated and has the keepalive stashed in it + expected_meta = { + "_keep_alive": 1, + } + assert tool.meta == expected_meta + + def test_tool_keep_alive_property_meta(self): + """Test that keep_alive property works correctly with existing metadata defined.""" + + def double_number(n: int) -> int: + """Double a number.""" + return 10 + + manager = ToolManager() + tool = manager.add_tool(double_number, invocation_modes=["async"], keep_alive=1, meta={"foo": "bar"}) + + # Test that meta is populated and has the keepalive stashed in it + expected_meta = { + "foo": "bar", + "_keep_alive": 1, + } + assert tool.meta == expected_meta + @pytest.mark.anyio async def test_tool_with_dict_str_any_output(self): """Test tool with dict[str, Any] return type.""" From 600982effc364f42233256d5bd88087f1c78ee2b Mon Sep 17 00:00:00 2001 From: Luca Chang Date: Wed, 24 Sep 2025 12:20:23 -0700 Subject: [PATCH 14/41] Control async op expiry by resolved_at, not created_at --- src/mcp/shared/async_operations.py | 9 +++++++-- tests/server/test_lowlevel_async_operations.py | 4 ++-- tests/shared/test_async_operations.py | 8 ++++---- 3 files changed, 13 insertions(+), 8 deletions(-) diff --git a/src/mcp/shared/async_operations.py b/src/mcp/shared/async_operations.py index f847f8f6d..02a9048ec 100644 --- a/src/mcp/shared/async_operations.py +++ b/src/mcp/shared/async_operations.py @@ -25,7 +25,7 @@ class ClientAsyncOperation: @property def is_expired(self) -> bool: """Check if operation has expired based on keepAlive.""" - return time.time() > (self.created_at + self.keep_alive) + return time.time() > (self.created_at + self.keep_alive * 2) # Give some buffer before expiration @dataclass @@ -38,6 +38,7 @@ class ServerAsyncOperation: status: AsyncOperationStatus created_at: float keep_alive: int + resolved_at: float | None = None session_id: str | None = None result: types.CallToolResult | None = None error: str | None = None @@ -45,8 +46,10 @@ class ServerAsyncOperation: @property def is_expired(self) -> bool: """Check if operation has expired based on keepAlive.""" + if not self.resolved_at: + return False if self.status in ("completed", "failed", "canceled"): - return time.time() > (self.created_at + self.keep_alive) + return time.time() > (self.resolved_at + self.keep_alive) return False @property @@ -197,6 +200,7 @@ def complete_operation(self, token: str, result: types.CallToolResult) -> bool: operation.status = "completed" operation.result = result + operation.resolved_at = time.time() return True def fail_operation(self, token: str, error: str) -> bool: @@ -211,6 +215,7 @@ def fail_operation(self, token: str, error: str) -> bool: operation.status = "failed" operation.error = error + operation.resolved_at = time.time() return True def get_operation_result(self, token: str) -> types.CallToolResult | None: diff --git a/tests/server/test_lowlevel_async_operations.py b/tests/server/test_lowlevel_async_operations.py index ff35ae2c1..c9b5151d5 100644 --- a/tests/server/test_lowlevel_async_operations.py +++ b/tests/server/test_lowlevel_async_operations.py @@ -55,7 +55,7 @@ async def check_status_handler(token: str) -> types.GetOperationStatusResult: manager.complete_operation(operation.token, types.CallToolResult(content=[])) # Make it expired - operation.created_at = time.time() - 2 + operation.resolved_at = time.time() - 2 expired_request = types.GetOperationStatusRequest(params=types.GetOperationStatusParams(token=operation.token)) @@ -164,7 +164,7 @@ async def get_result_handler(token: str) -> types.GetOperationPayloadResult: manager.complete_operation(operation.token, types.CallToolResult(content=[])) # Make it expired - operation.created_at = time.time() - 2 + operation.resolved_at = time.time() - 2 expired_request = types.GetOperationPayloadRequest( params=types.GetOperationPayloadParams(token=operation.token) diff --git a/tests/shared/test_async_operations.py b/tests/shared/test_async_operations.py index ad8d0195d..708283cef 100644 --- a/tests/shared/test_async_operations.py +++ b/tests/shared/test_async_operations.py @@ -178,7 +178,7 @@ def test_expiration_and_cleanup(self): # Complete both and make first expired for op in [short_op, long_op]: manager.complete_operation(op.token, Mock()) - short_op.created_at = time.time() - 2 + short_op.resolved_at = time.time() - 2 # Test expiration detection assert short_op.is_expired and not long_op.is_expired @@ -207,7 +207,7 @@ def test_concurrent_operations(self): for i in range(25): manager.complete_operation(operations[i].token, Mock()) operations[i].keep_alive = 1 - operations[i].created_at = time.time() - 2 + operations[i].resolved_at = time.time() - 2 # Cleanup should remove expired operations removed_count = manager.cleanup_expired_operations() @@ -286,8 +286,8 @@ def test_terminal_and_expiration_logic(self): completed_status: AsyncOperationStatus = "completed" operation.status = completed_status - operation.created_at = now - 1800 # 30 minutes ago + operation.resolved_at = now - 1800 # 30 minutes ago assert not operation.is_expired # Within keepAlive - operation.created_at = now - 7200 # 2 hours ago + operation.resolved_at = now - 7200 # 2 hours ago assert operation.is_expired # Past keepAlive From 37fb963cf689e4bd3fe7baaa5aa0339c5e2f609c Mon Sep 17 00:00:00 2001 From: Luca Chang Date: Wed, 24 Sep 2025 12:28:27 -0700 Subject: [PATCH 15/41] Add snippet for async tool with keepalive --- examples/snippets/servers/async_tools.py | 22 ++++++++++++ tests/server/fastmcp/test_integration.py | 43 +++++++++++++++++++++--- 2 files changed, 61 insertions(+), 4 deletions(-) diff --git a/examples/snippets/servers/async_tools.py b/examples/snippets/servers/async_tools.py index 315104241..3b96b98cb 100644 --- a/examples/snippets/servers/async_tools.py +++ b/examples/snippets/servers/async_tools.py @@ -135,5 +135,27 @@ async def batch_operation_tool(items: list[str], ctx: Context) -> list[str]: # return results +@mcp.tool(invocation_modes=["async"], keep_alive=1800) +async def long_running_task(task_name: str, ctx: Context) -> str: # type: ignore[type-arg] + """A long-running task with custom keep_alive duration.""" + await ctx.info(f"Starting long-running task: {task_name}") + + # Simulate extended processing + await asyncio.sleep(2) + await ctx.report_progress(0.5, 1.0, "Halfway through processing") + await asyncio.sleep(2) + + await ctx.info(f"Task '{task_name}' completed successfully") + return f"Long-running task '{task_name}' finished with 30-minute keep_alive" + + +@mcp.tool(invocation_modes=["async"], keep_alive=2) +async def quick_expiry_task(message: str, ctx: Context) -> str: # type: ignore[type-arg] + """A task with very short keep_alive for testing expiry.""" + await ctx.info(f"Quick task starting: {message}") + await asyncio.sleep(1) + return f"Quick task completed: {message} (expires in 2 seconds)" + + if __name__ == "__main__": mcp.run() diff --git a/tests/server/fastmcp/test_integration.py b/tests/server/fastmcp/test_integration.py index 7452b7066..752e553c2 100644 --- a/tests/server/fastmcp/test_integration.py +++ b/tests/server/fastmcp/test_integration.py @@ -10,6 +10,7 @@ # pyright: reportUnknownVariableType=false # pyright: reportUnknownArgumentType=false +import asyncio import json import multiprocessing import socket @@ -702,9 +703,6 @@ async def test_async_tools(server_transport: str, server_url: str) -> None: assert async_result.operation is not None token = async_result.operation.token - # Poll for completion - import asyncio - while True: status = await session.get_operation_status(token) if status.status == "completed": @@ -717,7 +715,6 @@ async def test_async_tools(server_transport: str, server_url: str) -> None: break elif status.status == "failed": pytest.fail(f"Async operation failed: {status.error}") - await asyncio.sleep(0.01) # Test hybrid tool (should work as sync by default) hybrid_result = await session.call_tool("hybrid_tool", {"message": "hello"}) @@ -725,6 +722,44 @@ async def test_async_tools(server_transport: str, server_url: str) -> None: assert isinstance(hybrid_result.content[0], TextContent) assert "Hybrid result: HELLO" in hybrid_result.content[0].text + # Test long-running task with custom keep_alive + long_task_result = await session.call_tool("long_running_task", {"task_name": "test_task"}) + assert long_task_result.operation is not None + long_token = long_task_result.operation.token + + while True: + status = await session.get_operation_status(long_token) + if status.status == "completed": + final_result = await session.get_operation_result(long_token) + assert not final_result.result.isError + assert len(final_result.result.content) == 1 + content = final_result.result.content[0] + assert isinstance(content, TextContent) + assert "Long-running task 'test_task' finished with 30-minute keep_alive" in content.text + break + elif status.status == "failed": + pytest.fail(f"Long-running task failed: {status.error}") + + # Test quick expiry task (should complete then expire) + quick_result = await session.call_tool("quick_expiry_task", {"message": "test_expiry"}) + assert quick_result.operation is not None + quick_token = quick_result.operation.token + + # Wait for completion + while True: + status = await session.get_operation_status(quick_token) + if status.status == "completed": + break + elif status.status == "failed": + pytest.fail(f"Quick task failed: {status.error}") + + # Wait for expiry (2 seconds + buffer) + await asyncio.sleep(3) + + # Should now be expired + with pytest.raises(Exception): # Should raise error when trying to access expired operation + await session.get_operation_result(quick_token) + # Test async tools example with legacy protocol @pytest.mark.anyio From f8ca895454038fe58e7bcd4a230df5d9baafe586 Mon Sep 17 00:00:00 2001 From: Luca Chang Date: Wed, 24 Sep 2025 13:24:30 -0700 Subject: [PATCH 16/41] Support progress in async tools --- .../snippets/clients/async_tools_client.py | 14 ++++++-- src/mcp/shared/session.py | 27 ++++++++++++-- tests/server/fastmcp/test_integration.py | 36 +++++++++++++++++++ 3 files changed, 73 insertions(+), 4 deletions(-) diff --git a/examples/snippets/clients/async_tools_client.py b/examples/snippets/clients/async_tools_client.py index c4395c7c8..26ad7a701 100644 --- a/examples/snippets/clients/async_tools_client.py +++ b/examples/snippets/clients/async_tools_client.py @@ -90,13 +90,23 @@ async def demonstrate_batch_processing(session: ClientSession): print("\n=== Batch Processing Demo ===") items = ["apple", "banana", "cherry", "date", "elderberry"] - result = await session.call_tool("batch_operation_tool", arguments={"items": items}) + + # Define progress callback + async def progress_callback(progress: float, total: float | None, message: str | None) -> None: + progress_pct = int(progress * 100) if progress else 0 + total_str = f"/{int(total * 100)}%" if total else "" + message_str = f" - {message}" if message else "" + print(f"Progress: {progress_pct}{total_str}{message_str}") + + result = await session.call_tool( + "batch_operation_tool", arguments={"items": items}, progress_callback=progress_callback + ) if result.operation: token = result.operation.token print(f"Batch operation started with token: {token}") - # Poll for status with progress tracking + # Poll for status while True: status = await session.get_operation_status(token) print(f"Status: {status.status}") diff --git a/src/mcp/shared/session.py b/src/mcp/shared/session.py index b2f49fc8b..ed880fd8f 100644 --- a/src/mcp/shared/session.py +++ b/src/mcp/shared/session.py @@ -16,11 +16,14 @@ from mcp.types import ( CONNECTION_CLOSED, INVALID_PARAMS, + CallToolResult, CancelledNotification, ClientNotification, ClientRequest, ClientResult, ErrorData, + GetOperationPayloadRequest, + GetOperationPayloadResult, JSONRPCError, JSONRPCMessage, JSONRPCNotification, @@ -177,6 +180,7 @@ class BaseSession( _request_id: int _in_flight: dict[RequestId, RequestResponder[ReceiveRequestT, SendResultT]] _progress_callbacks: dict[RequestId, ProgressFnT] + _operation_requests: dict[str, RequestId] def __init__( self, @@ -196,6 +200,7 @@ def __init__( self._session_read_timeout_seconds = read_timeout_seconds self._in_flight = {} self._progress_callbacks = {} + self._operation_requests = {} self._exit_stack = AsyncExitStack() async def __aenter__(self) -> Self: @@ -251,6 +256,7 @@ async def send_request( # Store the callback for this request self._progress_callbacks[request_id] = progress_callback + pop_progress: RequestId | None = request_id try: jsonrpc_request = JSONRPCRequest( jsonrpc="2.0", @@ -285,11 +291,28 @@ async def send_request( if isinstance(response_or_error, JSONRPCError): raise McpError(response_or_error.error) else: - return result_type.model_validate(response_or_error.result) + result = result_type.model_validate(response_or_error.result) + if isinstance(result, CallToolResult) and result.operation is not None: + # Store mapping of operation token to request ID for async operations + self._operation_requests[result.operation.token] = request_id + + # Don't pop the progress function if we were given one + pop_progress = None + elif isinstance(request, GetOperationPayloadRequest) and isinstance(result, GetOperationPayloadResult): + # Checked request and result to ensure no error + operation_token = request.params.token + + # Pop the progress function for the original request + pop_progress = self._operation_requests[operation_token] + + # Pop the token mapping since we know we won't need it anymore + self._operation_requests.pop(operation_token, None) + return result finally: self._response_streams.pop(request_id, None) - self._progress_callbacks.pop(request_id, None) + if pop_progress: + self._progress_callbacks.pop(pop_progress, None) await response_stream.aclose() await response_stream_reader.aclose() diff --git a/tests/server/fastmcp/test_integration.py b/tests/server/fastmcp/test_integration.py index 752e553c2..9881614d3 100644 --- a/tests/server/fastmcp/test_integration.py +++ b/tests/server/fastmcp/test_integration.py @@ -760,6 +760,42 @@ async def test_async_tools(server_transport: str, server_url: str) -> None: with pytest.raises(Exception): # Should raise error when trying to access expired operation await session.get_operation_result(quick_token) + # Test batch operation with progress notifications + progress_received = False + + async def progress_callback(progress: float, total: float | None, message: str | None) -> None: + nonlocal progress_received + progress_received = True + assert 0.0 <= progress <= 1.0 # Progress should be between 0 and 1 + + batch_result = await session.call_tool( + "batch_operation_tool", + {"items": ["apple", "banana", "cherry"]}, + progress_callback=progress_callback, + ) + assert batch_result.operation is not None + batch_token = batch_result.operation.token + + while True: + status = await session.get_operation_status(batch_token) + + if status.status == "completed": + final_result = await session.get_operation_result(batch_token) + assert not final_result.result.isError + # Should have structured content with processed items + if final_result.result.structuredContent: + # Structured content is wrapped in {"result": [...]} for list return types + assert isinstance(final_result.result.structuredContent, dict) + assert "result" in final_result.result.structuredContent + assert isinstance(final_result.result.structuredContent["result"], list) + assert len(final_result.result.structuredContent["result"]) == 3 + break + elif status.status == "failed": + pytest.fail(f"Batch operation failed: {status.error}") + + # Assert that we received at least one progress notification + assert progress_received, "Should have received progress notifications during batch operation" + # Test async tools example with legacy protocol @pytest.mark.anyio From b802dc423bc74458830bfdcb3626ac55ca77bcaf Mon Sep 17 00:00:00 2001 From: Luca Chang Date: Thu, 25 Sep 2025 20:05:38 -0700 Subject: [PATCH 17/41] Operation token plumbing to support async elicitation/sampling --- .../snippets/clients/async_tools_client.py | 57 ++++++- examples/snippets/servers/async_tools.py | 49 ++++++ src/mcp/client/session.py | 25 +++ src/mcp/server/elicitation.py | 2 + src/mcp/server/fastmcp/server.py | 8 +- src/mcp/server/lowlevel/server.py | 36 ++-- src/mcp/server/session.py | 26 +++ src/mcp/server/streamable_http.py | 51 +++++- src/mcp/shared/context.py | 1 + src/mcp/shared/session.py | 6 + src/mcp/types.py | 30 ++-- tests/issues/test_176_progress_token.py | 28 ++- tests/server/fastmcp/test_integration.py | 159 ++++++++++++++++++ tests/shared/test_progress_notifications.py | 1 + 14 files changed, 433 insertions(+), 46 deletions(-) diff --git a/examples/snippets/clients/async_tools_client.py b/examples/snippets/clients/async_tools_client.py index 26ad7a701..80b2fde36 100644 --- a/examples/snippets/clients/async_tools_client.py +++ b/examples/snippets/clients/async_tools_client.py @@ -13,6 +13,7 @@ from mcp import ClientSession, StdioServerParameters, types from mcp.client.stdio import stdio_client +from mcp.shared.context import RequestContext # Create server parameters for stdio connection server_params = StdioServerParameters( @@ -22,6 +23,22 @@ ) +async def elicitation_callback(context: RequestContext[ClientSession, None], params: types.ElicitRequestParams): + """Handle elicitation requests from the server.""" + if "data_migration" in params.message: + return types.ElicitResult( + action="accept", + content={"continue_processing": True, "priority_level": "normal"}, + ) + else: + return types.ElicitResult(action="decline") + + +async def logging_callback(params: types.LoggingMessageNotificationParams): + """Handle logging messages from the server.""" + print(f"Server log: {params.data}", file=sys.stderr) + + async def demonstrate_sync_tool(session: ClientSession): """Demonstrate calling a synchronous tool.""" print("\n=== Synchronous Tool Demo ===") @@ -174,6 +191,37 @@ async def demonstrate_data_processing(session: ClientSession): await asyncio.sleep(0.8) +async def demonstrate_elicitation(session: ClientSession): + """Demonstrate async elicitation tool.""" + print("\n=== Async Elicitation Demo ===") + + result = await session.call_tool("async_elicitation_tool", arguments={"operation": "data_migration"}) + + if result.operation: + token = result.operation.token + print(f"Elicitation operation started with token: {token}") + + # Poll for completion + while True: + status = await session.get_operation_status(token) + print(f"Status: {status.status}") + + if status.status == "completed": + final_result = await session.get_operation_result(token) + for content in final_result.result.content: + if isinstance(content, types.TextContent): + print(f"Elicitation result: {content.text}") + break + elif status.status == "failed": + print(f"Elicitation failed: {status.error}") + break + elif status.status in ("canceled", "unknown"): + print(f"Elicitation ended with status: {status.status}") + break + + await asyncio.sleep(0.5) + + async def run(): """Run all async tool demonstrations.""" # Determine protocol version from command line @@ -189,7 +237,13 @@ async def run(): async with stdio_client(server_params) as (read, write): # Use configured protocol version - async with ClientSession(read, write, protocol_version=protocol_version) as session: + async with ClientSession( + read, + write, + protocol_version=protocol_version, + elicitation_callback=elicitation_callback, + logging_callback=logging_callback, + ) as session: # Initialize the connection await session.initialize() @@ -206,6 +260,7 @@ async def run(): await demonstrate_async_tool(session) await demonstrate_batch_processing(session) await demonstrate_data_processing(session) + await demonstrate_elicitation(session) print("\n=== All demonstrations complete! ===") diff --git a/examples/snippets/servers/async_tools.py b/examples/snippets/servers/async_tools.py index 3b96b98cb..431efb777 100644 --- a/examples/snippets/servers/async_tools.py +++ b/examples/snippets/servers/async_tools.py @@ -7,12 +7,61 @@ import asyncio +from pydantic import BaseModel, Field + from mcp.server.fastmcp import Context, FastMCP # Create an MCP server with async operations support mcp = FastMCP("Async Tools Demo") +class UserPreferences(BaseModel): + """Schema for collecting user preferences.""" + + continue_processing: bool = Field(description="Should we continue with the operation?") + priority_level: str = Field( + default="normal", + description="Priority level: low, normal, high", + ) + + +@mcp.tool(invocation_modes=["async"]) +async def async_elicitation_tool(operation: str, ctx: Context) -> str: # type: ignore[type-arg] + """An async tool that uses elicitation to get user input.""" + await ctx.info(f"Starting operation: {operation}") + + # Simulate some initial processing + await asyncio.sleep(0.5) + await ctx.report_progress(0.3, 1.0, "Initial processing complete") + + await ctx.debug("About to call elicit") + try: + # Ask user for preferences + result = await ctx.elicit( + message=f"Operation '{operation}' requires user input. How should we proceed?", + schema=UserPreferences, + ) + await ctx.debug(f"Elicit result: {result}") + except Exception as e: + await ctx.error(f"Elicitation failed: {e}") + raise + + if result.action == "accept" and result.data: + if result.data.continue_processing: + await ctx.info(f"Continuing with {result.data.priority_level} priority") + # Simulate processing based on user choice + processing_time = {"low": 0.5, "normal": 1.0, "high": 1.5}.get(result.data.priority_level, 1.0) + await asyncio.sleep(processing_time) + await ctx.report_progress(1.0, 1.0, "Operation complete") + return f"Operation '{operation}' completed successfully with {result.data.priority_level} priority" + else: + await ctx.warning("User chose not to continue") + return f"Operation '{operation}' cancelled by user" + else: + await ctx.error("User declined or cancelled the operation") + return f"Operation '{operation}' aborted" + + @mcp.tool() def sync_tool(x: int) -> str: """An implicitly-synchronous tool.""" diff --git a/src/mcp/client/session.py b/src/mcp/client/session.py index 8f571f729..1cbd08f31 100644 --- a/src/mcp/client/session.py +++ b/src/mcp/client/session.py @@ -466,6 +466,7 @@ async def send_roots_list_changed(self) -> None: async def _received_request(self, responder: RequestResponder[types.ServerRequest, types.ClientResult]) -> None: ctx = RequestContext[ClientSession, Any]( request_id=responder.request_id, + operation_token=responder.operation.token if responder.operation is not None else None, meta=responder.request_meta, session=self, lifespan_context=None, @@ -475,12 +476,36 @@ async def _received_request(self, responder: RequestResponder[types.ServerReques case types.CreateMessageRequest(params=params): with responder: response = await self._sampling_callback(ctx, params) + if isinstance(response, types.CreateMessageResult): + response.operation_props = ( + types.Operation(token=responder.operation.token) + if responder.operation is not None + else None + ) + else: + response.operation = ( + types.Operation(token=responder.operation.token) + if responder.operation is not None + else None + ) client_response = ClientResponse.validate_python(response) await responder.respond(client_response) case types.ElicitRequest(params=params): with responder: response = await self._elicitation_callback(ctx, params) + if isinstance(response, types.ElicitResult): + response.operation_props = ( + types.Operation(token=responder.operation.token) + if responder.operation is not None + else None + ) + else: + response.operation = ( + types.Operation(token=responder.operation.token) + if responder.operation is not None + else None + ) client_response = ClientResponse.validate_python(response) await responder.respond(client_response) diff --git a/src/mcp/server/elicitation.py b/src/mcp/server/elicitation.py index 39e3212e9..5dec1767d 100644 --- a/src/mcp/server/elicitation.py +++ b/src/mcp/server/elicitation.py @@ -78,6 +78,7 @@ async def elicit_with_validation( message: str, schema: type[ElicitSchemaModelT], related_request_id: RequestId | None = None, + related_operation_token: str | None = None, ) -> ElicitationResult[ElicitSchemaModelT]: """Elicit information from the client/user with schema validation. @@ -96,6 +97,7 @@ async def elicit_with_validation( message=message, requestedSchema=json_schema, related_request_id=related_request_id, + related_operation_token=related_operation_token, ) if result.action == "accept" and result.content is not None: diff --git a/src/mcp/server/fastmcp/server.py b/src/mcp/server/fastmcp/server.py index cd66ea118..bc92c121d 100644 --- a/src/mcp/server/fastmcp/server.py +++ b/src/mcp/server/fastmcp/server.py @@ -1213,6 +1213,8 @@ async def report_progress(self, progress: float, total: float | None = None, mes progress=progress, total=total, message=message, + related_request_id=self.request_id, + related_operation_token=self.request_context.operation_token, ) async def read_resource(self, uri: str | AnyUrl) -> Iterable[ReadResourceContents]: @@ -1255,7 +1257,11 @@ async def elicit( """ return await elicit_with_validation( - session=self.request_context.session, message=message, schema=schema, related_request_id=self.request_id + session=self.request_context.session, + message=message, + schema=schema, + related_request_id=self.request_id, + related_operation_token=self.request_context.operation_token, ) async def log( diff --git a/src/mcp/server/lowlevel/server.py b/src/mcp/server/lowlevel/server.py index 8b85eaab3..bc0336010 100644 --- a/src/mcp/server/lowlevel/server.py +++ b/src/mcp/server/lowlevel/server.py @@ -91,7 +91,7 @@ async def main(): from mcp.shared.exceptions import McpError from mcp.shared.message import ServerMessageMetadata, SessionMessage from mcp.shared.session import RequestResponder -from mcp.types import RequestId +from mcp.types import Operation, RequestId logger = logging.getLogger(__name__) @@ -478,6 +478,10 @@ async def handler(req: types.CallToolRequest): ) logger.debug(f"Created async operation with token: {operation.token}") + ctx = self.request_context + ctx.operation_token = operation.token + request_ctx.set(ctx) + # Start async execution in background async def execute_async(): try: @@ -560,6 +564,9 @@ def _process_tool_result( content=list(unstructured_content), structuredContent=maybe_structured_content, isError=False, + _operation=Operation(token=self.request_context.operation_token) + if self.request_context and self.request_context.operation_token + else None, ) def _should_execute_async(self, tool: types.Tool) -> bool: @@ -720,9 +727,7 @@ def send_request_for_operation(self, token: str, request: types.ServerRequest) - # Add operation token to request if hasattr(request.root, "params") and request.root.params is not None: if not hasattr(request.root.params, "operation") or request.root.params.operation is None: - # Create operation field if it doesn't exist - operation_data = types.RequestParams.Operation(token=token) - request.root.params.operation = operation_data + request.root.params.operation = Operation(token=token) logger.debug(f"Marked operation {token} as input_required and added to request") def send_notification_for_operation(self, token: str, notification: types.ServerNotification) -> None: @@ -732,9 +737,7 @@ def send_notification_for_operation(self, token: str, notification: types.Server # Add operation token to notification if hasattr(notification.root, "params") and notification.root.params is not None: if not hasattr(notification.root.params, "operation") or notification.root.params.operation is None: - # Create operation field if it doesn't exist - operation_data = types.NotificationParams.Operation(token=token) - notification.root.params.operation = operation_data + notification.root.params.operation = Operation(token=token) logger.debug(f"Marked operation {token} as input_required and added to notification") def complete_request_for_operation(self, token: str) -> None: @@ -833,25 +836,16 @@ async def _handle_request( # app.get_request_context() context_token = request_ctx.set( RequestContext( - message.request_id, - message.request_meta, - session, - lifespan_context, + request_id=message.request_id, + operation_token=message.operation.token if message.operation else None, + meta=message.request_meta, + session=session, + lifespan_context=lifespan_context, request=request_data, ) ) response = await handler(req) - # Handle operation token in response (for input_required operations) - if ( - hasattr(req, "params") - and req.params is not None - and hasattr(req.params, "operation") - and req.params.operation is not None - ): - operation_token = req.params.operation.token - self.complete_request_for_operation(operation_token) - # Track async operations for cancellation if isinstance(req, types.CallToolRequest): result = response.root diff --git a/src/mcp/server/session.py b/src/mcp/server/session.py index 7b3680f7c..6d2a503b5 100644 --- a/src/mcp/server/session.py +++ b/src/mcp/server/session.py @@ -184,8 +184,13 @@ async def send_log_message( data: Any, logger: str | None = None, related_request_id: types.RequestId | None = None, + related_operation_token: str | None = None, ) -> None: """Send a log message notification.""" + operation = None + if related_operation_token: + operation = types.Operation(token=related_operation_token) + await self.send_notification( types.ServerNotification( types.LoggingMessageNotification( @@ -193,6 +198,7 @@ async def send_log_message( level=level, data=data, logger=logger, + _operation=operation, ), ) ), @@ -221,8 +227,13 @@ async def create_message( metadata: dict[str, Any] | None = None, model_preferences: types.ModelPreferences | None = None, related_request_id: types.RequestId | None = None, + related_operation_token: str | None = None, ) -> types.CreateMessageResult: """Send a sampling/create_message request.""" + operation = None + if related_operation_token: + operation = types.Operation(token=related_operation_token) + return await self.send_request( request=types.ServerRequest( types.CreateMessageRequest( @@ -235,6 +246,7 @@ async def create_message( stopSequences=stop_sequences, metadata=metadata, modelPreferences=model_preferences, + _operation=operation, ), ) ), @@ -256,22 +268,30 @@ async def elicit( message: str, requestedSchema: types.ElicitRequestedSchema, related_request_id: types.RequestId | None = None, + related_operation_token: str | None = None, ) -> types.ElicitResult: """Send an elicitation/create request. Args: message: The message to present to the user requestedSchema: Schema defining the expected response structure + related_request_id: Optional request ID this elicitation is related to + related_operation_token: Optional operation token this elicitation is related to Returns: The client's response """ + operation = None + if related_operation_token: + operation = types.Operation(token=related_operation_token) + return await self.send_request( types.ServerRequest( types.ElicitRequest( params=types.ElicitRequestParams( message=message, requestedSchema=requestedSchema, + _operation=operation, ), ) ), @@ -293,8 +313,13 @@ async def send_progress_notification( total: float | None = None, message: str | None = None, related_request_id: str | None = None, + related_operation_token: str | None = None, ) -> None: """Send a progress notification.""" + operation = None + if related_operation_token: + operation = types.Operation(token=related_operation_token) + await self.send_notification( types.ServerNotification( types.ProgressNotification( @@ -303,6 +328,7 @@ async def send_progress_notification( progress=progress, total=total, message=message, + _operation=operation, ), ) ), diff --git a/src/mcp/server/streamable_http.py b/src/mcp/server/streamable_http.py index b45d742b0..77d4aa63e 100644 --- a/src/mcp/server/streamable_http.py +++ b/src/mcp/server/streamable_http.py @@ -172,6 +172,8 @@ def __init__( ], ] = {} self._terminated = False + # Track operation tokens to original request IDs for stream resumption + self._operation_to_request_id: dict[str, str] = {} @property def is_terminated(self) -> bool: @@ -399,6 +401,7 @@ async def _handle_post_request(self, scope: Scope, request: Request, receive: Re metadata = ServerMessageMetadata(request_context=request) session_message = SessionMessage(message, metadata=metadata) await writer.send(session_message) + should_pop_stream = True # Default to cleaning up stream try: # Process messages from the request-specific stream # We need to collect all messages until we get a response @@ -416,6 +419,19 @@ async def _handle_post_request(self, scope: Scope, request: Request, receive: Re # At this point we should have a response if response_message: + # Check if this is an async operation response - keep stream open + if ( + isinstance(response_message.root, JSONRPCResponse) + and response_message.root.result + and "_operation" in response_message.root.result + and ( + ("token" in response_message.root.result["_operation"]) + and response_message.root.result["_operation"]["token"] + ) + ): + # This is an async operation - keep the stream open for elicitation/sampling + should_pop_stream = False + # Create JSON response response = self._create_json_response(response_message) await response(scope, receive, send) @@ -436,7 +452,8 @@ async def _handle_post_request(self, scope: Scope, request: Request, receive: Re ) await response(scope, receive, send) finally: - await self._clean_up_memory_streams(request_id) + if should_pop_stream: + await self._clean_up_memory_streams(request_id) else: # Create SSE stream sse_stream_writer, sse_stream_reader = anyio.create_memory_object_stream[dict[str, str]](0) @@ -838,6 +855,38 @@ async def message_router(): # If this response is for an existing request stream, # send it there target_request_id = response_id + + # Track operation tokens for stream resumption + if ( + isinstance(message.root, JSONRPCResponse) + and message.root.result + and "_operation" in message.root.result + and ( + ("token" in message.root.result["_operation"]) + and message.root.result["_operation"]["token"] + ) + ): + operation_token = message.root.result["_operation"]["token"] + self._operation_to_request_id[operation_token] = response_id + logger.info(f"Tracking operation token {operation_token} -> request {response_id}") + elif ( + message.root.params + and "_operation" in message.root.params + and ( + ("token" in message.root.params["_operation"]) + and message.root.params["_operation"]["token"] + ) + ): + # Route operation-related messages back to the original request stream + operation_token = message.root.params["_operation"]["token"] + if operation_token in self._operation_to_request_id: + target_request_id = self._operation_to_request_id[operation_token] + logging.info(operation_token) + else: + logger.warning( + f"Operation token {operation_token} not found in mapping, using GET_STREAM_KEY" + ) + target_request_id = GET_STREAM_KEY # Extract related_request_id from meta if it exists elif ( session_message.metadata is not None diff --git a/src/mcp/shared/context.py b/src/mcp/shared/context.py index f3006e7d5..f4e394990 100644 --- a/src/mcp/shared/context.py +++ b/src/mcp/shared/context.py @@ -14,6 +14,7 @@ @dataclass class RequestContext(Generic[SessionT, LifespanContextT, RequestT]): request_id: RequestId + operation_token: str | None meta: RequestParams.Meta | None session: SessionT lifespan_context: LifespanContextT diff --git a/src/mcp/shared/session.py b/src/mcp/shared/session.py index ed880fd8f..d9388f5d5 100644 --- a/src/mcp/shared/session.py +++ b/src/mcp/shared/session.py @@ -29,6 +29,7 @@ JSONRPCNotification, JSONRPCRequest, JSONRPCResponse, + Operation, ProgressNotification, RequestParams, ServerNotification, @@ -73,6 +74,7 @@ def __init__( request_id: RequestId, request_meta: RequestParams.Meta | None, request: ReceiveRequestT, + operation: Operation | None, session: """BaseSession[ SendRequestT, SendNotificationT, @@ -86,6 +88,7 @@ def __init__( self.request_id = request_id self.request_meta = request_meta self.request = request + self.operation = operation self.message_metadata = message_metadata self._session = session self._completed = False @@ -371,6 +374,9 @@ async def _receive_loop(self) -> None: if validated_request.root.params else None, request=validated_request, + operation=validated_request.root.params.operation + if validated_request.root.params + else None, session=self, on_complete=lambda r: self._in_flight.pop(r.request_id, None), message_metadata=message.metadata, diff --git a/src/mcp/types.py b/src/mcp/types.py index c2b341794..25c96e9d1 100644 --- a/src/mcp/types.py +++ b/src/mcp/types.py @@ -41,6 +41,12 @@ AnyFunction: TypeAlias = Callable[..., Any] +class Operation(BaseModel): + token: str + """The token associated with the originating asynchronous tool call.""" + model_config = ConfigDict(extra="allow") + + class RequestParams(BaseModel): class Meta(BaseModel): progressToken: ProgressToken | None = None @@ -53,11 +59,6 @@ class Meta(BaseModel): model_config = ConfigDict(extra="allow") - class Operation(BaseModel): - token: str - """The token associated with the originating asynchronous tool call.""" - model_config = ConfigDict(extra="allow") - meta: Meta | None = Field(alias="_meta", default=None) operation: Operation | None = Field(alias="_operation", default=None) """Async operation parameters, only used when a request is sent during an asynchronous tool call.""" @@ -75,11 +76,6 @@ class NotificationParams(BaseModel): class Meta(BaseModel): model_config = ConfigDict(extra="allow") - class Operation(BaseModel): - token: str - """The token associated with the originating asynchronous tool call.""" - model_config = ConfigDict(extra="allow") - meta: Meta | None = Field(alias="_meta", default=None) """ See [MCP specification](https://github.com/modelcontextprotocol/modelcontextprotocol/blob/47339c03c143bb4ec01a26e721a1b8fe66634ebe/docs/specification/draft/basic/index.mdx#general-fields) @@ -120,17 +116,12 @@ class Notification(BaseModel, Generic[NotificationParamsT, MethodT]): class Result(BaseModel): """Base class for JSON-RPC results.""" - class Operation(BaseModel): - token: str - """The token associated with the originating asynchronous tool call.""" - model_config = ConfigDict(extra="allow") - meta: dict[str, Any] | None = Field(alias="_meta", default=None) """ See [MCP specification](https://github.com/modelcontextprotocol/modelcontextprotocol/blob/47339c03c143bb4ec01a26e721a1b8fe66634ebe/docs/specification/draft/basic/index.mdx#general-fields) for notes on _meta usage. """ - _operation: Operation | None = None + operation_props: Operation | None = Field(alias="_operation", default=None) """ Async operation parameters, only used when a result is sent in response to a request with operation parameters. """ @@ -200,6 +191,9 @@ class ErrorData(BaseModel): sender (e.g. detailed error information, nested errors etc.). """ + operation: Operation | None = Field(alias="_operation", default=None) + """Async operation parameters, only used when an error is sent during an asynchronous tool call.""" + model_config = ConfigDict(extra="allow") @@ -913,8 +907,6 @@ class AsyncResultProperties(BaseModel): """Server-generated token to use for checking status and retrieving results.""" keepAlive: int """Number of seconds the result will be kept available upon completion.""" - message: str | None = None - """Optional message to immediately provide to the client.""" model_config = ConfigDict(extra="allow") @@ -992,7 +984,7 @@ class CallToolResult(Result): structuredContent: dict[str, Any] | None = None """An optional JSON object that represents the structured result of the tool call.""" isError: bool = False - operation: AsyncResultProperties | None = Field(default=None) + operation: AsyncResultProperties | None = None """Optional async execution information. Present when tool is executed asynchronously.""" diff --git a/tests/issues/test_176_progress_token.py b/tests/issues/test_176_progress_token.py index eb5f19d64..230be8241 100644 --- a/tests/issues/test_176_progress_token.py +++ b/tests/issues/test_176_progress_token.py @@ -21,6 +21,7 @@ async def test_progress_token_zero_first_call(): request_context = RequestContext( request_id="test-request", + operation_token=None, session=mock_session, meta=mock_meta, lifespan_context=None, @@ -36,6 +37,27 @@ async def test_progress_token_zero_first_call(): # Verify progress notifications assert mock_session.send_progress_notification.call_count == 3, "All progress notifications should be sent" - mock_session.send_progress_notification.assert_any_call(progress_token=0, progress=0.0, total=10.0, message=None) - mock_session.send_progress_notification.assert_any_call(progress_token=0, progress=5.0, total=10.0, message=None) - mock_session.send_progress_notification.assert_any_call(progress_token=0, progress=10.0, total=10.0, message=None) + mock_session.send_progress_notification.assert_any_call( + progress_token=0, + progress=0.0, + total=10.0, + message=None, + related_request_id="test-request", + related_operation_token=None, + ) + mock_session.send_progress_notification.assert_any_call( + progress_token=0, + progress=5.0, + total=10.0, + message=None, + related_request_id="test-request", + related_operation_token=None, + ) + mock_session.send_progress_notification.assert_any_call( + progress_token=0, + progress=10.0, + total=10.0, + message=None, + related_request_id="test-request", + related_operation_token=None, + ) diff --git a/tests/server/fastmcp/test_integration.py b/tests/server/fastmcp/test_integration.py index 9881614d3..d7ee425af 100644 --- a/tests/server/fastmcp/test_integration.py +++ b/tests/server/fastmcp/test_integration.py @@ -12,6 +12,7 @@ import asyncio import json +import logging import multiprocessing import socket import time @@ -63,6 +64,8 @@ ToolListChangedNotification, ) +logger = logging.getLogger(__name__) + class NotificationCollector: """Collects notifications from the server for testing.""" @@ -248,6 +251,12 @@ async def elicitation_callback(context: RequestContext[ClientSession, None], par action="accept", content={"checkAlternative": True, "alternativeDate": "2024-12-26"}, ) + # For async elicitation tool test + elif "data_migration" in params.message: + return ElicitResult( + action="accept", + content={"continue_processing": True, "priority_level": "high"}, + ) else: return ElicitResult(action="decline") @@ -797,6 +806,156 @@ async def progress_callback(progress: float, total: float | None, message: str | assert progress_received, "Should have received progress notifications during batch operation" +# Test async elicitation tool (demonstrates bug in streamable-http transport) +@pytest.mark.anyio +@pytest.mark.parametrize( + "server_transport", + [ + ("async_tools", "streamable-http"), + ], + indirect=True, +) +async def test_async_elicitation_tool(server_transport: str, server_url: str) -> None: + """Test async elicitation tool functionality. + + This test demonstrates a bug in streamable-http transport where elicitation + requests during async operations don't reach the client callback. + """ + transport = server_transport + client_cm = create_client_for_transport(transport, server_url) + + # Use the same elicitation callback as the client + async def test_elicitation_callback(context: RequestContext[ClientSession, None], params: ElicitRequestParams): + """Handle elicitation requests from the server.""" + logger.debug(f"Client elicitation callback called with message: {params.message}") + if "data_migration" in params.message: + logger.debug("Client accepting elicitation request") + return ElicitResult( + action="accept", + content={"continue_processing": True, "priority_level": "normal"}, + ) + else: + logger.debug("Client declining elicitation request") + return ElicitResult(action="decline") + + async with client_cm as client_streams: + read_stream, write_stream = unpack_streams(client_streams) + async with ClientSession( + read_stream, + write_stream, + protocol_version="next", + elicitation_callback=test_elicitation_callback, + ) as session: + # Test initialization + result = await session.initialize() + assert isinstance(result, InitializeResult) + assert result.serverInfo.name == "Async Tools Demo" + + # Test async elicitation tool - same as client + elicit_result = await session.call_tool("async_elicitation_tool", {"operation": "data_migration"}) + assert elicit_result.operation is not None + token = elicit_result.operation.token + + # Poll exactly like the client does + max_polls = 20 + poll_count = 0 + while poll_count < max_polls: + status = await session.get_operation_status(token) + if status.status == "completed": + final_result = await session.get_operation_result(token) + assert not final_result.result.isError + assert len(final_result.result.content) == 1 + content = final_result.result.content[0] + assert isinstance(content, TextContent) + assert "Operation 'data_migration'" in content.text + assert "completed successfully" in content.text + return + elif status.status == "failed": + pytest.fail(f"Async elicitation failed: {status.error}") + elif status.status in ("canceled", "unknown"): + pytest.fail(f"Operation ended with status: {status.status}") + + poll_count += 1 + await asyncio.sleep(0.5) + + pytest.fail(f"Test timed out after {max_polls} polls") + + +# Test async elicitation tool with stdio transport (works as expected) +@pytest.mark.anyio +async def test_async_elicitation_tool_stdio() -> None: + """Test async elicitation tool functionality using stdio transport. + + This test works because stdio transport properly handles elicitation during async operations. + """ + import os + + from mcp import StdioServerParameters + from mcp.client.stdio import stdio_client + + # Use the same server parameters as the client + server_params = StdioServerParameters( + command="uv", + args=["run", "server", "async_tools", "stdio"], + env={"UV_INDEX": os.environ.get("UV_INDEX", "")}, + ) + + # Use the same elicitation callback as the client + async def test_elicitation_callback(context: RequestContext[ClientSession, None], params: ElicitRequestParams): + """Handle elicitation requests from the server.""" + logger.debug(f"Client elicitation callback called with message: {params.message}") + if "data_migration" in params.message: + logger.debug("Client accepting elicitation request") + return ElicitResult( + action="accept", + content={"continue_processing": True, "priority_level": "normal"}, + ) + else: + logger.debug("Client declining elicitation request") + return ElicitResult(action="decline") + + async with stdio_client(server_params) as (read, write): + async with ClientSession( + read, + write, + protocol_version="next", + elicitation_callback=test_elicitation_callback, + ) as session: + # Test initialization + result = await session.initialize() + assert isinstance(result, InitializeResult) + assert result.serverInfo.name == "Async Tools Demo" + + # Test async elicitation tool + elicit_result = await session.call_tool("async_elicitation_tool", {"operation": "data_migration"}) + assert elicit_result.operation is not None + token = elicit_result.operation.token + + # Poll for completion + max_polls = 20 + poll_count = 0 + while poll_count < max_polls: + status = await session.get_operation_status(token) + if status.status == "completed": + final_result = await session.get_operation_result(token) + assert not final_result.result.isError + assert len(final_result.result.content) == 1 + content = final_result.result.content[0] + assert isinstance(content, TextContent) + assert "Operation 'data_migration'" in content.text + assert "completed successfully" in content.text + return + elif status.status == "failed": + pytest.fail(f"Async elicitation failed: {status.error}") + elif status.status in ("canceled", "unknown"): + pytest.fail(f"Operation ended with status: {status.status}") + + poll_count += 1 + await asyncio.sleep(0.5) + + pytest.fail(f"Test timed out after {max_polls} polls") + + # Test async tools example with legacy protocol @pytest.mark.anyio @pytest.mark.parametrize( diff --git a/tests/shared/test_progress_notifications.py b/tests/shared/test_progress_notifications.py index d3aabba20..3cf18509b 100644 --- a/tests/shared/test_progress_notifications.py +++ b/tests/shared/test_progress_notifications.py @@ -275,6 +275,7 @@ async def handle_client_message( meta = types.RequestParams.Meta(progressToken=progress_token) request_context = RequestContext( request_id="test-request", + operation_token=None, session=client_session, meta=meta, lifespan_context=None, From 047664f63f417eff93ec9ac91881f4c97537a51a Mon Sep 17 00:00:00 2001 From: Luca Chang Date: Fri, 26 Sep 2025 13:33:17 -0700 Subject: [PATCH 18/41] Add decorator parameter for immediate return value in LRO --- src/mcp/server/fastmcp/tools/base.py | 41 +++++++++++++++++++- src/mcp/server/fastmcp/tools/tool_manager.py | 29 +------------- 2 files changed, 41 insertions(+), 29 deletions(-) diff --git a/src/mcp/server/fastmcp/tools/base.py b/src/mcp/server/fastmcp/tools/base.py index 84936439b..524eefd84 100644 --- a/src/mcp/server/fastmcp/tools/base.py +++ b/src/mcp/server/fastmcp/tools/base.py @@ -2,7 +2,7 @@ import functools import inspect -from collections.abc import Callable +from collections.abc import Awaitable, Callable from functools import cached_property from typing import TYPE_CHECKING, Any, Literal @@ -39,6 +39,9 @@ class Tool(BaseModel): default=["sync"], description="Supported invocation modes (sync/async)" ) meta: dict[str, Any] | None = Field(description="Optional additional tool information.", default=None) + immediate_result: Callable[..., Awaitable[list[Any]]] | None = Field( + None, exclude=True, description="Optional immediate result function for async tools" + ) @cached_property def output_schema(self) -> dict[str, Any] | None: @@ -55,7 +58,9 @@ def from_function( annotations: ToolAnnotations | None = None, structured_output: bool | None = None, invocation_modes: list[InvocationMode] | None = None, + keep_alive: int | None = None, meta: dict[str, Any] | None = None, + immediate_result: Callable[..., Awaitable[list[Any]]] | None = None, ) -> Tool: """Create a Tool from a function.""" func_name = name or fn.__name__ @@ -80,6 +85,39 @@ def from_function( if invocation_modes is None: invocation_modes = ["sync"] + # Set appropriate default keep_alive based on async compatibility + # if user didn't specify custom keep_alive + if keep_alive is None and "async" in invocation_modes: + keep_alive = 3600 # Default for async-compatible tools + + # Validate keep_alive is only used with async-compatible tools + if keep_alive is not None and "async" not in invocation_modes: + raise ValueError( + f"keep_alive parameter can only be used with async-compatible tools. " + f"Tool '{func_name}' has invocation_modes={invocation_modes} " + f"but specifies keep_alive={keep_alive}. " + f"Add 'async' to invocation_modes to use keep_alive." + ) + + # Process meta dictionary and add keep_alive if specified + meta = meta or {} + if keep_alive is not None: + meta = meta.copy() # Don't modify the original dict + meta["_keep_alive"] = keep_alive + + # Validate immediate_result usage + if immediate_result is not None: + # Check if tool supports async invocation + if "async" not in invocation_modes: + raise ValueError( + "immediate_result can only be used with async-compatible tools. " + "Add 'async' to invocation_modes to use immediate_result." + ) + + # Validate that immediate_result is an async callable + if not _is_async_callable(immediate_result): + raise ValueError("immediate_result must be an async callable that returns list[ContentBlock]") + return cls( fn=fn, name=func_name, @@ -92,6 +130,7 @@ def from_function( annotations=annotations, invocation_modes=invocation_modes, meta=meta, + immediate_result=immediate_result, ) async def run( diff --git a/src/mcp/server/fastmcp/tools/tool_manager.py b/src/mcp/server/fastmcp/tools/tool_manager.py index e3a61ba75..933881ef4 100644 --- a/src/mcp/server/fastmcp/tools/tool_manager.py +++ b/src/mcp/server/fastmcp/tools/tool_manager.py @@ -55,34 +55,6 @@ def add_tool( meta: dict[str, Any] | None = None, ) -> Tool: """Add a tool to the server.""" - # Default to sync mode if no invocation modes specified - if invocation_modes is None: - invocation_modes = ["sync"] - - # Set appropriate default keep_alive based on async compatibility - # if user didn't specify custom keep_alive - if keep_alive is None and "async" in invocation_modes: - keep_alive = 3600 # Default for async-compatible tools - - # Validate keep_alive is only used with async-compatible tools - if keep_alive is not None and "async" not in invocation_modes: - raise ValueError( - f"keep_alive parameter can only be used with async-compatible tools. " - f"Tool '{name or fn.__name__}' has invocation_modes={invocation_modes} " - f"but specifies keep_alive={keep_alive}. " - f"Add 'async' to invocation_modes to use keep_alive." - ) - - meta = meta or {} - if keep_alive is not None: - meta.update( - { - # default keepalive value is stashed in _meta to pass it to the lowlevel Server - # without adding it to the actual protocol-level tool definition - "_keep_alive": keep_alive - } - ) - tool = Tool.from_function( fn, name=name, @@ -91,6 +63,7 @@ def add_tool( annotations=annotations, structured_output=structured_output, invocation_modes=invocation_modes, + keep_alive=keep_alive, meta=meta, ) existing = self._tools.get(tool.name) From 07a2821d0689bb505f65501b7a0a3fde3e2684a6 Mon Sep 17 00:00:00 2001 From: Luca Chang Date: Fri, 26 Sep 2025 13:54:40 -0700 Subject: [PATCH 19/41] Support configuring immediate LRO result --- .../snippets/clients/async_tools_client.py | 67 +- examples/snippets/servers/async_tools.py | 21 + src/mcp/server/fastmcp/server.py | 22 + src/mcp/server/fastmcp/tools/base.py | 10 +- src/mcp/server/fastmcp/tools/tool_manager.py | 6 +- src/mcp/server/lowlevel/server.py | 52 +- src/mcp/types.py | 16 + tests/server/fastmcp/test_immediate_result.py | 745 ++++++++++++++++++ tests/server/fastmcp/test_integration.py | 99 +++ tests/server/fastmcp/test_tool_manager.py | 1 + 10 files changed, 1026 insertions(+), 13 deletions(-) create mode 100644 tests/server/fastmcp/test_immediate_result.py diff --git a/examples/snippets/clients/async_tools_client.py b/examples/snippets/clients/async_tools_client.py index 80b2fde36..35db25b38 100644 --- a/examples/snippets/clients/async_tools_client.py +++ b/examples/snippets/clients/async_tools_client.py @@ -1,5 +1,14 @@ """ -Client example showing how to use async tools. +Client example showing how to use async tools, including immediate result functionality. + +This example demonstrates: +- Synchronous tools (immediate response) +- Hybrid tools (sync/async modes) +- Async-only tools (background execution with polling) +- Batch processing with progress updates +- Data processing pipelines +- Elicitation (user input during async execution) +- Immediate result tools (instant feedback + async execution) cd to the `examples/snippets` directory and run: uv run async-tools-client @@ -222,6 +231,61 @@ async def demonstrate_elicitation(session: ClientSession): await asyncio.sleep(0.5) +async def test_immediate_result_tool(session: ClientSession): + """Test calling async tool with immediate result functionality. + + This demonstrates the immediate_result feature where async tools can provide + instant feedback while continuing to execute in the background. + """ + print("\n=== Immediate Result Tool Demo ===") + + # Call the async tool with immediate_result functionality + result = await session.call_tool("long_running_analysis", arguments={"operation": "data_processing"}) + + # Display immediate feedback (should be available immediately) + print("Immediate response received:") + if result.content: + for content in result.content: + if isinstance(content, types.TextContent): + print(f" 📋 {content.text}") + else: + print(" (No immediate content received)") + + # Check if there's an async operation to poll + if result.operation: + token = result.operation.token + print(f"\nAsync operation started with token: {token}") + print("Polling for final results...") + + # Poll for status updates and final result + while True: + status = await session.get_operation_status(token) + print(f" Status: {status.status}") + + if status.status == "completed": + # Get the final result + final_result = await session.get_operation_result(token) + print("\nFinal result received:") + for content in final_result.result.content: + if isinstance(content, types.TextContent): + print(f" ✅ {content.text}") + break + elif status.status == "failed": + print(f" ❌ Operation failed: {status.error}") + break + elif status.status in ("canceled", "unknown"): + print(f" ⚠️ Operation ended with status: {status.status}") + break + + # Wait before polling again + await asyncio.sleep(1) + else: + # This shouldn't happen for async tools, but handle gracefully + print("⚠️ Unexpected: tool returned synchronous result instead of async operation") + + print("Immediate result demonstration complete!") + + async def run(): """Run all async tool demonstrations.""" # Determine protocol version from command line @@ -261,6 +325,7 @@ async def run(): await demonstrate_batch_processing(session) await demonstrate_data_processing(session) await demonstrate_elicitation(session) + await test_immediate_result_tool(session) print("\n=== All demonstrations complete! ===") diff --git a/examples/snippets/servers/async_tools.py b/examples/snippets/servers/async_tools.py index 431efb777..575f1a7ab 100644 --- a/examples/snippets/servers/async_tools.py +++ b/examples/snippets/servers/async_tools.py @@ -9,6 +9,7 @@ from pydantic import BaseModel, Field +from mcp import types from mcp.server.fastmcp import Context, FastMCP # Create an MCP server with async operations support @@ -206,5 +207,25 @@ async def quick_expiry_task(message: str, ctx: Context) -> str: # type: ignore[ return f"Quick task completed: {message} (expires in 2 seconds)" +async def immediate_feedback(operation: str) -> list[types.ContentBlock]: + """Provide immediate feedback for long-running operations.""" + return [types.TextContent(type="text", text=f"🚀 Starting {operation}... This may take a moment.")] + + +@mcp.tool(invocation_modes=["async"], immediate_result=immediate_feedback) +async def long_running_analysis(operation: str, ctx: Context) -> str: # type: ignore[type-arg] + """Perform analysis with immediate user feedback.""" + await ctx.info(f"Beginning {operation} analysis") + + # Simulate long-running work with progress updates + for i in range(5): + await asyncio.sleep(1) + progress = (i + 1) / 5 + await ctx.report_progress(progress, 1.0, f"Step {i + 1}/5 complete") + + await ctx.info(f"Analysis '{operation}' completed successfully!") + return f"Analysis '{operation}' completed successfully with detailed results!" + + if __name__ == "__main__": mcp.run() diff --git a/src/mcp/server/fastmcp/server.py b/src/mcp/server/fastmcp/server.py index bc92c121d..1fd43060a 100644 --- a/src/mcp/server/fastmcp/server.py +++ b/src/mcp/server/fastmcp/server.py @@ -21,6 +21,7 @@ from starlette.routing import Mount, Route from starlette.types import Receive, Scope, Send +import mcp.types as types from mcp.server.auth.middleware.auth_context import AuthContextMiddleware from mcp.server.auth.middleware.bearer_auth import BearerAuthBackend, RequireAuthMiddleware from mcp.server.auth.provider import OAuthAuthorizationServerProvider, ProviderTokenVerifier, TokenVerifier @@ -364,6 +365,10 @@ async def list_tools(self) -> list[MCPTool]: annotations=info.annotations, invocationMode=self._get_invocation_mode(info, client_supports_async), _meta=info.meta, + internal=types.InternalToolProperties( + immediate_result=info.immediate_result, + keepalive=info.meta.get("_keep_alive") if info.meta else None, + ), ) for info in tools if client_supports_async or info.invocation_modes != ["async"] @@ -438,6 +443,7 @@ def add_tool( structured_output: bool | None = None, invocation_modes: list[InvocationMode] | None = None, keep_alive: int | None = None, + immediate_result: Callable[..., Awaitable[list[ContentBlock]]] | None = None, ) -> None: """Add a tool to the server. @@ -458,6 +464,8 @@ def add_tool( - If None, defaults to ["sync"] for backwards compatibility keep_alive: How long (in seconds) async operation results should be kept available. Only applies to async tools. + immediate_result: Optional async function that returns immediate feedback content + for async tools. Must return list[ContentBlock]. Only valid for async-compatible tools. """ self._tool_manager.add_tool( fn, @@ -468,6 +476,7 @@ def add_tool( structured_output=structured_output, invocation_modes=invocation_modes, keep_alive=keep_alive, + immediate_result=immediate_result, ) def tool( @@ -479,6 +488,7 @@ def tool( structured_output: bool | None = None, invocation_modes: list[InvocationMode] | None = None, keep_alive: int | None = None, + immediate_result: Callable[..., Awaitable[list[ContentBlock]]] | None = None, ) -> Callable[[AnyFunction], AnyFunction]: """Decorator to register a tool. @@ -501,6 +511,8 @@ def tool( - Tools with "async" mode will be hidden from clients that don't support async execution keep_alive: How long (in seconds) async operation results should be kept available. Only applies to async tools. + immediate_result: Optional async function that returns immediate feedback content + for async tools. Must return list[ContentBlock]. Only valid for async-compatible tools. Example: @server.tool() @@ -527,6 +539,15 @@ async def async_only_tool(data: str, ctx: Context) -> str: def hybrid_tool(x: int) -> str: # This tool supports both sync and async execution return str(x) + + async def immediate_feedback(operation: str) -> list[ContentBlock]: + return [TextContent(type="text", text=f"Starting {operation}...")] + + @server.tool(invocation_modes=["async"], immediate_result=immediate_feedback) + async def long_running_tool(operation: str, ctx: Context) -> str: + # This tool provides immediate feedback while running asynchronously + await ctx.info(f"Processing {operation}") + return f"Completed {operation}" """ # Check if user passed function directly instead of calling decorator if callable(name): @@ -544,6 +565,7 @@ def decorator(fn: AnyFunction) -> AnyFunction: structured_output=structured_output, invocation_modes=invocation_modes, keep_alive=keep_alive, + immediate_result=immediate_result, ) return fn diff --git a/src/mcp/server/fastmcp/tools/base.py b/src/mcp/server/fastmcp/tools/base.py index 524eefd84..c7be305f2 100644 --- a/src/mcp/server/fastmcp/tools/base.py +++ b/src/mcp/server/fastmcp/tools/base.py @@ -11,7 +11,7 @@ from mcp.server.fastmcp.exceptions import ToolError from mcp.server.fastmcp.utilities.context_injection import find_context_parameter from mcp.server.fastmcp.utilities.func_metadata import FuncMetadata, func_metadata -from mcp.types import ToolAnnotations +from mcp.types import ContentBlock, ToolAnnotations if TYPE_CHECKING: from mcp.server.fastmcp.server import Context @@ -38,10 +38,10 @@ class Tool(BaseModel): invocation_modes: list[InvocationMode] = Field( default=["sync"], description="Supported invocation modes (sync/async)" ) - meta: dict[str, Any] | None = Field(description="Optional additional tool information.", default=None) - immediate_result: Callable[..., Awaitable[list[Any]]] | None = Field( + immediate_result: Callable[..., Awaitable[list[ContentBlock]]] | None = Field( None, exclude=True, description="Optional immediate result function for async tools" ) + meta: dict[str, Any] | None = Field(description="Optional additional tool information.", default=None) @cached_property def output_schema(self) -> dict[str, Any] | None: @@ -59,8 +59,8 @@ def from_function( structured_output: bool | None = None, invocation_modes: list[InvocationMode] | None = None, keep_alive: int | None = None, - meta: dict[str, Any] | None = None, immediate_result: Callable[..., Awaitable[list[Any]]] | None = None, + meta: dict[str, Any] | None = None, ) -> Tool: """Create a Tool from a function.""" func_name = name or fn.__name__ @@ -129,8 +129,8 @@ def from_function( context_kwarg=context_kwarg, annotations=annotations, invocation_modes=invocation_modes, - meta=meta, immediate_result=immediate_result, + meta=meta, ) async def run( diff --git a/src/mcp/server/fastmcp/tools/tool_manager.py b/src/mcp/server/fastmcp/tools/tool_manager.py index 933881ef4..540dd7f01 100644 --- a/src/mcp/server/fastmcp/tools/tool_manager.py +++ b/src/mcp/server/fastmcp/tools/tool_manager.py @@ -1,13 +1,13 @@ from __future__ import annotations as _annotations -from collections.abc import Callable +from collections.abc import Awaitable, Callable from typing import TYPE_CHECKING, Any from mcp.server.fastmcp.exceptions import ToolError from mcp.server.fastmcp.tools.base import InvocationMode, Tool from mcp.server.fastmcp.utilities.logging import get_logger from mcp.shared.context import LifespanContextT, RequestT -from mcp.types import ToolAnnotations +from mcp.types import ContentBlock, ToolAnnotations if TYPE_CHECKING: from mcp.server.fastmcp.server import Context @@ -52,6 +52,7 @@ def add_tool( structured_output: bool | None = None, invocation_modes: list[InvocationMode] | None = None, keep_alive: int | None = None, + immediate_result: Callable[..., Awaitable[list[ContentBlock]]] | None = None, meta: dict[str, Any] | None = None, ) -> Tool: """Add a tool to the server.""" @@ -64,6 +65,7 @@ def add_tool( structured_output=structured_output, invocation_modes=invocation_modes, keep_alive=keep_alive, + immediate_result=immediate_result, meta=meta, ) existing = self._tools.get(tool.name) diff --git a/src/mcp/server/lowlevel/server.py b/src/mcp/server/lowlevel/server.py index bc0336010..b8971462a 100644 --- a/src/mcp/server/lowlevel/server.py +++ b/src/mcp/server/lowlevel/server.py @@ -469,6 +469,23 @@ async def handler(req: types.CallToolRequest): # Check for async execution if tool and self.async_operations and self._should_execute_async(tool): keep_alive = self._get_tool_keep_alive(tool) + immediate_content: list[types.ContentBlock] = [] + + # Execute immediate result if available + if self._has_immediate_result(tool): + try: + immediate_content = await self._execute_immediate_result(tool, arguments) + logger.debug(f"Executed immediate result for {tool_name}") + except McpError: + # Re-raise McpError as-is + raise + except Exception as e: + raise McpError( + types.ErrorData( + code=types.INTERNAL_ERROR, + message=f"Immediate result execution failed: {str(e)}", + ) + ) # Create async operation operation = self.async_operations.create_operation( @@ -499,11 +516,11 @@ async def execute_async(): asyncio.create_task(execute_async()) - # Return operation result immediately + # Return operation result with immediate content logger.info(f"Returning async operation result for {tool_name}") return types.ServerResult( types.CallToolResult( - content=[], + content=immediate_content, operation=types.AsyncResultProperties( token=operation.token, keepAlive=operation.keep_alive, @@ -588,9 +605,34 @@ def _should_execute_async(self, tool: types.Tool) -> bool: def _get_tool_keep_alive(self, tool: types.Tool) -> int: """Get the keepalive value for an async tool.""" - if not tool.meta or "_keep_alive" not in tool.meta: - raise ValueError(f"_keep_alive not defined for tool {tool.name}") - return cast(int, tool.meta["_keep_alive"]) + if tool.internal.keepalive is None: + raise ValueError(f"keepalive not defined for tool {tool.name}") + return tool.internal.keepalive + + def _has_immediate_result(self, tool: types.Tool) -> bool: + """Check if tool has immediate_result function.""" + return tool.internal.immediate_result is not None and callable(tool.internal.immediate_result) + + async def _execute_immediate_result(self, tool: types.Tool, arguments: dict[str, Any]) -> list[types.ContentBlock]: + """Execute immediate result function and return content blocks.""" + immediate_fn = tool.internal.immediate_result + + if immediate_fn is None: + raise ValueError(f"No immediate_result function found for tool {tool.name}") + + # Validate function signature and execute + try: + result = await immediate_fn(**arguments) + if not isinstance(result, list): + raise ValueError("immediate_result must return list[ContentBlock]") + return cast(list[types.ContentBlock], result) + except McpError: + # Re-raise McpError as-is + raise + except Exception as e: + raise McpError( + types.ErrorData(code=types.INTERNAL_ERROR, message=f"Immediate result execution error: {str(e)}") + ) def progress_notification(self): def decorator( diff --git a/src/mcp/types.py b/src/mcp/types.py index 25c96e9d1..6038f5808 100644 --- a/src/mcp/types.py +++ b/src/mcp/types.py @@ -858,6 +858,18 @@ class ToolAnnotations(BaseModel): model_config = ConfigDict(extra="allow") +class InternalToolProperties(BaseModel): + """ + Internal properties for tools that are not serialized in the MCP protocol. + """ + + immediate_result: Any = Field(default=None) + """Function to execute for immediate results in async operations.""" + + keepalive: int | None = Field(default=None) + """Keepalive duration in seconds for async operations.""" + + class Tool(BaseMetadata): """Definition for a tool the client can call.""" @@ -883,6 +895,10 @@ class Tool(BaseMetadata): See [MCP specification](https://github.com/modelcontextprotocol/modelcontextprotocol/blob/47339c03c143bb4ec01a26e721a1b8fe66634ebe/docs/specification/draft/basic/index.mdx#general-fields) for notes on _meta usage. """ + internal: InternalToolProperties = Field(default_factory=InternalToolProperties, exclude=True) + """ + Internal properties not serialized in MCP protocol. + """ model_config = ConfigDict(extra="allow") diff --git a/tests/server/fastmcp/test_immediate_result.py b/tests/server/fastmcp/test_immediate_result.py new file mode 100644 index 000000000..bfe9be797 --- /dev/null +++ b/tests/server/fastmcp/test_immediate_result.py @@ -0,0 +1,745 @@ +"""Test immediate_result functionality in FastMCP.""" + +import asyncio + +import pytest + +from mcp.server.fastmcp import FastMCP +from mcp.server.fastmcp.tools import Tool, ToolManager +from mcp.shared.exceptions import McpError +from mcp.shared.memory import create_connected_server_and_client_session +from mcp.types import INVALID_PARAMS, ContentBlock, ErrorData, TextContent + + +class TestImmediateResultValidation: + """Test validation of immediate_result parameter during tool registration.""" + + def test_immediate_result_with_sync_only_tool_fails(self): + """Test that immediate_result fails with sync-only tools.""" + + async def immediate_fn() -> list[ContentBlock]: + return [TextContent(type="text", text="immediate")] + + def sync_tool() -> str: + """A sync tool.""" + return "sync" + + manager = ToolManager() + + # Should raise ValueError when immediate_result is used with sync-only tool + with pytest.raises(ValueError, match="immediate_result can only be used with async-compatible tools"): + manager.add_tool(sync_tool, invocation_modes=["sync"], immediate_result=immediate_fn) + + def test_immediate_result_with_async_tool_succeeds(self): + """Test that immediate_result succeeds with async-compatible tools.""" + + async def immediate_fn() -> list[ContentBlock]: + return [TextContent(type="text", text="immediate")] + + async def async_tool() -> str: + """An async tool.""" + return "async" + + manager = ToolManager() + + # Should succeed with async-compatible tool + tool = manager.add_tool(async_tool, invocation_modes=["async"], immediate_result=immediate_fn) + assert tool.immediate_result == immediate_fn + assert tool.invocation_modes == ["async"] + + def test_immediate_result_with_hybrid_tool_succeeds(self): + """Test that immediate_result succeeds with hybrid sync/async tools.""" + + async def immediate_fn() -> list[ContentBlock]: + return [TextContent(type="text", text="immediate")] + + def hybrid_tool() -> str: + """A hybrid tool.""" + return "hybrid" + + manager = ToolManager() + + # Should succeed with hybrid tool + tool = manager.add_tool(hybrid_tool, invocation_modes=["sync", "async"], immediate_result=immediate_fn) + assert tool.immediate_result == immediate_fn + assert tool.invocation_modes == ["sync", "async"] + + def test_immediate_result_non_async_callable_fails(self): + """Test that non-async immediate_result functions fail validation.""" + + def sync_immediate_fn() -> list[ContentBlock]: + return [TextContent(type="text", text="immediate")] + + async def async_tool() -> str: + return "async" + + manager = ToolManager() + + # Should raise ValueError for non-async immediate_result function + with pytest.raises(ValueError, match="immediate_result must be an async callable"): + manager.add_tool(async_tool, invocation_modes=["async"], immediate_result=sync_immediate_fn) # type: ignore + + def test_immediate_result_non_callable_fails(self): + """Test that non-callable immediate_result fails validation.""" + + async def async_tool() -> str: + return "async" + + manager = ToolManager() + + # Should raise ValueError for non-callable immediate_result + with pytest.raises(ValueError, match="immediate_result must be an async callable"): + manager.add_tool(async_tool, invocation_modes=["async"], immediate_result="not_callable") # type: ignore + + def test_tool_from_function_immediate_result_validation(self): + """Test Tool.from_function validates immediate_result correctly.""" + + async def immediate_fn() -> list[ContentBlock]: + return [TextContent(type="text", text="immediate")] + + def sync_tool() -> str: + return "sync" + + # Should fail with sync-only tool + with pytest.raises(ValueError, match="immediate_result can only be used with async-compatible tools"): + Tool.from_function(sync_tool, invocation_modes=["sync"], immediate_result=immediate_fn) + + # Should succeed with async tool + async def async_tool() -> str: + return "async" + + tool = Tool.from_function(async_tool, invocation_modes=["async"], immediate_result=immediate_fn) + assert tool.immediate_result == immediate_fn + + +class TestImmediateResultIntegration: + """Test integration of immediate_result with async operations and polling.""" + + @pytest.mark.anyio + async def test_fastmcp_tool_decorator_with_immediate_result(self): + """Test FastMCP tool decorator with immediate_result parameter.""" + + mcp = FastMCP() + + async def immediate_feedback(operation: str) -> list[ContentBlock]: + return [TextContent(type="text", text=f"🚀 Starting {operation}...")] + + @mcp.tool(invocation_modes=["async"], immediate_result=immediate_feedback) + async def long_running_task(operation: str) -> str: + """Perform a long-running task with immediate feedback.""" + await asyncio.sleep(0.1) # Simulate work + return f"Task '{operation}' completed!" + + # Test with "next" protocol version to see async tools + async with create_connected_server_and_client_session(mcp._mcp_server, protocol_version="next") as client: + tools = await client.list_tools() + assert len(tools.tools) == 1 + assert tools.tools[0].name == "long_running_task" + assert tools.tools[0].invocationMode == "async" + + # Test that the tool has immediate_result in the internal representation + internal_tool = mcp._tool_manager.get_tool("long_running_task") + assert internal_tool is not None + assert internal_tool.immediate_result == immediate_feedback + + @pytest.mark.anyio + async def test_tool_without_immediate_result_backward_compatibility(self): + """Test that async tools without immediate_result work unchanged.""" + + mcp = FastMCP() + + @mcp.tool(invocation_modes=["async"]) + async def simple_async_tool(message: str) -> str: + """A simple async tool without immediate result.""" + await asyncio.sleep(0.1) + return f"Processed: {message}" + + # Test with "next" protocol version to see async tools + async with create_connected_server_and_client_session(mcp._mcp_server, protocol_version="next") as client: + tools = await client.list_tools() + assert len(tools.tools) == 1 + assert tools.tools[0].name == "simple_async_tool" + assert tools.tools[0].invocationMode == "async" + + # Test that the tool has no immediate_result + internal_tool = mcp._tool_manager.get_tool("simple_async_tool") + assert internal_tool is not None + assert internal_tool.immediate_result is None + + @pytest.mark.anyio + async def test_sync_tool_unchanged_behavior(self): + """Test that sync tools continue to work without modification.""" + + mcp = FastMCP() + + @mcp.tool() + def sync_tool(message: str) -> str: + """A simple sync tool.""" + return f"Processed: {message}" + + # Test with old client (sync tools should be visible) + async with create_connected_server_and_client_session(mcp._mcp_server) as client: + tools = await client.list_tools() + assert len(tools.tools) == 1 + assert tools.tools[0].name == "sync_tool" + assert tools.tools[0].invocationMode is None # Old clients don't see invocationMode + + # Test with "next" protocol version + async with create_connected_server_and_client_session(mcp._mcp_server, protocol_version="next") as client: + tools = await client.list_tools() + assert len(tools.tools) == 1 + assert tools.tools[0].name == "sync_tool" + assert tools.tools[0].invocationMode == "sync" # New clients see invocationMode + + # Test that the tool has no immediate_result + internal_tool = mcp._tool_manager.get_tool("sync_tool") + assert internal_tool is not None + assert internal_tool.immediate_result is None + assert internal_tool.invocation_modes == ["sync"] + + @pytest.mark.anyio + async def test_multiple_tools_with_mixed_immediate_result(self): + """Test multiple tools with mixed immediate_result configurations.""" + + mcp = FastMCP() + + async def immediate_feedback(message: str) -> list[ContentBlock]: + return [TextContent(type="text", text=f"Processing: {message}")] + + @mcp.tool(invocation_modes=["async"], immediate_result=immediate_feedback) + async def tool_with_immediate(message: str) -> str: + return f"Done: {message}" + + @mcp.tool(invocation_modes=["async"]) + async def tool_without_immediate(message: str) -> str: + return f"Done: {message}" + + @mcp.tool() + def sync_tool(message: str) -> str: + return f"Done: {message}" + + # Test with old client (only sync tools visible) + async with create_connected_server_and_client_session(mcp._mcp_server) as client: + tools = await client.list_tools() + assert len(tools.tools) == 1 # Only sync tool visible + assert tools.tools[0].name == "sync_tool" + + # Test with "next" protocol version (all tools visible) + async with create_connected_server_and_client_session(mcp._mcp_server, protocol_version="next") as client: + tools = await client.list_tools() + assert len(tools.tools) == 3 + + tool_names = {tool.name for tool in tools.tools} + assert tool_names == {"tool_with_immediate", "tool_without_immediate", "sync_tool"} + + # Test internal representations + tool_with = mcp._tool_manager.get_tool("tool_with_immediate") + tool_without = mcp._tool_manager.get_tool("tool_without_immediate") + sync_tool_obj = mcp._tool_manager.get_tool("sync_tool") + + assert tool_with is not None and tool_with.immediate_result == immediate_feedback + assert tool_without is not None and tool_without.immediate_result is None + assert sync_tool_obj is not None and sync_tool_obj.immediate_result is None + + +class TestImmediateResultErrorHandling: + """Test error handling for immediate_result functionality.""" + + def test_registration_error_messages(self): + """Test that registration errors have clear messages.""" + + async def immediate_fn() -> list[ContentBlock]: + return [TextContent(type="text", text="immediate")] + + def sync_tool() -> str: + return "sync" + + manager = ToolManager() + + # Test error message for sync-only tool + with pytest.raises(ValueError) as exc_info: + manager.add_tool(sync_tool, invocation_modes=["sync"], immediate_result=immediate_fn) + + error_msg = str(exc_info.value) + assert "immediate_result can only be used with async-compatible tools" in error_msg + assert "Add 'async' to invocation_modes" in error_msg + + def test_fastmcp_decorator_sync_tool_validation(self): + """Test that FastMCP decorator prevents sync tools from using immediate_result.""" + + mcp = FastMCP() + + async def immediate_fn() -> list[ContentBlock]: + return [TextContent(type="text", text="immediate")] + + # Should raise ValueError when decorating sync tool with immediate_result + with pytest.raises(ValueError, match="immediate_result can only be used with async-compatible tools"): + + @mcp.tool(invocation_modes=["sync"], immediate_result=immediate_fn) + def sync_tool_with_immediate() -> str: + return "sync" + + def test_default_sync_tool_validation(self): + """Test that default sync tools (no invocation_modes specified) cannot use immediate_result.""" + + mcp = FastMCP() + + async def immediate_fn() -> list[ContentBlock]: + return [TextContent(type="text", text="immediate")] + + # Should raise ValueError when decorating default sync tool with immediate_result + with pytest.raises(ValueError, match="immediate_result can only be used with async-compatible tools"): + + @mcp.tool(immediate_result=immediate_fn) + def default_sync_tool() -> str: + return "sync" + + def test_non_async_callable_error_message(self): + """Test error message for non-async immediate_result function.""" + + def sync_immediate_fn() -> list[ContentBlock]: + return [TextContent(type="text", text="immediate")] + + async def async_tool() -> str: + return "async" + + manager = ToolManager() + + with pytest.raises(ValueError) as exc_info: + manager.add_tool(async_tool, invocation_modes=["async"], immediate_result=sync_immediate_fn) # type: ignore + + error_msg = str(exc_info.value) + assert "immediate_result must be an async callable" in error_msg + + def test_tool_manager_duplicate_tool_handling_with_immediate_result(self): + """Test duplicate tool handling when immediate_result is involved.""" + + async def immediate_fn1() -> list[ContentBlock]: + return [TextContent(type="text", text="immediate1")] + + async def immediate_fn2() -> list[ContentBlock]: + return [TextContent(type="text", text="immediate2")] + + async def async_tool() -> str: + return "async" + + manager = ToolManager() + + # Add first tool with immediate_result + tool1 = manager.add_tool( + async_tool, name="test_tool", invocation_modes=["async"], immediate_result=immediate_fn1 + ) + + # Add duplicate tool with different immediate_result (should return existing) + tool2 = manager.add_tool( + async_tool, name="test_tool", invocation_modes=["async"], immediate_result=immediate_fn2 + ) + + # Should return the same tool (first one registered) + assert tool1 is tool2 + assert tool1.immediate_result == immediate_fn1 + + +class TestImmediateResultPerformance: + """Test performance aspects of immediate_result functionality.""" + + def test_no_performance_impact_without_immediate_result(self): + """Test that tools without immediate_result have no performance impact.""" + + async def async_tool() -> str: + return "async" + + manager = ToolManager() + + # Add tool without immediate_result + tool = manager.add_tool(async_tool, invocation_modes=["async"]) + + # Verify no immediate_result overhead + assert tool.immediate_result is None + assert "async" in tool.invocation_modes + + @pytest.mark.anyio + async def test_immediate_result_function_isolation(self): + """Test that immediate_result functions are isolated from main tool execution.""" + + execution_order: list[str] = [] + + async def immediate_fn(message: str) -> list[ContentBlock]: + execution_order.append("immediate") + return [TextContent(type="text", text=f"Processing: {message}")] + + async def async_tool(message: str) -> str: + execution_order.append("main") + await asyncio.sleep(0.1) + return f"Completed: {message}" + + manager = ToolManager() + tool = manager.add_tool(async_tool, invocation_modes=["async"], immediate_result=immediate_fn) + + # Test that immediate function can be called independently + await immediate_fn("test") + assert execution_order == ["immediate"] + + # Reset and test main function + execution_order.clear() + await tool.run({"message": "test"}) + assert execution_order == ["main"] + + +class TestImmediateResultRuntimeErrors: + """Test runtime error handling when immediate_result functions raise exceptions.""" + + @pytest.mark.anyio + async def test_immediate_result_registration_and_storage(self): + """Test that immediate_result functions are properly registered, stored, and executed.""" + + async def working_immediate_fn(message: str) -> list[ContentBlock]: + return [TextContent(type="text", text=f"Processing: {message}")] + + async def async_tool(message: str) -> str: + await asyncio.sleep(0.1) + return f"Completed: {message}" + + mcp = FastMCP() + + @mcp.tool(invocation_modes=["async"], immediate_result=working_immediate_fn) + async def tool_with_working_immediate(message: str) -> str: + """Tool with working immediate result.""" + return await async_tool(message) + + # Verify the tool was registered with immediate_result + internal_tool = mcp._tool_manager.get_tool("tool_with_working_immediate") + assert internal_tool is not None + assert internal_tool.immediate_result == working_immediate_fn + + # Test with "next" protocol version to enable async tools + async with create_connected_server_and_client_session(mcp._mcp_server, protocol_version="next") as client: + # Call the tool - should return operation token + result = await client.call_tool("tool_with_working_immediate", {"message": "test"}) + + # Should get operation token for async call + assert result.operation is not None + token = result.operation.token + + # The immediate result should be in the initial response content + assert len(result.content) == 1 + content = result.content[0] + assert content.type == "text" + assert content.text == "Processing: test" + + # Poll for completion to verify main tool execution + while True: + status = await client.get_operation_status(token) + if status.status == "completed": + final_result = await client.get_operation_result(token) + assert not final_result.result.isError + assert len(final_result.result.content) == 1 + final_content = final_result.result.content[0] + assert final_content.type == "text" + assert final_content.text == "Completed: test" + break + elif status.status == "failed": + pytest.fail(f"Tool execution failed: {status}") + await asyncio.sleep(0.01) + + @pytest.mark.anyio + async def test_immediate_result_exception_handling(self): + """Test that exceptions in immediate_result are properly handled during tool execution.""" + + async def failing_immediate_fn(message: str) -> list[ContentBlock]: + raise ValueError(f"Immediate result failed for: {message}") + + async def async_tool(message: str) -> str: + await asyncio.sleep(0.1) + return f"Completed: {message}" + + mcp = FastMCP() + + @mcp.tool(invocation_modes=["async"], immediate_result=failing_immediate_fn) + async def tool_with_failing_immediate(message: str) -> str: + """Tool with failing immediate result.""" + return await async_tool(message) + + # Verify the tool was registered with the failing immediate_result + internal_tool = mcp._tool_manager.get_tool("tool_with_failing_immediate") + assert internal_tool is not None + assert internal_tool.immediate_result == failing_immediate_fn + + # Test with "next" protocol version to enable async tools + async with create_connected_server_and_client_session(mcp._mcp_server, protocol_version="next") as client: + # The call should return an error result due to immediate_result exception + result = await client.call_tool("tool_with_failing_immediate", {"message": "test"}) + + # Verify error result + assert result.isError is True + assert result.operation is None # No operation created due to immediate_result failure + assert len(result.content) == 1 + content = result.content[0] + assert content.type == "text" + assert "Immediate result execution error" in content.text + assert "Immediate result failed for: test" in content.text + + @pytest.mark.anyio + async def test_immediate_result_invalid_return_type_error(self): + """Test that immediate_result returning invalid type is handled properly.""" + + async def invalid_return_immediate_fn(message: str) -> str: # Wrong return type + return f"Invalid return: {message}" # Should return list[ContentBlock] + + async def async_tool(message: str) -> str: + return f"Completed: {message}" + + mcp = FastMCP() + + @mcp.tool(invocation_modes=["async"], immediate_result=invalid_return_immediate_fn) # type: ignore + async def tool_with_invalid_immediate(message: str) -> str: + """Tool with invalid immediate result return type.""" + return await async_tool(message) + + # Verify the tool was registered (type checking is not enforced at runtime) + internal_tool = mcp._tool_manager.get_tool("tool_with_invalid_immediate") + assert internal_tool is not None + assert internal_tool.immediate_result == invalid_return_immediate_fn + + # Test with "next" protocol version to enable async tools + async with create_connected_server_and_client_session(mcp._mcp_server, protocol_version="next") as client: + # The call should return an error result due to invalid return type + result = await client.call_tool("tool_with_invalid_immediate", {"message": "test"}) + + # Verify error result + assert result.isError is True + assert result.operation is None # No operation created due to immediate_result failure + assert len(result.content) == 1 + content = result.content[0] + assert content.type == "text" + assert "Immediate result execution error" in content.text + assert "immediate_result must return list[ContentBlock]" in content.text + + @pytest.mark.anyio + async def test_immediate_result_async_exception_handling(self): + """Test that async exceptions in immediate_result are properly handled.""" + + async def async_failing_immediate_fn(operation: str) -> list[ContentBlock]: + await asyncio.sleep(0.01) # Make it truly async + raise RuntimeError(f"Async immediate failure: {operation}") + + async def async_tool(operation: str) -> str: + await asyncio.sleep(0.1) + return f"Operation {operation} completed" + + mcp = FastMCP() + + @mcp.tool(invocation_modes=["async"], immediate_result=async_failing_immediate_fn) + async def tool_with_async_failing_immediate(operation: str) -> str: + """Tool with async failing immediate result.""" + return await async_tool(operation) + + # Verify the tool was registered + internal_tool = mcp._tool_manager.get_tool("tool_with_async_failing_immediate") + assert internal_tool is not None + assert internal_tool.immediate_result == async_failing_immediate_fn + + # Test with "next" protocol version to enable async tools + async with create_connected_server_and_client_session(mcp._mcp_server, protocol_version="next") as client: + # The call should return an error result due to immediate_result exception + result = await client.call_tool("tool_with_async_failing_immediate", {"operation": "test_op"}) + + # Verify error result + assert result.isError is True + assert result.operation is None # No operation created due to immediate_result failure + assert len(result.content) == 1 + content = result.content[0] + assert content.type == "text" + assert "Immediate result execution error" in content.text + assert "Async immediate failure: test_op" in content.text + + @pytest.mark.anyio + async def test_immediate_result_error_prevents_main_tool_execution(self): + """Test that immediate_result errors prevent the main tool from executing. + + When immediate_result fails, no async operation should be created and the main + tool function should not be executed. + """ + + call_count = 0 + + async def failing_immediate_fn(message: str) -> list[ContentBlock]: + raise ValueError("Immediate failed") + + async def async_tool(message: str) -> str: + nonlocal call_count + call_count += 1 + return f"Tool executed: {message}" + + mcp = FastMCP() + + @mcp.tool(invocation_modes=["async"], immediate_result=failing_immediate_fn) + async def tool_with_failing_immediate(message: str) -> str: + """Tool with failing immediate result.""" + return await async_tool(message) + + # Verify the tool was registered + internal_tool = mcp._tool_manager.get_tool("tool_with_failing_immediate") + assert internal_tool is not None + assert internal_tool.immediate_result == failing_immediate_fn + + # Test with "next" protocol version to enable async tools + async with create_connected_server_and_client_session(mcp._mcp_server, protocol_version="next") as client: + # The call should return an error result due to immediate_result exception + result = await client.call_tool("tool_with_failing_immediate", {"message": "test"}) + + # Verify error result + assert result.isError is True + assert result.operation is None # No operation created due to immediate_result failure + assert len(result.content) == 1 + content = result.content[0] + assert content.type == "text" + assert "Immediate result execution error" in content.text + assert "Immediate failed" in content.text + + # Verify main tool was NOT executed due to immediate_result failure + assert call_count == 0 + + @pytest.mark.anyio + async def test_immediate_result_mcp_error_passthrough(self): + """Test that McpError from immediate_result is passed through with original error details.""" + + async def mcp_error_immediate_fn(message: str) -> list[ContentBlock]: + raise McpError(ErrorData(code=INVALID_PARAMS, message=f"Custom MCP error: {message}")) + + async def async_tool(message: str) -> str: + return f"Completed: {message}" + + mcp = FastMCP() + + @mcp.tool(invocation_modes=["async"], immediate_result=mcp_error_immediate_fn) + async def tool_with_mcp_error_immediate(message: str) -> str: + """Tool with immediate result that raises McpError.""" + return await async_tool(message) + + # Verify the tool was registered + internal_tool = mcp._tool_manager.get_tool("tool_with_mcp_error_immediate") + assert internal_tool is not None + assert internal_tool.immediate_result == mcp_error_immediate_fn + + # Test with "next" protocol version to enable async tools + async with create_connected_server_and_client_session(mcp._mcp_server, protocol_version="next") as client: + # The call should return an error result with the original McpError details + result = await client.call_tool("tool_with_mcp_error_immediate", {"message": "test"}) + + # Verify error result preserves the original McpError + assert result.isError is True + assert result.operation is None # No operation created due to immediate_result failure + assert len(result.content) == 1 + content = result.content[0] + assert content.type == "text" + # The original McpError should be preserved, not wrapped in "Immediate result execution failed" + assert "Custom MCP error: test" in content.text + + @pytest.mark.anyio + async def test_generic_exception_wrapped_in_mcp_error(self): + """Test that generic exceptions from immediate_result are wrapped in McpError with INTERNAL_ERROR code.""" + + async def failing_immediate_fn(message: str) -> list[ContentBlock]: + raise ValueError(f"Generic error: {message}") + + async def async_tool(message: str) -> str: + return f"Completed: {message}" + + mcp = FastMCP() + + @mcp.tool(invocation_modes=["async"], immediate_result=failing_immediate_fn) + async def tool_with_failing_immediate(message: str) -> str: + """Tool with failing immediate result.""" + return await async_tool(message) + + # Test with "next" protocol version to enable async tools + async with create_connected_server_and_client_session(mcp._mcp_server, protocol_version="next") as client: + # The call should return an error result with wrapped exception + result = await client.call_tool("tool_with_failing_immediate", {"message": "test"}) + + # Verify error result wraps the exception + assert result.isError is True + assert result.operation is None # No operation created due to immediate_result failure + assert len(result.content) == 1 + content = result.content[0] + assert content.type == "text" + assert "Immediate result execution error" in content.text + assert "Generic error: test" in content.text + + +class TestImmediateResultMetadata: + """Test metadata handling for immediate_result functionality.""" + + def test_immediate_result_stored_in_tool_object(self): + """Test that immediate_result function is stored in Tool object.""" + + async def immediate_fn() -> list[ContentBlock]: + return [TextContent(type="text", text="immediate")] + + async def async_tool() -> str: + return "async" + + manager = ToolManager() + tool = manager.add_tool(async_tool, invocation_modes=["async"], immediate_result=immediate_fn) + + # Verify immediate_result is stored in the Tool object + assert tool.immediate_result == immediate_fn + assert callable(tool.immediate_result) + + def test_tool_meta_field_preservation(self): + """Test that existing meta field is preserved when immediate_result is added.""" + + async def immediate_fn() -> list[ContentBlock]: + return [TextContent(type="text", text="immediate")] + + async def async_tool() -> str: + return "async" + + manager = ToolManager() + + # Add tool with both meta and immediate_result + custom_meta = {"custom_key": "custom_value"} + tool = manager.add_tool(async_tool, invocation_modes=["async"], immediate_result=immediate_fn, meta=custom_meta) + + # Verify both meta and immediate_result are preserved + assert tool.immediate_result == immediate_fn + assert tool.meta is not None + assert tool.meta["custom_key"] == "custom_value" + + def test_keep_alive_and_immediate_result_compatibility(self): + """Test that keep_alive and immediate_result work together.""" + + async def immediate_fn() -> list[ContentBlock]: + return [TextContent(type="text", text="immediate")] + + async def async_tool() -> str: + return "async" + + manager = ToolManager() + + # Add tool with both keep_alive and immediate_result + tool = manager.add_tool(async_tool, invocation_modes=["async"], immediate_result=immediate_fn, keep_alive=1800) + + # Verify both are set correctly + assert tool.immediate_result == immediate_fn + assert tool.meta is not None + assert tool.meta["_keep_alive"] == 1800 + # immediate_result is no longer stored in meta, it's a direct field on the Tool object + + def test_immediate_result_stored_as_direct_field(self): + """Test that immediate_result function is stored as a direct field on the Tool object.""" + + async def immediate_fn() -> list[ContentBlock]: + return [TextContent(type="text", text="immediate")] + + async def async_tool() -> str: + return "async" + + manager = ToolManager() + tool = manager.add_tool(async_tool, invocation_modes=["async"], immediate_result=immediate_fn) + + # Verify immediate_result is stored as a direct field on the Tool object + assert tool.immediate_result == immediate_fn + assert callable(tool.immediate_result) + # immediate_result is no longer stored in meta field diff --git a/tests/server/fastmcp/test_integration.py b/tests/server/fastmcp/test_integration.py index d7ee425af..fb53c4145 100644 --- a/tests/server/fastmcp/test_integration.py +++ b/tests/server/fastmcp/test_integration.py @@ -1033,3 +1033,102 @@ async def test_structured_output(server_transport: str, server_url: str) -> None assert "sunny" in result_text # condition assert "45" in result_text # humidity assert "5.2" in result_text # wind_speed + + +# Test immediate_result functionality integration +@pytest.mark.anyio +@pytest.mark.parametrize( + "server_transport", + [ + ("async_tools", "sse"), + ("async_tools", "streamable-http"), + ], + indirect=True, +) +async def test_immediate_result_integration(server_transport: str, server_url: str) -> None: + """Test complete flow from tool registration to immediate result execution.""" + transport = server_transport + client_cm = create_client_for_transport(transport, server_url) + + async with client_cm as client_streams: + read_stream, write_stream = unpack_streams(client_streams) + async with ClientSession(read_stream, write_stream, protocol_version="next") as session: + # Test initialization + result = await session.initialize() + assert isinstance(result, InitializeResult) + assert result.serverInfo.name == "Async Tools Demo" + + # Test tool with immediate_result + immediate_result = await session.call_tool("long_running_analysis", {"operation": "data_processing"}) + + # Verify immediate result is returned in content + assert len(immediate_result.content) == 1 + assert isinstance(immediate_result.content[0], TextContent) + assert "🚀 Starting data_processing... This may take a moment." in immediate_result.content[0].text + + # Verify async operation is created + assert immediate_result.operation is not None + token = immediate_result.operation.token + + # Poll for final result + while True: + status = await session.get_operation_status(token) + if status.status == "completed": + final_result = await session.get_operation_result(token) + assert not final_result.result.isError + assert len(final_result.result.content) == 1 + content = final_result.result.content[0] + assert isinstance(content, TextContent) + assert "Analysis 'data_processing' completed successfully with detailed results!" in content.text + break + elif status.status == "failed": + pytest.fail(f"Async operation failed: {status.error}") + await asyncio.sleep(0.01) + + +@pytest.mark.anyio +@pytest.mark.parametrize( + "server_transport", + [ + ("async_tools", "sse"), + ("async_tools", "streamable-http"), + ], + indirect=True, +) +async def test_immediate_result_backward_compatibility(server_transport: str, server_url: str) -> None: + """Test that existing async tools without immediate_result work unchanged.""" + transport = server_transport + client_cm = create_client_for_transport(transport, server_url) + + async with client_cm as client_streams: + read_stream, write_stream = unpack_streams(client_streams) + async with ClientSession(read_stream, write_stream, protocol_version="next") as session: + # Test initialization + result = await session.initialize() + assert isinstance(result, InitializeResult) + assert result.serverInfo.name == "Async Tools Demo" + + # Test async tool without immediate_result (should have empty content initially) + async_result = await session.call_tool("async_only_tool", {"data": "test_data"}) + + # Should have empty content array (no immediate result) + assert len(async_result.content) == 0 + + # Should still have async operation + assert async_result.operation is not None + token = async_result.operation.token + + # Poll for final result + while True: + status = await session.get_operation_status(token) + if status.status == "completed": + final_result = await session.get_operation_result(token) + assert not final_result.result.isError + assert len(final_result.result.content) == 1 + content = final_result.result.content[0] + assert isinstance(content, TextContent) + assert "Async analysis result for: test_data" in content.text + break + elif status.status == "failed": + pytest.fail(f"Async operation failed: {status.error}") + await asyncio.sleep(0.01) diff --git a/tests/server/fastmcp/test_tool_manager.py b/tests/server/fastmcp/test_tool_manager.py index 9db6ff3f2..83580fe77 100644 --- a/tests/server/fastmcp/test_tool_manager.py +++ b/tests/server/fastmcp/test_tool_manager.py @@ -54,6 +54,7 @@ class AddArguments(ArgModelBase): parameters=AddArguments.model_json_schema(), context_kwarg=None, annotations=None, + immediate_result=None, ) manager = ToolManager(tools=[original_tool]) saved_tool = manager.get_tool("sum") From 2943631ece87d0c8c8093e2f323dae35cbf71890 Mon Sep 17 00:00:00 2001 From: Luca Chang Date: Fri, 26 Sep 2025 22:27:54 -0700 Subject: [PATCH 20/41] Fix code complexity issue in sHTTP --- src/mcp/server/streamable_http.py | 152 ++++++++++++++++++------------ 1 file changed, 90 insertions(+), 62 deletions(-) diff --git a/src/mcp/server/streamable_http.py b/src/mcp/server/streamable_http.py index 77d4aa63e..523cc6941 100644 --- a/src/mcp/server/streamable_http.py +++ b/src/mcp/server/streamable_http.py @@ -308,6 +308,93 @@ def _check_content_type(self, request: Request) -> bool: return any(part == CONTENT_TYPE_JSON for part in content_type_parts) + def _is_async_operation_response(self, response_message: JSONRPCMessage) -> bool: + """Check if response is for an async operation that should keep stream open.""" + try: + if not isinstance(response_message.root, JSONRPCResponse): + return False + + result = response_message.root.result + if not result: + return False + + # Check if result has _operation with token + if hasattr(result, "__getitem__") and "_operation" in result: + operation = result["_operation"] # type: ignore + if hasattr(operation, "__getitem__") and "token" in operation: + return bool(operation["token"]) # type: ignore + + return False + except (TypeError, KeyError, AttributeError): + return False + + async def _handle_sse_mode( + self, + message: JSONRPCMessage, + request: Request, + writer: MemoryObjectSendStream[SessionMessage | Exception], + request_id: str, + request_stream_reader: MemoryObjectReceiveStream[EventMessage], + scope: Scope, + receive: Receive, + send: Send, + ) -> None: + """Handle SSE response mode.""" + # Create SSE stream + sse_stream_writer, sse_stream_reader = anyio.create_memory_object_stream[dict[str, str]](0) + + async def sse_writer(): + # Get the request ID from the incoming request message + try: + async with sse_stream_writer, request_stream_reader: + # Process messages from the request-specific stream + async for event_message in request_stream_reader: + # Build the event data + event_data = self._create_event_data(event_message) + await sse_stream_writer.send(event_data) + + # If response, remove from pending streams and close + if isinstance( + event_message.message.root, + JSONRPCResponse | JSONRPCError, + ): + break + except Exception: + logger.exception("Error in SSE writer") + finally: + logger.debug("Closing SSE writer") + await self._clean_up_memory_streams(request_id) + + # Create and start EventSourceResponse + # SSE stream mode (original behavior) + # Set up headers + headers = { + "Cache-Control": "no-cache, no-transform", + "Connection": "keep-alive", + "Content-Type": CONTENT_TYPE_SSE, + **({MCP_SESSION_ID_HEADER: self.mcp_session_id} if self.mcp_session_id else {}), + } + response = EventSourceResponse( + content=sse_stream_reader, + data_sender_callable=sse_writer, + headers=headers, + ) + + # Start the SSE response (this will send headers immediately) + try: + # First send the response to establish the SSE connection + async with anyio.create_task_group() as tg: + tg.start_soon(response, scope, receive, send) + # Then send the message to be processed by the server + metadata = ServerMessageMetadata(request_context=request) + session_message = SessionMessage(message, metadata=metadata) + await writer.send(session_message) + except Exception: + logger.exception("SSE response error") + await sse_stream_writer.aclose() + await sse_stream_reader.aclose() + await self._clean_up_memory_streams(request_id) + async def _handle_post_request(self, scope: Scope, request: Request, receive: Receive, send: Send) -> None: """Handle POST requests containing JSON-RPC messages.""" writer = self._read_stream_writer @@ -420,15 +507,7 @@ async def _handle_post_request(self, scope: Scope, request: Request, receive: Re # At this point we should have a response if response_message: # Check if this is an async operation response - keep stream open - if ( - isinstance(response_message.root, JSONRPCResponse) - and response_message.root.result - and "_operation" in response_message.root.result - and ( - ("token" in response_message.root.result["_operation"]) - and response_message.root.result["_operation"]["token"] - ) - ): + if self._is_async_operation_response(response_message): # This is an async operation - keep the stream open for elicitation/sampling should_pop_stream = False @@ -455,61 +534,10 @@ async def _handle_post_request(self, scope: Scope, request: Request, receive: Re if should_pop_stream: await self._clean_up_memory_streams(request_id) else: - # Create SSE stream - sse_stream_writer, sse_stream_reader = anyio.create_memory_object_stream[dict[str, str]](0) - - async def sse_writer(): - # Get the request ID from the incoming request message - try: - async with sse_stream_writer, request_stream_reader: - # Process messages from the request-specific stream - async for event_message in request_stream_reader: - # Build the event data - event_data = self._create_event_data(event_message) - await sse_stream_writer.send(event_data) - - # If response, remove from pending streams and close - if isinstance( - event_message.message.root, - JSONRPCResponse | JSONRPCError, - ): - break - except Exception: - logger.exception("Error in SSE writer") - finally: - logger.debug("Closing SSE writer") - await self._clean_up_memory_streams(request_id) - - # Create and start EventSourceResponse - # SSE stream mode (original behavior) - # Set up headers - headers = { - "Cache-Control": "no-cache, no-transform", - "Connection": "keep-alive", - "Content-Type": CONTENT_TYPE_SSE, - **({MCP_SESSION_ID_HEADER: self.mcp_session_id} if self.mcp_session_id else {}), - } - response = EventSourceResponse( - content=sse_stream_reader, - data_sender_callable=sse_writer, - headers=headers, + await self._handle_sse_mode( + message, request, writer, request_id, request_stream_reader, scope, receive, send ) - # Start the SSE response (this will send headers immediately) - try: - # First send the response to establish the SSE connection - async with anyio.create_task_group() as tg: - tg.start_soon(response, scope, receive, send) - # Then send the message to be processed by the server - metadata = ServerMessageMetadata(request_context=request) - session_message = SessionMessage(message, metadata=metadata) - await writer.send(session_message) - except Exception: - logger.exception("SSE response error") - await sse_stream_writer.aclose() - await sse_stream_reader.aclose() - await self._clean_up_memory_streams(request_id) - except Exception as err: logger.exception("Error handling POST request") response = self._create_error_response( From 4539c597884acd093fe2ca12ab8ec372824971e2 Mon Sep 17 00:00:00 2001 From: Luca Chang Date: Mon, 29 Sep 2025 12:33:03 -0700 Subject: [PATCH 21/41] Add basic documentation for async tools --- README.md | 284 ++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 284 insertions(+) diff --git a/README.md b/README.md index c4bfaa953..edf659596 100644 --- a/README.md +++ b/README.md @@ -487,6 +487,290 @@ def get_temperature(city: str) -> float: _Full example: [examples/snippets/servers/structured_output.py](https://github.com/modelcontextprotocol/python-sdk/blob/main/examples/snippets/servers/structured_output.py)_ +#### Async Tools + +Tools can be configured to run asynchronously, allowing for long-running operations that execute in the background while clients poll for status and results. Async tools currently require protocol version `next` and support operation tokens for tracking execution state. + +Tools can specify their invocation mode: `sync` (default), `async`, or `["sync", "async"]` for hybrid tools that support both patterns. Async tools can provide immediate feedback while continuing to execute, and support configurable keep-alive duration for result availability. + + +```python +""" +FastMCP async tools example showing different invocation modes. + +cd to the `examples/snippets/clients` directory and run: + uv run server async_tools stdio +""" + +import asyncio + +from pydantic import BaseModel, Field + +from mcp import types +from mcp.server.fastmcp import Context, FastMCP + +# Create an MCP server with async operations support +mcp = FastMCP("Async Tools Demo") + + +class UserPreferences(BaseModel): + """Schema for collecting user preferences.""" + + continue_processing: bool = Field(description="Should we continue with the operation?") + priority_level: str = Field( + default="normal", + description="Priority level: low, normal, high", + ) + + +@mcp.tool(invocation_modes=["async"]) +async def async_elicitation_tool(operation: str, ctx: Context) -> str: # type: ignore[type-arg] + """An async tool that uses elicitation to get user input.""" + await ctx.info(f"Starting operation: {operation}") + + # Simulate some initial processing + await asyncio.sleep(0.5) + await ctx.report_progress(0.3, 1.0, "Initial processing complete") + + # Ask user for preferences + result = await ctx.elicit( + message=f"Operation '{operation}' requires user input. How should we proceed?", + schema=UserPreferences, + ) + + if result.action == "accept" and result.data: + if result.data.continue_processing: + await ctx.info(f"Continuing with {result.data.priority_level} priority") + # Simulate processing based on user choice + processing_time = {"low": 0.5, "normal": 1.0, "high": 1.5}.get(result.data.priority_level, 1.0) + await asyncio.sleep(processing_time) + await ctx.report_progress(1.0, 1.0, "Operation complete") + return f"Operation '{operation}' completed successfully with {result.data.priority_level} priority" + else: + await ctx.warning("User chose not to continue") + return f"Operation '{operation}' cancelled by user" + else: + await ctx.error("User declined or cancelled the operation") + return f"Operation '{operation}' aborted" + + +@mcp.tool() +def sync_tool(x: int) -> str: + """An implicitly-synchronous tool.""" + return f"Sync result: {x * 2}" + + +@mcp.tool(invocation_modes=["async"]) +async def async_only_tool(data: str, ctx: Context) -> str: # type: ignore[type-arg] + """An async-only tool that takes time to complete.""" + await ctx.info("Starting long-running analysis...") + + # Simulate long-running work with progress updates + for i in range(5): + await asyncio.sleep(0.5) + progress = (i + 1) / 5 + await ctx.report_progress(progress, 1.0, f"Processing step {i + 1}/5") + + await ctx.info("Analysis complete!") + return f"Async analysis result for: {data}" + + +@mcp.tool(invocation_modes=["sync", "async"]) +def hybrid_tool(message: str, ctx: Context | None = None) -> str: # type: ignore[type-arg] + """A hybrid tool that works both sync and async.""" + if ctx: + # Async mode - we have context for progress reporting + import asyncio + + async def async_work(): + await ctx.info(f"Processing '{message}' asynchronously...") + await asyncio.sleep(0.5) # Simulate some work + await ctx.debug("Async processing complete") + + # Run the async work (this is a bit of a hack for demo purposes) + try: + loop = asyncio.get_event_loop() + loop.create_task(async_work()) + except RuntimeError: + pass # No event loop running + + # Both sync and async modes return the same result + return f"Hybrid result: {message.upper()}" + + +async def immediate_feedback(operation: str) -> list[types.ContentBlock]: + """Provide immediate feedback for long-running operations.""" + return [types.TextContent(type="text", text=f"🚀 Starting {operation}... This may take a moment.")] + + +@mcp.tool(invocation_modes=["async"], immediate_result=immediate_feedback) +async def long_running_analysis(operation: str, ctx: Context) -> str: # type: ignore[type-arg] + """Perform analysis with immediate user feedback.""" + await ctx.info(f"Beginning {operation} analysis") + + # Simulate long-running work with progress updates + for i in range(5): + await asyncio.sleep(1) + progress = (i + 1) / 5 + await ctx.report_progress(progress, 1.0, f"Step {i + 1}/5 complete") + + await ctx.info(f"Analysis '{operation}' completed successfully!") + return f"Analysis '{operation}' completed successfully with detailed results!" + + +@mcp.tool(invocation_modes=["async"], keep_alive=1800) +async def long_running_task(task_name: str, ctx: Context) -> str: # type: ignore[type-arg] + """A long-running task with custom keep_alive duration.""" + await ctx.info(f"Starting long-running task: {task_name}") + + # Simulate extended processing + await asyncio.sleep(2) + await ctx.report_progress(0.5, 1.0, "Halfway through processing") + await asyncio.sleep(2) + + await ctx.info(f"Task '{task_name}' completed successfully") + return f"Long-running task '{task_name}' finished with 30-minute keep_alive" + + +if __name__ == "__main__": + mcp.run() +``` + +_Full example: [examples/snippets/servers/async_tools.py](https://github.com/modelcontextprotocol/python-sdk/blob/main/examples/snippets/servers/async_tools.py)_ + + +Clients using protocol version `next` can interact with async tools by polling operation status and retrieving results: + + +```python +""" +Client example showing how to use async tools, including immediate result functionality. + +cd to the `examples/snippets` directory and run: + uv run async-tools-client + uv run async-tools-client --protocol=latest # backwards compatible mode + uv run async-tools-client --protocol=next # async tools mode +""" + +import asyncio +import os +import sys + +from mcp import ClientSession, StdioServerParameters, types +from mcp.client.stdio import stdio_client +from mcp.shared.context import RequestContext + +# Create server parameters for stdio connection +server_params = StdioServerParameters( + command="uv", # Using uv to run the server + args=["run", "server", "async_tools", "stdio"], + env={"UV_INDEX": os.environ.get("UV_INDEX", "")}, +) + + +async def demonstrate_async_tool(session: ClientSession): + """Demonstrate calling an async-only tool.""" + print("\n=== Asynchronous Tool Demo ===") + + # Call the async tool + result = await session.call_tool("async_only_tool", arguments={"data": "sample dataset"}) + + if result.operation: + token = result.operation.token + print(f"Async operation started with token: {token}") + + # Poll for status updates + while True: + status = await session.get_operation_status(token) + print(f"Status: {status.status}") + + if status.status == "completed": + # Get the final result + final_result = await session.get_operation_result(token) + for content in final_result.result.content: + if isinstance(content, types.TextContent): + print(f"Final result: {content.text}") + break + elif status.status == "failed": + print(f"Operation failed: {status.error}") + break + elif status.status in ("canceled", "unknown"): + print(f"Operation ended with status: {status.status}") + break + + # Wait before polling again + await asyncio.sleep(1) + + +async def test_immediate_result_tool(session: ClientSession): + """Test calling async tool with immediate result functionality.""" + print("\n=== Immediate Result Tool Demo ===") + + # Call the async tool with immediate_result functionality + result = await session.call_tool("long_running_analysis", arguments={"operation": "data_processing"}) + + # Display immediate feedback (should be available immediately) + print("Immediate response received:") + if result.content: + for content in result.content: + if isinstance(content, types.TextContent): + print(f" 📋 {content.text}") + + # Check if there's an async operation to poll + if result.operation: + token = result.operation.token + print(f"\nAsync operation started with token: {token}") + print("Polling for final results...") + + # Poll for status updates and final result + while True: + status = await session.get_operation_status(token) + print(f" Status: {status.status}") + + if status.status == "completed": + # Get the final result + final_result = await session.get_operation_result(token) + print("\nFinal result received:") + for content in final_result.result.content: + if isinstance(content, types.TextContent): + print(f" ✅ {content.text}") + break + elif status.status == "failed": + print(f" ❌ Operation failed: {status.error}") + break + + # Wait before polling again + await asyncio.sleep(1) + + +async def run(): + """Run async tool demonstrations.""" + protocol_version = "next" # Required for async tools support + + async with stdio_client(server_params) as (read, write): + async with ClientSession(read, write, protocol_version=protocol_version) as session: + await session.initialize() + + # List available tools to see invocation modes + tools = await session.list_tools() + print("Available tools:") + for tool in tools.tools: + invocation_mode = getattr(tool, "invocationMode", "sync") + print(f" - {tool.name}: {tool.description} (mode: {invocation_mode})") + + await demonstrate_async_tool(session) + await test_immediate_result_tool(session) + + +if __name__ == "__main__": + asyncio.run(run()) +``` + +_Full example: [examples/snippets/clients/async_tools_client.py](https://github.com/modelcontextprotocol/python-sdk/blob/main/examples/snippets/clients/async_tools_client.py)_ + + +The `@mcp.tool()` decorator accepts `invocation_modes` to specify supported execution patterns, `immediate_result` to provide instant feedback for async tools, and `keep_alive` to set how long operation results remain available (default: 300 seconds). + ### Prompts Prompts are reusable templates that help LLMs interact with your server effectively: From 97be6dd26d26a2d2cb4f5ad087548eadd7baa78b Mon Sep 17 00:00:00 2001 From: Luca Chang Date: Mon, 29 Sep 2025 12:35:12 -0700 Subject: [PATCH 22/41] Remove misplaced server test --- tests/server/fastmcp/test_integration.py | 75 ------------------------ 1 file changed, 75 deletions(-) diff --git a/tests/server/fastmcp/test_integration.py b/tests/server/fastmcp/test_integration.py index fb53c4145..c03a25055 100644 --- a/tests/server/fastmcp/test_integration.py +++ b/tests/server/fastmcp/test_integration.py @@ -881,81 +881,6 @@ async def test_elicitation_callback(context: RequestContext[ClientSession, None] pytest.fail(f"Test timed out after {max_polls} polls") -# Test async elicitation tool with stdio transport (works as expected) -@pytest.mark.anyio -async def test_async_elicitation_tool_stdio() -> None: - """Test async elicitation tool functionality using stdio transport. - - This test works because stdio transport properly handles elicitation during async operations. - """ - import os - - from mcp import StdioServerParameters - from mcp.client.stdio import stdio_client - - # Use the same server parameters as the client - server_params = StdioServerParameters( - command="uv", - args=["run", "server", "async_tools", "stdio"], - env={"UV_INDEX": os.environ.get("UV_INDEX", "")}, - ) - - # Use the same elicitation callback as the client - async def test_elicitation_callback(context: RequestContext[ClientSession, None], params: ElicitRequestParams): - """Handle elicitation requests from the server.""" - logger.debug(f"Client elicitation callback called with message: {params.message}") - if "data_migration" in params.message: - logger.debug("Client accepting elicitation request") - return ElicitResult( - action="accept", - content={"continue_processing": True, "priority_level": "normal"}, - ) - else: - logger.debug("Client declining elicitation request") - return ElicitResult(action="decline") - - async with stdio_client(server_params) as (read, write): - async with ClientSession( - read, - write, - protocol_version="next", - elicitation_callback=test_elicitation_callback, - ) as session: - # Test initialization - result = await session.initialize() - assert isinstance(result, InitializeResult) - assert result.serverInfo.name == "Async Tools Demo" - - # Test async elicitation tool - elicit_result = await session.call_tool("async_elicitation_tool", {"operation": "data_migration"}) - assert elicit_result.operation is not None - token = elicit_result.operation.token - - # Poll for completion - max_polls = 20 - poll_count = 0 - while poll_count < max_polls: - status = await session.get_operation_status(token) - if status.status == "completed": - final_result = await session.get_operation_result(token) - assert not final_result.result.isError - assert len(final_result.result.content) == 1 - content = final_result.result.content[0] - assert isinstance(content, TextContent) - assert "Operation 'data_migration'" in content.text - assert "completed successfully" in content.text - return - elif status.status == "failed": - pytest.fail(f"Async elicitation failed: {status.error}") - elif status.status in ("canceled", "unknown"): - pytest.fail(f"Operation ended with status: {status.status}") - - poll_count += 1 - await asyncio.sleep(0.5) - - pytest.fail(f"Test timed out after {max_polls} polls") - - # Test async tools example with legacy protocol @pytest.mark.anyio @pytest.mark.parametrize( From b0d3f305d1dce1a88cbe73d18909d327f22ae010 Mon Sep 17 00:00:00 2001 From: Luca Chang Date: Mon, 29 Sep 2025 14:21:43 -0700 Subject: [PATCH 23/41] Split up async tool snippets to improve README readability --- README.md | 251 +++------ .../clients/async_elicitation_client.py | 118 +++++ .../snippets/clients/async_progress_client.py | 110 ++++ .../snippets/clients/async_sampling_client.py | 126 +++++ .../snippets/clients/async_tool_client.py | 59 +++ .../snippets/clients/async_tools_client.py | 349 ------------- examples/snippets/servers/async_tool_basic.py | 51 ++ .../servers/async_tool_elicitation.py | 103 ++++ .../snippets/servers/async_tool_immediate.py | 36 ++ .../snippets/servers/async_tool_progress.py | 70 +++ .../snippets/servers/async_tool_sampling.py | 105 ++++ examples/snippets/servers/async_tools.py | 231 --------- tests/server/fastmcp/test_integration.py | 479 +++++++++++------- 13 files changed, 1141 insertions(+), 947 deletions(-) create mode 100644 examples/snippets/clients/async_elicitation_client.py create mode 100644 examples/snippets/clients/async_progress_client.py create mode 100644 examples/snippets/clients/async_sampling_client.py create mode 100644 examples/snippets/clients/async_tool_client.py delete mode 100644 examples/snippets/clients/async_tools_client.py create mode 100644 examples/snippets/servers/async_tool_basic.py create mode 100644 examples/snippets/servers/async_tool_elicitation.py create mode 100644 examples/snippets/servers/async_tool_immediate.py create mode 100644 examples/snippets/servers/async_tool_progress.py create mode 100644 examples/snippets/servers/async_tool_sampling.py delete mode 100644 examples/snippets/servers/async_tools.py diff --git a/README.md b/README.md index edf659596..19844bee8 100644 --- a/README.md +++ b/README.md @@ -493,280 +493,175 @@ Tools can be configured to run asynchronously, allowing for long-running operati Tools can specify their invocation mode: `sync` (default), `async`, or `["sync", "async"]` for hybrid tools that support both patterns. Async tools can provide immediate feedback while continuing to execute, and support configurable keep-alive duration for result availability. - + ```python """ -FastMCP async tools example showing different invocation modes. +Basic async tool example. cd to the `examples/snippets/clients` directory and run: - uv run server async_tools stdio + uv run server async_tool_basic stdio """ import asyncio -from pydantic import BaseModel, Field - -from mcp import types from mcp.server.fastmcp import Context, FastMCP -# Create an MCP server with async operations support -mcp = FastMCP("Async Tools Demo") - - -class UserPreferences(BaseModel): - """Schema for collecting user preferences.""" - - continue_processing: bool = Field(description="Should we continue with the operation?") - priority_level: str = Field( - default="normal", - description="Priority level: low, normal, high", - ) - - -@mcp.tool(invocation_modes=["async"]) -async def async_elicitation_tool(operation: str, ctx: Context) -> str: # type: ignore[type-arg] - """An async tool that uses elicitation to get user input.""" - await ctx.info(f"Starting operation: {operation}") - - # Simulate some initial processing - await asyncio.sleep(0.5) - await ctx.report_progress(0.3, 1.0, "Initial processing complete") - - # Ask user for preferences - result = await ctx.elicit( - message=f"Operation '{operation}' requires user input. How should we proceed?", - schema=UserPreferences, - ) - - if result.action == "accept" and result.data: - if result.data.continue_processing: - await ctx.info(f"Continuing with {result.data.priority_level} priority") - # Simulate processing based on user choice - processing_time = {"low": 0.5, "normal": 1.0, "high": 1.5}.get(result.data.priority_level, 1.0) - await asyncio.sleep(processing_time) - await ctx.report_progress(1.0, 1.0, "Operation complete") - return f"Operation '{operation}' completed successfully with {result.data.priority_level} priority" - else: - await ctx.warning("User chose not to continue") - return f"Operation '{operation}' cancelled by user" - else: - await ctx.error("User declined or cancelled the operation") - return f"Operation '{operation}' aborted" - - -@mcp.tool() -def sync_tool(x: int) -> str: - """An implicitly-synchronous tool.""" - return f"Sync result: {x * 2}" +mcp = FastMCP("Async Tool Basic") @mcp.tool(invocation_modes=["async"]) -async def async_only_tool(data: str, ctx: Context) -> str: # type: ignore[type-arg] - """An async-only tool that takes time to complete.""" - await ctx.info("Starting long-running analysis...") +async def analyze_data(dataset: str, ctx: Context) -> str: # type: ignore[type-arg] + """Analyze a dataset asynchronously with progress updates.""" + await ctx.info(f"Starting analysis of {dataset}") - # Simulate long-running work with progress updates + # Simulate analysis with progress updates for i in range(5): await asyncio.sleep(0.5) progress = (i + 1) / 5 await ctx.report_progress(progress, 1.0, f"Processing step {i + 1}/5") - await ctx.info("Analysis complete!") - return f"Async analysis result for: {data}" + await ctx.info("Analysis complete") + return f"Analysis results for {dataset}: 95% accuracy achieved" @mcp.tool(invocation_modes=["sync", "async"]) -def hybrid_tool(message: str, ctx: Context | None = None) -> str: # type: ignore[type-arg] - """A hybrid tool that works both sync and async.""" +def process_text(text: str, ctx: Context | None = None) -> str: # type: ignore[type-arg] + """Process text in sync or async mode.""" if ctx: - # Async mode - we have context for progress reporting + # Async mode with context import asyncio - async def async_work(): - await ctx.info(f"Processing '{message}' asynchronously...") - await asyncio.sleep(0.5) # Simulate some work - await ctx.debug("Async processing complete") + async def async_processing(): + await ctx.info(f"Processing text asynchronously: {text[:20]}...") + await asyncio.sleep(0.3) - # Run the async work (this is a bit of a hack for demo purposes) try: loop = asyncio.get_event_loop() - loop.create_task(async_work()) + loop.create_task(async_processing()) except RuntimeError: - pass # No event loop running + pass - # Both sync and async modes return the same result - return f"Hybrid result: {message.upper()}" + return f"Processed: {text.upper()}" -async def immediate_feedback(operation: str) -> list[types.ContentBlock]: - """Provide immediate feedback for long-running operations.""" - return [types.TextContent(type="text", text=f"🚀 Starting {operation}... This may take a moment.")] +if __name__ == "__main__": + mcp.run() +``` +_Full example: [examples/snippets/servers/async_tool_basic.py](https://github.com/modelcontextprotocol/python-sdk/blob/main/examples/snippets/servers/async_tool_basic.py)_ + -@mcp.tool(invocation_modes=["async"], immediate_result=immediate_feedback) -async def long_running_analysis(operation: str, ctx: Context) -> str: # type: ignore[type-arg] - """Perform analysis with immediate user feedback.""" - await ctx.info(f"Beginning {operation} analysis") +Tools can also provide immediate feedback while continuing to execute asynchronously: - # Simulate long-running work with progress updates - for i in range(5): - await asyncio.sleep(1) - progress = (i + 1) / 5 - await ctx.report_progress(progress, 1.0, f"Step {i + 1}/5 complete") + +```python +""" +Async tool with immediate result example. + +cd to the `examples/snippets/clients` directory and run: + uv run server async_tool_immediate stdio +""" + +import asyncio + +from mcp import types +from mcp.server.fastmcp import Context, FastMCP - await ctx.info(f"Analysis '{operation}' completed successfully!") - return f"Analysis '{operation}' completed successfully with detailed results!" +mcp = FastMCP("Async Tool Immediate") -@mcp.tool(invocation_modes=["async"], keep_alive=1800) -async def long_running_task(task_name: str, ctx: Context) -> str: # type: ignore[type-arg] - """A long-running task with custom keep_alive duration.""" - await ctx.info(f"Starting long-running task: {task_name}") +async def provide_immediate_feedback(operation: str) -> list[types.ContentBlock]: + """Provide immediate feedback while async operation starts.""" + return [types.TextContent(type="text", text=f"Starting {operation} operation. This will take a moment.")] - # Simulate extended processing - await asyncio.sleep(2) - await ctx.report_progress(0.5, 1.0, "Halfway through processing") - await asyncio.sleep(2) - await ctx.info(f"Task '{task_name}' completed successfully") - return f"Long-running task '{task_name}' finished with 30-minute keep_alive" +@mcp.tool(invocation_modes=["async"], immediate_result=provide_immediate_feedback) +async def long_analysis(operation: str, ctx: Context) -> str: # type: ignore[type-arg] + """Perform long-running analysis with immediate user feedback.""" + await ctx.info(f"Beginning {operation} analysis") + + # Simulate long-running work + for i in range(4): + await asyncio.sleep(1) + progress = (i + 1) / 4 + await ctx.report_progress(progress, 1.0, f"Analysis step {i + 1}/4") + + return f"Analysis '{operation}' completed with detailed results" if __name__ == "__main__": mcp.run() ``` -_Full example: [examples/snippets/servers/async_tools.py](https://github.com/modelcontextprotocol/python-sdk/blob/main/examples/snippets/servers/async_tools.py)_ +_Full example: [examples/snippets/servers/async_tool_immediate.py](https://github.com/modelcontextprotocol/python-sdk/blob/main/examples/snippets/servers/async_tool_immediate.py)_ Clients using protocol version `next` can interact with async tools by polling operation status and retrieving results: - + ```python """ -Client example showing how to use async tools, including immediate result functionality. +Client example for async tools. cd to the `examples/snippets` directory and run: - uv run async-tools-client - uv run async-tools-client --protocol=latest # backwards compatible mode - uv run async-tools-client --protocol=next # async tools mode + uv run async-tool-client """ import asyncio import os -import sys from mcp import ClientSession, StdioServerParameters, types from mcp.client.stdio import stdio_client -from mcp.shared.context import RequestContext -# Create server parameters for stdio connection +# Server parameters for async tool example server_params = StdioServerParameters( - command="uv", # Using uv to run the server - args=["run", "server", "async_tools", "stdio"], + command="uv", + args=["run", "server", "async_tool_basic", "stdio"], env={"UV_INDEX": os.environ.get("UV_INDEX", "")}, ) -async def demonstrate_async_tool(session: ClientSession): - """Demonstrate calling an async-only tool.""" - print("\n=== Asynchronous Tool Demo ===") +async def call_async_tool(session: ClientSession): + """Demonstrate calling an async tool.""" + print("Calling async tool...") - # Call the async tool - result = await session.call_tool("async_only_tool", arguments={"data": "sample dataset"}) + result = await session.call_tool("analyze_data", arguments={"dataset": "customer_data.csv"}) if result.operation: token = result.operation.token - print(f"Async operation started with token: {token}") + print(f"Operation started with token: {token}") - # Poll for status updates + # Poll for completion while True: status = await session.get_operation_status(token) print(f"Status: {status.status}") if status.status == "completed": - # Get the final result final_result = await session.get_operation_result(token) for content in final_result.result.content: if isinstance(content, types.TextContent): - print(f"Final result: {content.text}") + print(f"Result: {content.text}") break elif status.status == "failed": print(f"Operation failed: {status.error}") break - elif status.status in ("canceled", "unknown"): - print(f"Operation ended with status: {status.status}") - break - - # Wait before polling again - await asyncio.sleep(1) - - -async def test_immediate_result_tool(session: ClientSession): - """Test calling async tool with immediate result functionality.""" - print("\n=== Immediate Result Tool Demo ===") - # Call the async tool with immediate_result functionality - result = await session.call_tool("long_running_analysis", arguments={"operation": "data_processing"}) - - # Display immediate feedback (should be available immediately) - print("Immediate response received:") - if result.content: - for content in result.content: - if isinstance(content, types.TextContent): - print(f" 📋 {content.text}") - - # Check if there's an async operation to poll - if result.operation: - token = result.operation.token - print(f"\nAsync operation started with token: {token}") - print("Polling for final results...") - - # Poll for status updates and final result - while True: - status = await session.get_operation_status(token) - print(f" Status: {status.status}") - - if status.status == "completed": - # Get the final result - final_result = await session.get_operation_result(token) - print("\nFinal result received:") - for content in final_result.result.content: - if isinstance(content, types.TextContent): - print(f" ✅ {content.text}") - break - elif status.status == "failed": - print(f" ❌ Operation failed: {status.error}") - break - - # Wait before polling again - await asyncio.sleep(1) + await asyncio.sleep(0.5) async def run(): - """Run async tool demonstrations.""" - protocol_version = "next" # Required for async tools support - + """Run the async tool client example.""" async with stdio_client(server_params) as (read, write): - async with ClientSession(read, write, protocol_version=protocol_version) as session: + async with ClientSession(read, write, protocol_version="next") as session: await session.initialize() - - # List available tools to see invocation modes - tools = await session.list_tools() - print("Available tools:") - for tool in tools.tools: - invocation_mode = getattr(tool, "invocationMode", "sync") - print(f" - {tool.name}: {tool.description} (mode: {invocation_mode})") - - await demonstrate_async_tool(session) - await test_immediate_result_tool(session) + await call_async_tool(session) if __name__ == "__main__": asyncio.run(run()) ``` -_Full example: [examples/snippets/clients/async_tools_client.py](https://github.com/modelcontextprotocol/python-sdk/blob/main/examples/snippets/clients/async_tools_client.py)_ +_Full example: [examples/snippets/clients/async_tool_client.py](https://github.com/modelcontextprotocol/python-sdk/blob/main/examples/snippets/clients/async_tool_client.py)_ The `@mcp.tool()` decorator accepts `invocation_modes` to specify supported execution patterns, `immediate_result` to provide instant feedback for async tools, and `keep_alive` to set how long operation results remain available (default: 300 seconds). diff --git a/examples/snippets/clients/async_elicitation_client.py b/examples/snippets/clients/async_elicitation_client.py new file mode 100644 index 000000000..44194553f --- /dev/null +++ b/examples/snippets/clients/async_elicitation_client.py @@ -0,0 +1,118 @@ +""" +Client example for async tools with elicitation. + +cd to the `examples/snippets` directory and run: + uv run async-elicitation-client +""" + +import asyncio +import os + +from mcp import ClientSession, StdioServerParameters, types +from mcp.client.stdio import stdio_client +from mcp.shared.context import RequestContext + +# Server parameters for async elicitation example +server_params = StdioServerParameters( + command="uv", + args=["run", "server", "async_tool_elicitation", "stdio"], + env={"UV_INDEX": os.environ.get("UV_INDEX", "")}, +) + + +async def elicitation_callback(context: RequestContext[ClientSession, None], params: types.ElicitRequestParams): + """Handle elicitation requests from the server.""" + print(f"Server is asking: {params.message}") + + # Handle different types of elicitation + if "data_migration" in params.message: + print("Client responding: Continue with high priority") + return types.ElicitResult( + action="accept", + content={"continue_processing": True, "priority_level": "high"}, + ) + elif "file operation" in params.message.lower() or "confirm" in params.message.lower(): + print("Client responding: Confirm operation with backup") + return types.ElicitResult( + action="accept", + content={"confirm_operation": True, "backup_first": True}, + ) + elif "How should we proceed" in params.message: + print("Client responding: Continue with normal priority") + return types.ElicitResult( + action="accept", + content={"continue_processing": True, "priority_level": "normal"}, + ) + else: + print("Client responding: Decline") + return types.ElicitResult(action="decline") + + +async def test_process_with_confirmation(session: ClientSession): + """Test process that requires user confirmation.""" + print("Testing process with confirmation...") + + result = await session.call_tool("process_with_confirmation", {"operation": "data_migration"}) + + if result.operation: + token = result.operation.token + print(f"Operation started with token: {token}") + + while True: + status = await session.get_operation_status(token) + if status.status == "completed": + final_result = await session.get_operation_result(token) + for content in final_result.result.content: + if isinstance(content, types.TextContent): + print(f"Result: {content.text}") + break + elif status.status == "failed": + print(f"Operation failed: {status.error}") + break + + await asyncio.sleep(0.3) + + +async def test_file_operation(session: ClientSession): + """Test file operation with confirmation.""" + print("\nTesting file operation...") + + result = await session.call_tool( + "file_operation", {"file_path": "/path/to/important_file.txt", "operation_type": "delete"} + ) + + if result.operation: + token = result.operation.token + print(f"File operation started with token: {token}") + + while True: + status = await session.get_operation_status(token) + if status.status == "completed": + final_result = await session.get_operation_result(token) + for content in final_result.result.content: + if isinstance(content, types.TextContent): + print(f"Result: {content.text}") + break + elif status.status == "failed": + print(f"File operation failed: {status.error}") + break + + await asyncio.sleep(0.3) + + +async def run(): + """Run the async elicitation client example.""" + async with stdio_client(server_params) as (read, write): + async with ClientSession( + read, write, protocol_version="next", elicitation_callback=elicitation_callback + ) as session: + await session.initialize() + + await test_process_with_confirmation(session) + await test_file_operation(session) + + print("\nElicitation examples complete!") + + +if __name__ == "__main__": + asyncio.run(run()) diff --git a/examples/snippets/clients/async_progress_client.py b/examples/snippets/clients/async_progress_client.py new file mode 100644 index 000000000..42e2a7167 --- /dev/null +++ b/examples/snippets/clients/async_progress_client.py @@ -0,0 +1,110 @@ +""" +Client example for async tools with progress notifications. + +cd to the `examples/snippets` directory and run: + uv run async-progress-client +""" + +import asyncio +import os + +from mcp import ClientSession, StdioServerParameters, types +from mcp.client.stdio import stdio_client + +# Server parameters for async progress example +server_params = StdioServerParameters( + command="uv", + args=["run", "server", "async_tool_progress", "stdio"], + env={"UV_INDEX": os.environ.get("UV_INDEX", "")}, +) + + +async def test_batch_processing(session: ClientSession): + """Test batch processing with progress notifications.""" + print("Testing batch processing with progress notifications...") + + items = ["apple", "banana", "cherry", "date", "elderberry"] + progress_updates: list[tuple[float, float | None, str | None]] = [] + + async def progress_callback(progress: float, total: float | None, message: str | None) -> None: + progress_pct = int(progress * 100) if progress else 0 + total_str = f"/{int(total * 100)}%" if total else "" + message_str = f" - {message}" if message else "" + print(f"Progress: {progress_pct}{total_str}{message_str}") + progress_updates.append((progress, total, message)) + + result = await session.call_tool("batch_process", arguments={"items": items}, progress_callback=progress_callback) + + if result.operation: + token = result.operation.token + print(f"Batch operation started with token: {token}") + + # Poll for completion + while True: + status = await session.get_operation_status(token) + if status.status == "completed": + final_result = await session.get_operation_result(token) + + # Show structured result + if final_result.result.structuredContent: + print(f"Structured result: {final_result.result.structuredContent}") + + # Show text content + for content in final_result.result.content: + if isinstance(content, types.TextContent): + print(f"Text result: {content.text}") + break + elif status.status == "failed": + print(f"Operation failed: {status.error}") + break + + await asyncio.sleep(0.3) + + print(f"Received {len(progress_updates)} progress updates") + + +async def test_data_pipeline(session: ClientSession): + """Test data pipeline with progress tracking.""" + print("\nTesting data pipeline...") + + operations = ["validate", "clean", "transform", "analyze", "export"] + + result = await session.call_tool( + "data_pipeline", arguments={"dataset": "customer_data.csv", "operations": operations} + ) + + if result.operation: + token = result.operation.token + print(f"Pipeline started with token: {token}") + + while True: + status = await session.get_operation_status(token) + if status.status == "completed": + final_result = await session.get_operation_result(token) + + if final_result.result.structuredContent: + print("Pipeline results:") + for op, result_text in final_result.result.structuredContent.items(): + print(f" {op}: {result_text}") + break + elif status.status == "failed": + print(f"Pipeline failed: {status.error}") + break + + await asyncio.sleep(0.3) + + +async def run(): + """Run the async progress client example.""" + async with stdio_client(server_params) as (read, write): + async with ClientSession(read, write, protocol_version="next") as session: + await session.initialize() + + await test_batch_processing(session) + await test_data_pipeline(session) + + print("\nProgress notification examples complete!") + + +if __name__ == "__main__": + asyncio.run(run()) diff --git a/examples/snippets/clients/async_sampling_client.py b/examples/snippets/clients/async_sampling_client.py new file mode 100644 index 000000000..eb0aa5822 --- /dev/null +++ b/examples/snippets/clients/async_sampling_client.py @@ -0,0 +1,126 @@ +""" +Client example for async tools with sampling (LLM interaction). + +cd to the `examples/snippets` directory and run: + uv run async-sampling-client +""" + +import asyncio +import os + +from mcp import ClientSession, StdioServerParameters, types +from mcp.client.stdio import stdio_client +from mcp.shared.context import RequestContext + +# Server parameters for async sampling example +server_params = StdioServerParameters( + command="uv", + args=["run", "server", "async_tool_sampling", "stdio"], + env={"UV_INDEX": os.environ.get("UV_INDEX", "")}, +) + + +async def sampling_callback( + context: RequestContext[ClientSession, None], params: types.CreateMessageRequestParams +) -> types.CreateMessageResult: + """Handle sampling requests from the server.""" + print("Server requesting LLM generation...") + + # Simulate LLM response based on the prompt + response = "Generic simulated LLM response." + + if params.messages and len(params.messages) > 0: + message = params.messages[0] + if hasattr(message, "content") and isinstance(message.content, types.TextContent): + prompt_text = message.content.text + + # Generate different responses based on prompt content + if "poem" in prompt_text.lower(): + response = "Roses are red, violets are blue,\nThis is a simulated poem for you!" + elif "story" in prompt_text.lower(): + response = "Once upon a time, in a digital realm, there lived a helpful AI assistant..." + elif "summary" in prompt_text.lower(): + response = "This is a concise summary of the requested topic, generated by simulation." + elif "analysis" in prompt_text.lower(): + response = ( + "Analysis: This topic demonstrates key concepts and relationships that are important to understand." + ) + else: + response = "This is a simulated LLM response for testing purposes." + + return types.CreateMessageResult( + role="assistant", + content=types.TextContent(type="text", text=response), + model="test-model", + ) + + +async def test_content_generation(session: ClientSession): + """Test content generation with LLM sampling.""" + print("Testing content generation...") + + result = await session.call_tool("generate_content", {"topic": "artificial intelligence", "content_type": "poem"}) + + if result.operation: + token = result.operation.token + print(f"Content generation started with token: {token}") + + while True: + status = await session.get_operation_status(token) + if status.status == "completed": + final_result = await session.get_operation_result(token) + for content in final_result.result.content: + if isinstance(content, types.TextContent): + print(f"Generated content:\n{content.text}") + break + elif status.status == "failed": + print(f"Generation failed: {status.error}") + break + + await asyncio.sleep(0.3) + + +async def test_multi_step_generation(session: ClientSession): + """Test multi-step content generation.""" + print("\nTesting multi-step generation...") + + steps = ["write a brief introduction", "explain the main concepts", "provide a conclusion"] + + result = await session.call_tool("multi_step_generation", {"topic": "machine learning", "steps": steps}) + + if result.operation: + token = result.operation.token + print(f"Multi-step generation started with token: {token}") + + while True: + status = await session.get_operation_status(token) + if status.status == "completed": + final_result = await session.get_operation_result(token) + + if final_result.result.structuredContent: + print("Generated content by step:") + for step, content in final_result.result.structuredContent.items(): + print(f"\n{step}:") + print(f" {content}") + break + elif status.status == "failed": + print(f"Multi-step generation failed: {status.error}") + break + + await asyncio.sleep(0.3) + + +async def run(): + """Run the async sampling client example.""" + async with stdio_client(server_params) as (read, write): + async with ClientSession(read, write, protocol_version="next", sampling_callback=sampling_callback) as session: + await session.initialize() + + await test_content_generation(session) + await test_multi_step_generation(session) + + print("\nSampling examples complete!") + + +if __name__ == "__main__": + asyncio.run(run()) diff --git a/examples/snippets/clients/async_tool_client.py b/examples/snippets/clients/async_tool_client.py new file mode 100644 index 000000000..52ee0be68 --- /dev/null +++ b/examples/snippets/clients/async_tool_client.py @@ -0,0 +1,59 @@ +""" +Client example for async tools. + +cd to the `examples/snippets` directory and run: + uv run async-tool-client +""" + +import asyncio +import os + +from mcp import ClientSession, StdioServerParameters, types +from mcp.client.stdio import stdio_client + +# Server parameters for async tool example +server_params = StdioServerParameters( + command="uv", + args=["run", "server", "async_tool_basic", "stdio"], + env={"UV_INDEX": os.environ.get("UV_INDEX", "")}, +) + + +async def call_async_tool(session: ClientSession): + """Demonstrate calling an async tool.""" + print("Calling async tool...") + + result = await session.call_tool("analyze_data", arguments={"dataset": "customer_data.csv"}) + + if result.operation: + token = result.operation.token + print(f"Operation started with token: {token}") + + # Poll for completion + while True: + status = await session.get_operation_status(token) + print(f"Status: {status.status}") + + if status.status == "completed": + final_result = await session.get_operation_result(token) + for content in final_result.result.content: + if isinstance(content, types.TextContent): + print(f"Result: {content.text}") + break + elif status.status == "failed": + print(f"Operation failed: {status.error}") + break + + await asyncio.sleep(0.5) + + +async def run(): + """Run the async tool client example.""" + async with stdio_client(server_params) as (read, write): + async with ClientSession(read, write, protocol_version="next") as session: + await session.initialize() + await call_async_tool(session) + + +if __name__ == "__main__": + asyncio.run(run()) diff --git a/examples/snippets/clients/async_tools_client.py b/examples/snippets/clients/async_tools_client.py deleted file mode 100644 index 35db25b38..000000000 --- a/examples/snippets/clients/async_tools_client.py +++ /dev/null @@ -1,349 +0,0 @@ -""" -Client example showing how to use async tools, including immediate result functionality. - -This example demonstrates: -- Synchronous tools (immediate response) -- Hybrid tools (sync/async modes) -- Async-only tools (background execution with polling) -- Batch processing with progress updates -- Data processing pipelines -- Elicitation (user input during async execution) -- Immediate result tools (instant feedback + async execution) - -cd to the `examples/snippets` directory and run: - uv run async-tools-client - uv run async-tools-client --protocol=latest # backwards compatible mode - uv run async-tools-client --protocol=next # async tools mode -""" - -import asyncio -import os -import sys - -from mcp import ClientSession, StdioServerParameters, types -from mcp.client.stdio import stdio_client -from mcp.shared.context import RequestContext - -# Create server parameters for stdio connection -server_params = StdioServerParameters( - command="uv", # Using uv to run the server - args=["run", "server", "async_tools", "stdio"], - env={"UV_INDEX": os.environ.get("UV_INDEX", "")}, -) - - -async def elicitation_callback(context: RequestContext[ClientSession, None], params: types.ElicitRequestParams): - """Handle elicitation requests from the server.""" - if "data_migration" in params.message: - return types.ElicitResult( - action="accept", - content={"continue_processing": True, "priority_level": "normal"}, - ) - else: - return types.ElicitResult(action="decline") - - -async def logging_callback(params: types.LoggingMessageNotificationParams): - """Handle logging messages from the server.""" - print(f"Server log: {params.data}", file=sys.stderr) - - -async def demonstrate_sync_tool(session: ClientSession): - """Demonstrate calling a synchronous tool.""" - print("\n=== Synchronous Tool Demo ===") - - result = await session.call_tool("sync_tool", arguments={"x": 21}) - - # Print the result - for content in result.content: - if isinstance(content, types.TextContent): - print(f"Sync tool result: {content.text}") - - -async def demonstrate_async_tool(session: ClientSession): - """Demonstrate calling an async-only tool.""" - print("\n=== Asynchronous Tool Demo ===") - - # Call the async tool - result = await session.call_tool("async_only_tool", arguments={"data": "sample dataset"}) - - if result.operation: - token = result.operation.token - print(f"Async operation started with token: {token}") - - # Poll for status updates - while True: - status = await session.get_operation_status(token) - print(f"Status: {status.status}") - - if status.status == "completed": - # Get the final result - final_result = await session.get_operation_result(token) - for content in final_result.result.content: - if isinstance(content, types.TextContent): - print(f"Final result: {content.text}") - break - elif status.status == "failed": - print(f"Operation failed: {status.error}") - break - elif status.status in ("canceled", "unknown"): - print(f"Operation ended with status: {status.status}") - break - - # Wait before polling again - await asyncio.sleep(1) - else: - # Synchronous result (shouldn't happen for async-only tools) - for content in result.content: - if isinstance(content, types.TextContent): - print(f"Unexpected sync result: {content.text}") - - -async def demonstrate_hybrid_tool(session: ClientSession): - """Demonstrate calling a hybrid tool in both modes.""" - print("\n=== Hybrid Tool Demo ===") - - # Call hybrid tool (will be sync by default for compatibility) - result = await session.call_tool("hybrid_tool", arguments={"message": "hello world"}) - - for content in result.content: - if isinstance(content, types.TextContent): - print(f"Hybrid tool result: {content.text}") - - -async def demonstrate_batch_processing(session: ClientSession): - """Demonstrate batch processing with progress updates.""" - print("\n=== Batch Processing Demo ===") - - items = ["apple", "banana", "cherry", "date", "elderberry"] - - # Define progress callback - async def progress_callback(progress: float, total: float | None, message: str | None) -> None: - progress_pct = int(progress * 100) if progress else 0 - total_str = f"/{int(total * 100)}%" if total else "" - message_str = f" - {message}" if message else "" - print(f"Progress: {progress_pct}{total_str}{message_str}") - - result = await session.call_tool( - "batch_operation_tool", arguments={"items": items}, progress_callback=progress_callback - ) - - if result.operation: - token = result.operation.token - print(f"Batch operation started with token: {token}") - - # Poll for status - while True: - status = await session.get_operation_status(token) - print(f"Status: {status.status}") - - if status.status == "completed": - # Get the final result - final_result = await session.get_operation_result(token) - - # Check for structured result - if final_result.result.structuredContent: - print(f"Structured result: {final_result.result.structuredContent}") - - # Also show text content - for content in final_result.result.content: - if isinstance(content, types.TextContent): - print(f"Text result: {content.text}") - break - elif status.status == "failed": - print(f"Operation failed: {status.error}") - break - elif status.status in ("canceled", "unknown"): - print(f"Operation ended with status: {status.status}") - break - - # Wait before polling again - await asyncio.sleep(0.5) - else: - print("Unexpected: batch operation returned synchronous result") - - -async def demonstrate_data_processing(session: ClientSession): - """Demonstrate complex data processing pipeline.""" - print("\n=== Data Processing Pipeline Demo ===") - - operations = ["validate", "clean", "transform", "analyze", "export"] - result = await session.call_tool( - "data_processing_tool", arguments={"dataset": "customer_data.csv", "operations": operations} - ) - - if result.operation: - token = result.operation.token - print(f"Data processing started with token: {token}") - - # Poll for completion - while True: - status = await session.get_operation_status(token) - print(f"Status: {status.status}") - - if status.status == "completed": - final_result = await session.get_operation_result(token) - - # Show structured result if available - if final_result.result.structuredContent: - print("Processing results:") - for op, result_text in final_result.result.structuredContent.items(): - print(f" {op}: {result_text}") - break - elif status.status == "failed": - print(f"Processing failed: {status.error}") - break - elif status.status in ("canceled", "unknown"): - print(f"Processing ended with status: {status.status}") - break - - await asyncio.sleep(0.8) - - -async def demonstrate_elicitation(session: ClientSession): - """Demonstrate async elicitation tool.""" - print("\n=== Async Elicitation Demo ===") - - result = await session.call_tool("async_elicitation_tool", arguments={"operation": "data_migration"}) - - if result.operation: - token = result.operation.token - print(f"Elicitation operation started with token: {token}") - - # Poll for completion - while True: - status = await session.get_operation_status(token) - print(f"Status: {status.status}") - - if status.status == "completed": - final_result = await session.get_operation_result(token) - for content in final_result.result.content: - if isinstance(content, types.TextContent): - print(f"Elicitation result: {content.text}") - break - elif status.status == "failed": - print(f"Elicitation failed: {status.error}") - break - elif status.status in ("canceled", "unknown"): - print(f"Elicitation ended with status: {status.status}") - break - - await asyncio.sleep(0.5) - - -async def test_immediate_result_tool(session: ClientSession): - """Test calling async tool with immediate result functionality. - - This demonstrates the immediate_result feature where async tools can provide - instant feedback while continuing to execute in the background. - """ - print("\n=== Immediate Result Tool Demo ===") - - # Call the async tool with immediate_result functionality - result = await session.call_tool("long_running_analysis", arguments={"operation": "data_processing"}) - - # Display immediate feedback (should be available immediately) - print("Immediate response received:") - if result.content: - for content in result.content: - if isinstance(content, types.TextContent): - print(f" 📋 {content.text}") - else: - print(" (No immediate content received)") - - # Check if there's an async operation to poll - if result.operation: - token = result.operation.token - print(f"\nAsync operation started with token: {token}") - print("Polling for final results...") - - # Poll for status updates and final result - while True: - status = await session.get_operation_status(token) - print(f" Status: {status.status}") - - if status.status == "completed": - # Get the final result - final_result = await session.get_operation_result(token) - print("\nFinal result received:") - for content in final_result.result.content: - if isinstance(content, types.TextContent): - print(f" ✅ {content.text}") - break - elif status.status == "failed": - print(f" ❌ Operation failed: {status.error}") - break - elif status.status in ("canceled", "unknown"): - print(f" ⚠️ Operation ended with status: {status.status}") - break - - # Wait before polling again - await asyncio.sleep(1) - else: - # This shouldn't happen for async tools, but handle gracefully - print("⚠️ Unexpected: tool returned synchronous result instead of async operation") - - print("Immediate result demonstration complete!") - - -async def run(): - """Run all async tool demonstrations.""" - # Determine protocol version from command line - protocol_version = "next" # Default to next for async tools - if len(sys.argv) > 1: - if "--protocol=latest" in sys.argv: - protocol_version = "2025-06-18" # Latest stable protocol - elif "--protocol=next" in sys.argv: - protocol_version = "next" # Development protocol version with async tools - - print(f"Using protocol version: {protocol_version}") - print() - - async with stdio_client(server_params) as (read, write): - # Use configured protocol version - async with ClientSession( - read, - write, - protocol_version=protocol_version, - elicitation_callback=elicitation_callback, - logging_callback=logging_callback, - ) as session: - # Initialize the connection - await session.initialize() - - # List available tools to see invocation modes - tools = await session.list_tools() - print("Available tools:") - for tool in tools.tools: - invocation_mode = getattr(tool, "invocationMode", "sync") - print(f" - {tool.name}: {tool.description} (mode: {invocation_mode})") - - # Demonstrate different tool types - await demonstrate_sync_tool(session) - await demonstrate_hybrid_tool(session) - await demonstrate_async_tool(session) - await demonstrate_batch_processing(session) - await demonstrate_data_processing(session) - await demonstrate_elicitation(session) - await test_immediate_result_tool(session) - - print("\n=== All demonstrations complete! ===") - - -def main(): - """Entry point for the async tools client.""" - if "--help" in sys.argv or "-h" in sys.argv: - print("Usage: async-tools-client [--protocol=latest|next]") - print() - print("Protocol versions:") - print(" --protocol=latest Use stable protocol (only sync/hybrid tools visible)") - print(" --protocol=next Use development protocol (all async tools visible)") - print() - print("Default: --protocol=next") - return - - asyncio.run(run()) - - -if __name__ == "__main__": - main() diff --git a/examples/snippets/servers/async_tool_basic.py b/examples/snippets/servers/async_tool_basic.py new file mode 100644 index 000000000..f711b8bd7 --- /dev/null +++ b/examples/snippets/servers/async_tool_basic.py @@ -0,0 +1,51 @@ +""" +Basic async tool example. + +cd to the `examples/snippets/clients` directory and run: + uv run server async_tool_basic stdio +""" + +import asyncio + +from mcp.server.fastmcp import Context, FastMCP + +mcp = FastMCP("Async Tool Basic") + + +@mcp.tool(invocation_modes=["async"]) +async def analyze_data(dataset: str, ctx: Context) -> str: # type: ignore[type-arg] + """Analyze a dataset asynchronously with progress updates.""" + await ctx.info(f"Starting analysis of {dataset}") + + # Simulate analysis with progress updates + for i in range(5): + await asyncio.sleep(0.5) + progress = (i + 1) / 5 + await ctx.report_progress(progress, 1.0, f"Processing step {i + 1}/5") + + await ctx.info("Analysis complete") + return f"Analysis results for {dataset}: 95% accuracy achieved" + + +@mcp.tool(invocation_modes=["sync", "async"]) +def process_text(text: str, ctx: Context | None = None) -> str: # type: ignore[type-arg] + """Process text in sync or async mode.""" + if ctx: + # Async mode with context + import asyncio + + async def async_processing(): + await ctx.info(f"Processing text asynchronously: {text[:20]}...") + await asyncio.sleep(0.3) + + try: + loop = asyncio.get_event_loop() + loop.create_task(async_processing()) + except RuntimeError: + pass + + return f"Processed: {text.upper()}" + + +if __name__ == "__main__": + mcp.run() diff --git a/examples/snippets/servers/async_tool_elicitation.py b/examples/snippets/servers/async_tool_elicitation.py new file mode 100644 index 000000000..058c15ed0 --- /dev/null +++ b/examples/snippets/servers/async_tool_elicitation.py @@ -0,0 +1,103 @@ +""" +Async tool with elicitation example. + +cd to the `examples/snippets/clients` directory and run: + uv run server async_tool_elicitation stdio +""" + +import asyncio + +from pydantic import BaseModel, Field + +from mcp.server.fastmcp import Context, FastMCP + +mcp = FastMCP("Async Tool Elicitation") + + +class UserPreferences(BaseModel): + """Schema for collecting user preferences.""" + + continue_processing: bool = Field(description="Should we continue with the operation?") + priority_level: str = Field( + default="normal", + description="Priority level: low, normal, high", + ) + + +class FileOperationChoice(BaseModel): + """Schema for file operation confirmation.""" + + confirm_operation: bool = Field(description="Confirm the file operation?") + backup_first: bool = Field(default=True, description="Create backup before operation?") + + +@mcp.tool(invocation_modes=["async"]) +async def process_with_confirmation(operation: str, ctx: Context) -> str: # type: ignore[type-arg] + """Process an operation that requires user confirmation.""" + await ctx.info(f"Starting operation: {operation}") + + # Simulate some initial processing + await asyncio.sleep(0.5) + await ctx.report_progress(0.3, 1.0, "Initial processing complete") + + # Ask user for preferences + result = await ctx.elicit( + message=f"Operation '{operation}' requires user input. How should we proceed?", + schema=UserPreferences, + ) + + if result.action == "accept" and result.data: + if result.data.continue_processing: + await ctx.info(f"Continuing with {result.data.priority_level} priority") + # Simulate processing based on user choice + processing_time = {"low": 0.5, "normal": 1.0, "high": 1.5}.get(result.data.priority_level, 1.0) + await asyncio.sleep(processing_time) + await ctx.report_progress(1.0, 1.0, "Operation complete") + return f"Operation '{operation}' completed successfully with {result.data.priority_level} priority" + else: + await ctx.warning("User chose not to continue") + return f"Operation '{operation}' cancelled by user" + else: + await ctx.error("User declined or cancelled the operation") + return f"Operation '{operation}' aborted" + + +@mcp.tool(invocation_modes=["async"]) +async def file_operation(file_path: str, operation_type: str, ctx: Context) -> str: # type: ignore[type-arg] + """Perform file operation with user confirmation.""" + await ctx.info(f"Analyzing file: {file_path}") + + # Simulate initial analysis + await asyncio.sleep(1) + await ctx.report_progress(0.3, 1.0, "File analysis complete") + + # Simulate finding something that requires user confirmation + await ctx.warning(f"About to perform {operation_type} on {file_path} - requires confirmation") + + # Ask user for confirmation + result = await ctx.elicit( + message=f"Confirm {operation_type} operation on {file_path}?", + schema=FileOperationChoice, + ) + + if result.action == "accept" and result.data: + if result.data.confirm_operation: + if result.data.backup_first: + await ctx.info("Creating backup first...") + await asyncio.sleep(0.5) + await ctx.report_progress(0.7, 1.0, "Backup created") + + await ctx.info(f"Performing {operation_type} operation...") + await asyncio.sleep(1) + await ctx.report_progress(1.0, 1.0, "Operation complete") + + backup_msg = " (with backup)" if result.data.backup_first else " (no backup)" + return f"Successfully performed {operation_type} on {file_path}{backup_msg}" + else: + return f"Operation {operation_type} on {file_path} cancelled by user" + else: + return f"Operation {operation_type} on {file_path} declined" + + +if __name__ == "__main__": + mcp.run() diff --git a/examples/snippets/servers/async_tool_immediate.py b/examples/snippets/servers/async_tool_immediate.py new file mode 100644 index 000000000..49898760f --- /dev/null +++ b/examples/snippets/servers/async_tool_immediate.py @@ -0,0 +1,36 @@ +""" +Async tool with immediate result example. + +cd to the `examples/snippets/clients` directory and run: + uv run server async_tool_immediate stdio +""" + +import asyncio + +from mcp import types +from mcp.server.fastmcp import Context, FastMCP + +mcp = FastMCP("Async Tool Immediate") + + +async def provide_immediate_feedback(operation: str) -> list[types.ContentBlock]: + """Provide immediate feedback while async operation starts.""" + return [types.TextContent(type="text", text=f"Starting {operation} operation. This will take a moment.")] + + +@mcp.tool(invocation_modes=["async"], immediate_result=provide_immediate_feedback) +async def long_analysis(operation: str, ctx: Context) -> str: # type: ignore[type-arg] + """Perform long-running analysis with immediate user feedback.""" + await ctx.info(f"Beginning {operation} analysis") + + # Simulate long-running work + for i in range(4): + await asyncio.sleep(1) + progress = (i + 1) / 4 + await ctx.report_progress(progress, 1.0, f"Analysis step {i + 1}/4") + + return f"Analysis '{operation}' completed with detailed results" + + +if __name__ == "__main__": + mcp.run() diff --git a/examples/snippets/servers/async_tool_progress.py b/examples/snippets/servers/async_tool_progress.py new file mode 100644 index 000000000..633c7570e --- /dev/null +++ b/examples/snippets/servers/async_tool_progress.py @@ -0,0 +1,70 @@ +""" +Async tool with progress notifications example. + +cd to the `examples/snippets/clients` directory and run: + uv run server async_tool_progress stdio +""" + +import asyncio + +from mcp.server.fastmcp import Context, FastMCP + +mcp = FastMCP("Async Tool Progress") + + +@mcp.tool(invocation_modes=["async"]) +async def batch_process(items: list[str], ctx: Context) -> list[str]: # type: ignore[type-arg] + """Process a batch of items with detailed progress reporting.""" + await ctx.info(f"Starting batch processing of {len(items)} items") + + results: list[str] = [] + + for i, item in enumerate(items): + await ctx.debug(f"Processing item {i + 1}: {item}") + + # Simulate variable processing time + processing_time = 0.3 + (len(item) * 0.1) + await asyncio.sleep(processing_time) + + # Report progress for this item + progress = (i + 1) / len(items) + await ctx.report_progress(progress, 1.0, f"Processed {i + 1}/{len(items)}: {item}") + + # Process the item + result = f"PROCESSED_{item.upper()}" + results.append(result) + + await ctx.debug(f"Item {i + 1} result: {result}") + + await ctx.info(f"Batch processing complete! Processed {len(results)} items") + return results + + +@mcp.tool(invocation_modes=["async"]) +async def data_pipeline(dataset: str, operations: list[str], ctx: Context) -> dict[str, str]: # type: ignore[type-arg] + """Execute a data processing pipeline with progress updates.""" + await ctx.info(f"Starting data pipeline for {dataset}") + + results: dict[str, str] = {} + total_ops = len(operations) + + for i, operation in enumerate(operations): + await ctx.debug(f"Executing operation: {operation}") + + # Simulate processing time that increases with complexity + processing_time = 0.5 + (i * 0.2) + await asyncio.sleep(processing_time) + + # Report progress + progress = (i + 1) / total_ops + await ctx.report_progress(progress, 1.0, f"Completed {operation}") + + # Store result + results[operation] = f"Result of {operation} on {dataset}" + + await ctx.info("Data pipeline complete!") + return results + + +if __name__ == "__main__": + mcp.run() diff --git a/examples/snippets/servers/async_tool_sampling.py b/examples/snippets/servers/async_tool_sampling.py new file mode 100644 index 000000000..253aadada --- /dev/null +++ b/examples/snippets/servers/async_tool_sampling.py @@ -0,0 +1,105 @@ +""" +Async tool with sampling (LLM interaction) example. + +cd to the `examples/snippets/clients` directory and run: + uv run server async_tool_sampling stdio +""" + +import asyncio + +from mcp.server.fastmcp import Context, FastMCP +from mcp.types import SamplingMessage, TextContent + +mcp = FastMCP("Async Tool Sampling") + + +@mcp.tool(invocation_modes=["async"]) +async def generate_content(topic: str, content_type: str, ctx: Context) -> str: # type: ignore[type-arg] + """Generate content using LLM sampling with progress updates.""" + await ctx.info(f"Starting {content_type} generation for topic: {topic}") + + # Simulate preparation + await asyncio.sleep(0.5) + await ctx.report_progress(0.2, 1.0, "Preparing content generation") + + # Create prompt based on content type + prompts = { + "poem": f"Write a creative poem about {topic}", + "story": f"Write a short story about {topic}", + "summary": f"Write a concise summary about {topic}", + "analysis": f"Provide a detailed analysis of {topic}", + } + + prompt = prompts.get(content_type, f"Write about {topic}") + await ctx.report_progress(0.4, 1.0, "Prompt prepared") + + # Use LLM sampling + await ctx.info("Requesting content from LLM...") + result = await ctx.session.create_message( + messages=[ + SamplingMessage( + role="user", + content=TextContent(type="text", text=prompt), + ) + ], + max_tokens=200, + ) + + await ctx.report_progress(0.8, 1.0, "Content generated") + + # Process the result + await asyncio.sleep(0.3) + await ctx.report_progress(1.0, 1.0, "Processing complete") + + if result.content.type == "text": + await ctx.info(f"Successfully generated {content_type}") + return f"Generated {content_type} about '{topic}':\n\n{result.content.text}" + else: + await ctx.warning("Unexpected content type from LLM") + return f"Generated {content_type} about '{topic}': {str(result.content)}" + + +@mcp.tool(invocation_modes=["async"]) +async def multi_step_generation(topic: str, steps: list[str], ctx: Context) -> dict[str, str]: # type: ignore[type-arg] + """Generate multiple pieces of content in sequence.""" + await ctx.info(f"Starting multi-step generation for: {topic}") + + results: dict[str, str] = {} + total_steps = len(steps) + + for i, step in enumerate(steps): + await ctx.debug(f"Processing step {i + 1}: {step}") + + # Create step-specific prompt + prompt = f"For the topic '{topic}', please {step}" + + # Use LLM sampling for this step + result = await ctx.session.create_message( + messages=[ + SamplingMessage( + role="user", + content=TextContent(type="text", text=prompt), + ) + ], + max_tokens=150, + ) + + # Store result + if result.content.type == "text": + results[step] = result.content.text + else: + results[step] = str(result.content) + + # Report progress + progress = (i + 1) / total_steps + await ctx.report_progress(progress, 1.0, f"Completed step {i + 1}/{total_steps}: {step}") + + # Small delay between steps + await asyncio.sleep(0.2) + + await ctx.info(f"Multi-step generation complete! Generated {len(results)} pieces of content") + return results + + +if __name__ == "__main__": + mcp.run() diff --git a/examples/snippets/servers/async_tools.py b/examples/snippets/servers/async_tools.py deleted file mode 100644 index 575f1a7ab..000000000 --- a/examples/snippets/servers/async_tools.py +++ /dev/null @@ -1,231 +0,0 @@ -""" -FastMCP async tools example showing different invocation modes. - -cd to the `examples/snippets/clients` directory and run: - uv run server async_tools stdio -""" - -import asyncio - -from pydantic import BaseModel, Field - -from mcp import types -from mcp.server.fastmcp import Context, FastMCP - -# Create an MCP server with async operations support -mcp = FastMCP("Async Tools Demo") - - -class UserPreferences(BaseModel): - """Schema for collecting user preferences.""" - - continue_processing: bool = Field(description="Should we continue with the operation?") - priority_level: str = Field( - default="normal", - description="Priority level: low, normal, high", - ) - - -@mcp.tool(invocation_modes=["async"]) -async def async_elicitation_tool(operation: str, ctx: Context) -> str: # type: ignore[type-arg] - """An async tool that uses elicitation to get user input.""" - await ctx.info(f"Starting operation: {operation}") - - # Simulate some initial processing - await asyncio.sleep(0.5) - await ctx.report_progress(0.3, 1.0, "Initial processing complete") - - await ctx.debug("About to call elicit") - try: - # Ask user for preferences - result = await ctx.elicit( - message=f"Operation '{operation}' requires user input. How should we proceed?", - schema=UserPreferences, - ) - await ctx.debug(f"Elicit result: {result}") - except Exception as e: - await ctx.error(f"Elicitation failed: {e}") - raise - - if result.action == "accept" and result.data: - if result.data.continue_processing: - await ctx.info(f"Continuing with {result.data.priority_level} priority") - # Simulate processing based on user choice - processing_time = {"low": 0.5, "normal": 1.0, "high": 1.5}.get(result.data.priority_level, 1.0) - await asyncio.sleep(processing_time) - await ctx.report_progress(1.0, 1.0, "Operation complete") - return f"Operation '{operation}' completed successfully with {result.data.priority_level} priority" - else: - await ctx.warning("User chose not to continue") - return f"Operation '{operation}' cancelled by user" - else: - await ctx.error("User declined or cancelled the operation") - return f"Operation '{operation}' aborted" - - -@mcp.tool() -def sync_tool(x: int) -> str: - """An implicitly-synchronous tool.""" - return f"Sync result: {x * 2}" - - -@mcp.tool(invocation_modes=["async"]) -async def async_only_tool(data: str, ctx: Context) -> str: # type: ignore[type-arg] - """An async-only tool that takes time to complete.""" - await ctx.info("Starting long-running analysis...") - - # Simulate long-running work with progress updates - for i in range(5): - await asyncio.sleep(0.5) - progress = (i + 1) / 5 - await ctx.report_progress(progress, 1.0, f"Processing step {i + 1}/5") - - await ctx.info("Analysis complete!") - return f"Async analysis result for: {data}" - - -@mcp.tool(invocation_modes=["sync", "async"]) -def hybrid_tool(message: str, ctx: Context | None = None) -> str: # type: ignore[type-arg] - """A hybrid tool that works both sync and async.""" - if ctx: - # Async mode - we have context for progress reporting - import asyncio - - async def async_work(): - await ctx.info(f"Processing '{message}' asynchronously...") - await asyncio.sleep(0.5) # Simulate some work - await ctx.debug("Async processing complete") - - # Run the async work (this is a bit of a hack for demo purposes) - try: - loop = asyncio.get_event_loop() - loop.create_task(async_work()) - except RuntimeError: - pass # No event loop running - - # Both sync and async modes return the same result - return f"Hybrid result: {message.upper()}" - - -@mcp.tool(invocation_modes=["async"]) -async def data_processing_tool(dataset: str, operations: list[str], ctx: Context) -> dict[str, str]: # type: ignore[type-arg] - """Simulate a complex data processing pipeline.""" - await ctx.info(f"Starting data processing pipeline for {dataset}") - - results: dict[str, str] = {} - total_ops = len(operations) - - for i, operation in enumerate(operations): - await ctx.debug(f"Executing operation: {operation}") - - # Simulate processing time - processing_time = 0.5 + (i * 0.2) # Increasing complexity - await asyncio.sleep(processing_time) - - # Report progress - progress = (i + 1) / total_ops - await ctx.report_progress(progress, 1.0, f"Completed {operation}") - - # Store result - results[operation] = f"Result of {operation} on {dataset}" - - await ctx.info("Data processing pipeline complete!") - return results - - -@mcp.tool(invocation_modes=["async"]) -async def file_analysis_tool(file_path: str, ctx: Context) -> str: # type: ignore[type-arg] - """Simulate file analysis with user interaction.""" - await ctx.info(f"Analyzing file: {file_path}") - - # Simulate initial analysis - await asyncio.sleep(1) - await ctx.report_progress(0.3, 1.0, "Initial scan complete") - - # Simulate finding an issue that requires user input - await ctx.warning("Found potential security issue - requires user confirmation") - - # In a real implementation, you would use ctx.elicit() here to ask the user - # For this demo, we'll just simulate the decision - await asyncio.sleep(0.5) - await ctx.info("User confirmed - continuing analysis") - - # Complete the analysis - await asyncio.sleep(1) - await ctx.report_progress(1.0, 1.0, "Analysis complete") - - return f"File analysis complete for {file_path}. No issues found after user review." - - -@mcp.tool(invocation_modes=["async"]) -async def batch_operation_tool(items: list[str], ctx: Context) -> list[str]: # type: ignore[type-arg] - """Process a batch of items with detailed progress reporting.""" - await ctx.info(f"Starting batch operation on {len(items)} items") - - results: list[str] = [] - - for i, item in enumerate(items): - await ctx.debug(f"Processing item {i + 1}: {item}") - - # Simulate variable processing time - processing_time = 0.2 + (len(item) * 0.1) - await asyncio.sleep(processing_time) - - # Report progress for this item - progress = (i + 1) / len(items) - await ctx.report_progress(progress, 1.0, f"Processed {i + 1}/{len(items)}: {item}") - - # Process the item - result = f"PROCESSED_{item.upper()}" - results.append(result) - - await ctx.debug(f"Item {i + 1} result: {result}") - - await ctx.info(f"Batch operation complete! Processed {len(results)} items") - return results - - -@mcp.tool(invocation_modes=["async"], keep_alive=1800) -async def long_running_task(task_name: str, ctx: Context) -> str: # type: ignore[type-arg] - """A long-running task with custom keep_alive duration.""" - await ctx.info(f"Starting long-running task: {task_name}") - - # Simulate extended processing - await asyncio.sleep(2) - await ctx.report_progress(0.5, 1.0, "Halfway through processing") - await asyncio.sleep(2) - - await ctx.info(f"Task '{task_name}' completed successfully") - return f"Long-running task '{task_name}' finished with 30-minute keep_alive" - - -@mcp.tool(invocation_modes=["async"], keep_alive=2) -async def quick_expiry_task(message: str, ctx: Context) -> str: # type: ignore[type-arg] - """A task with very short keep_alive for testing expiry.""" - await ctx.info(f"Quick task starting: {message}") - await asyncio.sleep(1) - return f"Quick task completed: {message} (expires in 2 seconds)" - - -async def immediate_feedback(operation: str) -> list[types.ContentBlock]: - """Provide immediate feedback for long-running operations.""" - return [types.TextContent(type="text", text=f"🚀 Starting {operation}... This may take a moment.")] - - -@mcp.tool(invocation_modes=["async"], immediate_result=immediate_feedback) -async def long_running_analysis(operation: str, ctx: Context) -> str: # type: ignore[type-arg] - """Perform analysis with immediate user feedback.""" - await ctx.info(f"Beginning {operation} analysis") - - # Simulate long-running work with progress updates - for i in range(5): - await asyncio.sleep(1) - progress = (i + 1) / 5 - await ctx.report_progress(progress, 1.0, f"Step {i + 1}/5 complete") - - await ctx.info(f"Analysis '{operation}' completed successfully!") - return f"Analysis '{operation}' completed successfully with detailed results!" - - -if __name__ == "__main__": - mcp.run() diff --git a/tests/server/fastmcp/test_integration.py b/tests/server/fastmcp/test_integration.py index c03a25055..8853516e9 100644 --- a/tests/server/fastmcp/test_integration.py +++ b/tests/server/fastmcp/test_integration.py @@ -24,7 +24,11 @@ from pydantic import AnyUrl from examples.snippets.servers import ( - async_tools, + async_tool_basic, + async_tool_elicitation, + async_tool_immediate, + async_tool_progress, + async_tool_sampling, basic_prompt, basic_resource, basic_tool, @@ -109,8 +113,16 @@ def server_url(server_port: int) -> str: def run_server_with_transport(module_name: str, port: int, transport: str) -> None: """Run server with specified transport.""" # Get the MCP instance based on module name - if module_name == "async_tools": - mcp = async_tools.mcp + if module_name == "async_tool_basic": + mcp = async_tool_basic.mcp + elif module_name == "async_tool_elicitation": + mcp = async_tool_elicitation.mcp + elif module_name == "async_tool_immediate": + mcp = async_tool_immediate.mcp + elif module_name == "async_tool_progress": + mcp = async_tool_progress.mcp + elif module_name == "async_tool_sampling": + mcp = async_tool_sampling.mcp elif module_name == "basic_tool": mcp = basic_tool.mcp elif module_name == "basic_resource": @@ -683,12 +695,12 @@ async def test_fastmcp_quickstart(server_transport: str, server_url: str) -> Non [ # Skip SSE for async tools - SSE client has issues with long polling in test environment # causing BrokenResourceError during async operation status polling - # ("async_tools", "sse"), - ("async_tools", "streamable-http"), + # ("async_tool_basic", "sse"), + ("async_tool_basic", "streamable-http"), ], indirect=True, ) -async def test_async_tools(server_transport: str, server_url: str) -> None: +async def test_async_tool_basic(server_transport: str, server_url: str) -> None: """Test async tools functionality with 'next' protocol version.""" transport = server_transport client_cm = create_client_for_transport(transport, server_url) @@ -699,20 +711,23 @@ async def test_async_tools(server_transport: str, server_url: str) -> None: # Test initialization result = await session.initialize() assert isinstance(result, InitializeResult) - assert result.serverInfo.name == "Async Tools Demo" + assert result.serverInfo.name == "Async Tool Basic" # Test sync tool (should work normally) - sync_result = await session.call_tool("sync_tool", {"x": 21}) + sync_result = await session.call_tool("process_text", {"text": "hello"}) assert len(sync_result.content) == 1 assert isinstance(sync_result.content[0], TextContent) - assert sync_result.content[0].text == "Sync result: 42" + assert sync_result.content[0].text == "Processed: HELLO" # Test async-only tool (should return operation token) - async_result = await session.call_tool("async_only_tool", {"data": "test data"}) + async_result = await session.call_tool("analyze_data", {"dataset": "test data"}) assert async_result.operation is not None token = async_result.operation.token - while True: + # Poll for completion with timeout + max_attempts = 20 + attempt = 0 + while attempt < max_attempts: status = await session.get_operation_status(token) if status.status == "completed": final_result = await session.get_operation_result(token) @@ -720,165 +735,21 @@ async def test_async_tools(server_transport: str, server_url: str) -> None: assert len(final_result.result.content) == 1 content = final_result.result.content[0] assert isinstance(content, TextContent) - assert "Async analysis result for: test data" in content.text + assert "Analysis results for test data: 95% accuracy achieved" in content.text break elif status.status == "failed": pytest.fail(f"Async operation failed: {status.error}") - # Test hybrid tool (should work as sync by default) - hybrid_result = await session.call_tool("hybrid_tool", {"message": "hello"}) - assert len(hybrid_result.content) == 1 - assert isinstance(hybrid_result.content[0], TextContent) - assert "Hybrid result: HELLO" in hybrid_result.content[0].text - - # Test long-running task with custom keep_alive - long_task_result = await session.call_tool("long_running_task", {"task_name": "test_task"}) - assert long_task_result.operation is not None - long_token = long_task_result.operation.token - - while True: - status = await session.get_operation_status(long_token) - if status.status == "completed": - final_result = await session.get_operation_result(long_token) - assert not final_result.result.isError - assert len(final_result.result.content) == 1 - content = final_result.result.content[0] - assert isinstance(content, TextContent) - assert "Long-running task 'test_task' finished with 30-minute keep_alive" in content.text - break - elif status.status == "failed": - pytest.fail(f"Long-running task failed: {status.error}") - - # Test quick expiry task (should complete then expire) - quick_result = await session.call_tool("quick_expiry_task", {"message": "test_expiry"}) - assert quick_result.operation is not None - quick_token = quick_result.operation.token - - # Wait for completion - while True: - status = await session.get_operation_status(quick_token) - if status.status == "completed": - break - elif status.status == "failed": - pytest.fail(f"Quick task failed: {status.error}") - - # Wait for expiry (2 seconds + buffer) - await asyncio.sleep(3) - - # Should now be expired - with pytest.raises(Exception): # Should raise error when trying to access expired operation - await session.get_operation_result(quick_token) - - # Test batch operation with progress notifications - progress_received = False - - async def progress_callback(progress: float, total: float | None, message: str | None) -> None: - nonlocal progress_received - progress_received = True - assert 0.0 <= progress <= 1.0 # Progress should be between 0 and 1 - - batch_result = await session.call_tool( - "batch_operation_tool", - {"items": ["apple", "banana", "cherry"]}, - progress_callback=progress_callback, - ) - assert batch_result.operation is not None - batch_token = batch_result.operation.token - - while True: - status = await session.get_operation_status(batch_token) - - if status.status == "completed": - final_result = await session.get_operation_result(batch_token) - assert not final_result.result.isError - # Should have structured content with processed items - if final_result.result.structuredContent: - # Structured content is wrapped in {"result": [...]} for list return types - assert isinstance(final_result.result.structuredContent, dict) - assert "result" in final_result.result.structuredContent - assert isinstance(final_result.result.structuredContent["result"], list) - assert len(final_result.result.structuredContent["result"]) == 3 - break - elif status.status == "failed": - pytest.fail(f"Batch operation failed: {status.error}") - - # Assert that we received at least one progress notification - assert progress_received, "Should have received progress notifications during batch operation" - - -# Test async elicitation tool (demonstrates bug in streamable-http transport) -@pytest.mark.anyio -@pytest.mark.parametrize( - "server_transport", - [ - ("async_tools", "streamable-http"), - ], - indirect=True, -) -async def test_async_elicitation_tool(server_transport: str, server_url: str) -> None: - """Test async elicitation tool functionality. - - This test demonstrates a bug in streamable-http transport where elicitation - requests during async operations don't reach the client callback. - """ - transport = server_transport - client_cm = create_client_for_transport(transport, server_url) - - # Use the same elicitation callback as the client - async def test_elicitation_callback(context: RequestContext[ClientSession, None], params: ElicitRequestParams): - """Handle elicitation requests from the server.""" - logger.debug(f"Client elicitation callback called with message: {params.message}") - if "data_migration" in params.message: - logger.debug("Client accepting elicitation request") - return ElicitResult( - action="accept", - content={"continue_processing": True, "priority_level": "normal"}, - ) - else: - logger.debug("Client declining elicitation request") - return ElicitResult(action="decline") - - async with client_cm as client_streams: - read_stream, write_stream = unpack_streams(client_streams) - async with ClientSession( - read_stream, - write_stream, - protocol_version="next", - elicitation_callback=test_elicitation_callback, - ) as session: - # Test initialization - result = await session.initialize() - assert isinstance(result, InitializeResult) - assert result.serverInfo.name == "Async Tools Demo" - - # Test async elicitation tool - same as client - elicit_result = await session.call_tool("async_elicitation_tool", {"operation": "data_migration"}) - assert elicit_result.operation is not None - token = elicit_result.operation.token - - # Poll exactly like the client does - max_polls = 20 - poll_count = 0 - while poll_count < max_polls: - status = await session.get_operation_status(token) - if status.status == "completed": - final_result = await session.get_operation_result(token) - assert not final_result.result.isError - assert len(final_result.result.content) == 1 - content = final_result.result.content[0] - assert isinstance(content, TextContent) - assert "Operation 'data_migration'" in content.text - assert "completed successfully" in content.text - return - elif status.status == "failed": - pytest.fail(f"Async elicitation failed: {status.error}") - elif status.status in ("canceled", "unknown"): - pytest.fail(f"Operation ended with status: {status.status}") - - poll_count += 1 + attempt += 1 await asyncio.sleep(0.5) + else: + pytest.fail("Async operation timed out") - pytest.fail(f"Test timed out after {max_polls} polls") + # Test hybrid tool (process_text can work in sync or async mode) + hybrid_result = await session.call_tool("process_text", {"text": "world"}) + assert len(hybrid_result.content) == 1 + assert isinstance(hybrid_result.content[0], TextContent) + assert "Processed: WORLD" in hybrid_result.content[0].text # Test async tools example with legacy protocol @@ -886,11 +757,11 @@ async def test_elicitation_callback(context: RequestContext[ClientSession, None] @pytest.mark.parametrize( "server_transport", [ - ("async_tools", "streamable-http"), + ("async_tool_basic", "streamable-http"), ], indirect=True, ) -async def test_async_tools_legacy_protocol(server_transport: str, server_url: str) -> None: +async def test_async_tool_basic_legacy_protocol(server_transport: str, server_url: str) -> None: """Test async tools functionality with '2025-06-18' protocol version.""" transport = server_transport client_cm = create_client_for_transport(transport, server_url) @@ -901,27 +772,27 @@ async def test_async_tools_legacy_protocol(server_transport: str, server_url: st # Test initialization result = await session.initialize() assert isinstance(result, InitializeResult) - assert result.serverInfo.name == "Async Tools Demo" + assert result.serverInfo.name == "Async Tool Basic" # Test sync tool (should work normally) - sync_result = await session.call_tool("sync_tool", {"x": 21}) + sync_result = await session.call_tool("process_text", {"text": "hello"}) assert len(sync_result.content) == 1 assert isinstance(sync_result.content[0], TextContent) - assert sync_result.content[0].text == "Sync result: 42" + assert sync_result.content[0].text == "Processed: HELLO" # Test async-only tool (executes synchronously with legacy protocol) - async_result = await session.call_tool("async_only_tool", {"data": "test data"}) + async_result = await session.call_tool("analyze_data", {"dataset": "test data"}) assert async_result.operation is None # No operation token with legacy protocol assert len(async_result.content) == 1 content = async_result.content[0] assert isinstance(content, TextContent) - assert "Async analysis result for: test data" in content.text + assert "Analysis results for test data: 95% accuracy achieved" in content.text # Test hybrid tool (should work as sync) - hybrid_result = await session.call_tool("hybrid_tool", {"message": "hello"}) + hybrid_result = await session.call_tool("process_text", {"text": "hello"}) assert len(hybrid_result.content) == 1 assert isinstance(hybrid_result.content[0], TextContent) - assert "Hybrid result: HELLO" in hybrid_result.content[0].text + assert "Processed: HELLO" in hybrid_result.content[0].text # Test structured output example @@ -965,8 +836,8 @@ async def test_structured_output(server_transport: str, server_url: str) -> None @pytest.mark.parametrize( "server_transport", [ - ("async_tools", "sse"), - ("async_tools", "streamable-http"), + ("async_tool_immediate", "sse"), + ("async_tool_immediate", "streamable-http"), ], indirect=True, ) @@ -981,22 +852,24 @@ async def test_immediate_result_integration(server_transport: str, server_url: s # Test initialization result = await session.initialize() assert isinstance(result, InitializeResult) - assert result.serverInfo.name == "Async Tools Demo" + assert result.serverInfo.name == "Async Tool Immediate" # Test tool with immediate_result - immediate_result = await session.call_tool("long_running_analysis", {"operation": "data_processing"}) + immediate_result = await session.call_tool("long_analysis", {"operation": "data_processing"}) # Verify immediate result is returned in content assert len(immediate_result.content) == 1 assert isinstance(immediate_result.content[0], TextContent) - assert "🚀 Starting data_processing... This may take a moment." in immediate_result.content[0].text + assert "Starting data_processing operation. This will take a moment." in immediate_result.content[0].text # Verify async operation is created assert immediate_result.operation is not None token = immediate_result.operation.token - # Poll for final result - while True: + # Poll for final result with timeout + max_attempts = 20 + attempt = 0 + while attempt < max_attempts: status = await session.get_operation_status(token) if status.status == "completed": final_result = await session.get_operation_result(token) @@ -1004,19 +877,23 @@ async def test_immediate_result_integration(server_transport: str, server_url: s assert len(final_result.result.content) == 1 content = final_result.result.content[0] assert isinstance(content, TextContent) - assert "Analysis 'data_processing' completed successfully with detailed results!" in content.text + assert "Analysis 'data_processing' completed with detailed results" in content.text break elif status.status == "failed": pytest.fail(f"Async operation failed: {status.error}") - await asyncio.sleep(0.01) + + attempt += 1 + await asyncio.sleep(0.5) + else: + pytest.fail("Async operation timed out") @pytest.mark.anyio @pytest.mark.parametrize( "server_transport", [ - ("async_tools", "sse"), - ("async_tools", "streamable-http"), + ("async_tool_basic", "sse"), + ("async_tool_basic", "streamable-http"), ], indirect=True, ) @@ -1031,10 +908,10 @@ async def test_immediate_result_backward_compatibility(server_transport: str, se # Test initialization result = await session.initialize() assert isinstance(result, InitializeResult) - assert result.serverInfo.name == "Async Tools Demo" + assert result.serverInfo.name == "Async Tool Basic" # Test async tool without immediate_result (should have empty content initially) - async_result = await session.call_tool("async_only_tool", {"data": "test_data"}) + async_result = await session.call_tool("analyze_data", {"dataset": "test_data"}) # Should have empty content array (no immediate result) assert len(async_result.content) == 0 @@ -1043,8 +920,10 @@ async def test_immediate_result_backward_compatibility(server_transport: str, se assert async_result.operation is not None token = async_result.operation.token - # Poll for final result - while True: + # Poll for final result with timeout + max_attempts = 20 + attempt = 0 + while attempt < max_attempts: status = await session.get_operation_status(token) if status.status == "completed": final_result = await session.get_operation_result(token) @@ -1052,8 +931,230 @@ async def test_immediate_result_backward_compatibility(server_transport: str, se assert len(final_result.result.content) == 1 content = final_result.result.content[0] assert isinstance(content, TextContent) - assert "Async analysis result for: test_data" in content.text + assert "Analysis results for test_data: 95% accuracy achieved" in content.text break elif status.status == "failed": pytest.fail(f"Async operation failed: {status.error}") + + attempt += 1 + await asyncio.sleep(0.5) + else: + pytest.fail("Async operation timed out") await asyncio.sleep(0.01) + + +# Test async progress notifications +@pytest.mark.anyio +@pytest.mark.parametrize( + "server_transport", + [ + ("async_tool_progress", "sse"), + ("async_tool_progress", "streamable-http"), + ], + indirect=True, +) +async def test_async_tool_progress(server_transport: str, server_url: str) -> None: + """Test async tools with progress notifications.""" + transport = server_transport + collector = NotificationCollector() + + async def message_handler(message: RequestResponder[ServerRequest, ClientResult] | ServerNotification | Exception): + await collector.handle_generic_notification(message) + if isinstance(message, Exception): + raise message + + client_cm = create_client_for_transport(transport, server_url) + + async with client_cm as client_streams: + read_stream, write_stream = unpack_streams(client_streams) + async with ClientSession( + read_stream, write_stream, protocol_version="next", message_handler=message_handler + ) as session: + # Test initialization + result = await session.initialize() + assert isinstance(result, InitializeResult) + assert result.serverInfo.name == "Async Tool Progress" + + # Test batch processing with progress + progress_updates = [] + + async def progress_callback(progress: float, total: float | None, message: str | None) -> None: + progress_updates.append((progress, total, message)) + + batch_result = await session.call_tool( + "batch_process", + {"items": ["apple", "banana", "cherry"]}, + progress_callback=progress_callback, + ) + assert batch_result.operation is not None + token = batch_result.operation.token + + # Poll for completion + max_attempts = 20 + attempt = 0 + while attempt < max_attempts: + status = await session.get_operation_status(token) + if status.status == "completed": + final_result = await session.get_operation_result(token) + assert not final_result.result.isError + + # Check structured content + if final_result.result.structuredContent: + assert isinstance(final_result.result.structuredContent, dict) + assert "result" in final_result.result.structuredContent + processed_items = final_result.result.structuredContent["result"] + assert len(processed_items) == 3 + assert all("PROCESSED_" in item for item in processed_items) + break + elif status.status == "failed": + pytest.fail(f"Batch operation failed: {status.error}") + + attempt += 1 + await asyncio.sleep(0.3) + else: + pytest.fail("Batch operation timed out") + + # Verify progress updates were received + assert len(progress_updates) == 3 + for i, (progress, total, message) in enumerate(progress_updates): + expected_progress = (i + 1) / 3 + assert abs(progress - expected_progress) < 0.01 + assert total == 1.0 + assert f"Processed {i + 1}/3" in message + + +# Test async elicitation +@pytest.mark.anyio +@pytest.mark.parametrize( + "server_transport", + [ + ("async_tool_elicitation", "streamable-http"), # Only test streamable-http for elicitation + ], + indirect=True, +) +async def test_async_tool_elicitation(server_transport: str, server_url: str) -> None: + """Test async tools with elicitation.""" + transport = server_transport + client_cm = create_client_for_transport(transport, server_url) + + async def test_elicitation_callback(context: RequestContext[ClientSession, None], params: ElicitRequestParams): + """Handle elicitation requests from the server.""" + if "data_migration" in params.message: + return ElicitResult( + action="accept", + content={"continue_processing": True, "priority_level": "high"}, + ) + elif "file operation" in params.message.lower(): + return ElicitResult( + action="accept", + content={"confirm_operation": True, "backup_first": True}, + ) + else: + return ElicitResult(action="decline") + + async with client_cm as client_streams: + read_stream, write_stream = unpack_streams(client_streams) + async with ClientSession( + read_stream, + write_stream, + protocol_version="next", + elicitation_callback=test_elicitation_callback, + ) as session: + # Test initialization + result = await session.initialize() + assert isinstance(result, InitializeResult) + assert result.serverInfo.name == "Async Tool Elicitation" + + # Test process with confirmation + elicit_result = await session.call_tool("process_with_confirmation", {"operation": "data_migration"}) + assert elicit_result.operation is not None + token = elicit_result.operation.token + + # Poll for completion + max_attempts = 20 + attempt = 0 + while attempt < max_attempts: + status = await session.get_operation_status(token) + if status.status == "completed": + final_result = await session.get_operation_result(token) + assert not final_result.result.isError + assert len(final_result.result.content) == 1 + content = final_result.result.content[0] + assert isinstance(content, TextContent) + assert "Operation 'data_migration' completed successfully with high priority" in content.text + break + elif status.status == "failed": + pytest.fail(f"Elicitation operation failed: {status.error}") + + attempt += 1 + await asyncio.sleep(0.3) + else: + pytest.fail("Elicitation operation timed out") + + +# Test async sampling +@pytest.mark.anyio +@pytest.mark.parametrize( + "server_transport", + [ + ("async_tool_sampling", "sse"), + ("async_tool_sampling", "streamable-http"), + ], + indirect=True, +) +async def test_async_tool_sampling(server_transport: str, server_url: str) -> None: + """Test async tools with sampling (LLM interaction).""" + transport = server_transport + client_cm = create_client_for_transport(transport, server_url) + + async def test_sampling_callback( + context: RequestContext[ClientSession, None], params: CreateMessageRequestParams + ) -> CreateMessageResult: + """Handle sampling requests from the server.""" + return CreateMessageResult( + role="assistant", + content=TextContent(type="text", text="This is a simulated LLM response for testing"), + model="test-model", + ) + + async with client_cm as client_streams: + read_stream, write_stream = unpack_streams(client_streams) + async with ClientSession( + read_stream, + write_stream, + protocol_version="next", + sampling_callback=test_sampling_callback, + ) as session: + # Test initialization + result = await session.initialize() + assert isinstance(result, InitializeResult) + assert result.serverInfo.name == "Async Tool Sampling" + + # Test content generation + sampling_result = await session.call_tool( + "generate_content", {"topic": "artificial intelligence", "content_type": "poem"} + ) + assert sampling_result.operation is not None + token = sampling_result.operation.token + + # Poll for completion + max_attempts = 20 + attempt = 0 + while attempt < max_attempts: + status = await session.get_operation_status(token) + if status.status == "completed": + final_result = await session.get_operation_result(token) + assert not final_result.result.isError + assert len(final_result.result.content) == 1 + content = final_result.result.content[0] + assert isinstance(content, TextContent) + assert "Generated poem about 'artificial intelligence'" in content.text + assert "This is a simulated LLM response" in content.text + break + elif status.status == "failed": + pytest.fail(f"Sampling operation failed: {status.error}") + + attempt += 1 + await asyncio.sleep(0.3) + else: + pytest.fail("Sampling operation timed out") From b33721e9a14a3e9fceb1afcbad60e98988a04a3c Mon Sep 17 00:00:00 2001 From: Luca Chang Date: Wed, 1 Oct 2025 12:21:42 -0700 Subject: [PATCH 24/41] Move operations into "working" state before tool execution --- src/mcp/server/lowlevel/server.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/mcp/server/lowlevel/server.py b/src/mcp/server/lowlevel/server.py index c3c88f75d..415efc16b 100644 --- a/src/mcp/server/lowlevel/server.py +++ b/src/mcp/server/lowlevel/server.py @@ -544,6 +544,7 @@ async def handler(req: types.CallToolRequest): async def execute_async(): try: logger.debug(f"Starting async execution of {tool_name}") + self.async_operations.mark_working(operation.token) results = await func(tool_name, arguments) logger.debug(f"Async execution completed for {tool_name}") From 5e7bc5e52d50f642aef98023e466bc7342be3c45 Mon Sep 17 00:00:00 2001 From: Luca Chang Date: Wed, 1 Oct 2025 12:21:57 -0700 Subject: [PATCH 25/41] Add reconnect example for async tools --- .../clients/async-reconnect-client/README.md | 79 ++ .../mcp_async_reconnect_client/__init__.py | 0 .../mcp_async_reconnect_client/client.py | 47 ++ .../async-reconnect-client/pyproject.toml | 49 ++ .../clients/async-reconnect-client/uv.lock | 761 ++++++++++++++++++ .../servers/simple-tool-async/.python-version | 1 + examples/servers/simple-tool-async/README.md | 56 ++ .../mcp_simple_tool_async/__init__.py | 1 + .../mcp_simple_tool_async/__main__.py | 5 + .../mcp_simple_tool_async/server.py | 41 + .../servers/simple-tool-async/pyproject.toml | 47 ++ uv.lock | 34 + 12 files changed, 1121 insertions(+) create mode 100644 examples/clients/async-reconnect-client/README.md create mode 100644 examples/clients/async-reconnect-client/mcp_async_reconnect_client/__init__.py create mode 100644 examples/clients/async-reconnect-client/mcp_async_reconnect_client/client.py create mode 100644 examples/clients/async-reconnect-client/pyproject.toml create mode 100644 examples/clients/async-reconnect-client/uv.lock create mode 100644 examples/servers/simple-tool-async/.python-version create mode 100644 examples/servers/simple-tool-async/README.md create mode 100644 examples/servers/simple-tool-async/mcp_simple_tool_async/__init__.py create mode 100644 examples/servers/simple-tool-async/mcp_simple_tool_async/__main__.py create mode 100644 examples/servers/simple-tool-async/mcp_simple_tool_async/server.py create mode 100644 examples/servers/simple-tool-async/pyproject.toml diff --git a/examples/clients/async-reconnect-client/README.md b/examples/clients/async-reconnect-client/README.md new file mode 100644 index 000000000..3a4bdb7c0 --- /dev/null +++ b/examples/clients/async-reconnect-client/README.md @@ -0,0 +1,79 @@ +# Async Reconnect Client Example + +A demonstration of how to use the MCP Python SDK to call async tools and handle operation tokens for resuming long-running operations. + +## Features + +- Async tool invocation with operation tokens +- Operation status polling and result retrieval +- Support for resuming operations with existing tokens + +## Installation + +```bash +cd examples/clients/async-reconnect-client +uv sync --reinstall +``` + +## Usage + +### 1. Start an MCP server with async tools + +```bash +# Example with simple-tool-async server +cd examples/servers/simple-tool-async +uv run mcp-simple-tool-async --transport streamable-http --port 8000 +``` + +### 2. Run the client + +```bash +# Connect to default endpoint +uv run mcp-async-reconnect-client + +# Connect to custom endpoint +uv run mcp-async-reconnect-client --endpoint http://localhost:3001/mcp + +# Resume with existing operation token +uv run mcp-async-reconnect-client --token your-operation-token-here +``` + +## Example + +The client will call the `fetch_website` async tool and demonstrate: + +1. Starting an async operation and receiving an operation token +2. Polling the operation status until completion +3. Retrieving the final result when the operation completes + +```bash +$ uv run mcp-async-reconnect-client +Calling async tool... +Operation started with token: abc123... +Status: submitted +Status: working +Status: completed +Result: ... +``` + +The client can be terminated during polling and resumed with the returned token, demonstrating how reconnection is supported: + +```bash +$ uv run mcp-async-reconnect-client +Calling async tool... +Operation started with token: abc123... +Status: working +^C +Aborted! +$ uv run mcp-async-reconnect-client --token=abc123... +Calling async tool... +Status: completed +Result: ... +``` + +## Configuration + +- `--endpoint` - MCP server endpoint (default: http://127.0.0.1:8000/mcp) +- `--token` - Operation token to resume with (optional) + +This example showcases the async tool capabilities introduced in MCP protocol version "next", allowing for long-running operations that can be resumed even if the client disconnects. diff --git a/examples/clients/async-reconnect-client/mcp_async_reconnect_client/__init__.py b/examples/clients/async-reconnect-client/mcp_async_reconnect_client/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/examples/clients/async-reconnect-client/mcp_async_reconnect_client/client.py b/examples/clients/async-reconnect-client/mcp_async_reconnect_client/client.py new file mode 100644 index 000000000..9d1924ac4 --- /dev/null +++ b/examples/clients/async-reconnect-client/mcp_async_reconnect_client/client.py @@ -0,0 +1,47 @@ +import asyncio + +import click +from mcp import ClientSession, types +from mcp.client.streamable_http import streamablehttp_client + + +async def call_async_tool(session: ClientSession, token: str | None): + """Demonstrate calling an async tool.""" + print("Calling async tool...") + + if not token: + result = await session.call_tool("fetch_website", arguments={"url": "https://modelcontextprotocol.io"}) + assert result.operation + token = result.operation.token + print(f"Operation started with token: {token}") + + # Poll for completion + while True: + status = await session.get_operation_status(token) + print(f"Status: {status.status}") + + if status.status == "completed": + final_result = await session.get_operation_result(token) + for content in final_result.result.content: + if isinstance(content, types.TextContent): + print(f"Result: {content.text}") + break + elif status.status == "failed": + print(f"Operation failed: {status.error}") + break + + await asyncio.sleep(0.5) + + +async def run_session(endpoint: str, token: str | None): + async with streamablehttp_client(endpoint) as (read, write, _): + async with ClientSession(read, write, protocol_version="next") as session: + await session.initialize() + await call_async_tool(session, token) + + +@click.command() +@click.option("--endpoint", default="http://127.0.0.1:8000/mcp", help="Endpoint to connect to") +@click.option("--token", default=None, help="Operation token to resume with") +def main(endpoint: str, token: str | None): + asyncio.run(run_session(endpoint, token)) diff --git a/examples/clients/async-reconnect-client/pyproject.toml b/examples/clients/async-reconnect-client/pyproject.toml new file mode 100644 index 000000000..53c66ea28 --- /dev/null +++ b/examples/clients/async-reconnect-client/pyproject.toml @@ -0,0 +1,49 @@ +[project] +name = "mcp-async-reconnect-client" +version = "0.1.0" +description = "A client for the MCP simple-tool-async server that supports reconnection" +readme = "README.md" +requires-python = ">=3.10" +authors = [{ name = "Anthropic" }] +keywords = ["mcp", "client", "async"] +license = { text = "MIT" } +classifiers = [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.10", +] +dependencies = ["click>=8.2.0", "mcp>=1.0.0"] + +[project.scripts] +mcp-async-reconnect-client = "mcp_async_reconnect_client.client:main" + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["mcp_async_reconnect_client"] + +[tool.pyright] +include = ["mcp_async_reconnect_client"] +venvPath = "." +venv = ".venv" + +[tool.ruff.lint] +select = ["E", "F", "I"] +ignore = [] + +[tool.ruff] +line-length = 120 +target-version = "py310" + +[tool.uv] +dev-dependencies = ["pyright>=1.1.379", "pytest>=8.3.3", "ruff>=0.6.9"] + +[tool.uv.sources] +mcp = { path = "../../../" } + +[[tool.uv.index]] +url = "https://pypi.org/simple" diff --git a/examples/clients/async-reconnect-client/uv.lock b/examples/clients/async-reconnect-client/uv.lock new file mode 100644 index 000000000..21173abdc --- /dev/null +++ b/examples/clients/async-reconnect-client/uv.lock @@ -0,0 +1,761 @@ +version = 1 +revision = 2 +requires-python = ">=3.10" + +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, +] + +[[package]] +name = "anyio" +version = "4.11.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "idna" }, + { name = "sniffio" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c6/78/7d432127c41b50bccba979505f272c16cbcadcc33645d5fa3a738110ae75/anyio-4.11.0.tar.gz", hash = "sha256:82a8d0b81e318cc5ce71a5f1f8b5c4e63619620b63141ef8c995fa0db95a57c4", size = 219094, upload-time = "2025-09-23T09:19:12.58Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/15/b3/9b1a8074496371342ec1e796a96f99c82c945a339cd81a8e73de28b4cf9e/anyio-4.11.0-py3-none-any.whl", hash = "sha256:0287e96f4d26d4149305414d4e3bc32f0dcd0862365a4bddea19d7a1ec38c4fc", size = 109097, upload-time = "2025-09-23T09:19:10.601Z" }, +] + +[[package]] +name = "attrs" +version = "25.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/1367933a8532ee6ff8d63537de4f1177af4bff9f3e829baf7331f595bb24/attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b", size = 812032, upload-time = "2025-03-13T11:10:22.779Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815, upload-time = "2025-03-13T11:10:21.14Z" }, +] + +[[package]] +name = "certifi" +version = "2025.8.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/dc/67/960ebe6bf230a96cda2e0abcf73af550ec4f090005363542f0765df162e0/certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407", size = 162386, upload-time = "2025-08-03T03:07:47.08Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/48/1549795ba7742c948d2ad169c1c8cdbae65bc450d6cd753d124b17c8cd32/certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5", size = 161216, upload-time = "2025-08-03T03:07:45.777Z" }, +] + +[[package]] +name = "click" +version = "8.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/46/61/de6cd827efad202d7057d93e0fed9294b96952e188f7384832791c7b2254/click-8.3.0.tar.gz", hash = "sha256:e7b8232224eba16f4ebe410c25ced9f7875cb5f3263ffc93cc3e8da705e229c4", size = 276943, upload-time = "2025-09-18T17:32:23.696Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/db/d3/9dcc0f5797f070ec8edf30fbadfb200e71d9db6b84d211e3b2085a7589a0/click-8.3.0-py3-none-any.whl", hash = "sha256:9b9f285302c6e3064f4330c05f05b81945b2a39544279343e6e7c5f27a9baddc", size = 107295, upload-time = "2025-09-18T17:32:22.42Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "exceptiongroup" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749, upload-time = "2025-05-10T17:42:51.123Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", size = 16674, upload-time = "2025-05-10T17:42:49.33Z" }, +] + +[[package]] +name = "h11" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, +] + +[[package]] +name = "httpcore" +version = "1.0.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, +] + +[[package]] +name = "httpx" +version = "0.28.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "certifi" }, + { name = "httpcore" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, +] + +[[package]] +name = "httpx-sse" +version = "0.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6e/fa/66bd985dd0b7c109a3bcb89272ee0bfb7e2b4d06309ad7b38ff866734b2a/httpx_sse-0.4.1.tar.gz", hash = "sha256:8f44d34414bc7b21bf3602713005c5df4917884f76072479b21f68befa4ea26e", size = 12998, upload-time = "2025-06-24T13:21:05.71Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/25/0a/6269e3473b09aed2dab8aa1a600c70f31f00ae1349bee30658f7e358a159/httpx_sse-0.4.1-py3-none-any.whl", hash = "sha256:cba42174344c3a5b06f255ce65b350880f962d99ead85e776f23c6618a377a37", size = 8054, upload-time = "2025-06-24T13:21:04.772Z" }, +] + +[[package]] +name = "idna" +version = "3.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, +] + +[[package]] +name = "jsonschema" +version = "4.25.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "jsonschema-specifications" }, + { name = "referencing" }, + { name = "rpds-py" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/74/69/f7185de793a29082a9f3c7728268ffb31cb5095131a9c139a74078e27336/jsonschema-4.25.1.tar.gz", hash = "sha256:e4a9655ce0da0c0b67a085847e00a3a51449e1157f4f75e9fb5aa545e122eb85", size = 357342, upload-time = "2025-08-18T17:03:50.038Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bf/9c/8c95d856233c1f82500c2450b8c68576b4cf1c871db3afac5c34ff84e6fd/jsonschema-4.25.1-py3-none-any.whl", hash = "sha256:3fba0169e345c7175110351d456342c364814cfcf3b964ba4587f22915230a63", size = 90040, upload-time = "2025-08-18T17:03:48.373Z" }, +] + +[[package]] +name = "jsonschema-specifications" +version = "2025.9.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "referencing" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/19/74/a633ee74eb36c44aa6d1095e7cc5569bebf04342ee146178e2d36600708b/jsonschema_specifications-2025.9.1.tar.gz", hash = "sha256:b540987f239e745613c7a9176f3edb72b832a4ac465cf02712288397832b5e8d", size = 32855, upload-time = "2025-09-08T01:34:59.186Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl", hash = "sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe", size = 18437, upload-time = "2025-09-08T01:34:57.871Z" }, +] + +[[package]] +name = "mcp" +source = { directory = "../../../" } +dependencies = [ + { name = "anyio" }, + { name = "httpx" }, + { name = "httpx-sse" }, + { name = "jsonschema" }, + { name = "pydantic" }, + { name = "pydantic-settings" }, + { name = "python-multipart" }, + { name = "pywin32", marker = "sys_platform == 'win32'" }, + { name = "sse-starlette" }, + { name = "starlette" }, + { name = "uvicorn", marker = "sys_platform != 'emscripten'" }, +] + +[package.metadata] +requires-dist = [ + { name = "anyio", specifier = ">=4.5" }, + { name = "httpx", specifier = ">=0.27.1" }, + { name = "httpx-sse", specifier = ">=0.4" }, + { name = "jsonschema", specifier = ">=4.20.0" }, + { name = "pydantic", specifier = ">=2.11.0,<3.0.0" }, + { name = "pydantic-settings", specifier = ">=2.5.2" }, + { name = "python-dotenv", marker = "extra == 'cli'", specifier = ">=1.0.0" }, + { name = "python-multipart", specifier = ">=0.0.9" }, + { name = "pywin32", marker = "sys_platform == 'win32'", specifier = ">=310" }, + { name = "rich", marker = "extra == 'rich'", specifier = ">=13.9.4" }, + { name = "sse-starlette", specifier = ">=1.6.1" }, + { name = "starlette", specifier = ">=0.27" }, + { name = "typer", marker = "extra == 'cli'", specifier = ">=0.16.0" }, + { name = "uvicorn", marker = "sys_platform != 'emscripten'", specifier = ">=0.31.1" }, + { name = "websockets", marker = "extra == 'ws'", specifier = ">=15.0.1" }, +] +provides-extras = ["cli", "rich", "ws"] + +[package.metadata.requires-dev] +dev = [ + { name = "dirty-equals", specifier = ">=0.9.0" }, + { name = "inline-snapshot", specifier = ">=0.23.0" }, + { name = "pyright", specifier = ">=1.1.400" }, + { name = "pytest", specifier = ">=8.3.4" }, + { name = "pytest-examples", specifier = ">=0.0.14" }, + { name = "pytest-flakefinder", specifier = ">=1.1.0" }, + { name = "pytest-pretty", specifier = ">=1.2.0" }, + { name = "pytest-xdist", specifier = ">=3.6.1" }, + { name = "ruff", specifier = ">=0.8.5" }, + { name = "trio", specifier = ">=0.26.2" }, +] +docs = [ + { name = "mkdocs", specifier = ">=1.6.1" }, + { name = "mkdocs-glightbox", specifier = ">=0.4.0" }, + { name = "mkdocs-material", extras = ["imaging"], specifier = ">=9.5.45" }, + { name = "mkdocstrings-python", specifier = ">=1.12.2" }, +] + +[[package]] +name = "mcp-async-reconnect-client" +version = "0.1.0" +source = { editable = "." } +dependencies = [ + { name = "click" }, + { name = "mcp" }, +] + +[package.dev-dependencies] +dev = [ + { name = "pyright" }, + { name = "pytest" }, + { name = "ruff" }, +] + +[package.metadata] +requires-dist = [ + { name = "click", specifier = ">=8.2.0" }, + { name = "mcp", directory = "../../../" }, +] + +[package.metadata.requires-dev] +dev = [ + { name = "pyright", specifier = ">=1.1.379" }, + { name = "pytest", specifier = ">=8.3.3" }, + { name = "ruff", specifier = ">=0.6.9" }, +] + +[[package]] +name = "nodeenv" +version = "1.9.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437, upload-time = "2024-06-04T18:44:11.171Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload-time = "2024-06-04T18:44:08.352Z" }, +] + +[[package]] +name = "packaging" +version = "25.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "pydantic" +version = "2.11.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ff/5d/09a551ba512d7ca404d785072700d3f6727a02f6f3c24ecfd081c7cf0aa8/pydantic-2.11.9.tar.gz", hash = "sha256:6b8ffda597a14812a7975c90b82a8a2e777d9257aba3453f973acd3c032a18e2", size = 788495, upload-time = "2025-09-13T11:26:39.325Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3e/d3/108f2006987c58e76691d5ae5d200dd3e0f532cb4e5fa3560751c3a1feba/pydantic-2.11.9-py3-none-any.whl", hash = "sha256:c42dd626f5cfc1c6950ce6205ea58c93efa406da65f479dcb4029d5934857da2", size = 444855, upload-time = "2025-09-13T11:26:36.909Z" }, +] + +[[package]] +name = "pydantic-core" +version = "2.33.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/92/b31726561b5dae176c2d2c2dc43a9c5bfba5d32f96f8b4c0a600dd492447/pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8", size = 2028817, upload-time = "2025-04-23T18:30:43.919Z" }, + { url = "https://files.pythonhosted.org/packages/a3/44/3f0b95fafdaca04a483c4e685fe437c6891001bf3ce8b2fded82b9ea3aa1/pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d", size = 1861357, upload-time = "2025-04-23T18:30:46.372Z" }, + { url = "https://files.pythonhosted.org/packages/30/97/e8f13b55766234caae05372826e8e4b3b96e7b248be3157f53237682e43c/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d", size = 1898011, upload-time = "2025-04-23T18:30:47.591Z" }, + { url = "https://files.pythonhosted.org/packages/9b/a3/99c48cf7bafc991cc3ee66fd544c0aae8dc907b752f1dad2d79b1b5a471f/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572", size = 1982730, upload-time = "2025-04-23T18:30:49.328Z" }, + { url = "https://files.pythonhosted.org/packages/de/8e/a5b882ec4307010a840fb8b58bd9bf65d1840c92eae7534c7441709bf54b/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02", size = 2136178, upload-time = "2025-04-23T18:30:50.907Z" }, + { url = "https://files.pythonhosted.org/packages/e4/bb/71e35fc3ed05af6834e890edb75968e2802fe98778971ab5cba20a162315/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b", size = 2736462, upload-time = "2025-04-23T18:30:52.083Z" }, + { url = "https://files.pythonhosted.org/packages/31/0d/c8f7593e6bc7066289bbc366f2235701dcbebcd1ff0ef8e64f6f239fb47d/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2", size = 2005652, upload-time = "2025-04-23T18:30:53.389Z" }, + { url = "https://files.pythonhosted.org/packages/d2/7a/996d8bd75f3eda405e3dd219ff5ff0a283cd8e34add39d8ef9157e722867/pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a", size = 2113306, upload-time = "2025-04-23T18:30:54.661Z" }, + { url = "https://files.pythonhosted.org/packages/ff/84/daf2a6fb2db40ffda6578a7e8c5a6e9c8affb251a05c233ae37098118788/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac", size = 2073720, upload-time = "2025-04-23T18:30:56.11Z" }, + { url = "https://files.pythonhosted.org/packages/77/fb/2258da019f4825128445ae79456a5499c032b55849dbd5bed78c95ccf163/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a", size = 2244915, upload-time = "2025-04-23T18:30:57.501Z" }, + { url = "https://files.pythonhosted.org/packages/d8/7a/925ff73756031289468326e355b6fa8316960d0d65f8b5d6b3a3e7866de7/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b", size = 2241884, upload-time = "2025-04-23T18:30:58.867Z" }, + { url = "https://files.pythonhosted.org/packages/0b/b0/249ee6d2646f1cdadcb813805fe76265745c4010cf20a8eba7b0e639d9b2/pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22", size = 1910496, upload-time = "2025-04-23T18:31:00.078Z" }, + { url = "https://files.pythonhosted.org/packages/66/ff/172ba8f12a42d4b552917aa65d1f2328990d3ccfc01d5b7c943ec084299f/pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640", size = 1955019, upload-time = "2025-04-23T18:31:01.335Z" }, + { url = "https://files.pythonhosted.org/packages/3f/8d/71db63483d518cbbf290261a1fc2839d17ff89fce7089e08cad07ccfce67/pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7", size = 2028584, upload-time = "2025-04-23T18:31:03.106Z" }, + { url = "https://files.pythonhosted.org/packages/24/2f/3cfa7244ae292dd850989f328722d2aef313f74ffc471184dc509e1e4e5a/pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246", size = 1855071, upload-time = "2025-04-23T18:31:04.621Z" }, + { url = "https://files.pythonhosted.org/packages/b3/d3/4ae42d33f5e3f50dd467761304be2fa0a9417fbf09735bc2cce003480f2a/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f", size = 1897823, upload-time = "2025-04-23T18:31:06.377Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f3/aa5976e8352b7695ff808599794b1fba2a9ae2ee954a3426855935799488/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc", size = 1983792, upload-time = "2025-04-23T18:31:07.93Z" }, + { url = "https://files.pythonhosted.org/packages/d5/7a/cda9b5a23c552037717f2b2a5257e9b2bfe45e687386df9591eff7b46d28/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de", size = 2136338, upload-time = "2025-04-23T18:31:09.283Z" }, + { url = "https://files.pythonhosted.org/packages/2b/9f/b8f9ec8dd1417eb9da784e91e1667d58a2a4a7b7b34cf4af765ef663a7e5/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a", size = 2730998, upload-time = "2025-04-23T18:31:11.7Z" }, + { url = "https://files.pythonhosted.org/packages/47/bc/cd720e078576bdb8255d5032c5d63ee5c0bf4b7173dd955185a1d658c456/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef", size = 2003200, upload-time = "2025-04-23T18:31:13.536Z" }, + { url = "https://files.pythonhosted.org/packages/ca/22/3602b895ee2cd29d11a2b349372446ae9727c32e78a94b3d588a40fdf187/pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e", size = 2113890, upload-time = "2025-04-23T18:31:15.011Z" }, + { url = "https://files.pythonhosted.org/packages/ff/e6/e3c5908c03cf00d629eb38393a98fccc38ee0ce8ecce32f69fc7d7b558a7/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d", size = 2073359, upload-time = "2025-04-23T18:31:16.393Z" }, + { url = "https://files.pythonhosted.org/packages/12/e7/6a36a07c59ebefc8777d1ffdaf5ae71b06b21952582e4b07eba88a421c79/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30", size = 2245883, upload-time = "2025-04-23T18:31:17.892Z" }, + { url = "https://files.pythonhosted.org/packages/16/3f/59b3187aaa6cc0c1e6616e8045b284de2b6a87b027cce2ffcea073adf1d2/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf", size = 2241074, upload-time = "2025-04-23T18:31:19.205Z" }, + { url = "https://files.pythonhosted.org/packages/e0/ed/55532bb88f674d5d8f67ab121a2a13c385df382de2a1677f30ad385f7438/pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51", size = 1910538, upload-time = "2025-04-23T18:31:20.541Z" }, + { url = "https://files.pythonhosted.org/packages/fe/1b/25b7cccd4519c0b23c2dd636ad39d381abf113085ce4f7bec2b0dc755eb1/pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab", size = 1952909, upload-time = "2025-04-23T18:31:22.371Z" }, + { url = "https://files.pythonhosted.org/packages/49/a9/d809358e49126438055884c4366a1f6227f0f84f635a9014e2deb9b9de54/pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65", size = 1897786, upload-time = "2025-04-23T18:31:24.161Z" }, + { url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000, upload-time = "2025-04-23T18:31:25.863Z" }, + { url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996, upload-time = "2025-04-23T18:31:27.341Z" }, + { url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957, upload-time = "2025-04-23T18:31:28.956Z" }, + { url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199, upload-time = "2025-04-23T18:31:31.025Z" }, + { url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296, upload-time = "2025-04-23T18:31:32.514Z" }, + { url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109, upload-time = "2025-04-23T18:31:33.958Z" }, + { url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028, upload-time = "2025-04-23T18:31:39.095Z" }, + { url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044, upload-time = "2025-04-23T18:31:41.034Z" }, + { url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881, upload-time = "2025-04-23T18:31:42.757Z" }, + { url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034, upload-time = "2025-04-23T18:31:44.304Z" }, + { url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187, upload-time = "2025-04-23T18:31:45.891Z" }, + { url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628, upload-time = "2025-04-23T18:31:47.819Z" }, + { url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866, upload-time = "2025-04-23T18:31:49.635Z" }, + { url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894, upload-time = "2025-04-23T18:31:51.609Z" }, + { url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688, upload-time = "2025-04-23T18:31:53.175Z" }, + { url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808, upload-time = "2025-04-23T18:31:54.79Z" }, + { url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580, upload-time = "2025-04-23T18:31:57.393Z" }, + { url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859, upload-time = "2025-04-23T18:31:59.065Z" }, + { url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810, upload-time = "2025-04-23T18:32:00.78Z" }, + { url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498, upload-time = "2025-04-23T18:32:02.418Z" }, + { url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611, upload-time = "2025-04-23T18:32:04.152Z" }, + { url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924, upload-time = "2025-04-23T18:32:06.129Z" }, + { url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196, upload-time = "2025-04-23T18:32:08.178Z" }, + { url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389, upload-time = "2025-04-23T18:32:10.242Z" }, + { url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223, upload-time = "2025-04-23T18:32:12.382Z" }, + { url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473, upload-time = "2025-04-23T18:32:14.034Z" }, + { url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269, upload-time = "2025-04-23T18:32:15.783Z" }, + { url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921, upload-time = "2025-04-23T18:32:18.473Z" }, + { url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162, upload-time = "2025-04-23T18:32:20.188Z" }, + { url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560, upload-time = "2025-04-23T18:32:22.354Z" }, + { url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" }, + { url = "https://files.pythonhosted.org/packages/30/68/373d55e58b7e83ce371691f6eaa7175e3a24b956c44628eb25d7da007917/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa", size = 2023982, upload-time = "2025-04-23T18:32:53.14Z" }, + { url = "https://files.pythonhosted.org/packages/a4/16/145f54ac08c96a63d8ed6442f9dec17b2773d19920b627b18d4f10a061ea/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29", size = 1858412, upload-time = "2025-04-23T18:32:55.52Z" }, + { url = "https://files.pythonhosted.org/packages/41/b1/c6dc6c3e2de4516c0bb2c46f6a373b91b5660312342a0cf5826e38ad82fa/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d", size = 1892749, upload-time = "2025-04-23T18:32:57.546Z" }, + { url = "https://files.pythonhosted.org/packages/12/73/8cd57e20afba760b21b742106f9dbdfa6697f1570b189c7457a1af4cd8a0/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e", size = 2067527, upload-time = "2025-04-23T18:32:59.771Z" }, + { url = "https://files.pythonhosted.org/packages/e3/d5/0bb5d988cc019b3cba4a78f2d4b3854427fc47ee8ec8e9eaabf787da239c/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c", size = 2108225, upload-time = "2025-04-23T18:33:04.51Z" }, + { url = "https://files.pythonhosted.org/packages/f1/c5/00c02d1571913d496aabf146106ad8239dc132485ee22efe08085084ff7c/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec", size = 2069490, upload-time = "2025-04-23T18:33:06.391Z" }, + { url = "https://files.pythonhosted.org/packages/22/a8/dccc38768274d3ed3a59b5d06f59ccb845778687652daa71df0cab4040d7/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052", size = 2237525, upload-time = "2025-04-23T18:33:08.44Z" }, + { url = "https://files.pythonhosted.org/packages/d4/e7/4f98c0b125dda7cf7ccd14ba936218397b44f50a56dd8c16a3091df116c3/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c", size = 2238446, upload-time = "2025-04-23T18:33:10.313Z" }, + { url = "https://files.pythonhosted.org/packages/ce/91/2ec36480fdb0b783cd9ef6795753c1dea13882f2e68e73bce76ae8c21e6a/pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808", size = 2066678, upload-time = "2025-04-23T18:33:12.224Z" }, + { url = "https://files.pythonhosted.org/packages/7b/27/d4ae6487d73948d6f20dddcd94be4ea43e74349b56eba82e9bdee2d7494c/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8", size = 2025200, upload-time = "2025-04-23T18:33:14.199Z" }, + { url = "https://files.pythonhosted.org/packages/f1/b8/b3cb95375f05d33801024079b9392a5ab45267a63400bf1866e7ce0f0de4/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593", size = 1859123, upload-time = "2025-04-23T18:33:16.555Z" }, + { url = "https://files.pythonhosted.org/packages/05/bc/0d0b5adeda59a261cd30a1235a445bf55c7e46ae44aea28f7bd6ed46e091/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612", size = 1892852, upload-time = "2025-04-23T18:33:18.513Z" }, + { url = "https://files.pythonhosted.org/packages/3e/11/d37bdebbda2e449cb3f519f6ce950927b56d62f0b84fd9cb9e372a26a3d5/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7", size = 2067484, upload-time = "2025-04-23T18:33:20.475Z" }, + { url = "https://files.pythonhosted.org/packages/8c/55/1f95f0a05ce72ecb02a8a8a1c3be0579bbc29b1d5ab68f1378b7bebc5057/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e", size = 2108896, upload-time = "2025-04-23T18:33:22.501Z" }, + { url = "https://files.pythonhosted.org/packages/53/89/2b2de6c81fa131f423246a9109d7b2a375e83968ad0800d6e57d0574629b/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8", size = 2069475, upload-time = "2025-04-23T18:33:24.528Z" }, + { url = "https://files.pythonhosted.org/packages/b8/e9/1f7efbe20d0b2b10f6718944b5d8ece9152390904f29a78e68d4e7961159/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf", size = 2239013, upload-time = "2025-04-23T18:33:26.621Z" }, + { url = "https://files.pythonhosted.org/packages/3c/b2/5309c905a93811524a49b4e031e9851a6b00ff0fb668794472ea7746b448/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb", size = 2238715, upload-time = "2025-04-23T18:33:28.656Z" }, + { url = "https://files.pythonhosted.org/packages/32/56/8a7ca5d2cd2cda1d245d34b1c9a942920a718082ae8e54e5f3e5a58b7add/pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1", size = 2066757, upload-time = "2025-04-23T18:33:30.645Z" }, +] + +[[package]] +name = "pydantic-settings" +version = "2.11.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic" }, + { name = "python-dotenv" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/20/c5/dbbc27b814c71676593d1c3f718e6cd7d4f00652cefa24b75f7aa3efb25e/pydantic_settings-2.11.0.tar.gz", hash = "sha256:d0e87a1c7d33593beb7194adb8470fc426e95ba02af83a0f23474a04c9a08180", size = 188394, upload-time = "2025-09-24T14:19:11.764Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/83/d6/887a1ff844e64aa823fb4905978d882a633cfe295c32eacad582b78a7d8b/pydantic_settings-2.11.0-py3-none-any.whl", hash = "sha256:fe2cea3413b9530d10f3a5875adffb17ada5c1e1bab0b2885546d7310415207c", size = 48608, upload-time = "2025-09-24T14:19:10.015Z" }, +] + +[[package]] +name = "pygments" +version = "2.19.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, +] + +[[package]] +name = "pyright" +version = "1.1.405" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "nodeenv" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fb/6c/ba4bbee22e76af700ea593a1d8701e3225080956753bee9750dcc25e2649/pyright-1.1.405.tar.gz", hash = "sha256:5c2a30e1037af27eb463a1cc0b9f6d65fec48478ccf092c1ac28385a15c55763", size = 4068319, upload-time = "2025-09-04T03:37:06.776Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d5/1a/524f832e1ff1962a22a1accc775ca7b143ba2e9f5924bb6749dce566784a/pyright-1.1.405-py3-none-any.whl", hash = "sha256:a2cb13700b5508ce8e5d4546034cb7ea4aedb60215c6c33f56cec7f53996035a", size = 5905038, upload-time = "2025-09-04T03:37:04.913Z" }, +] + +[[package]] +name = "pytest" +version = "8.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a3/5c/00a0e072241553e1a7496d638deababa67c5058571567b92a7eaa258397c/pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01", size = 1519618, upload-time = "2025-09-04T14:34:22.711Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a8/a4/20da314d277121d6534b3a980b29035dcd51e6744bd79075a6ce8fa4eb8d/pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79", size = 365750, upload-time = "2025-09-04T14:34:20.226Z" }, +] + +[[package]] +name = "python-dotenv" +version = "1.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f6/b0/4bc07ccd3572a2f9df7e6782f52b0c6c90dcbb803ac4a167702d7d0dfe1e/python_dotenv-1.1.1.tar.gz", hash = "sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab", size = 41978, upload-time = "2025-06-24T04:21:07.341Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556, upload-time = "2025-06-24T04:21:06.073Z" }, +] + +[[package]] +name = "python-multipart" +version = "0.0.20" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f3/87/f44d7c9f274c7ee665a29b885ec97089ec5dc034c7f3fafa03da9e39a09e/python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13", size = 37158, upload-time = "2024-12-16T19:45:46.972Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/45/58/38b5afbc1a800eeea951b9285d3912613f2603bdf897a4ab0f4bd7f405fc/python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104", size = 24546, upload-time = "2024-12-16T19:45:44.423Z" }, +] + +[[package]] +name = "pywin32" +version = "311" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7b/40/44efbb0dfbd33aca6a6483191dae0716070ed99e2ecb0c53683f400a0b4f/pywin32-311-cp310-cp310-win32.whl", hash = "sha256:d03ff496d2a0cd4a5893504789d4a15399133fe82517455e78bad62efbb7f0a3", size = 8760432, upload-time = "2025-07-14T20:13:05.9Z" }, + { url = "https://files.pythonhosted.org/packages/5e/bf/360243b1e953bd254a82f12653974be395ba880e7ec23e3731d9f73921cc/pywin32-311-cp310-cp310-win_amd64.whl", hash = "sha256:797c2772017851984b97180b0bebe4b620bb86328e8a884bb626156295a63b3b", size = 9590103, upload-time = "2025-07-14T20:13:07.698Z" }, + { url = "https://files.pythonhosted.org/packages/57/38/d290720e6f138086fb3d5ffe0b6caa019a791dd57866940c82e4eeaf2012/pywin32-311-cp310-cp310-win_arm64.whl", hash = "sha256:0502d1facf1fed4839a9a51ccbcc63d952cf318f78ffc00a7e78528ac27d7a2b", size = 8778557, upload-time = "2025-07-14T20:13:11.11Z" }, + { url = "https://files.pythonhosted.org/packages/7c/af/449a6a91e5d6db51420875c54f6aff7c97a86a3b13a0b4f1a5c13b988de3/pywin32-311-cp311-cp311-win32.whl", hash = "sha256:184eb5e436dea364dcd3d2316d577d625c0351bf237c4e9a5fabbcfa5a58b151", size = 8697031, upload-time = "2025-07-14T20:13:13.266Z" }, + { url = "https://files.pythonhosted.org/packages/51/8f/9bb81dd5bb77d22243d33c8397f09377056d5c687aa6d4042bea7fbf8364/pywin32-311-cp311-cp311-win_amd64.whl", hash = "sha256:3ce80b34b22b17ccbd937a6e78e7225d80c52f5ab9940fe0506a1a16f3dab503", size = 9508308, upload-time = "2025-07-14T20:13:15.147Z" }, + { url = "https://files.pythonhosted.org/packages/44/7b/9c2ab54f74a138c491aba1b1cd0795ba61f144c711daea84a88b63dc0f6c/pywin32-311-cp311-cp311-win_arm64.whl", hash = "sha256:a733f1388e1a842abb67ffa8e7aad0e70ac519e09b0f6a784e65a136ec7cefd2", size = 8703930, upload-time = "2025-07-14T20:13:16.945Z" }, + { url = "https://files.pythonhosted.org/packages/e7/ab/01ea1943d4eba0f850c3c61e78e8dd59757ff815ff3ccd0a84de5f541f42/pywin32-311-cp312-cp312-win32.whl", hash = "sha256:750ec6e621af2b948540032557b10a2d43b0cee2ae9758c54154d711cc852d31", size = 8706543, upload-time = "2025-07-14T20:13:20.765Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a8/a0e8d07d4d051ec7502cd58b291ec98dcc0c3fff027caad0470b72cfcc2f/pywin32-311-cp312-cp312-win_amd64.whl", hash = "sha256:b8c095edad5c211ff31c05223658e71bf7116daa0ecf3ad85f3201ea3190d067", size = 9495040, upload-time = "2025-07-14T20:13:22.543Z" }, + { url = "https://files.pythonhosted.org/packages/ba/3a/2ae996277b4b50f17d61f0603efd8253cb2d79cc7ae159468007b586396d/pywin32-311-cp312-cp312-win_arm64.whl", hash = "sha256:e286f46a9a39c4a18b319c28f59b61de793654af2f395c102b4f819e584b5852", size = 8710102, upload-time = "2025-07-14T20:13:24.682Z" }, + { url = "https://files.pythonhosted.org/packages/a5/be/3fd5de0979fcb3994bfee0d65ed8ca9506a8a1260651b86174f6a86f52b3/pywin32-311-cp313-cp313-win32.whl", hash = "sha256:f95ba5a847cba10dd8c4d8fefa9f2a6cf283b8b88ed6178fa8a6c1ab16054d0d", size = 8705700, upload-time = "2025-07-14T20:13:26.471Z" }, + { url = "https://files.pythonhosted.org/packages/e3/28/e0a1909523c6890208295a29e05c2adb2126364e289826c0a8bc7297bd5c/pywin32-311-cp313-cp313-win_amd64.whl", hash = "sha256:718a38f7e5b058e76aee1c56ddd06908116d35147e133427e59a3983f703a20d", size = 9494700, upload-time = "2025-07-14T20:13:28.243Z" }, + { url = "https://files.pythonhosted.org/packages/04/bf/90339ac0f55726dce7d794e6d79a18a91265bdf3aa70b6b9ca52f35e022a/pywin32-311-cp313-cp313-win_arm64.whl", hash = "sha256:7b4075d959648406202d92a2310cb990fea19b535c7f4a78d3f5e10b926eeb8a", size = 8709318, upload-time = "2025-07-14T20:13:30.348Z" }, + { url = "https://files.pythonhosted.org/packages/c9/31/097f2e132c4f16d99a22bfb777e0fd88bd8e1c634304e102f313af69ace5/pywin32-311-cp314-cp314-win32.whl", hash = "sha256:b7a2c10b93f8986666d0c803ee19b5990885872a7de910fc460f9b0c2fbf92ee", size = 8840714, upload-time = "2025-07-14T20:13:32.449Z" }, + { url = "https://files.pythonhosted.org/packages/90/4b/07c77d8ba0e01349358082713400435347df8426208171ce297da32c313d/pywin32-311-cp314-cp314-win_amd64.whl", hash = "sha256:3aca44c046bd2ed8c90de9cb8427f581c479e594e99b5c0bb19b29c10fd6cb87", size = 9656800, upload-time = "2025-07-14T20:13:34.312Z" }, + { url = "https://files.pythonhosted.org/packages/c0/d2/21af5c535501a7233e734b8af901574572da66fcc254cb35d0609c9080dd/pywin32-311-cp314-cp314-win_arm64.whl", hash = "sha256:a508e2d9025764a8270f93111a970e1d0fbfc33f4153b388bb649b7eec4f9b42", size = 8932540, upload-time = "2025-07-14T20:13:36.379Z" }, +] + +[[package]] +name = "referencing" +version = "0.36.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "rpds-py" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2f/db/98b5c277be99dd18bfd91dd04e1b759cad18d1a338188c936e92f921c7e2/referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa", size = 74744, upload-time = "2025-01-25T08:48:16.138Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/b1/3baf80dc6d2b7bc27a95a67752d0208e410351e3feb4eb78de5f77454d8d/referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0", size = 26775, upload-time = "2025-01-25T08:48:14.241Z" }, +] + +[[package]] +name = "rpds-py" +version = "0.27.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e9/dd/2c0cbe774744272b0ae725f44032c77bdcab6e8bcf544bffa3b6e70c8dba/rpds_py-0.27.1.tar.gz", hash = "sha256:26a1c73171d10b7acccbded82bf6a586ab8203601e565badc74bbbf8bc5a10f8", size = 27479, upload-time = "2025-08-27T12:16:36.024Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a5/ed/3aef893e2dd30e77e35d20d4ddb45ca459db59cead748cad9796ad479411/rpds_py-0.27.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:68afeec26d42ab3b47e541b272166a0b4400313946871cba3ed3a4fc0cab1cef", size = 371606, upload-time = "2025-08-27T12:12:25.189Z" }, + { url = "https://files.pythonhosted.org/packages/6d/82/9818b443e5d3eb4c83c3994561387f116aae9833b35c484474769c4a8faf/rpds_py-0.27.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74e5b2f7bb6fa38b1b10546d27acbacf2a022a8b5543efb06cfebc72a59c85be", size = 353452, upload-time = "2025-08-27T12:12:27.433Z" }, + { url = "https://files.pythonhosted.org/packages/99/c7/d2a110ffaaa397fc6793a83c7bd3545d9ab22658b7cdff05a24a4535cc45/rpds_py-0.27.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9024de74731df54546fab0bfbcdb49fae19159ecaecfc8f37c18d2c7e2c0bd61", size = 381519, upload-time = "2025-08-27T12:12:28.719Z" }, + { url = "https://files.pythonhosted.org/packages/5a/bc/e89581d1f9d1be7d0247eaef602566869fdc0d084008ba139e27e775366c/rpds_py-0.27.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:31d3ebadefcd73b73928ed0b2fd696f7fefda8629229f81929ac9c1854d0cffb", size = 394424, upload-time = "2025-08-27T12:12:30.207Z" }, + { url = "https://files.pythonhosted.org/packages/ac/2e/36a6861f797530e74bb6ed53495f8741f1ef95939eed01d761e73d559067/rpds_py-0.27.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2e7f8f169d775dd9092a1743768d771f1d1300453ddfe6325ae3ab5332b4657", size = 523467, upload-time = "2025-08-27T12:12:31.808Z" }, + { url = "https://files.pythonhosted.org/packages/c4/59/c1bc2be32564fa499f988f0a5c6505c2f4746ef96e58e4d7de5cf923d77e/rpds_py-0.27.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d905d16f77eb6ab2e324e09bfa277b4c8e5e6b8a78a3e7ff8f3cdf773b4c013", size = 402660, upload-time = "2025-08-27T12:12:33.444Z" }, + { url = "https://files.pythonhosted.org/packages/0a/ec/ef8bf895f0628dd0a59e54d81caed6891663cb9c54a0f4bb7da918cb88cf/rpds_py-0.27.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50c946f048209e6362e22576baea09193809f87687a95a8db24e5fbdb307b93a", size = 384062, upload-time = "2025-08-27T12:12:34.857Z" }, + { url = "https://files.pythonhosted.org/packages/69/f7/f47ff154be8d9a5e691c083a920bba89cef88d5247c241c10b9898f595a1/rpds_py-0.27.1-cp310-cp310-manylinux_2_31_riscv64.whl", hash = "sha256:3deab27804d65cd8289eb814c2c0e807c4b9d9916c9225e363cb0cf875eb67c1", size = 401289, upload-time = "2025-08-27T12:12:36.085Z" }, + { url = "https://files.pythonhosted.org/packages/3b/d9/ca410363efd0615814ae579f6829cafb39225cd63e5ea5ed1404cb345293/rpds_py-0.27.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8b61097f7488de4be8244c89915da8ed212832ccf1e7c7753a25a394bf9b1f10", size = 417718, upload-time = "2025-08-27T12:12:37.401Z" }, + { url = "https://files.pythonhosted.org/packages/e3/a0/8cb5c2ff38340f221cc067cc093d1270e10658ba4e8d263df923daa18e86/rpds_py-0.27.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8a3f29aba6e2d7d90528d3c792555a93497fe6538aa65eb675b44505be747808", size = 558333, upload-time = "2025-08-27T12:12:38.672Z" }, + { url = "https://files.pythonhosted.org/packages/6f/8c/1b0de79177c5d5103843774ce12b84caa7164dfc6cd66378768d37db11bf/rpds_py-0.27.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:dd6cd0485b7d347304067153a6dc1d73f7d4fd995a396ef32a24d24b8ac63ac8", size = 589127, upload-time = "2025-08-27T12:12:41.48Z" }, + { url = "https://files.pythonhosted.org/packages/c8/5e/26abb098d5e01266b0f3a2488d299d19ccc26849735d9d2b95c39397e945/rpds_py-0.27.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6f4461bf931108c9fa226ffb0e257c1b18dc2d44cd72b125bec50ee0ab1248a9", size = 554899, upload-time = "2025-08-27T12:12:42.925Z" }, + { url = "https://files.pythonhosted.org/packages/de/41/905cc90ced13550db017f8f20c6d8e8470066c5738ba480d7ba63e3d136b/rpds_py-0.27.1-cp310-cp310-win32.whl", hash = "sha256:ee5422d7fb21f6a00c1901bf6559c49fee13a5159d0288320737bbf6585bd3e4", size = 217450, upload-time = "2025-08-27T12:12:44.813Z" }, + { url = "https://files.pythonhosted.org/packages/75/3d/6bef47b0e253616ccdf67c283e25f2d16e18ccddd38f92af81d5a3420206/rpds_py-0.27.1-cp310-cp310-win_amd64.whl", hash = "sha256:3e039aabf6d5f83c745d5f9a0a381d031e9ed871967c0a5c38d201aca41f3ba1", size = 228447, upload-time = "2025-08-27T12:12:46.204Z" }, + { url = "https://files.pythonhosted.org/packages/b5/c1/7907329fbef97cbd49db6f7303893bd1dd5a4a3eae415839ffdfb0762cae/rpds_py-0.27.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:be898f271f851f68b318872ce6ebebbc62f303b654e43bf72683dbdc25b7c881", size = 371063, upload-time = "2025-08-27T12:12:47.856Z" }, + { url = "https://files.pythonhosted.org/packages/11/94/2aab4bc86228bcf7c48760990273653a4900de89c7537ffe1b0d6097ed39/rpds_py-0.27.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:62ac3d4e3e07b58ee0ddecd71d6ce3b1637de2d373501412df395a0ec5f9beb5", size = 353210, upload-time = "2025-08-27T12:12:49.187Z" }, + { url = "https://files.pythonhosted.org/packages/3a/57/f5eb3ecf434342f4f1a46009530e93fd201a0b5b83379034ebdb1d7c1a58/rpds_py-0.27.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4708c5c0ceb2d034f9991623631d3d23cb16e65c83736ea020cdbe28d57c0a0e", size = 381636, upload-time = "2025-08-27T12:12:50.492Z" }, + { url = "https://files.pythonhosted.org/packages/ae/f4/ef95c5945e2ceb5119571b184dd5a1cc4b8541bbdf67461998cfeac9cb1e/rpds_py-0.27.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:abfa1171a9952d2e0002aba2ad3780820b00cc3d9c98c6630f2e93271501f66c", size = 394341, upload-time = "2025-08-27T12:12:52.024Z" }, + { url = "https://files.pythonhosted.org/packages/5a/7e/4bd610754bf492d398b61725eb9598ddd5eb86b07d7d9483dbcd810e20bc/rpds_py-0.27.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b507d19f817ebaca79574b16eb2ae412e5c0835542c93fe9983f1e432aca195", size = 523428, upload-time = "2025-08-27T12:12:53.779Z" }, + { url = "https://files.pythonhosted.org/packages/9f/e5/059b9f65a8c9149361a8b75094864ab83b94718344db511fd6117936ed2a/rpds_py-0.27.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:168b025f8fd8d8d10957405f3fdcef3dc20f5982d398f90851f4abc58c566c52", size = 402923, upload-time = "2025-08-27T12:12:55.15Z" }, + { url = "https://files.pythonhosted.org/packages/f5/48/64cabb7daced2968dd08e8a1b7988bf358d7bd5bcd5dc89a652f4668543c/rpds_py-0.27.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb56c6210ef77caa58e16e8c17d35c63fe3f5b60fd9ba9d424470c3400bcf9ed", size = 384094, upload-time = "2025-08-27T12:12:57.194Z" }, + { url = "https://files.pythonhosted.org/packages/ae/e1/dc9094d6ff566bff87add8a510c89b9e158ad2ecd97ee26e677da29a9e1b/rpds_py-0.27.1-cp311-cp311-manylinux_2_31_riscv64.whl", hash = "sha256:d252f2d8ca0195faa707f8eb9368955760880b2b42a8ee16d382bf5dd807f89a", size = 401093, upload-time = "2025-08-27T12:12:58.985Z" }, + { url = "https://files.pythonhosted.org/packages/37/8e/ac8577e3ecdd5593e283d46907d7011618994e1d7ab992711ae0f78b9937/rpds_py-0.27.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6e5e54da1e74b91dbc7996b56640f79b195d5925c2b78efaa8c5d53e1d88edde", size = 417969, upload-time = "2025-08-27T12:13:00.367Z" }, + { url = "https://files.pythonhosted.org/packages/66/6d/87507430a8f74a93556fe55c6485ba9c259949a853ce407b1e23fea5ba31/rpds_py-0.27.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ffce0481cc6e95e5b3f0a47ee17ffbd234399e6d532f394c8dce320c3b089c21", size = 558302, upload-time = "2025-08-27T12:13:01.737Z" }, + { url = "https://files.pythonhosted.org/packages/3a/bb/1db4781ce1dda3eecc735e3152659a27b90a02ca62bfeea17aee45cc0fbc/rpds_py-0.27.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:a205fdfe55c90c2cd8e540ca9ceba65cbe6629b443bc05db1f590a3db8189ff9", size = 589259, upload-time = "2025-08-27T12:13:03.127Z" }, + { url = "https://files.pythonhosted.org/packages/7b/0e/ae1c8943d11a814d01b482e1f8da903f88047a962dff9bbdadf3bd6e6fd1/rpds_py-0.27.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:689fb5200a749db0415b092972e8eba85847c23885c8543a8b0f5c009b1a5948", size = 554983, upload-time = "2025-08-27T12:13:04.516Z" }, + { url = "https://files.pythonhosted.org/packages/b2/d5/0b2a55415931db4f112bdab072443ff76131b5ac4f4dc98d10d2d357eb03/rpds_py-0.27.1-cp311-cp311-win32.whl", hash = "sha256:3182af66048c00a075010bc7f4860f33913528a4b6fc09094a6e7598e462fe39", size = 217154, upload-time = "2025-08-27T12:13:06.278Z" }, + { url = "https://files.pythonhosted.org/packages/24/75/3b7ffe0d50dc86a6a964af0d1cc3a4a2cdf437cb7b099a4747bbb96d1819/rpds_py-0.27.1-cp311-cp311-win_amd64.whl", hash = "sha256:b4938466c6b257b2f5c4ff98acd8128ec36b5059e5c8f8372d79316b1c36bb15", size = 228627, upload-time = "2025-08-27T12:13:07.625Z" }, + { url = "https://files.pythonhosted.org/packages/8d/3f/4fd04c32abc02c710f09a72a30c9a55ea3cc154ef8099078fd50a0596f8e/rpds_py-0.27.1-cp311-cp311-win_arm64.whl", hash = "sha256:2f57af9b4d0793e53266ee4325535a31ba48e2f875da81a9177c9926dfa60746", size = 220998, upload-time = "2025-08-27T12:13:08.972Z" }, + { url = "https://files.pythonhosted.org/packages/bd/fe/38de28dee5df58b8198c743fe2bea0c785c6d40941b9950bac4cdb71a014/rpds_py-0.27.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ae2775c1973e3c30316892737b91f9283f9908e3cc7625b9331271eaaed7dc90", size = 361887, upload-time = "2025-08-27T12:13:10.233Z" }, + { url = "https://files.pythonhosted.org/packages/7c/9a/4b6c7eedc7dd90986bf0fab6ea2a091ec11c01b15f8ba0a14d3f80450468/rpds_py-0.27.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2643400120f55c8a96f7c9d858f7be0c88d383cd4653ae2cf0d0c88f668073e5", size = 345795, upload-time = "2025-08-27T12:13:11.65Z" }, + { url = "https://files.pythonhosted.org/packages/6f/0e/e650e1b81922847a09cca820237b0edee69416a01268b7754d506ade11ad/rpds_py-0.27.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16323f674c089b0360674a4abd28d5042947d54ba620f72514d69be4ff64845e", size = 385121, upload-time = "2025-08-27T12:13:13.008Z" }, + { url = "https://files.pythonhosted.org/packages/1b/ea/b306067a712988e2bff00dcc7c8f31d26c29b6d5931b461aa4b60a013e33/rpds_py-0.27.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9a1f4814b65eacac94a00fc9a526e3fdafd78e439469644032032d0d63de4881", size = 398976, upload-time = "2025-08-27T12:13:14.368Z" }, + { url = "https://files.pythonhosted.org/packages/2c/0a/26dc43c8840cb8fe239fe12dbc8d8de40f2365e838f3d395835dde72f0e5/rpds_py-0.27.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ba32c16b064267b22f1850a34051121d423b6f7338a12b9459550eb2096e7ec", size = 525953, upload-time = "2025-08-27T12:13:15.774Z" }, + { url = "https://files.pythonhosted.org/packages/22/14/c85e8127b573aaf3a0cbd7fbb8c9c99e735a4a02180c84da2a463b766e9e/rpds_py-0.27.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5c20f33fd10485b80f65e800bbe5f6785af510b9f4056c5a3c612ebc83ba6cb", size = 407915, upload-time = "2025-08-27T12:13:17.379Z" }, + { url = "https://files.pythonhosted.org/packages/ed/7b/8f4fee9ba1fb5ec856eb22d725a4efa3deb47f769597c809e03578b0f9d9/rpds_py-0.27.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:466bfe65bd932da36ff279ddd92de56b042f2266d752719beb97b08526268ec5", size = 386883, upload-time = "2025-08-27T12:13:18.704Z" }, + { url = "https://files.pythonhosted.org/packages/86/47/28fa6d60f8b74fcdceba81b272f8d9836ac0340570f68f5df6b41838547b/rpds_py-0.27.1-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:41e532bbdcb57c92ba3be62c42e9f096431b4cf478da9bc3bc6ce5c38ab7ba7a", size = 405699, upload-time = "2025-08-27T12:13:20.089Z" }, + { url = "https://files.pythonhosted.org/packages/d0/fd/c5987b5e054548df56953a21fe2ebed51fc1ec7c8f24fd41c067b68c4a0a/rpds_py-0.27.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f149826d742b406579466283769a8ea448eed82a789af0ed17b0cd5770433444", size = 423713, upload-time = "2025-08-27T12:13:21.436Z" }, + { url = "https://files.pythonhosted.org/packages/ac/ba/3c4978b54a73ed19a7d74531be37a8bcc542d917c770e14d372b8daea186/rpds_py-0.27.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:80c60cfb5310677bd67cb1e85a1e8eb52e12529545441b43e6f14d90b878775a", size = 562324, upload-time = "2025-08-27T12:13:22.789Z" }, + { url = "https://files.pythonhosted.org/packages/b5/6c/6943a91768fec16db09a42b08644b960cff540c66aab89b74be6d4a144ba/rpds_py-0.27.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:7ee6521b9baf06085f62ba9c7a3e5becffbc32480d2f1b351559c001c38ce4c1", size = 593646, upload-time = "2025-08-27T12:13:24.122Z" }, + { url = "https://files.pythonhosted.org/packages/11/73/9d7a8f4be5f4396f011a6bb7a19fe26303a0dac9064462f5651ced2f572f/rpds_py-0.27.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a512c8263249a9d68cac08b05dd59d2b3f2061d99b322813cbcc14c3c7421998", size = 558137, upload-time = "2025-08-27T12:13:25.557Z" }, + { url = "https://files.pythonhosted.org/packages/6e/96/6772cbfa0e2485bcceef8071de7821f81aeac8bb45fbfd5542a3e8108165/rpds_py-0.27.1-cp312-cp312-win32.whl", hash = "sha256:819064fa048ba01b6dadc5116f3ac48610435ac9a0058bbde98e569f9e785c39", size = 221343, upload-time = "2025-08-27T12:13:26.967Z" }, + { url = "https://files.pythonhosted.org/packages/67/b6/c82f0faa9af1c6a64669f73a17ee0eeef25aff30bb9a1c318509efe45d84/rpds_py-0.27.1-cp312-cp312-win_amd64.whl", hash = "sha256:d9199717881f13c32c4046a15f024971a3b78ad4ea029e8da6b86e5aa9cf4594", size = 232497, upload-time = "2025-08-27T12:13:28.326Z" }, + { url = "https://files.pythonhosted.org/packages/e1/96/2817b44bd2ed11aebacc9251da03689d56109b9aba5e311297b6902136e2/rpds_py-0.27.1-cp312-cp312-win_arm64.whl", hash = "sha256:33aa65b97826a0e885ef6e278fbd934e98cdcfed80b63946025f01e2f5b29502", size = 222790, upload-time = "2025-08-27T12:13:29.71Z" }, + { url = "https://files.pythonhosted.org/packages/cc/77/610aeee8d41e39080c7e14afa5387138e3c9fa9756ab893d09d99e7d8e98/rpds_py-0.27.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:e4b9fcfbc021633863a37e92571d6f91851fa656f0180246e84cbd8b3f6b329b", size = 361741, upload-time = "2025-08-27T12:13:31.039Z" }, + { url = "https://files.pythonhosted.org/packages/3a/fc/c43765f201c6a1c60be2043cbdb664013def52460a4c7adace89d6682bf4/rpds_py-0.27.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1441811a96eadca93c517d08df75de45e5ffe68aa3089924f963c782c4b898cf", size = 345574, upload-time = "2025-08-27T12:13:32.902Z" }, + { url = "https://files.pythonhosted.org/packages/20/42/ee2b2ca114294cd9847d0ef9c26d2b0851b2e7e00bf14cc4c0b581df0fc3/rpds_py-0.27.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55266dafa22e672f5a4f65019015f90336ed31c6383bd53f5e7826d21a0e0b83", size = 385051, upload-time = "2025-08-27T12:13:34.228Z" }, + { url = "https://files.pythonhosted.org/packages/fd/e8/1e430fe311e4799e02e2d1af7c765f024e95e17d651612425b226705f910/rpds_py-0.27.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d78827d7ac08627ea2c8e02c9e5b41180ea5ea1f747e9db0915e3adf36b62dcf", size = 398395, upload-time = "2025-08-27T12:13:36.132Z" }, + { url = "https://files.pythonhosted.org/packages/82/95/9dc227d441ff2670651c27a739acb2535ccaf8b351a88d78c088965e5996/rpds_py-0.27.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae92443798a40a92dc5f0b01d8a7c93adde0c4dc965310a29ae7c64d72b9fad2", size = 524334, upload-time = "2025-08-27T12:13:37.562Z" }, + { url = "https://files.pythonhosted.org/packages/87/01/a670c232f401d9ad461d9a332aa4080cd3cb1d1df18213dbd0d2a6a7ab51/rpds_py-0.27.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c46c9dd2403b66a2a3b9720ec4b74d4ab49d4fabf9f03dfdce2d42af913fe8d0", size = 407691, upload-time = "2025-08-27T12:13:38.94Z" }, + { url = "https://files.pythonhosted.org/packages/03/36/0a14aebbaa26fe7fab4780c76f2239e76cc95a0090bdb25e31d95c492fcd/rpds_py-0.27.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2efe4eb1d01b7f5f1939f4ef30ecea6c6b3521eec451fb93191bf84b2a522418", size = 386868, upload-time = "2025-08-27T12:13:40.192Z" }, + { url = "https://files.pythonhosted.org/packages/3b/03/8c897fb8b5347ff6c1cc31239b9611c5bf79d78c984430887a353e1409a1/rpds_py-0.27.1-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:15d3b4d83582d10c601f481eca29c3f138d44c92187d197aff663a269197c02d", size = 405469, upload-time = "2025-08-27T12:13:41.496Z" }, + { url = "https://files.pythonhosted.org/packages/da/07/88c60edc2df74850d496d78a1fdcdc7b54360a7f610a4d50008309d41b94/rpds_py-0.27.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4ed2e16abbc982a169d30d1a420274a709949e2cbdef119fe2ec9d870b42f274", size = 422125, upload-time = "2025-08-27T12:13:42.802Z" }, + { url = "https://files.pythonhosted.org/packages/6b/86/5f4c707603e41b05f191a749984f390dabcbc467cf833769b47bf14ba04f/rpds_py-0.27.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a75f305c9b013289121ec0f1181931975df78738cdf650093e6b86d74aa7d8dd", size = 562341, upload-time = "2025-08-27T12:13:44.472Z" }, + { url = "https://files.pythonhosted.org/packages/b2/92/3c0cb2492094e3cd9baf9e49bbb7befeceb584ea0c1a8b5939dca4da12e5/rpds_py-0.27.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:67ce7620704745881a3d4b0ada80ab4d99df390838839921f99e63c474f82cf2", size = 592511, upload-time = "2025-08-27T12:13:45.898Z" }, + { url = "https://files.pythonhosted.org/packages/10/bb/82e64fbb0047c46a168faa28d0d45a7851cd0582f850b966811d30f67ad8/rpds_py-0.27.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9d992ac10eb86d9b6f369647b6a3f412fc0075cfd5d799530e84d335e440a002", size = 557736, upload-time = "2025-08-27T12:13:47.408Z" }, + { url = "https://files.pythonhosted.org/packages/00/95/3c863973d409210da7fb41958172c6b7dbe7fc34e04d3cc1f10bb85e979f/rpds_py-0.27.1-cp313-cp313-win32.whl", hash = "sha256:4f75e4bd8ab8db624e02c8e2fc4063021b58becdbe6df793a8111d9343aec1e3", size = 221462, upload-time = "2025-08-27T12:13:48.742Z" }, + { url = "https://files.pythonhosted.org/packages/ce/2c/5867b14a81dc217b56d95a9f2a40fdbc56a1ab0181b80132beeecbd4b2d6/rpds_py-0.27.1-cp313-cp313-win_amd64.whl", hash = "sha256:f9025faafc62ed0b75a53e541895ca272815bec18abe2249ff6501c8f2e12b83", size = 232034, upload-time = "2025-08-27T12:13:50.11Z" }, + { url = "https://files.pythonhosted.org/packages/c7/78/3958f3f018c01923823f1e47f1cc338e398814b92d83cd278364446fac66/rpds_py-0.27.1-cp313-cp313-win_arm64.whl", hash = "sha256:ed10dc32829e7d222b7d3b93136d25a406ba9788f6a7ebf6809092da1f4d279d", size = 222392, upload-time = "2025-08-27T12:13:52.587Z" }, + { url = "https://files.pythonhosted.org/packages/01/76/1cdf1f91aed5c3a7bf2eba1f1c4e4d6f57832d73003919a20118870ea659/rpds_py-0.27.1-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:92022bbbad0d4426e616815b16bc4127f83c9a74940e1ccf3cfe0b387aba0228", size = 358355, upload-time = "2025-08-27T12:13:54.012Z" }, + { url = "https://files.pythonhosted.org/packages/c3/6f/bf142541229374287604caf3bb2a4ae17f0a580798fd72d3b009b532db4e/rpds_py-0.27.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:47162fdab9407ec3f160805ac3e154df042e577dd53341745fc7fb3f625e6d92", size = 342138, upload-time = "2025-08-27T12:13:55.791Z" }, + { url = "https://files.pythonhosted.org/packages/1a/77/355b1c041d6be40886c44ff5e798b4e2769e497b790f0f7fd1e78d17e9a8/rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb89bec23fddc489e5d78b550a7b773557c9ab58b7946154a10a6f7a214a48b2", size = 380247, upload-time = "2025-08-27T12:13:57.683Z" }, + { url = "https://files.pythonhosted.org/packages/d6/a4/d9cef5c3946ea271ce2243c51481971cd6e34f21925af2783dd17b26e815/rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e48af21883ded2b3e9eb48cb7880ad8598b31ab752ff3be6457001d78f416723", size = 390699, upload-time = "2025-08-27T12:13:59.137Z" }, + { url = "https://files.pythonhosted.org/packages/3a/06/005106a7b8c6c1a7e91b73169e49870f4af5256119d34a361ae5240a0c1d/rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6f5b7bd8e219ed50299e58551a410b64daafb5017d54bbe822e003856f06a802", size = 521852, upload-time = "2025-08-27T12:14:00.583Z" }, + { url = "https://files.pythonhosted.org/packages/e5/3e/50fb1dac0948e17a02eb05c24510a8fe12d5ce8561c6b7b7d1339ab7ab9c/rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08f1e20bccf73b08d12d804d6e1c22ca5530e71659e6673bce31a6bb71c1e73f", size = 402582, upload-time = "2025-08-27T12:14:02.034Z" }, + { url = "https://files.pythonhosted.org/packages/cb/b0/f4e224090dc5b0ec15f31a02d746ab24101dd430847c4d99123798661bfc/rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dc5dceeaefcc96dc192e3a80bbe1d6c410c469e97bdd47494a7d930987f18b2", size = 384126, upload-time = "2025-08-27T12:14:03.437Z" }, + { url = "https://files.pythonhosted.org/packages/54/77/ac339d5f82b6afff1df8f0fe0d2145cc827992cb5f8eeb90fc9f31ef7a63/rpds_py-0.27.1-cp313-cp313t-manylinux_2_31_riscv64.whl", hash = "sha256:d76f9cc8665acdc0c9177043746775aa7babbf479b5520b78ae4002d889f5c21", size = 399486, upload-time = "2025-08-27T12:14:05.443Z" }, + { url = "https://files.pythonhosted.org/packages/d6/29/3e1c255eee6ac358c056a57d6d6869baa00a62fa32eea5ee0632039c50a3/rpds_py-0.27.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:134fae0e36022edad8290a6661edf40c023562964efea0cc0ec7f5d392d2aaef", size = 414832, upload-time = "2025-08-27T12:14:06.902Z" }, + { url = "https://files.pythonhosted.org/packages/3f/db/6d498b844342deb3fa1d030598db93937a9964fcf5cb4da4feb5f17be34b/rpds_py-0.27.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:eb11a4f1b2b63337cfd3b4d110af778a59aae51c81d195768e353d8b52f88081", size = 557249, upload-time = "2025-08-27T12:14:08.37Z" }, + { url = "https://files.pythonhosted.org/packages/60/f3/690dd38e2310b6f68858a331399b4d6dbb9132c3e8ef8b4333b96caf403d/rpds_py-0.27.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:13e608ac9f50a0ed4faec0e90ece76ae33b34c0e8656e3dceb9a7db994c692cd", size = 587356, upload-time = "2025-08-27T12:14:10.034Z" }, + { url = "https://files.pythonhosted.org/packages/86/e3/84507781cccd0145f35b1dc32c72675200c5ce8d5b30f813e49424ef68fc/rpds_py-0.27.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dd2135527aa40f061350c3f8f89da2644de26cd73e4de458e79606384f4f68e7", size = 555300, upload-time = "2025-08-27T12:14:11.783Z" }, + { url = "https://files.pythonhosted.org/packages/e5/ee/375469849e6b429b3516206b4580a79e9ef3eb12920ddbd4492b56eaacbe/rpds_py-0.27.1-cp313-cp313t-win32.whl", hash = "sha256:3020724ade63fe320a972e2ffd93b5623227e684315adce194941167fee02688", size = 216714, upload-time = "2025-08-27T12:14:13.629Z" }, + { url = "https://files.pythonhosted.org/packages/21/87/3fc94e47c9bd0742660e84706c311a860dcae4374cf4a03c477e23ce605a/rpds_py-0.27.1-cp313-cp313t-win_amd64.whl", hash = "sha256:8ee50c3e41739886606388ba3ab3ee2aae9f35fb23f833091833255a31740797", size = 228943, upload-time = "2025-08-27T12:14:14.937Z" }, + { url = "https://files.pythonhosted.org/packages/70/36/b6e6066520a07cf029d385de869729a895917b411e777ab1cde878100a1d/rpds_py-0.27.1-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:acb9aafccaae278f449d9c713b64a9e68662e7799dbd5859e2c6b3c67b56d334", size = 362472, upload-time = "2025-08-27T12:14:16.333Z" }, + { url = "https://files.pythonhosted.org/packages/af/07/b4646032e0dcec0df9c73a3bd52f63bc6c5f9cda992f06bd0e73fe3fbebd/rpds_py-0.27.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:b7fb801aa7f845ddf601c49630deeeccde7ce10065561d92729bfe81bd21fb33", size = 345676, upload-time = "2025-08-27T12:14:17.764Z" }, + { url = "https://files.pythonhosted.org/packages/b0/16/2f1003ee5d0af4bcb13c0cf894957984c32a6751ed7206db2aee7379a55e/rpds_py-0.27.1-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe0dd05afb46597b9a2e11c351e5e4283c741237e7f617ffb3252780cca9336a", size = 385313, upload-time = "2025-08-27T12:14:19.829Z" }, + { url = "https://files.pythonhosted.org/packages/05/cd/7eb6dd7b232e7f2654d03fa07f1414d7dfc980e82ba71e40a7c46fd95484/rpds_py-0.27.1-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b6dfb0e058adb12d8b1d1b25f686e94ffa65d9995a5157afe99743bf7369d62b", size = 399080, upload-time = "2025-08-27T12:14:21.531Z" }, + { url = "https://files.pythonhosted.org/packages/20/51/5829afd5000ec1cb60f304711f02572d619040aa3ec033d8226817d1e571/rpds_py-0.27.1-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ed090ccd235f6fa8bb5861684567f0a83e04f52dfc2e5c05f2e4b1309fcf85e7", size = 523868, upload-time = "2025-08-27T12:14:23.485Z" }, + { url = "https://files.pythonhosted.org/packages/05/2c/30eebca20d5db95720ab4d2faec1b5e4c1025c473f703738c371241476a2/rpds_py-0.27.1-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf876e79763eecf3e7356f157540d6a093cef395b65514f17a356f62af6cc136", size = 408750, upload-time = "2025-08-27T12:14:24.924Z" }, + { url = "https://files.pythonhosted.org/packages/90/1a/cdb5083f043597c4d4276eae4e4c70c55ab5accec078da8611f24575a367/rpds_py-0.27.1-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:12ed005216a51b1d6e2b02a7bd31885fe317e45897de81d86dcce7d74618ffff", size = 387688, upload-time = "2025-08-27T12:14:27.537Z" }, + { url = "https://files.pythonhosted.org/packages/7c/92/cf786a15320e173f945d205ab31585cc43969743bb1a48b6888f7a2b0a2d/rpds_py-0.27.1-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:ee4308f409a40e50593c7e3bb8cbe0b4d4c66d1674a316324f0c2f5383b486f9", size = 407225, upload-time = "2025-08-27T12:14:28.981Z" }, + { url = "https://files.pythonhosted.org/packages/33/5c/85ee16df5b65063ef26017bef33096557a4c83fbe56218ac7cd8c235f16d/rpds_py-0.27.1-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0b08d152555acf1f455154d498ca855618c1378ec810646fcd7c76416ac6dc60", size = 423361, upload-time = "2025-08-27T12:14:30.469Z" }, + { url = "https://files.pythonhosted.org/packages/4b/8e/1c2741307fcabd1a334ecf008e92c4f47bb6f848712cf15c923becfe82bb/rpds_py-0.27.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:dce51c828941973a5684d458214d3a36fcd28da3e1875d659388f4f9f12cc33e", size = 562493, upload-time = "2025-08-27T12:14:31.987Z" }, + { url = "https://files.pythonhosted.org/packages/04/03/5159321baae9b2222442a70c1f988cbbd66b9be0675dd3936461269be360/rpds_py-0.27.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:c1476d6f29eb81aa4151c9a31219b03f1f798dc43d8af1250a870735516a1212", size = 592623, upload-time = "2025-08-27T12:14:33.543Z" }, + { url = "https://files.pythonhosted.org/packages/ff/39/c09fd1ad28b85bc1d4554a8710233c9f4cefd03d7717a1b8fbfd171d1167/rpds_py-0.27.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:3ce0cac322b0d69b63c9cdb895ee1b65805ec9ffad37639f291dd79467bee675", size = 558800, upload-time = "2025-08-27T12:14:35.436Z" }, + { url = "https://files.pythonhosted.org/packages/c5/d6/99228e6bbcf4baa764b18258f519a9035131d91b538d4e0e294313462a98/rpds_py-0.27.1-cp314-cp314-win32.whl", hash = "sha256:dfbfac137d2a3d0725758cd141f878bf4329ba25e34979797c89474a89a8a3a3", size = 221943, upload-time = "2025-08-27T12:14:36.898Z" }, + { url = "https://files.pythonhosted.org/packages/be/07/c802bc6b8e95be83b79bdf23d1aa61d68324cb1006e245d6c58e959e314d/rpds_py-0.27.1-cp314-cp314-win_amd64.whl", hash = "sha256:a6e57b0abfe7cc513450fcf529eb486b6e4d3f8aee83e92eb5f1ef848218d456", size = 233739, upload-time = "2025-08-27T12:14:38.386Z" }, + { url = "https://files.pythonhosted.org/packages/c8/89/3e1b1c16d4c2d547c5717377a8df99aee8099ff050f87c45cb4d5fa70891/rpds_py-0.27.1-cp314-cp314-win_arm64.whl", hash = "sha256:faf8d146f3d476abfee026c4ae3bdd9ca14236ae4e4c310cbd1cf75ba33d24a3", size = 223120, upload-time = "2025-08-27T12:14:39.82Z" }, + { url = "https://files.pythonhosted.org/packages/62/7e/dc7931dc2fa4a6e46b2a4fa744a9fe5c548efd70e0ba74f40b39fa4a8c10/rpds_py-0.27.1-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:ba81d2b56b6d4911ce735aad0a1d4495e808b8ee4dc58715998741a26874e7c2", size = 358944, upload-time = "2025-08-27T12:14:41.199Z" }, + { url = "https://files.pythonhosted.org/packages/e6/22/4af76ac4e9f336bfb1a5f240d18a33c6b2fcaadb7472ac7680576512b49a/rpds_py-0.27.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:84f7d509870098de0e864cad0102711c1e24e9b1a50ee713b65928adb22269e4", size = 342283, upload-time = "2025-08-27T12:14:42.699Z" }, + { url = "https://files.pythonhosted.org/packages/1c/15/2a7c619b3c2272ea9feb9ade67a45c40b3eeb500d503ad4c28c395dc51b4/rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9e960fc78fecd1100539f14132425e1d5fe44ecb9239f8f27f079962021523e", size = 380320, upload-time = "2025-08-27T12:14:44.157Z" }, + { url = "https://files.pythonhosted.org/packages/a2/7d/4c6d243ba4a3057e994bb5bedd01b5c963c12fe38dde707a52acdb3849e7/rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:62f85b665cedab1a503747617393573995dac4600ff51869d69ad2f39eb5e817", size = 391760, upload-time = "2025-08-27T12:14:45.845Z" }, + { url = "https://files.pythonhosted.org/packages/b4/71/b19401a909b83bcd67f90221330bc1ef11bc486fe4e04c24388d28a618ae/rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fed467af29776f6556250c9ed85ea5a4dd121ab56a5f8b206e3e7a4c551e48ec", size = 522476, upload-time = "2025-08-27T12:14:47.364Z" }, + { url = "https://files.pythonhosted.org/packages/e4/44/1a3b9715c0455d2e2f0f6df5ee6d6f5afdc423d0773a8a682ed2b43c566c/rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2729615f9d430af0ae6b36cf042cb55c0936408d543fb691e1a9e36648fd35a", size = 403418, upload-time = "2025-08-27T12:14:49.991Z" }, + { url = "https://files.pythonhosted.org/packages/1c/4b/fb6c4f14984eb56673bc868a66536f53417ddb13ed44b391998100a06a96/rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b207d881a9aef7ba753d69c123a35d96ca7cb808056998f6b9e8747321f03b8", size = 384771, upload-time = "2025-08-27T12:14:52.159Z" }, + { url = "https://files.pythonhosted.org/packages/c0/56/d5265d2d28b7420d7b4d4d85cad8ef891760f5135102e60d5c970b976e41/rpds_py-0.27.1-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:639fd5efec029f99b79ae47e5d7e00ad8a773da899b6309f6786ecaf22948c48", size = 400022, upload-time = "2025-08-27T12:14:53.859Z" }, + { url = "https://files.pythonhosted.org/packages/8f/e9/9f5fc70164a569bdd6ed9046486c3568d6926e3a49bdefeeccfb18655875/rpds_py-0.27.1-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fecc80cb2a90e28af8a9b366edacf33d7a91cbfe4c2c4544ea1246e949cfebeb", size = 416787, upload-time = "2025-08-27T12:14:55.673Z" }, + { url = "https://files.pythonhosted.org/packages/d4/64/56dd03430ba491db943a81dcdef115a985aac5f44f565cd39a00c766d45c/rpds_py-0.27.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:42a89282d711711d0a62d6f57d81aa43a1368686c45bc1c46b7f079d55692734", size = 557538, upload-time = "2025-08-27T12:14:57.245Z" }, + { url = "https://files.pythonhosted.org/packages/3f/36/92cc885a3129993b1d963a2a42ecf64e6a8e129d2c7cc980dbeba84e55fb/rpds_py-0.27.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:cf9931f14223de59551ab9d38ed18d92f14f055a5f78c1d8ad6493f735021bbb", size = 588512, upload-time = "2025-08-27T12:14:58.728Z" }, + { url = "https://files.pythonhosted.org/packages/dd/10/6b283707780a81919f71625351182b4f98932ac89a09023cb61865136244/rpds_py-0.27.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:f39f58a27cc6e59f432b568ed8429c7e1641324fbe38131de852cd77b2d534b0", size = 555813, upload-time = "2025-08-27T12:15:00.334Z" }, + { url = "https://files.pythonhosted.org/packages/04/2e/30b5ea18c01379da6272a92825dd7e53dc9d15c88a19e97932d35d430ef7/rpds_py-0.27.1-cp314-cp314t-win32.whl", hash = "sha256:d5fa0ee122dc09e23607a28e6d7b150da16c662e66409bbe85230e4c85bb528a", size = 217385, upload-time = "2025-08-27T12:15:01.937Z" }, + { url = "https://files.pythonhosted.org/packages/32/7d/97119da51cb1dd3f2f3c0805f155a3aa4a95fa44fe7d78ae15e69edf4f34/rpds_py-0.27.1-cp314-cp314t-win_amd64.whl", hash = "sha256:6567d2bb951e21232c2f660c24cf3470bb96de56cdcb3f071a83feeaff8a2772", size = 230097, upload-time = "2025-08-27T12:15:03.961Z" }, + { url = "https://files.pythonhosted.org/packages/d5/63/b7cc415c345625d5e62f694ea356c58fb964861409008118f1245f8c3347/rpds_py-0.27.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7ba22cb9693df986033b91ae1d7a979bc399237d45fccf875b76f62bb9e52ddf", size = 371360, upload-time = "2025-08-27T12:15:29.218Z" }, + { url = "https://files.pythonhosted.org/packages/e5/8c/12e1b24b560cf378b8ffbdb9dc73abd529e1adcfcf82727dfd29c4a7b88d/rpds_py-0.27.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5b640501be9288c77738b5492b3fd3abc4ba95c50c2e41273c8a1459f08298d3", size = 353933, upload-time = "2025-08-27T12:15:30.837Z" }, + { url = "https://files.pythonhosted.org/packages/9b/85/1bb2210c1f7a1b99e91fea486b9f0f894aa5da3a5ec7097cbad7dec6d40f/rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb08b65b93e0c6dd70aac7f7890a9c0938d5ec71d5cb32d45cf844fb8ae47636", size = 382962, upload-time = "2025-08-27T12:15:32.348Z" }, + { url = "https://files.pythonhosted.org/packages/cc/c9/a839b9f219cf80ed65f27a7f5ddbb2809c1b85c966020ae2dff490e0b18e/rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d7ff07d696a7a38152ebdb8212ca9e5baab56656749f3d6004b34ab726b550b8", size = 394412, upload-time = "2025-08-27T12:15:33.839Z" }, + { url = "https://files.pythonhosted.org/packages/02/2d/b1d7f928b0b1f4fc2e0133e8051d199b01d7384875adc63b6ddadf3de7e5/rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fb7c72262deae25366e3b6c0c0ba46007967aea15d1eea746e44ddba8ec58dcc", size = 523972, upload-time = "2025-08-27T12:15:35.377Z" }, + { url = "https://files.pythonhosted.org/packages/a9/af/2cbf56edd2d07716df1aec8a726b3159deb47cb5c27e1e42b71d705a7c2f/rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7b002cab05d6339716b03a4a3a2ce26737f6231d7b523f339fa061d53368c9d8", size = 403273, upload-time = "2025-08-27T12:15:37.051Z" }, + { url = "https://files.pythonhosted.org/packages/c0/93/425e32200158d44ff01da5d9612c3b6711fe69f606f06e3895511f17473b/rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23f6b69d1c26c4704fec01311963a41d7de3ee0570a84ebde4d544e5a1859ffc", size = 385278, upload-time = "2025-08-27T12:15:38.571Z" }, + { url = "https://files.pythonhosted.org/packages/eb/1a/1a04a915ecd0551bfa9e77b7672d1937b4b72a0fc204a17deef76001cfb2/rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:530064db9146b247351f2a0250b8f00b289accea4596a033e94be2389977de71", size = 402084, upload-time = "2025-08-27T12:15:40.529Z" }, + { url = "https://files.pythonhosted.org/packages/51/f7/66585c0fe5714368b62951d2513b684e5215beaceab2c6629549ddb15036/rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7b90b0496570bd6b0321724a330d8b545827c4df2034b6ddfc5f5275f55da2ad", size = 419041, upload-time = "2025-08-27T12:15:42.191Z" }, + { url = "https://files.pythonhosted.org/packages/8e/7e/83a508f6b8e219bba2d4af077c35ba0e0cdd35a751a3be6a7cba5a55ad71/rpds_py-0.27.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:879b0e14a2da6a1102a3fc8af580fc1ead37e6d6692a781bd8c83da37429b5ab", size = 560084, upload-time = "2025-08-27T12:15:43.839Z" }, + { url = "https://files.pythonhosted.org/packages/66/66/bb945683b958a1b19eb0fe715594630d0f36396ebdef4d9b89c2fa09aa56/rpds_py-0.27.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:0d807710df3b5faa66c731afa162ea29717ab3be17bdc15f90f2d9f183da4059", size = 590115, upload-time = "2025-08-27T12:15:46.647Z" }, + { url = "https://files.pythonhosted.org/packages/12/00/ccfaafaf7db7e7adace915e5c2f2c2410e16402561801e9c7f96683002d3/rpds_py-0.27.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:3adc388fc3afb6540aec081fa59e6e0d3908722771aa1e37ffe22b220a436f0b", size = 556561, upload-time = "2025-08-27T12:15:48.219Z" }, + { url = "https://files.pythonhosted.org/packages/e1/b7/92b6ed9aad103bfe1c45df98453dfae40969eef2cb6c6239c58d7e96f1b3/rpds_py-0.27.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c796c0c1cc68cb08b0284db4229f5af76168172670c74908fdbd4b7d7f515819", size = 229125, upload-time = "2025-08-27T12:15:49.956Z" }, + { url = "https://files.pythonhosted.org/packages/0c/ed/e1fba02de17f4f76318b834425257c8ea297e415e12c68b4361f63e8ae92/rpds_py-0.27.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cdfe4bb2f9fe7458b7453ad3c33e726d6d1c7c0a72960bcc23800d77384e42df", size = 371402, upload-time = "2025-08-27T12:15:51.561Z" }, + { url = "https://files.pythonhosted.org/packages/af/7c/e16b959b316048b55585a697e94add55a4ae0d984434d279ea83442e460d/rpds_py-0.27.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:8fabb8fd848a5f75a2324e4a84501ee3a5e3c78d8603f83475441866e60b94a3", size = 354084, upload-time = "2025-08-27T12:15:53.219Z" }, + { url = "https://files.pythonhosted.org/packages/de/c1/ade645f55de76799fdd08682d51ae6724cb46f318573f18be49b1e040428/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eda8719d598f2f7f3e0f885cba8646644b55a187762bec091fa14a2b819746a9", size = 383090, upload-time = "2025-08-27T12:15:55.158Z" }, + { url = "https://files.pythonhosted.org/packages/1f/27/89070ca9b856e52960da1472efcb6c20ba27cfe902f4f23ed095b9cfc61d/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3c64d07e95606ec402a0a1c511fe003873fa6af630bda59bac77fac8b4318ebc", size = 394519, upload-time = "2025-08-27T12:15:57.238Z" }, + { url = "https://files.pythonhosted.org/packages/b3/28/be120586874ef906aa5aeeae95ae8df4184bc757e5b6bd1c729ccff45ed5/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:93a2ed40de81bcff59aabebb626562d48332f3d028ca2036f1d23cbb52750be4", size = 523817, upload-time = "2025-08-27T12:15:59.237Z" }, + { url = "https://files.pythonhosted.org/packages/a8/ef/70cc197bc11cfcde02a86f36ac1eed15c56667c2ebddbdb76a47e90306da/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:387ce8c44ae94e0ec50532d9cb0edce17311024c9794eb196b90e1058aadeb66", size = 403240, upload-time = "2025-08-27T12:16:00.923Z" }, + { url = "https://files.pythonhosted.org/packages/cf/35/46936cca449f7f518f2f4996e0e8344db4b57e2081e752441154089d2a5f/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aaf94f812c95b5e60ebaf8bfb1898a7d7cb9c1af5744d4a67fa47796e0465d4e", size = 385194, upload-time = "2025-08-27T12:16:02.802Z" }, + { url = "https://files.pythonhosted.org/packages/e1/62/29c0d3e5125c3270b51415af7cbff1ec587379c84f55a5761cc9efa8cd06/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:4848ca84d6ded9b58e474dfdbad4b8bfb450344c0551ddc8d958bf4b36aa837c", size = 402086, upload-time = "2025-08-27T12:16:04.806Z" }, + { url = "https://files.pythonhosted.org/packages/8f/66/03e1087679227785474466fdd04157fb793b3b76e3fcf01cbf4c693c1949/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2bde09cbcf2248b73c7c323be49b280180ff39fadcfe04e7b6f54a678d02a7cf", size = 419272, upload-time = "2025-08-27T12:16:06.471Z" }, + { url = "https://files.pythonhosted.org/packages/6a/24/e3e72d265121e00b063aef3e3501e5b2473cf1b23511d56e529531acf01e/rpds_py-0.27.1-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:94c44ee01fd21c9058f124d2d4f0c9dc7634bec93cd4b38eefc385dabe71acbf", size = 560003, upload-time = "2025-08-27T12:16:08.06Z" }, + { url = "https://files.pythonhosted.org/packages/26/ca/f5a344c534214cc2d41118c0699fffbdc2c1bc7046f2a2b9609765ab9c92/rpds_py-0.27.1-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:df8b74962e35c9249425d90144e721eed198e6555a0e22a563d29fe4486b51f6", size = 590482, upload-time = "2025-08-27T12:16:10.137Z" }, + { url = "https://files.pythonhosted.org/packages/ce/08/4349bdd5c64d9d193c360aa9db89adeee6f6682ab8825dca0a3f535f434f/rpds_py-0.27.1-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:dc23e6820e3b40847e2f4a7726462ba0cf53089512abe9ee16318c366494c17a", size = 556523, upload-time = "2025-08-27T12:16:12.188Z" }, +] + +[[package]] +name = "ruff" +version = "0.13.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/02/df/8d7d8c515d33adfc540e2edf6c6021ea1c5a58a678d8cfce9fae59aabcab/ruff-0.13.2.tar.gz", hash = "sha256:cb12fffd32fb16d32cef4ed16d8c7cdc27ed7c944eaa98d99d01ab7ab0b710ff", size = 5416417, upload-time = "2025-09-25T14:54:09.936Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6e/84/5716a7fa4758e41bf70e603e13637c42cfb9dbf7ceb07180211b9bbf75ef/ruff-0.13.2-py3-none-linux_armv6l.whl", hash = "sha256:3796345842b55f033a78285e4f1641078f902020d8450cade03aad01bffd81c3", size = 12343254, upload-time = "2025-09-25T14:53:27.784Z" }, + { url = "https://files.pythonhosted.org/packages/9b/77/c7042582401bb9ac8eff25360e9335e901d7a1c0749a2b28ba4ecb239991/ruff-0.13.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ff7e4dda12e683e9709ac89e2dd436abf31a4d8a8fc3d89656231ed808e231d2", size = 13040891, upload-time = "2025-09-25T14:53:31.38Z" }, + { url = "https://files.pythonhosted.org/packages/c6/15/125a7f76eb295cb34d19c6778e3a82ace33730ad4e6f28d3427e134a02e0/ruff-0.13.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:c75e9d2a2fafd1fdd895d0e7e24b44355984affdde1c412a6f6d3f6e16b22d46", size = 12243588, upload-time = "2025-09-25T14:53:33.543Z" }, + { url = "https://files.pythonhosted.org/packages/9e/eb/0093ae04a70f81f8be7fd7ed6456e926b65d238fc122311293d033fdf91e/ruff-0.13.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cceac74e7bbc53ed7d15d1042ffe7b6577bf294611ad90393bf9b2a0f0ec7cb6", size = 12491359, upload-time = "2025-09-25T14:53:35.892Z" }, + { url = "https://files.pythonhosted.org/packages/43/fe/72b525948a6956f07dad4a6f122336b6a05f2e3fd27471cea612349fedb9/ruff-0.13.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6ae3f469b5465ba6d9721383ae9d49310c19b452a161b57507764d7ef15f4b07", size = 12162486, upload-time = "2025-09-25T14:53:38.171Z" }, + { url = "https://files.pythonhosted.org/packages/6a/e3/0fac422bbbfb2ea838023e0d9fcf1f30183d83ab2482800e2cb892d02dfe/ruff-0.13.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f8f9e3cd6714358238cd6626b9d43026ed19c0c018376ac1ef3c3a04ffb42d8", size = 13871203, upload-time = "2025-09-25T14:53:41.943Z" }, + { url = "https://files.pythonhosted.org/packages/6b/82/b721c8e3ec5df6d83ba0e45dcf00892c4f98b325256c42c38ef136496cbf/ruff-0.13.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:c6ed79584a8f6cbe2e5d7dbacf7cc1ee29cbdb5df1172e77fbdadc8bb85a1f89", size = 14929635, upload-time = "2025-09-25T14:53:43.953Z" }, + { url = "https://files.pythonhosted.org/packages/c4/a0/ad56faf6daa507b83079a1ad7a11694b87d61e6bf01c66bd82b466f21821/ruff-0.13.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aed130b2fde049cea2019f55deb939103123cdd191105f97a0599a3e753d61b0", size = 14338783, upload-time = "2025-09-25T14:53:46.205Z" }, + { url = "https://files.pythonhosted.org/packages/47/77/ad1d9156db8f99cd01ee7e29d74b34050e8075a8438e589121fcd25c4b08/ruff-0.13.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1887c230c2c9d65ed1b4e4cfe4d255577ea28b718ae226c348ae68df958191aa", size = 13355322, upload-time = "2025-09-25T14:53:48.164Z" }, + { url = "https://files.pythonhosted.org/packages/64/8b/e87cfca2be6f8b9f41f0bb12dc48c6455e2d66df46fe61bb441a226f1089/ruff-0.13.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5bcb10276b69b3cfea3a102ca119ffe5c6ba3901e20e60cf9efb53fa417633c3", size = 13354427, upload-time = "2025-09-25T14:53:50.486Z" }, + { url = "https://files.pythonhosted.org/packages/7f/df/bf382f3fbead082a575edb860897287f42b1b3c694bafa16bc9904c11ed3/ruff-0.13.2-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:afa721017aa55a555b2ff7944816587f1cb813c2c0a882d158f59b832da1660d", size = 13537637, upload-time = "2025-09-25T14:53:52.887Z" }, + { url = "https://files.pythonhosted.org/packages/51/70/1fb7a7c8a6fc8bd15636288a46e209e81913b87988f26e1913d0851e54f4/ruff-0.13.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:1dbc875cf3720c64b3990fef8939334e74cb0ca65b8dbc61d1f439201a38101b", size = 12340025, upload-time = "2025-09-25T14:53:54.88Z" }, + { url = "https://files.pythonhosted.org/packages/4c/27/1e5b3f1c23ca5dd4106d9d580e5c13d9acb70288bff614b3d7b638378cc9/ruff-0.13.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:5b939a1b2a960e9742e9a347e5bbc9b3c3d2c716f86c6ae273d9cbd64f193f22", size = 12133449, upload-time = "2025-09-25T14:53:57.089Z" }, + { url = "https://files.pythonhosted.org/packages/2d/09/b92a5ccee289f11ab128df57d5911224197d8d55ef3bd2043534ff72ca54/ruff-0.13.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:50e2d52acb8de3804fc5f6e2fa3ae9bdc6812410a9e46837e673ad1f90a18736", size = 13051369, upload-time = "2025-09-25T14:53:59.124Z" }, + { url = "https://files.pythonhosted.org/packages/89/99/26c9d1c7d8150f45e346dc045cc49f23e961efceb4a70c47dea0960dea9a/ruff-0.13.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:3196bc13ab2110c176b9a4ae5ff7ab676faaa1964b330a1383ba20e1e19645f2", size = 13523644, upload-time = "2025-09-25T14:54:01.622Z" }, + { url = "https://files.pythonhosted.org/packages/f7/00/e7f1501e81e8ec290e79527827af1d88f541d8d26151751b46108978dade/ruff-0.13.2-py3-none-win32.whl", hash = "sha256:7c2a0b7c1e87795fec3404a485096bcd790216c7c146a922d121d8b9c8f1aaac", size = 12245990, upload-time = "2025-09-25T14:54:03.647Z" }, + { url = "https://files.pythonhosted.org/packages/ee/bd/d9f33a73de84fafd0146c6fba4f497c4565fe8fa8b46874b8e438869abc2/ruff-0.13.2-py3-none-win_amd64.whl", hash = "sha256:17d95fb32218357c89355f6f6f9a804133e404fc1f65694372e02a557edf8585", size = 13324004, upload-time = "2025-09-25T14:54:06.05Z" }, + { url = "https://files.pythonhosted.org/packages/c3/12/28fa2f597a605884deb0f65c1b1ae05111051b2a7030f5d8a4ff7f4599ba/ruff-0.13.2-py3-none-win_arm64.whl", hash = "sha256:da711b14c530412c827219312b7d7fbb4877fb31150083add7e8c5336549cea7", size = 12484437, upload-time = "2025-09-25T14:54:08.022Z" }, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, +] + +[[package]] +name = "sse-starlette" +version = "3.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/42/6f/22ed6e33f8a9e76ca0a412405f31abb844b779d52c5f96660766edcd737c/sse_starlette-3.0.2.tar.gz", hash = "sha256:ccd60b5765ebb3584d0de2d7a6e4f745672581de4f5005ab31c3a25d10b52b3a", size = 20985, upload-time = "2025-07-27T09:07:44.565Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/10/c78f463b4ef22eef8491f218f692be838282cd65480f6e423d7730dfd1fb/sse_starlette-3.0.2-py3-none-any.whl", hash = "sha256:16b7cbfddbcd4eaca11f7b586f3b8a080f1afe952c15813455b162edea619e5a", size = 11297, upload-time = "2025-07-27T09:07:43.268Z" }, +] + +[[package]] +name = "starlette" +version = "0.48.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a7/a5/d6f429d43394057b67a6b5bbe6eae2f77a6bf7459d961fdb224bf206eee6/starlette-0.48.0.tar.gz", hash = "sha256:7e8cee469a8ab2352911528110ce9088fdc6a37d9876926e73da7ce4aa4c7a46", size = 2652949, upload-time = "2025-09-13T08:41:05.699Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/be/72/2db2f49247d0a18b4f1bb9a5a39a0162869acf235f3a96418363947b3d46/starlette-0.48.0-py3-none-any.whl", hash = "sha256:0764ca97b097582558ecb498132ed0c7d942f233f365b86ba37770e026510659", size = 73736, upload-time = "2025-09-13T08:41:03.869Z" }, +] + +[[package]] +name = "tomli" +version = "2.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", size = 17175, upload-time = "2024-11-27T22:38:36.873Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249", size = 131077, upload-time = "2024-11-27T22:37:54.956Z" }, + { url = "https://files.pythonhosted.org/packages/c7/16/51ae563a8615d472fdbffc43a3f3d46588c264ac4f024f63f01283becfbb/tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6", size = 123429, upload-time = "2024-11-27T22:37:56.698Z" }, + { url = "https://files.pythonhosted.org/packages/f1/dd/4f6cd1e7b160041db83c694abc78e100473c15d54620083dbd5aae7b990e/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", size = 226067, upload-time = "2024-11-27T22:37:57.63Z" }, + { url = "https://files.pythonhosted.org/packages/a9/6b/c54ede5dc70d648cc6361eaf429304b02f2871a345bbdd51e993d6cdf550/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee", size = 236030, upload-time = "2024-11-27T22:37:59.344Z" }, + { url = "https://files.pythonhosted.org/packages/1f/47/999514fa49cfaf7a92c805a86c3c43f4215621855d151b61c602abb38091/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e", size = 240898, upload-time = "2024-11-27T22:38:00.429Z" }, + { url = "https://files.pythonhosted.org/packages/73/41/0a01279a7ae09ee1573b423318e7934674ce06eb33f50936655071d81a24/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4", size = 229894, upload-time = "2024-11-27T22:38:02.094Z" }, + { url = "https://files.pythonhosted.org/packages/55/18/5d8bc5b0a0362311ce4d18830a5d28943667599a60d20118074ea1b01bb7/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106", size = 245319, upload-time = "2024-11-27T22:38:03.206Z" }, + { url = "https://files.pythonhosted.org/packages/92/a3/7ade0576d17f3cdf5ff44d61390d4b3febb8a9fc2b480c75c47ea048c646/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8", size = 238273, upload-time = "2024-11-27T22:38:04.217Z" }, + { url = "https://files.pythonhosted.org/packages/72/6f/fa64ef058ac1446a1e51110c375339b3ec6be245af9d14c87c4a6412dd32/tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff", size = 98310, upload-time = "2024-11-27T22:38:05.908Z" }, + { url = "https://files.pythonhosted.org/packages/6a/1c/4a2dcde4a51b81be3530565e92eda625d94dafb46dbeb15069df4caffc34/tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b", size = 108309, upload-time = "2024-11-27T22:38:06.812Z" }, + { url = "https://files.pythonhosted.org/packages/52/e1/f8af4c2fcde17500422858155aeb0d7e93477a0d59a98e56cbfe75070fd0/tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea", size = 132762, upload-time = "2024-11-27T22:38:07.731Z" }, + { url = "https://files.pythonhosted.org/packages/03/b8/152c68bb84fc00396b83e7bbddd5ec0bd3dd409db4195e2a9b3e398ad2e3/tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8", size = 123453, upload-time = "2024-11-27T22:38:09.384Z" }, + { url = "https://files.pythonhosted.org/packages/c8/d6/fc9267af9166f79ac528ff7e8c55c8181ded34eb4b0e93daa767b8841573/tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192", size = 233486, upload-time = "2024-11-27T22:38:10.329Z" }, + { url = "https://files.pythonhosted.org/packages/5c/51/51c3f2884d7bab89af25f678447ea7d297b53b5a3b5730a7cb2ef6069f07/tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222", size = 242349, upload-time = "2024-11-27T22:38:11.443Z" }, + { url = "https://files.pythonhosted.org/packages/ab/df/bfa89627d13a5cc22402e441e8a931ef2108403db390ff3345c05253935e/tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77", size = 252159, upload-time = "2024-11-27T22:38:13.099Z" }, + { url = "https://files.pythonhosted.org/packages/9e/6e/fa2b916dced65763a5168c6ccb91066f7639bdc88b48adda990db10c8c0b/tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6", size = 237243, upload-time = "2024-11-27T22:38:14.766Z" }, + { url = "https://files.pythonhosted.org/packages/b4/04/885d3b1f650e1153cbb93a6a9782c58a972b94ea4483ae4ac5cedd5e4a09/tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd", size = 259645, upload-time = "2024-11-27T22:38:15.843Z" }, + { url = "https://files.pythonhosted.org/packages/9c/de/6b432d66e986e501586da298e28ebeefd3edc2c780f3ad73d22566034239/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", size = 244584, upload-time = "2024-11-27T22:38:17.645Z" }, + { url = "https://files.pythonhosted.org/packages/1c/9a/47c0449b98e6e7d1be6cbac02f93dd79003234ddc4aaab6ba07a9a7482e2/tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98", size = 98875, upload-time = "2024-11-27T22:38:19.159Z" }, + { url = "https://files.pythonhosted.org/packages/ef/60/9b9638f081c6f1261e2688bd487625cd1e660d0a85bd469e91d8db969734/tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4", size = 109418, upload-time = "2024-11-27T22:38:20.064Z" }, + { url = "https://files.pythonhosted.org/packages/04/90/2ee5f2e0362cb8a0b6499dc44f4d7d48f8fff06d28ba46e6f1eaa61a1388/tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7", size = 132708, upload-time = "2024-11-27T22:38:21.659Z" }, + { url = "https://files.pythonhosted.org/packages/c0/ec/46b4108816de6b385141f082ba99e315501ccd0a2ea23db4a100dd3990ea/tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c", size = 123582, upload-time = "2024-11-27T22:38:22.693Z" }, + { url = "https://files.pythonhosted.org/packages/a0/bd/b470466d0137b37b68d24556c38a0cc819e8febe392d5b199dcd7f578365/tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13", size = 232543, upload-time = "2024-11-27T22:38:24.367Z" }, + { url = "https://files.pythonhosted.org/packages/d9/e5/82e80ff3b751373f7cead2815bcbe2d51c895b3c990686741a8e56ec42ab/tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281", size = 241691, upload-time = "2024-11-27T22:38:26.081Z" }, + { url = "https://files.pythonhosted.org/packages/05/7e/2a110bc2713557d6a1bfb06af23dd01e7dde52b6ee7dadc589868f9abfac/tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272", size = 251170, upload-time = "2024-11-27T22:38:27.921Z" }, + { url = "https://files.pythonhosted.org/packages/64/7b/22d713946efe00e0adbcdfd6d1aa119ae03fd0b60ebed51ebb3fa9f5a2e5/tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140", size = 236530, upload-time = "2024-11-27T22:38:29.591Z" }, + { url = "https://files.pythonhosted.org/packages/38/31/3a76f67da4b0cf37b742ca76beaf819dca0ebef26d78fc794a576e08accf/tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2", size = 258666, upload-time = "2024-11-27T22:38:30.639Z" }, + { url = "https://files.pythonhosted.org/packages/07/10/5af1293da642aded87e8a988753945d0cf7e00a9452d3911dd3bb354c9e2/tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744", size = 243954, upload-time = "2024-11-27T22:38:31.702Z" }, + { url = "https://files.pythonhosted.org/packages/5b/b9/1ed31d167be802da0fc95020d04cd27b7d7065cc6fbefdd2f9186f60d7bd/tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec", size = 98724, upload-time = "2024-11-27T22:38:32.837Z" }, + { url = "https://files.pythonhosted.org/packages/c7/32/b0963458706accd9afcfeb867c0f9175a741bf7b19cd424230714d722198/tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69", size = 109383, upload-time = "2024-11-27T22:38:34.455Z" }, + { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257, upload-time = "2024-11-27T22:38:35.385Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] + +[[package]] +name = "typing-inspection" +version = "0.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f8/b1/0c11f5058406b3af7609f121aaa6b609744687f1d158b3c3a5bf4cc94238/typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28", size = 75726, upload-time = "2025-05-21T18:55:23.885Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/17/69/cd203477f944c353c31bade965f880aa1061fd6bf05ded0726ca845b6ff7/typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51", size = 14552, upload-time = "2025-05-21T18:55:22.152Z" }, +] + +[[package]] +name = "uvicorn" +version = "0.37.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "h11" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/71/57/1616c8274c3442d802621abf5deb230771c7a0fec9414cb6763900eb3868/uvicorn-0.37.0.tar.gz", hash = "sha256:4115c8add6d3fd536c8ee77f0e14a7fd2ebba939fed9b02583a97f80648f9e13", size = 80367, upload-time = "2025-09-23T13:33:47.486Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/85/cd/584a2ceb5532af99dd09e50919e3615ba99aa127e9850eafe5f31ddfdb9a/uvicorn-0.37.0-py3-none-any.whl", hash = "sha256:913b2b88672343739927ce381ff9e2ad62541f9f8289664fa1d1d3803fa2ce6c", size = 67976, upload-time = "2025-09-23T13:33:45.842Z" }, +] diff --git a/examples/servers/simple-tool-async/.python-version b/examples/servers/simple-tool-async/.python-version new file mode 100644 index 000000000..c8cfe3959 --- /dev/null +++ b/examples/servers/simple-tool-async/.python-version @@ -0,0 +1 @@ +3.10 diff --git a/examples/servers/simple-tool-async/README.md b/examples/servers/simple-tool-async/README.md new file mode 100644 index 000000000..20878261f --- /dev/null +++ b/examples/servers/simple-tool-async/README.md @@ -0,0 +1,56 @@ +# Simple Tool Async Example + +A simple MCP server that demonstrates async tool execution with operation tokens and long-running operations. + +## Usage + +Start the server using either stdio (default) or streamable-http transport: + +```bash +# Using stdio transport (default) +uv run mcp-simple-tool-async + +# Using streamable-http transport on custom port +uv run mcp-simple-tool-async --transport streamable-http --port 8000 +``` + +The server exposes an async tool named "fetch_website" that accepts one required argument: + +- `url`: The URL of the website to fetch + +The tool runs asynchronously with a 5-second delay to simulate a long-running operation, making it useful for testing async tool capabilities. + +## Example + +Using the MCP client with protocol version "next", you can use the async tool like this: + +```python +import asyncio +from mcp import ClientSession, types +from mcp.client.streamable_http import streamablehttp_client + + +async def main(): + async with streamablehttp_client("http://127.0.0.1:8000/mcp") as (read, write, _): + async with ClientSession(read, write, protocol_version="next") as session: + await session.initialize() + + # Call the async tool + result = await session.call_tool("fetch_website", {"url": "https://example.com"}) + + # Get operation token + token = result.operation.token + print(f"Operation started with token: {token}") + + # Poll for completion + while True: + status = await session.get_operation_status(token) + if status.status == "completed": + final_result = await session.get_operation_result(token) + print(f"Result: {final_result.result.content[0].text}") + break + await asyncio.sleep(0.5) + + +asyncio.run(main()) +``` diff --git a/examples/servers/simple-tool-async/mcp_simple_tool_async/__init__.py b/examples/servers/simple-tool-async/mcp_simple_tool_async/__init__.py new file mode 100644 index 000000000..8b1378917 --- /dev/null +++ b/examples/servers/simple-tool-async/mcp_simple_tool_async/__init__.py @@ -0,0 +1 @@ + diff --git a/examples/servers/simple-tool-async/mcp_simple_tool_async/__main__.py b/examples/servers/simple-tool-async/mcp_simple_tool_async/__main__.py new file mode 100644 index 000000000..e7ef16530 --- /dev/null +++ b/examples/servers/simple-tool-async/mcp_simple_tool_async/__main__.py @@ -0,0 +1,5 @@ +import sys + +from .server import main + +sys.exit(main()) # type: ignore[call-arg] diff --git a/examples/servers/simple-tool-async/mcp_simple_tool_async/server.py b/examples/servers/simple-tool-async/mcp_simple_tool_async/server.py new file mode 100644 index 000000000..605097a3b --- /dev/null +++ b/examples/servers/simple-tool-async/mcp_simple_tool_async/server.py @@ -0,0 +1,41 @@ +import asyncio + +import click +import mcp.types as types +import uvicorn +from mcp.server.fastmcp import FastMCP +from mcp.shared._httpx_utils import create_mcp_http_client + +mcp = FastMCP("mcp-website-fetcher") + + +@mcp.tool(invocation_modes=["async"]) +async def fetch_website( + url: str, +) -> list[types.ContentBlock]: + headers = {"User-Agent": "MCP Test Server (github.com/modelcontextprotocol/python-sdk)"} + async with create_mcp_http_client(headers=headers) as client: + await asyncio.sleep(5) + response = await client.get(url) + response.raise_for_status() + return [types.TextContent(type="text", text=response.text)] + + +@click.command() +@click.option("--port", default=8000, help="Port to listen on for HTTP") +@click.option( + "--transport", + type=click.Choice(["stdio", "streamable-http"]), + default="stdio", + help="Transport type", +) +def main(port: int, transport: str): + if transport == "stdio": + mcp.run(transport="stdio") + elif transport == "streamable-http": + app = mcp.streamable_http_app() + server = uvicorn.Server(config=uvicorn.Config(app=app, host="127.0.0.1", port=port, log_level="error")) + print(f"Starting {transport} server on port {port}") + server.run() + else: + raise ValueError(f"Invalid transport for test server: {transport}") diff --git a/examples/servers/simple-tool-async/pyproject.toml b/examples/servers/simple-tool-async/pyproject.toml new file mode 100644 index 000000000..46c00170d --- /dev/null +++ b/examples/servers/simple-tool-async/pyproject.toml @@ -0,0 +1,47 @@ +[project] +name = "mcp-simple-tool-async" +version = "0.1.0" +description = "A simple MCP server exposing an async website fetching tool" +readme = "README.md" +requires-python = ">=3.10" +authors = [{ name = "Anthropic, PBC." }] +maintainers = [ + { name = "David Soria Parra", email = "davidsp@anthropic.com" }, + { name = "Justin Spahr-Summers", email = "justin@anthropic.com" }, +] +keywords = ["mcp", "llm", "automation", "web", "fetch"] +license = { text = "MIT" } +classifiers = [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.10", +] +dependencies = ["anyio>=4.5", "click>=8.2.0", "httpx>=0.27", "mcp"] + +[project.scripts] +mcp-simple-tool-async = "mcp_simple_tool_async.server:main" + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["mcp_simple_tool_async"] + +[tool.pyright] +include = ["mcp_simple_tool_async"] +venvPath = "." +venv = ".venv" + +[tool.ruff.lint] +select = ["E", "F", "I"] +ignore = [] + +[tool.ruff] +line-length = 120 +target-version = "py310" + +[tool.uv] +dev-dependencies = ["pyright>=1.1.378", "pytest>=8.3.3", "ruff>=0.6.9"] diff --git a/uv.lock b/uv.lock index 68abdcc4f..3ab753e87 100644 --- a/uv.lock +++ b/uv.lock @@ -12,6 +12,7 @@ members = [ "mcp-simple-streamablehttp", "mcp-simple-streamablehttp-stateless", "mcp-simple-tool", + "mcp-simple-tool-async", "mcp-snippets", ] @@ -937,6 +938,39 @@ dev = [ { name = "ruff", specifier = ">=0.6.9" }, ] +[[package]] +name = "mcp-simple-tool-async" +version = "0.1.0" +source = { editable = "examples/servers/simple-tool-async" } +dependencies = [ + { name = "anyio" }, + { name = "click" }, + { name = "httpx" }, + { name = "mcp" }, +] + +[package.dev-dependencies] +dev = [ + { name = "pyright" }, + { name = "pytest" }, + { name = "ruff" }, +] + +[package.metadata] +requires-dist = [ + { name = "anyio", specifier = ">=4.5" }, + { name = "click", specifier = ">=8.2.0" }, + { name = "httpx", specifier = ">=0.27" }, + { name = "mcp", editable = "." }, +] + +[package.metadata.requires-dev] +dev = [ + { name = "pyright", specifier = ">=1.1.378" }, + { name = "pytest", specifier = ">=8.3.3" }, + { name = "ruff", specifier = ">=0.6.9" }, +] + [[package]] name = "mcp-snippets" version = "0.1.0" From 4a6c5a5374d029c7b836bab740f6b7e5541efd6b Mon Sep 17 00:00:00 2001 From: Luca Chang Date: Wed, 1 Oct 2025 12:40:18 -0700 Subject: [PATCH 26/41] Fix README formatting --- examples/clients/async-reconnect-client/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/clients/async-reconnect-client/README.md b/examples/clients/async-reconnect-client/README.md index 3a4bdb7c0..9d3106c41 100644 --- a/examples/clients/async-reconnect-client/README.md +++ b/examples/clients/async-reconnect-client/README.md @@ -73,7 +73,7 @@ Result: ... ## Configuration -- `--endpoint` - MCP server endpoint (default: http://127.0.0.1:8000/mcp) +- `--endpoint` - MCP server endpoint (default: ) - `--token` - Operation token to resume with (optional) This example showcases the async tool capabilities introduced in MCP protocol version "next", allowing for long-running operations that can be resumed even if the client disconnects. From 9375927935f3a493553a1ea4500f578c91fdc15e Mon Sep 17 00:00:00 2001 From: Luca Chang Date: Wed, 1 Oct 2025 12:53:50 -0700 Subject: [PATCH 27/41] Remove usages of asyncio in tests --- .../mcp_async_reconnect_client/client.py | 7 +- .../mcp_simple_tool_async/server.py | 5 +- .../clients/async_elicitation_client.py | 9 +- .../snippets/clients/async_progress_client.py | 9 +- .../snippets/clients/async_sampling_client.py | 9 +- .../snippets/clients/async_tool_client.py | 7 +- examples/snippets/servers/async_tool_basic.py | 25 ++-- .../servers/async_tool_elicitation.py | 18 +-- .../snippets/servers/async_tool_immediate.py | 7 +- .../snippets/servers/async_tool_progress.py | 11 +- .../snippets/servers/async_tool_sampling.py | 13 +- tests/server/fastmcp/test_immediate_result.py | 19 ++- tests/server/fastmcp/test_integration.py | 16 +-- tests/server/fastmcp/test_server.py | 18 +-- .../server/test_lowlevel_async_operations.py | 114 ++++++++---------- 15 files changed, 135 insertions(+), 152 deletions(-) diff --git a/examples/clients/async-reconnect-client/mcp_async_reconnect_client/client.py b/examples/clients/async-reconnect-client/mcp_async_reconnect_client/client.py index 9d1924ac4..24b8d8a03 100644 --- a/examples/clients/async-reconnect-client/mcp_async_reconnect_client/client.py +++ b/examples/clients/async-reconnect-client/mcp_async_reconnect_client/client.py @@ -1,5 +1,4 @@ -import asyncio - +import anyio import click from mcp import ClientSession, types from mcp.client.streamable_http import streamablehttp_client @@ -30,7 +29,7 @@ async def call_async_tool(session: ClientSession, token: str | None): print(f"Operation failed: {status.error}") break - await asyncio.sleep(0.5) + await anyio.sleep(0.5) async def run_session(endpoint: str, token: str | None): @@ -44,4 +43,4 @@ async def run_session(endpoint: str, token: str | None): @click.option("--endpoint", default="http://127.0.0.1:8000/mcp", help="Endpoint to connect to") @click.option("--token", default=None, help="Operation token to resume with") def main(endpoint: str, token: str | None): - asyncio.run(run_session(endpoint, token)) + anyio.run(run_session, endpoint, token) diff --git a/examples/servers/simple-tool-async/mcp_simple_tool_async/server.py b/examples/servers/simple-tool-async/mcp_simple_tool_async/server.py index 605097a3b..3ac9b2c67 100644 --- a/examples/servers/simple-tool-async/mcp_simple_tool_async/server.py +++ b/examples/servers/simple-tool-async/mcp_simple_tool_async/server.py @@ -1,5 +1,4 @@ -import asyncio - +import anyio import click import mcp.types as types import uvicorn @@ -15,7 +14,7 @@ async def fetch_website( ) -> list[types.ContentBlock]: headers = {"User-Agent": "MCP Test Server (github.com/modelcontextprotocol/python-sdk)"} async with create_mcp_http_client(headers=headers) as client: - await asyncio.sleep(5) + await anyio.sleep(5) response = await client.get(url) response.raise_for_status() return [types.TextContent(type="text", text=response.text)] diff --git a/examples/snippets/clients/async_elicitation_client.py b/examples/snippets/clients/async_elicitation_client.py index 44194553f..0f7290784 100644 --- a/examples/snippets/clients/async_elicitation_client.py +++ b/examples/snippets/clients/async_elicitation_client.py @@ -5,9 +5,10 @@ uv run async-elicitation-client """ -import asyncio import os +import anyio + from mcp import ClientSession, StdioServerParameters, types from mcp.client.stdio import stdio_client from mcp.shared.context import RequestContext @@ -70,7 +71,7 @@ async def test_process_with_confirmation(session: ClientSession): print(f"Operation failed: {status.error}") break - await asyncio.sleep(0.3) + await anyio.sleep(0.3) async def test_file_operation(session: ClientSession): @@ -97,7 +98,7 @@ async def test_file_operation(session: ClientSession): print(f"File operation failed: {status.error}") break - await asyncio.sleep(0.3) + await anyio.sleep(0.3) async def run(): @@ -115,4 +116,4 @@ async def run(): if __name__ == "__main__": - asyncio.run(run()) + anyio.run(run) diff --git a/examples/snippets/clients/async_progress_client.py b/examples/snippets/clients/async_progress_client.py index 42e2a7167..337131a92 100644 --- a/examples/snippets/clients/async_progress_client.py +++ b/examples/snippets/clients/async_progress_client.py @@ -5,9 +5,10 @@ uv run async-progress-client """ -import asyncio import os +import anyio + from mcp import ClientSession, StdioServerParameters, types from mcp.client.stdio import stdio_client @@ -58,7 +59,7 @@ async def progress_callback(progress: float, total: float | None, message: str | print(f"Operation failed: {status.error}") break - await asyncio.sleep(0.3) + await anyio.sleep(0.3) print(f"Received {len(progress_updates)} progress updates") @@ -91,7 +92,7 @@ async def test_data_pipeline(session: ClientSession): print(f"Pipeline failed: {status.error}") break - await asyncio.sleep(0.3) + await anyio.sleep(0.3) async def run(): @@ -107,4 +108,4 @@ async def run(): if __name__ == "__main__": - asyncio.run(run()) + anyio.run(run) diff --git a/examples/snippets/clients/async_sampling_client.py b/examples/snippets/clients/async_sampling_client.py index eb0aa5822..7cd1d1e13 100644 --- a/examples/snippets/clients/async_sampling_client.py +++ b/examples/snippets/clients/async_sampling_client.py @@ -5,9 +5,10 @@ uv run async-sampling-client """ -import asyncio import os +import anyio + from mcp import ClientSession, StdioServerParameters, types from mcp.client.stdio import stdio_client from mcp.shared.context import RequestContext @@ -77,7 +78,7 @@ async def test_content_generation(session: ClientSession): print(f"Generation failed: {status.error}") break - await asyncio.sleep(0.3) + await anyio.sleep(0.3) async def test_multi_step_generation(session: ClientSession): @@ -107,7 +108,7 @@ async def test_multi_step_generation(session: ClientSession): print(f"Multi-step generation failed: {status.error}") break - await asyncio.sleep(0.3) + await anyio.sleep(0.3) async def run(): @@ -123,4 +124,4 @@ async def run(): if __name__ == "__main__": - asyncio.run(run()) + anyio.run(run) diff --git a/examples/snippets/clients/async_tool_client.py b/examples/snippets/clients/async_tool_client.py index 52ee0be68..e67a18733 100644 --- a/examples/snippets/clients/async_tool_client.py +++ b/examples/snippets/clients/async_tool_client.py @@ -5,9 +5,10 @@ uv run async-tool-client """ -import asyncio import os +import anyio + from mcp import ClientSession, StdioServerParameters, types from mcp.client.stdio import stdio_client @@ -44,7 +45,7 @@ async def call_async_tool(session: ClientSession): print(f"Operation failed: {status.error}") break - await asyncio.sleep(0.5) + await anyio.sleep(0.5) async def run(): @@ -56,4 +57,4 @@ async def run(): if __name__ == "__main__": - asyncio.run(run()) + anyio.run(run) diff --git a/examples/snippets/servers/async_tool_basic.py b/examples/snippets/servers/async_tool_basic.py index f711b8bd7..562e18e23 100644 --- a/examples/snippets/servers/async_tool_basic.py +++ b/examples/snippets/servers/async_tool_basic.py @@ -5,21 +5,22 @@ uv run server async_tool_basic stdio """ -import asyncio +import anyio from mcp.server.fastmcp import Context, FastMCP +from mcp.server.session import ServerSession mcp = FastMCP("Async Tool Basic") @mcp.tool(invocation_modes=["async"]) -async def analyze_data(dataset: str, ctx: Context) -> str: # type: ignore[type-arg] +async def analyze_data(dataset: str, ctx: Context[ServerSession, None]) -> str: """Analyze a dataset asynchronously with progress updates.""" await ctx.info(f"Starting analysis of {dataset}") # Simulate analysis with progress updates for i in range(5): - await asyncio.sleep(0.5) + await anyio.sleep(0.5) progress = (i + 1) / 5 await ctx.report_progress(progress, 1.0, f"Processing step {i + 1}/5") @@ -28,21 +29,11 @@ async def analyze_data(dataset: str, ctx: Context) -> str: # type: ignore[type- @mcp.tool(invocation_modes=["sync", "async"]) -def process_text(text: str, ctx: Context | None = None) -> str: # type: ignore[type-arg] +async def process_text(text: str, ctx: Context[ServerSession, None]) -> str: """Process text in sync or async mode.""" - if ctx: - # Async mode with context - import asyncio - - async def async_processing(): - await ctx.info(f"Processing text asynchronously: {text[:20]}...") - await asyncio.sleep(0.3) - - try: - loop = asyncio.get_event_loop() - loop.create_task(async_processing()) - except RuntimeError: - pass + + await ctx.info(f"Processing text asynchronously: {text[:20]}...") + await anyio.sleep(0.3) return f"Processed: {text.upper()}" diff --git a/examples/snippets/servers/async_tool_elicitation.py b/examples/snippets/servers/async_tool_elicitation.py index 058c15ed0..300fb3d27 100644 --- a/examples/snippets/servers/async_tool_elicitation.py +++ b/examples/snippets/servers/async_tool_elicitation.py @@ -5,11 +5,11 @@ uv run server async_tool_elicitation stdio """ -import asyncio - +import anyio from pydantic import BaseModel, Field from mcp.server.fastmcp import Context, FastMCP +from mcp.server.session import ServerSession mcp = FastMCP("Async Tool Elicitation") @@ -32,12 +32,12 @@ class FileOperationChoice(BaseModel): @mcp.tool(invocation_modes=["async"]) -async def process_with_confirmation(operation: str, ctx: Context) -> str: # type: ignore[type-arg] +async def process_with_confirmation(operation: str, ctx: Context[ServerSession, None]) -> str: """Process an operation that requires user confirmation.""" await ctx.info(f"Starting operation: {operation}") # Simulate some initial processing - await asyncio.sleep(0.5) + await anyio.sleep(0.5) await ctx.report_progress(0.3, 1.0, "Initial processing complete") # Ask user for preferences @@ -51,7 +51,7 @@ async def process_with_confirmation(operation: str, ctx: Context) -> str: # typ await ctx.info(f"Continuing with {result.data.priority_level} priority") # Simulate processing based on user choice processing_time = {"low": 0.5, "normal": 1.0, "high": 1.5}.get(result.data.priority_level, 1.0) - await asyncio.sleep(processing_time) + await anyio.sleep(processing_time) await ctx.report_progress(1.0, 1.0, "Operation complete") return f"Operation '{operation}' completed successfully with {result.data.priority_level} priority" else: @@ -63,12 +63,12 @@ async def process_with_confirmation(operation: str, ctx: Context) -> str: # typ @mcp.tool(invocation_modes=["async"]) -async def file_operation(file_path: str, operation_type: str, ctx: Context) -> str: # type: ignore[type-arg] +async def file_operation(file_path: str, operation_type: str, ctx: Context[ServerSession, None]) -> str: """Perform file operation with user confirmation.""" await ctx.info(f"Analyzing file: {file_path}") # Simulate initial analysis - await asyncio.sleep(1) + await anyio.sleep(1) await ctx.report_progress(0.3, 1.0, "File analysis complete") # Simulate finding something that requires user confirmation @@ -84,11 +84,11 @@ async def file_operation(file_path: str, operation_type: str, ctx: Context) -> s if result.data.confirm_operation: if result.data.backup_first: await ctx.info("Creating backup first...") - await asyncio.sleep(0.5) + await anyio.sleep(0.5) await ctx.report_progress(0.7, 1.0, "Backup created") await ctx.info(f"Performing {operation_type} operation...") - await asyncio.sleep(1) + await anyio.sleep(1) await ctx.report_progress(1.0, 1.0, "Operation complete") backup_msg = " (with backup)" if result.data.backup_first else " (no backup)" diff --git a/examples/snippets/servers/async_tool_immediate.py b/examples/snippets/servers/async_tool_immediate.py index 49898760f..0e34d9d0c 100644 --- a/examples/snippets/servers/async_tool_immediate.py +++ b/examples/snippets/servers/async_tool_immediate.py @@ -5,10 +5,11 @@ uv run server async_tool_immediate stdio """ -import asyncio +import anyio from mcp import types from mcp.server.fastmcp import Context, FastMCP +from mcp.server.session import ServerSession mcp = FastMCP("Async Tool Immediate") @@ -19,13 +20,13 @@ async def provide_immediate_feedback(operation: str) -> list[types.ContentBlock] @mcp.tool(invocation_modes=["async"], immediate_result=provide_immediate_feedback) -async def long_analysis(operation: str, ctx: Context) -> str: # type: ignore[type-arg] +async def long_analysis(operation: str, ctx: Context[ServerSession, None]) -> str: """Perform long-running analysis with immediate user feedback.""" await ctx.info(f"Beginning {operation} analysis") # Simulate long-running work for i in range(4): - await asyncio.sleep(1) + await anyio.sleep(1) progress = (i + 1) / 4 await ctx.report_progress(progress, 1.0, f"Analysis step {i + 1}/4") diff --git a/examples/snippets/servers/async_tool_progress.py b/examples/snippets/servers/async_tool_progress.py index 633c7570e..ed98a5858 100644 --- a/examples/snippets/servers/async_tool_progress.py +++ b/examples/snippets/servers/async_tool_progress.py @@ -5,15 +5,16 @@ uv run server async_tool_progress stdio """ -import asyncio +import anyio from mcp.server.fastmcp import Context, FastMCP +from mcp.server.session import ServerSession mcp = FastMCP("Async Tool Progress") @mcp.tool(invocation_modes=["async"]) -async def batch_process(items: list[str], ctx: Context) -> list[str]: # type: ignore[type-arg] +async def batch_process(items: list[str], ctx: Context[ServerSession, None]) -> list[str]: """Process a batch of items with detailed progress reporting.""" await ctx.info(f"Starting batch processing of {len(items)} items") @@ -24,7 +25,7 @@ async def batch_process(items: list[str], ctx: Context) -> list[str]: # type: i # Simulate variable processing time processing_time = 0.3 + (len(item) * 0.1) - await asyncio.sleep(processing_time) + await anyio.sleep(processing_time) # Report progress for this item progress = (i + 1) / len(items) @@ -41,7 +42,7 @@ async def batch_process(items: list[str], ctx: Context) -> list[str]: # type: i @mcp.tool(invocation_modes=["async"]) -async def data_pipeline(dataset: str, operations: list[str], ctx: Context) -> dict[str, str]: # type: ignore[type-arg] +async def data_pipeline(dataset: str, operations: list[str], ctx: Context[ServerSession, None]) -> dict[str, str]: """Execute a data processing pipeline with progress updates.""" await ctx.info(f"Starting data pipeline for {dataset}") @@ -53,7 +54,7 @@ async def data_pipeline(dataset: str, operations: list[str], ctx: Context) -> di # Simulate processing time that increases with complexity processing_time = 0.5 + (i * 0.2) - await asyncio.sleep(processing_time) + await anyio.sleep(processing_time) # Report progress progress = (i + 1) / total_ops diff --git a/examples/snippets/servers/async_tool_sampling.py b/examples/snippets/servers/async_tool_sampling.py index 253aadada..d63273f50 100644 --- a/examples/snippets/servers/async_tool_sampling.py +++ b/examples/snippets/servers/async_tool_sampling.py @@ -5,21 +5,22 @@ uv run server async_tool_sampling stdio """ -import asyncio +import anyio from mcp.server.fastmcp import Context, FastMCP +from mcp.server.session import ServerSession from mcp.types import SamplingMessage, TextContent mcp = FastMCP("Async Tool Sampling") @mcp.tool(invocation_modes=["async"]) -async def generate_content(topic: str, content_type: str, ctx: Context) -> str: # type: ignore[type-arg] +async def generate_content(topic: str, content_type: str, ctx: Context[ServerSession, None]) -> str: """Generate content using LLM sampling with progress updates.""" await ctx.info(f"Starting {content_type} generation for topic: {topic}") # Simulate preparation - await asyncio.sleep(0.5) + await anyio.sleep(0.5) await ctx.report_progress(0.2, 1.0, "Preparing content generation") # Create prompt based on content type @@ -48,7 +49,7 @@ async def generate_content(topic: str, content_type: str, ctx: Context) -> str: await ctx.report_progress(0.8, 1.0, "Content generated") # Process the result - await asyncio.sleep(0.3) + await anyio.sleep(0.3) await ctx.report_progress(1.0, 1.0, "Processing complete") if result.content.type == "text": @@ -60,7 +61,7 @@ async def generate_content(topic: str, content_type: str, ctx: Context) -> str: @mcp.tool(invocation_modes=["async"]) -async def multi_step_generation(topic: str, steps: list[str], ctx: Context) -> dict[str, str]: # type: ignore[type-arg] +async def multi_step_generation(topic: str, steps: list[str], ctx: Context[ServerSession, None]) -> dict[str, str]: """Generate multiple pieces of content in sequence.""" await ctx.info(f"Starting multi-step generation for: {topic}") @@ -95,7 +96,7 @@ async def multi_step_generation(topic: str, steps: list[str], ctx: Context) -> d await ctx.report_progress(progress, 1.0, f"Completed step {i + 1}/{total_steps}: {step}") # Small delay between steps - await asyncio.sleep(0.2) + await anyio.sleep(0.2) await ctx.info(f"Multi-step generation complete! Generated {len(results)} pieces of content") return results diff --git a/tests/server/fastmcp/test_immediate_result.py b/tests/server/fastmcp/test_immediate_result.py index bfe9be797..bdfd4a17e 100644 --- a/tests/server/fastmcp/test_immediate_result.py +++ b/tests/server/fastmcp/test_immediate_result.py @@ -1,7 +1,6 @@ """Test immediate_result functionality in FastMCP.""" -import asyncio - +import anyio import pytest from mcp.server.fastmcp import FastMCP @@ -127,7 +126,7 @@ async def immediate_feedback(operation: str) -> list[ContentBlock]: @mcp.tool(invocation_modes=["async"], immediate_result=immediate_feedback) async def long_running_task(operation: str) -> str: """Perform a long-running task with immediate feedback.""" - await asyncio.sleep(0.1) # Simulate work + await anyio.sleep(0.1) # Simulate work return f"Task '{operation}' completed!" # Test with "next" protocol version to see async tools @@ -151,7 +150,7 @@ async def test_tool_without_immediate_result_backward_compatibility(self): @mcp.tool(invocation_modes=["async"]) async def simple_async_tool(message: str) -> str: """A simple async tool without immediate result.""" - await asyncio.sleep(0.1) + await anyio.sleep(0.1) return f"Processed: {message}" # Test with "next" protocol version to see async tools @@ -370,7 +369,7 @@ async def immediate_fn(message: str) -> list[ContentBlock]: async def async_tool(message: str) -> str: execution_order.append("main") - await asyncio.sleep(0.1) + await anyio.sleep(0.1) return f"Completed: {message}" manager = ToolManager() @@ -397,7 +396,7 @@ async def working_immediate_fn(message: str) -> list[ContentBlock]: return [TextContent(type="text", text=f"Processing: {message}")] async def async_tool(message: str) -> str: - await asyncio.sleep(0.1) + await anyio.sleep(0.1) return f"Completed: {message}" mcp = FastMCP() @@ -440,7 +439,7 @@ async def tool_with_working_immediate(message: str) -> str: break elif status.status == "failed": pytest.fail(f"Tool execution failed: {status}") - await asyncio.sleep(0.01) + await anyio.sleep(0.01) @pytest.mark.anyio async def test_immediate_result_exception_handling(self): @@ -450,7 +449,7 @@ async def failing_immediate_fn(message: str) -> list[ContentBlock]: raise ValueError(f"Immediate result failed for: {message}") async def async_tool(message: str) -> str: - await asyncio.sleep(0.1) + await anyio.sleep(0.1) return f"Completed: {message}" mcp = FastMCP() @@ -520,11 +519,11 @@ async def test_immediate_result_async_exception_handling(self): """Test that async exceptions in immediate_result are properly handled.""" async def async_failing_immediate_fn(operation: str) -> list[ContentBlock]: - await asyncio.sleep(0.01) # Make it truly async + await anyio.sleep(0.01) # Make it truly async raise RuntimeError(f"Async immediate failure: {operation}") async def async_tool(operation: str) -> str: - await asyncio.sleep(0.1) + await anyio.sleep(0.1) return f"Operation {operation} completed" mcp = FastMCP() diff --git a/tests/server/fastmcp/test_integration.py b/tests/server/fastmcp/test_integration.py index 8853516e9..7ef0d5674 100644 --- a/tests/server/fastmcp/test_integration.py +++ b/tests/server/fastmcp/test_integration.py @@ -10,7 +10,6 @@ # pyright: reportUnknownVariableType=false # pyright: reportUnknownArgumentType=false -import asyncio import json import logging import multiprocessing @@ -18,6 +17,7 @@ import time from collections.abc import Generator +import anyio import pytest import uvicorn from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream @@ -741,7 +741,7 @@ async def test_async_tool_basic(server_transport: str, server_url: str) -> None: pytest.fail(f"Async operation failed: {status.error}") attempt += 1 - await asyncio.sleep(0.5) + await anyio.sleep(0.5) else: pytest.fail("Async operation timed out") @@ -883,7 +883,7 @@ async def test_immediate_result_integration(server_transport: str, server_url: s pytest.fail(f"Async operation failed: {status.error}") attempt += 1 - await asyncio.sleep(0.5) + await anyio.sleep(0.5) else: pytest.fail("Async operation timed out") @@ -937,10 +937,10 @@ async def test_immediate_result_backward_compatibility(server_transport: str, se pytest.fail(f"Async operation failed: {status.error}") attempt += 1 - await asyncio.sleep(0.5) + await anyio.sleep(0.5) else: pytest.fail("Async operation timed out") - await asyncio.sleep(0.01) + await anyio.sleep(0.01) # Test async progress notifications @@ -1010,7 +1010,7 @@ async def progress_callback(progress: float, total: float | None, message: str | pytest.fail(f"Batch operation failed: {status.error}") attempt += 1 - await asyncio.sleep(0.3) + await anyio.sleep(0.3) else: pytest.fail("Batch operation timed out") @@ -1087,7 +1087,7 @@ async def test_elicitation_callback(context: RequestContext[ClientSession, None] pytest.fail(f"Elicitation operation failed: {status.error}") attempt += 1 - await asyncio.sleep(0.3) + await anyio.sleep(0.3) else: pytest.fail("Elicitation operation timed out") @@ -1155,6 +1155,6 @@ async def test_sampling_callback( pytest.fail(f"Sampling operation failed: {status.error}") attempt += 1 - await asyncio.sleep(0.3) + await anyio.sleep(0.3) else: pytest.fail("Sampling operation timed out") diff --git a/tests/server/fastmcp/test_server.py b/tests/server/fastmcp/test_server.py index 73862abd4..fc51321e6 100644 --- a/tests/server/fastmcp/test_server.py +++ b/tests/server/fastmcp/test_server.py @@ -1,9 +1,9 @@ -import asyncio import base64 from pathlib import Path from typing import TYPE_CHECKING, Any from unittest.mock import patch +import anyio import pytest from pydantic import AnyUrl, BaseModel from starlette.routing import Mount, Route @@ -660,7 +660,7 @@ async def test_async_tool_call_basic(self): @mcp.tool(invocation_modes=["async"]) async def async_add(a: int, b: int) -> int: """Add two numbers asynchronously.""" - await asyncio.sleep(0.01) # Simulate async work + await anyio.sleep(0.01) # Simulate async work return a + b async with client_session(mcp._mcp_server, protocol_version="next") as client: @@ -683,7 +683,7 @@ async def async_add(a: int, b: int) -> int: break elif status.status == "failed": pytest.fail(f"Operation failed: {status.error}") - await asyncio.sleep(0.01) + await anyio.sleep(0.01) @pytest.mark.anyio async def test_async_tool_call_structured_output(self): @@ -697,7 +697,7 @@ class AsyncResult(BaseModel): @mcp.tool(invocation_modes=["async"]) async def async_structured_tool(x: int) -> AsyncResult: """Process data and return structured result.""" - await asyncio.sleep(0.01) # Simulate async work + await anyio.sleep(0.01) # Simulate async work return AsyncResult(value=x * 2) async with client_session(mcp._mcp_server, protocol_version="next") as client: @@ -718,7 +718,7 @@ async def async_structured_tool(x: int) -> AsyncResult: break elif status.status == "failed": pytest.fail(f"Operation failed: {status.error}") - await asyncio.sleep(0.01) + await anyio.sleep(0.01) @pytest.mark.anyio async def test_async_tool_call_validation_error(self): @@ -728,7 +728,7 @@ async def test_async_tool_call_validation_error(self): @mcp.tool(invocation_modes=["async"]) async def async_invalid_tool() -> list[int]: """Tool that returns invalid structured output.""" - await asyncio.sleep(0.01) # Simulate async work + await anyio.sleep(0.01) # Simulate async work return [1, 2, 3, [4]] # type: ignore async with client_session(mcp._mcp_server, protocol_version="next") as client: @@ -747,7 +747,7 @@ async def async_invalid_tool() -> list[int]: break elif status.status == "completed": pytest.fail("Operation should have failed due to validation error") - await asyncio.sleep(0.01) + await anyio.sleep(0.01) @pytest.mark.anyio async def test_tool_keep_alive_validation_no_sync_only(self): @@ -814,7 +814,7 @@ def short_lived_tool(data: str) -> str: assert operation_result.result is not None # Wait for keep_alive to expire (1 second + buffer) - await asyncio.sleep(1.2) + await anyio.sleep(1.2) # Operation should now be expired/unavailable with pytest.raises(Exception): # Should raise error for expired operation @@ -862,7 +862,7 @@ def structured_tool(input_data: str) -> ProcessResult: assert structured_data["count"] == 42 # Wait for keep_alive to expire (1 second + buffer) - await asyncio.sleep(1.2) + await anyio.sleep(1.2) # Operation should now be expired/unavailable - validation should fail gracefully with pytest.raises(Exception): # Should raise error for expired operation diff --git a/tests/server/test_lowlevel_async_operations.py b/tests/server/test_lowlevel_async_operations.py index c9b5151d5..7adaa2199 100644 --- a/tests/server/test_lowlevel_async_operations.py +++ b/tests/server/test_lowlevel_async_operations.py @@ -1,6 +1,5 @@ """Test async operations integration in lowlevel Server.""" -import asyncio import time from typing import cast @@ -15,7 +14,8 @@ class TestLowlevelServerAsyncOperations: """Test lowlevel Server async operations integration.""" - def test_check_async_status_invalid_token(self): + @pytest.mark.anyio + async def test_check_async_status_invalid_token(self): """Test get_operation_status handler with invalid token.""" manager = ServerAsyncOperationManager() server = Server("Test", async_operations=manager) @@ -32,16 +32,13 @@ async def check_status_handler(token: str) -> types.GetOperationStatusResult: handler = server.request_handlers[types.GetOperationStatusRequest] with pytest.raises(McpError) as exc_info: - - async def run_handler(): - return await handler(invalid_request) - - asyncio.run(run_handler()) + await handler(invalid_request) assert exc_info.value.error.code == -32602 assert exc_info.value.error.message == "Invalid token" - def test_check_async_status_expired_token(self): + @pytest.mark.anyio + async def test_check_async_status_expired_token(self): """Test get_operation_status handler with expired token.""" manager = ServerAsyncOperationManager() server = Server("Test", async_operations=manager) @@ -62,16 +59,13 @@ async def check_status_handler(token: str) -> types.GetOperationStatusResult: handler = server.request_handlers[types.GetOperationStatusRequest] with pytest.raises(McpError) as exc_info: - - async def run_handler(): - return await handler(expired_request) - - asyncio.run(run_handler()) + await handler(expired_request) assert exc_info.value.error.code == -32602 assert exc_info.value.error.message == "Token expired" - def test_check_async_status_valid_operation(self): + @pytest.mark.anyio + async def test_check_async_status_valid_operation(self): """Test get_operation_status handler with valid operation.""" manager = ServerAsyncOperationManager() server = Server("Test", async_operations=manager) @@ -88,17 +82,15 @@ async def check_status_handler(token: str) -> types.GetOperationStatusResult: handler = server.request_handlers[types.GetOperationStatusRequest] - async def run_handler(): - return await handler(valid_request) - - result = asyncio.run(run_handler()) + result = await handler(valid_request) assert isinstance(result, types.ServerResult) status_result = cast(types.GetOperationStatusResult, result.root) assert status_result.status == "working" assert status_result.error is None - def test_check_async_status_failed_operation(self): + @pytest.mark.anyio + async def test_check_async_status_failed_operation(self): """Test get_operation_status handler with failed operation.""" manager = ServerAsyncOperationManager() server = Server("Test", async_operations=manager) @@ -115,17 +107,15 @@ async def check_status_handler(token: str) -> types.GetOperationStatusResult: handler = server.request_handlers[types.GetOperationStatusRequest] - async def run_handler(): - return await handler(failed_request) - - result = asyncio.run(run_handler()) + result = await handler(failed_request) assert isinstance(result, types.ServerResult) status_result = cast(types.GetOperationStatusResult, result.root) assert status_result.status == "failed" assert status_result.error == "Something went wrong" - def test_get_async_result_invalid_token(self): + @pytest.mark.anyio + async def test_get_async_result_invalid_token(self): """Test get_operation_result handler with invalid token.""" manager = ServerAsyncOperationManager() server = Server("Test", async_operations=manager) @@ -141,16 +131,13 @@ async def get_result_handler(token: str) -> types.GetOperationPayloadResult: handler = server.request_handlers[types.GetOperationPayloadRequest] with pytest.raises(McpError) as exc_info: - - async def run_handler(): - return await handler(invalid_request) - - asyncio.run(run_handler()) + await handler(invalid_request) assert exc_info.value.error.code == -32602 assert exc_info.value.error.message == "Invalid token" - def test_get_async_result_expired_token(self): + @pytest.mark.anyio + async def test_get_async_result_expired_token(self): """Test get_operation_result handler with expired token.""" manager = ServerAsyncOperationManager() server = Server("Test", async_operations=manager) @@ -173,16 +160,13 @@ async def get_result_handler(token: str) -> types.GetOperationPayloadResult: handler = server.request_handlers[types.GetOperationPayloadRequest] with pytest.raises(McpError) as exc_info: - - async def run_handler(): - return await handler(expired_request) - - asyncio.run(run_handler()) + await handler(expired_request) assert exc_info.value.error.code == -32602 assert exc_info.value.error.message == "Token expired" - def test_get_async_result_not_completed(self): + @pytest.mark.anyio + async def test_get_async_result_not_completed(self): """Test get_operation_result handler with non-completed operation.""" manager = ServerAsyncOperationManager() server = Server("Test", async_operations=manager) @@ -202,16 +186,13 @@ async def get_result_handler(token: str) -> types.GetOperationPayloadResult: handler = server.request_handlers[types.GetOperationPayloadRequest] with pytest.raises(McpError) as exc_info: - - async def run_handler(): - return await handler(working_request) - - asyncio.run(run_handler()) + await handler(working_request) assert exc_info.value.error.code == -32600 assert exc_info.value.error.message == "Operation not completed (status: working)" - def test_get_async_result_completed_with_result(self): + @pytest.mark.anyio + async def test_get_async_result_completed_with_result(self): """Test get_operation_result handler with completed operation.""" manager = ServerAsyncOperationManager() server = Server("Test", async_operations=manager) @@ -231,10 +212,7 @@ async def get_result_handler(token: str) -> types.GetOperationPayloadResult: handler = server.request_handlers[types.GetOperationPayloadRequest] - async def run_handler(): - return await handler(completed_request) - - response = asyncio.run(run_handler()) + response = await handler(completed_request) assert isinstance(response, types.ServerResult) payload_result = cast(types.GetOperationPayloadResult, response.root) @@ -244,7 +222,8 @@ async def run_handler(): class TestCancellationLogic: """Test cancellation logic for async operations.""" - def test_handle_cancelled_notification(self): + @pytest.mark.anyio + async def test_handle_cancelled_notification(self): """Test handling of cancelled notifications.""" manager = ServerAsyncOperationManager() server = Server("Test", async_operations=manager) @@ -267,7 +246,8 @@ def test_handle_cancelled_notification(self): # Verify mapping was cleaned up assert request_id not in server._request_to_operation - def test_cancelled_notification_handler(self): + @pytest.mark.anyio + async def test_cancelled_notification_handler(self): """Test the async cancelled notification handler.""" manager = ServerAsyncOperationManager() server = Server("Test", async_operations=manager) @@ -282,17 +262,15 @@ def test_cancelled_notification_handler(self): # Create cancelled notification notification = types.CancelledNotification(params=types.CancelledNotificationParams(requestId=request_id)) - # Handle the notification - import asyncio - - asyncio.run(server._handle_cancelled_notification(notification)) + await server._handle_cancelled_notification(notification) # Verify operation was cancelled cancelled_op = manager.get_operation(operation.token) assert cancelled_op is not None assert cancelled_op.status == "canceled" - def test_validate_operation_token_cancelled(self): + @pytest.mark.anyio + async def test_validate_operation_token_cancelled(self): """Test that cancelled operations are rejected.""" manager = ServerAsyncOperationManager() server = Server("Test", async_operations=manager) @@ -308,7 +286,8 @@ def test_validate_operation_token_cancelled(self): assert exc_info.value.error.code == -32602 assert "cancelled" in exc_info.value.error.message.lower() - def test_nonexistent_request_id_cancellation(self): + @pytest.mark.anyio + async def test_nonexistent_request_id_cancellation(self): """Test cancellation of non-existent request ID.""" server = Server("Test") @@ -322,7 +301,8 @@ def test_nonexistent_request_id_cancellation(self): class TestInputRequiredBehavior: """Test input_required status handling for async operations.""" - def test_mark_input_required(self): + @pytest.mark.anyio + async def test_mark_input_required(self): """Test marking operation as requiring input.""" manager = ServerAsyncOperationManager() @@ -339,7 +319,8 @@ def test_mark_input_required(self): assert updated_op is not None assert updated_op.status == "input_required" - def test_mark_input_required_from_working(self): + @pytest.mark.anyio + async def test_mark_input_required_from_working(self): """Test marking working operation as requiring input.""" manager = ServerAsyncOperationManager() @@ -353,7 +334,8 @@ def test_mark_input_required_from_working(self): assert result is True assert operation.status == "input_required" - def test_mark_input_required_invalid_states(self): + @pytest.mark.anyio + async def test_mark_input_required_invalid_states(self): """Test that input_required can only be set from valid states.""" manager = ServerAsyncOperationManager() @@ -365,7 +347,8 @@ def test_mark_input_required_invalid_states(self): assert result is False assert operation.status == "completed" - def test_mark_input_completed(self): + @pytest.mark.anyio + async def test_mark_input_completed(self): """Test marking input as completed.""" manager = ServerAsyncOperationManager() @@ -379,7 +362,8 @@ def test_mark_input_completed(self): assert result is True assert operation.status == "working" - def test_mark_input_completed_invalid_state(self): + @pytest.mark.anyio + async def test_mark_input_completed_invalid_state(self): """Test that input can only be completed from input_required state.""" manager = ServerAsyncOperationManager() @@ -392,7 +376,8 @@ def test_mark_input_completed_invalid_state(self): assert result is False assert operation.status == "submitted" - def test_nonexistent_token_operations(self): + @pytest.mark.anyio + async def test_nonexistent_token_operations(self): """Test input_required operations on nonexistent tokens.""" manager = ServerAsyncOperationManager() @@ -400,7 +385,8 @@ def test_nonexistent_token_operations(self): assert manager.mark_input_required("fake_token") is False assert manager.mark_input_completed("fake_token") is False - def test_server_send_request_for_operation(self): + @pytest.mark.anyio + async def test_server_send_request_for_operation(self): """Test server method for sending requests with operation tokens.""" manager = ServerAsyncOperationManager() server = Server("Test", async_operations=manager) @@ -427,7 +413,8 @@ def test_server_send_request_for_operation(self): assert updated_op is not None assert updated_op.status == "input_required" - def test_server_complete_request_for_operation(self): + @pytest.mark.anyio + async def test_server_complete_request_for_operation(self): """Test server method for completing requests.""" manager = ServerAsyncOperationManager() server = Server("Test", async_operations=manager) @@ -444,7 +431,8 @@ def test_server_complete_request_for_operation(self): assert updated_op is not None assert updated_op.status == "working" - def test_input_required_is_terminal_check(self): + @pytest.mark.anyio + async def test_input_required_is_terminal_check(self): """Test that input_required is not considered a terminal state.""" manager = ServerAsyncOperationManager() From 26055d959cb672003dd2912b02610c4bbd12dea3 Mon Sep 17 00:00:00 2001 From: Luca Chang Date: Wed, 1 Oct 2025 14:15:52 -0700 Subject: [PATCH 28/41] Update README snippets --- README.md | 37 +++++++++++++++---------------------- 1 file changed, 15 insertions(+), 22 deletions(-) diff --git a/README.md b/README.md index 19844bee8..8378dd2ac 100644 --- a/README.md +++ b/README.md @@ -502,21 +502,22 @@ cd to the `examples/snippets/clients` directory and run: uv run server async_tool_basic stdio """ -import asyncio +import anyio from mcp.server.fastmcp import Context, FastMCP +from mcp.server.session import ServerSession mcp = FastMCP("Async Tool Basic") @mcp.tool(invocation_modes=["async"]) -async def analyze_data(dataset: str, ctx: Context) -> str: # type: ignore[type-arg] +async def analyze_data(dataset: str, ctx: Context[ServerSession, None]) -> str: """Analyze a dataset asynchronously with progress updates.""" await ctx.info(f"Starting analysis of {dataset}") # Simulate analysis with progress updates for i in range(5): - await asyncio.sleep(0.5) + await anyio.sleep(0.5) progress = (i + 1) / 5 await ctx.report_progress(progress, 1.0, f"Processing step {i + 1}/5") @@ -525,21 +526,11 @@ async def analyze_data(dataset: str, ctx: Context) -> str: # type: ignore[type- @mcp.tool(invocation_modes=["sync", "async"]) -def process_text(text: str, ctx: Context | None = None) -> str: # type: ignore[type-arg] +async def process_text(text: str, ctx: Context[ServerSession, None]) -> str: """Process text in sync or async mode.""" - if ctx: - # Async mode with context - import asyncio - async def async_processing(): - await ctx.info(f"Processing text asynchronously: {text[:20]}...") - await asyncio.sleep(0.3) - - try: - loop = asyncio.get_event_loop() - loop.create_task(async_processing()) - except RuntimeError: - pass + await ctx.info(f"Processing text asynchronously: {text[:20]}...") + await anyio.sleep(0.3) return f"Processed: {text.upper()}" @@ -562,10 +553,11 @@ cd to the `examples/snippets/clients` directory and run: uv run server async_tool_immediate stdio """ -import asyncio +import anyio from mcp import types from mcp.server.fastmcp import Context, FastMCP +from mcp.server.session import ServerSession mcp = FastMCP("Async Tool Immediate") @@ -576,13 +568,13 @@ async def provide_immediate_feedback(operation: str) -> list[types.ContentBlock] @mcp.tool(invocation_modes=["async"], immediate_result=provide_immediate_feedback) -async def long_analysis(operation: str, ctx: Context) -> str: # type: ignore[type-arg] +async def long_analysis(operation: str, ctx: Context[ServerSession, None]) -> str: """Perform long-running analysis with immediate user feedback.""" await ctx.info(f"Beginning {operation} analysis") # Simulate long-running work for i in range(4): - await asyncio.sleep(1) + await anyio.sleep(1) progress = (i + 1) / 4 await ctx.report_progress(progress, 1.0, f"Analysis step {i + 1}/4") @@ -607,9 +599,10 @@ cd to the `examples/snippets` directory and run: uv run async-tool-client """ -import asyncio import os +import anyio + from mcp import ClientSession, StdioServerParameters, types from mcp.client.stdio import stdio_client @@ -646,7 +639,7 @@ async def call_async_tool(session: ClientSession): print(f"Operation failed: {status.error}") break - await asyncio.sleep(0.5) + await anyio.sleep(0.5) async def run(): @@ -658,7 +651,7 @@ async def run(): if __name__ == "__main__": - asyncio.run(run()) + anyio.run(run) ``` _Full example: [examples/snippets/clients/async_tool_client.py](https://github.com/modelcontextprotocol/python-sdk/blob/main/examples/snippets/clients/async_tool_client.py)_ From a8e0831d5ca231aea236ad89ff94e7ff5af8009d Mon Sep 17 00:00:00 2001 From: Luca Chang Date: Wed, 1 Oct 2025 14:20:36 -0700 Subject: [PATCH 29/41] Use anyio instead of asyncio in lowlevel server --- src/mcp/server/lowlevel/server.py | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/src/mcp/server/lowlevel/server.py b/src/mcp/server/lowlevel/server.py index 415efc16b..7c06d3984 100644 --- a/src/mcp/server/lowlevel/server.py +++ b/src/mcp/server/lowlevel/server.py @@ -67,7 +67,6 @@ async def main(): from __future__ import annotations as _annotations -import asyncio import contextvars import json import logging @@ -556,19 +555,20 @@ async def execute_async(): logger.exception(f"Async execution failed for {tool_name}") self.async_operations.fail_operation(operation.token, str(e)) - asyncio.create_task(execute_async()) - - # Return operation result with immediate content - logger.info(f"Returning async operation result for {tool_name}") - return types.ServerResult( - types.CallToolResult( - content=immediate_content, - operation=types.AsyncResultProperties( - token=operation.token, - keepAlive=operation.keep_alive, - ), + async with anyio.create_task_group() as tg: + tg.start_soon(execute_async) + + # Return operation result with immediate content + logger.info(f"Returning async operation result for {tool_name}") + return types.ServerResult( + types.CallToolResult( + content=immediate_content, + operation=types.AsyncResultProperties( + token=operation.token, + keepAlive=operation.keep_alive, + ), + ) ) - ) # tool call results = await func(tool_name, arguments) From 2ed562eb667a5ba2d4425f6c247ea4361d8b95b8 Mon Sep 17 00:00:00 2001 From: Luca Chang Date: Wed, 1 Oct 2025 14:30:36 -0700 Subject: [PATCH 30/41] Apply Copilot suggestions --- src/mcp/server/fastmcp/server.py | 17 +++++++++++++++-- src/mcp/server/lowlevel/server.py | 10 +++++++++- src/mcp/server/streamable_http.py | 3 ++- 3 files changed, 26 insertions(+), 4 deletions(-) diff --git a/src/mcp/server/fastmcp/server.py b/src/mcp/server/fastmcp/server.py index eb89ab7c5..0c238a825 100644 --- a/src/mcp/server/fastmcp/server.py +++ b/src/mcp/server/fastmcp/server.py @@ -354,12 +354,25 @@ def _get_invocation_mode(self, info: Tool, client_supports_async: bool) -> Liter return None # Old clients don't see invocationMode field # New clients see the invocationMode field - if "async" in info.invocation_modes and len(info.invocation_modes) == 1: + modes = info.invocation_modes + if self._is_async_only(modes): return "async" # Async-only - elif len(info.invocation_modes) > 1 or info.invocation_modes == ["sync"]: + if self._is_sync_only(modes) or self._is_hybrid(modes): return "sync" # Hybrid or explicit sync return None + def _is_async_only(self, modes: list[InvocationMode]) -> bool: + """Return True if invocation_modes is async-only.""" + return modes == ["async"] + + def _is_sync_only(self, modes: list[InvocationMode]) -> bool: + """Return True if invocation_modes is sync-only.""" + return modes == ["sync"] + + def _is_hybrid(self, modes: list[InvocationMode]) -> bool: + """Return True if invocation_modes contains both sync and async.""" + return "sync" in modes and "async" in modes and len(modes) > 1 + async def list_tools(self) -> list[MCPTool]: """List all available tools.""" tools = self._tool_manager.list_tools() diff --git a/src/mcp/server/lowlevel/server.py b/src/mcp/server/lowlevel/server.py index 7c06d3984..6c19688c0 100644 --- a/src/mcp/server/lowlevel/server.py +++ b/src/mcp/server/lowlevel/server.py @@ -535,7 +535,15 @@ async def handler(req: types.CallToolRequest): ) logger.debug(f"Created async operation with token: {operation.token}") - ctx = self.request_context + # Add the operation token to the request context + ctx = RequestContext( + request_id=self.request_context.request_id, + operation_token=self.request_context.operation_token, + meta=self.request_context.meta, + session=self.request_context.session, + lifespan_context=self.request_context.lifespan_context, + request=self.request_context.request, + ) ctx.operation_token = operation.token request_ctx.set(ctx) diff --git a/src/mcp/server/streamable_http.py b/src/mcp/server/streamable_http.py index 523cc6941..da448794e 100644 --- a/src/mcp/server/streamable_http.py +++ b/src/mcp/server/streamable_http.py @@ -325,7 +325,8 @@ def _is_async_operation_response(self, response_message: JSONRPCMessage) -> bool return bool(operation["token"]) # type: ignore return False - except (TypeError, KeyError, AttributeError): + except (TypeError, KeyError, AttributeError) as exc: + logger.exception("Exception in _is_async_operation_response: %s", exc) return False async def _handle_sse_mode( From 76f135e5d13f28cd45a19cbbcd2ac7b676c27cb0 Mon Sep 17 00:00:00 2001 From: Luca Chang Date: Fri, 3 Oct 2025 15:03:27 -0700 Subject: [PATCH 31/41] Use server TaskGroup to fix operations blocking CallTool requests --- src/mcp/server/lowlevel/server.py | 66 +++++++++++++++++-------------- 1 file changed, 36 insertions(+), 30 deletions(-) diff --git a/src/mcp/server/lowlevel/server.py b/src/mcp/server/lowlevel/server.py index 6c19688c0..eaea6a0e9 100644 --- a/src/mcp/server/lowlevel/server.py +++ b/src/mcp/server/lowlevel/server.py @@ -77,6 +77,7 @@ async def main(): import anyio import jsonschema +from anyio.abc import TaskGroup from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream from pydantic import AnyUrl from typing_extensions import TypeVar @@ -252,7 +253,7 @@ def decorator( wrapper = create_call_wrapper(func, types.ListPromptsRequest) - async def handler(req: types.ListPromptsRequest): + async def handler(req: types.ListPromptsRequest, _: Any = None): result = await wrapper(req) # Handle both old style (list[Prompt]) and new style (ListPromptsResult) if isinstance(result, types.ListPromptsResult): @@ -272,7 +273,7 @@ def decorator( ): logger.debug("Registering handler for GetPromptRequest") - async def handler(req: types.GetPromptRequest): + async def handler(req: types.GetPromptRequest, _: Any = None): prompt_get = await func(req.params.name, req.params.arguments) return types.ServerResult(prompt_get) @@ -290,7 +291,7 @@ def decorator( wrapper = create_call_wrapper(func, types.ListResourcesRequest) - async def handler(req: types.ListResourcesRequest): + async def handler(req: types.ListResourcesRequest, _: Any = None): result = await wrapper(req) # Handle both old style (list[Resource]) and new style (ListResourcesResult) if isinstance(result, types.ListResourcesResult): @@ -308,7 +309,7 @@ def list_resource_templates(self): def decorator(func: Callable[[], Awaitable[list[types.ResourceTemplate]]]): logger.debug("Registering handler for ListResourceTemplatesRequest") - async def handler(_: Any): + async def handler(_1: Any, _2: Any = None): templates = await func() return types.ServerResult(types.ListResourceTemplatesResult(resourceTemplates=templates)) @@ -323,7 +324,7 @@ def decorator( ): logger.debug("Registering handler for ReadResourceRequest") - async def handler(req: types.ReadResourceRequest): + async def handler(req: types.ReadResourceRequest, _: Any = None): result = await func(req.params.uri) def create_content(data: str | bytes, mime_type: str | None): @@ -379,7 +380,7 @@ def set_logging_level(self): def decorator(func: Callable[[types.LoggingLevel], Awaitable[None]]): logger.debug("Registering handler for SetLevelRequest") - async def handler(req: types.SetLevelRequest): + async def handler(req: types.SetLevelRequest, _: Any = None): await func(req.params.level) return types.ServerResult(types.EmptyResult()) @@ -392,7 +393,7 @@ def subscribe_resource(self): def decorator(func: Callable[[AnyUrl], Awaitable[None]]): logger.debug("Registering handler for SubscribeRequest") - async def handler(req: types.SubscribeRequest): + async def handler(req: types.SubscribeRequest, _: Any = None): await func(req.params.uri) return types.ServerResult(types.EmptyResult()) @@ -405,7 +406,7 @@ def unsubscribe_resource(self): def decorator(func: Callable[[AnyUrl], Awaitable[None]]): logger.debug("Registering handler for UnsubscribeRequest") - async def handler(req: types.UnsubscribeRequest): + async def handler(req: types.UnsubscribeRequest, _: Any = None): await func(req.params.uri) return types.ServerResult(types.EmptyResult()) @@ -423,7 +424,7 @@ def decorator( wrapper = create_call_wrapper(func, types.ListToolsRequest) - async def handler(req: types.ListToolsRequest): + async def handler(req: types.ListToolsRequest, _: Any = None): result = await wrapper(req) # Handle both old style (list[Tool]) and new style (ListToolsResult) @@ -493,7 +494,7 @@ def decorator( ): logger.debug("Registering handler for CallToolRequest") - async def handler(req: types.CallToolRequest): + async def handler(req: types.CallToolRequest, server_scope: TaskGroup): try: tool_name = req.params.name arguments = req.params.arguments or {} @@ -563,20 +564,20 @@ async def execute_async(): logger.exception(f"Async execution failed for {tool_name}") self.async_operations.fail_operation(operation.token, str(e)) - async with anyio.create_task_group() as tg: - tg.start_soon(execute_async) - - # Return operation result with immediate content - logger.info(f"Returning async operation result for {tool_name}") - return types.ServerResult( - types.CallToolResult( - content=immediate_content, - operation=types.AsyncResultProperties( - token=operation.token, - keepAlive=operation.keep_alive, - ), - ) + # Dispatch in concurrency scope of the server to run between requests + server_scope.start_soon(execute_async) + + # Return operation result with immediate content + logger.info(f"Returning async operation result for {tool_name}") + return types.ServerResult( + types.CallToolResult( + content=immediate_content, + operation=types.AsyncResultProperties( + token=operation.token, + keepAlive=operation.keep_alive, + ), ) + ) # tool call results = await func(tool_name, arguments) @@ -690,7 +691,7 @@ def decorator( ): logger.debug("Registering handler for ProgressNotification") - async def handler(req: types.ProgressNotification): + async def handler(req: types.ProgressNotification, _: Any = None): await func( req.params.progressToken, req.params.progress, @@ -718,7 +719,7 @@ def decorator( ): logger.debug("Registering handler for CompleteRequest") - async def handler(req: types.CompleteRequest): + async def handler(req: types.CompleteRequest, _: Any = None): completion = await func(req.params.ref, req.params.argument, req.params.context) return types.ServerResult( types.CompleteResult( @@ -754,7 +755,7 @@ def get_operation_status(self): def decorator(func: Callable[[str], Awaitable[types.GetOperationStatusResult]]): logger.debug("Registering handler for GetOperationStatusRequest") - async def handler(req: types.GetOperationStatusRequest): + async def handler(req: types.GetOperationStatusRequest, _: Any = None): # Validate token and get operation operation = self._validate_operation_token(req.params.token) @@ -776,7 +777,7 @@ def get_operation_result(self): def decorator(func: Callable[[str], Awaitable[types.GetOperationPayloadResult]]): logger.debug("Registering handler for GetOperationPayloadRequest") - async def handler(req: types.GetOperationPayloadRequest): + async def handler(req: types.GetOperationPayloadRequest, _: Any = None): # Validate token and get operation operation = self._validate_operation_token(req.params.token) @@ -878,6 +879,7 @@ async def run( session, lifespan_context, raise_exceptions, + tg, ) finally: # Cancel session operations and stop cleanup task @@ -892,13 +894,16 @@ async def _handle_message( session: ServerSession, lifespan_context: LifespanResultT, raise_exceptions: bool = False, + server_scope: TaskGroup | None = None, ): with warnings.catch_warnings(record=True) as w: # TODO(Marcelo): We should be checking if message is Exception here. match message: # type: ignore[reportMatchNotExhaustive] case RequestResponder(request=types.ClientRequest(root=req)) as responder: with responder: - await self._handle_request(message, req, session, lifespan_context, raise_exceptions) + await self._handle_request( + message, req, session, lifespan_context, raise_exceptions, server_scope + ) case types.ClientNotification(root=notify): await self._handle_notification(notify) @@ -912,6 +917,7 @@ async def _handle_request( session: ServerSession, lifespan_context: LifespanResultT, raise_exceptions: bool, + server_scope: TaskGroup | None = None, ): logger.info("Processing request of type %s", type(req).__name__) if handler := self.request_handlers.get(type(req)): # type: ignore @@ -936,7 +942,7 @@ async def _handle_request( request=request_data, ) ) - response = await handler(req) + response = await handler(req, server_scope) # Track async operations for cancellation if isinstance(req, types.CallToolRequest): @@ -985,5 +991,5 @@ async def _handle_notification(self, notify: Any): logger.exception("Uncaught exception in notification handler") -async def _ping_handler(request: types.PingRequest) -> types.ServerResult: +async def _ping_handler(request: types.PingRequest, _: Any = None) -> types.ServerResult: return types.ServerResult(types.EmptyResult()) From 7255e4fdde43f4ec059ddd7ec316bb564624b9bc Mon Sep 17 00:00:00 2001 From: Luca Chang Date: Fri, 3 Oct 2025 15:25:35 -0700 Subject: [PATCH 32/41] Remove vestigial session operation cancellation --- src/mcp/server/lowlevel/server.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/src/mcp/server/lowlevel/server.py b/src/mcp/server/lowlevel/server.py index eaea6a0e9..d29eaa2fa 100644 --- a/src/mcp/server/lowlevel/server.py +++ b/src/mcp/server/lowlevel/server.py @@ -882,10 +882,7 @@ async def run( tg, ) finally: - # Cancel session operations and stop cleanup task - session_id = getattr(session, "session_id", None) - if session_id is not None: - self.async_operations.cancel_session_operations(session_id) + # Stop cleanup task await self.async_operations.stop_cleanup_task() async def _handle_message( From 17bef50f7519fec8d89d8d04b7b36d9b8579e5e2 Mon Sep 17 00:00:00 2001 From: Luca Chang Date: Mon, 6 Oct 2025 13:44:12 -0700 Subject: [PATCH 33/41] Fully switch AsyncOperationManager to anyio --- src/mcp/client/session.py | 15 +++--- src/mcp/server/lowlevel/server.py | 16 +++--- src/mcp/shared/async_operations.py | 72 +++++++-------------------- tests/shared/test_async_operations.py | 46 ----------------- 4 files changed, 33 insertions(+), 116 deletions(-) diff --git a/src/mcp/client/session.py b/src/mcp/client/session.py index 1cbd08f31..abb679c7e 100644 --- a/src/mcp/client/session.py +++ b/src/mcp/client/session.py @@ -1,6 +1,6 @@ import logging from datetime import timedelta -from typing import Any, Protocol +from typing import Any, Protocol, Self import anyio import anyio.lowlevel @@ -139,6 +139,12 @@ def __init__( self._tool_output_schemas: dict[str, dict[str, Any] | None] = {} self._operation_manager = ClientAsyncOperationManager() + async def __aenter__(self) -> Self: + await super().__aenter__() + self._task_group.start_soon(self._operation_manager.cleanup_loop) + self._exit_stack.push_async_callback(lambda: self._operation_manager.stop_cleanup_loop()) + return self + async def initialize(self) -> types.InitializeResult: sampling = types.SamplingCapability() if self._sampling_callback is not _default_sampling_callback else None elicitation = ( @@ -176,15 +182,8 @@ async def initialize(self) -> types.InitializeResult: await self.send_notification(types.ClientNotification(types.InitializedNotification())) - # Start cleanup task for operations - await self._operation_manager.start_cleanup_task() - return result - async def close(self) -> None: - """Clean up resources.""" - await self._operation_manager.stop_cleanup_task() - async def send_ping(self) -> types.EmptyResult: """Send a ping request.""" return await self.send_request( diff --git a/src/mcp/server/lowlevel/server.py b/src/mcp/server/lowlevel/server.py index d29eaa2fa..dd9008b6e 100644 --- a/src/mcp/server/lowlevel/server.py +++ b/src/mcp/server/lowlevel/server.py @@ -865,11 +865,10 @@ async def run( ) ) - # Start async operations cleanup task - await self.async_operations.start_cleanup_task() + async with anyio.create_task_group() as tg: + tg.start_soon(self.async_operations.cleanup_loop) - try: - async with anyio.create_task_group() as tg: + try: async for message in session.incoming_messages: logger.debug("Received message: %s", message) @@ -881,9 +880,12 @@ async def run( raise_exceptions, tg, ) - finally: - # Stop cleanup task - await self.async_operations.stop_cleanup_task() + finally: + # Stop cleanup loop before task group exits + await self.async_operations.stop_cleanup_loop() + + # Cancel all remaining tasks in the task group (cleanup loop and potentially LROs) + tg.cancel_scope.cancel() async def _handle_message( self, diff --git a/src/mcp/shared/async_operations.py b/src/mcp/shared/async_operations.py index 02a9048ec..1785cc65f 100644 --- a/src/mcp/shared/async_operations.py +++ b/src/mcp/shared/async_operations.py @@ -2,13 +2,15 @@ from __future__ import annotations -import asyncio +import logging import secrets import time from collections.abc import Callable from dataclasses import dataclass from typing import Any, Generic, TypeVar +import anyio + import mcp.types as types from mcp.types import AsyncOperationStatus @@ -66,9 +68,9 @@ class BaseOperationManager(Generic[OperationT]): def __init__(self, *, token_generator: Callable[[str | None], str] | None = None): self._operations: dict[str, OperationT] = {} - self._cleanup_task: asyncio.Task[None] | None = None self._cleanup_interval = 60 # Cleanup every 60 seconds self._token_generator = token_generator or self._default_token_generator + self._running = False def _default_token_generator(self, session_id: str | None = None) -> str: """Default token generation using random tokens.""" @@ -105,31 +107,20 @@ def cleanup_expired(self) -> int: self._remove_operation(token) return len(expired_tokens) - async def start_cleanup_task(self) -> None: - """Start the background cleanup task.""" - if self._cleanup_task is None: - self._cleanup_task = asyncio.create_task(self._cleanup_loop()) - - async def stop_cleanup_task(self) -> None: - """Stop the background cleanup task.""" - if self._cleanup_task: - self._cleanup_task.cancel() - try: - await self._cleanup_task - except asyncio.CancelledError: - pass - self._cleanup_task = None - - async def _cleanup_loop(self) -> None: + async def stop_cleanup_loop(self) -> None: + self._running = False + + async def cleanup_loop(self) -> None: """Background task to clean up expired operations.""" - while True: - try: - await asyncio.sleep(self._cleanup_interval) - count = self.cleanup_expired() - if count > 0: - print(f"Cleaned up {count} expired operations") - except asyncio.CancelledError: - break + if self._running: + return + self._running = True + + while self._running: + await anyio.sleep(self._cleanup_interval) + count = self.cleanup_expired() + if count > 0: + logging.debug(f"Cleaned up {count} expired operations") class ClientAsyncOperationManager(BaseOperationManager[ClientAsyncOperation]): @@ -292,32 +283,3 @@ def mark_input_completed(self, token: str) -> bool: operation.status = "working" return True - - async def start_cleanup_task(self) -> None: - """Start the background cleanup task.""" - if self._cleanup_task is not None: - return - - self._cleanup_task = asyncio.create_task(self._cleanup_loop()) - - async def stop_cleanup_task(self) -> None: - """Stop the background cleanup task.""" - if self._cleanup_task is not None: - self._cleanup_task.cancel() - try: - await self._cleanup_task - except asyncio.CancelledError: - pass - self._cleanup_task = None - - async def _cleanup_loop(self) -> None: - """Background cleanup loop.""" - while True: - try: - await asyncio.sleep(self._cleanup_interval) - self.cleanup_expired_operations() - except asyncio.CancelledError: - break - except Exception: - # Log error but continue cleanup loop - pass diff --git a/tests/shared/test_async_operations.py b/tests/shared/test_async_operations.py index 708283cef..8c349b6aa 100644 --- a/tests/shared/test_async_operations.py +++ b/tests/shared/test_async_operations.py @@ -5,8 +5,6 @@ from typing import Any, cast from unittest.mock import Mock -import pytest - import mcp.types as types from mcp.shared.async_operations import ServerAsyncOperation, ServerAsyncOperationManager from mcp.types import AsyncOperationStatus @@ -213,50 +211,6 @@ def test_concurrent_operations(self): removed_count = manager.cleanup_expired_operations() assert removed_count == 25 and len(manager._operations) == 25 - @pytest.mark.anyio - async def test_cleanup_task_lifecycle(self): - """Test background cleanup task management.""" - manager = ServerAsyncOperationManager() - - await manager.start_cleanup_task() - assert manager._cleanup_task is not None and not manager._cleanup_task.done() - - # Starting again should be no-op - await manager.start_cleanup_task() - - await manager.stop_cleanup_task() - assert manager._cleanup_task is None - - def test_dependency_injection_and_integration(self): - """Test AsyncOperationManager dependency injection and server integration.""" - from mcp.server.fastmcp import FastMCP - from mcp.server.lowlevel import Server - - # Test custom manager injection - custom_manager = ServerAsyncOperationManager() - operation = custom_manager.create_operation("shared_tool", {"data": "shared"}, session_id="session1") - - # Test FastMCP integration - fastmcp = FastMCP("FastMCP", async_operations=custom_manager) - assert fastmcp._async_operations is custom_manager - assert fastmcp._async_operations.get_operation(operation.token) is operation - - # Test lowlevel Server integration - lowlevel = Server("LowLevel", async_operations=custom_manager) - assert lowlevel.async_operations is custom_manager - assert lowlevel.async_operations.get_operation(operation.token) is operation - - # Test default creation - default_fastmcp = FastMCP("Default") - default_server = Server("Default") - assert isinstance(default_fastmcp._async_operations, ServerAsyncOperationManager) - assert isinstance(default_server.async_operations, ServerAsyncOperationManager) - assert default_fastmcp._async_operations is not custom_manager - - # Test shared manager between servers - new_op = fastmcp._async_operations.create_operation("new_tool", {}, session_id="session2") - assert lowlevel.async_operations.get_operation(new_op.token) is new_op - class TestAsyncOperation: """Test AsyncOperation dataclass.""" From 17fc21e7ff7af17029575634aa63163dc5ae4239 Mon Sep 17 00:00:00 2001 From: Luca Chang Date: Mon, 6 Oct 2025 13:50:05 -0700 Subject: [PATCH 34/41] import Self from typing_extensions for Python 3.10 --- src/mcp/client/session.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/mcp/client/session.py b/src/mcp/client/session.py index abb679c7e..e6d2d45f2 100644 --- a/src/mcp/client/session.py +++ b/src/mcp/client/session.py @@ -1,12 +1,13 @@ import logging from datetime import timedelta -from typing import Any, Protocol, Self +from typing import Any, Protocol import anyio import anyio.lowlevel from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream from jsonschema import SchemaError, ValidationError, validate from pydantic import AnyUrl, TypeAdapter +from typing_extensions import Self import mcp.types as types from mcp.shared.async_operations import ClientAsyncOperationManager From 428e7a4538529580b51a87e3463492be6b3ee684 Mon Sep 17 00:00:00 2001 From: Luca Chang Date: Mon, 6 Oct 2025 14:29:38 -0700 Subject: [PATCH 35/41] Fix sync/async detection and add failing test for reconnects --- src/mcp/server/fastmcp/server.py | 16 +++---- tests/server/fastmcp/test_integration.py | 54 ++++++++++++++++++++++++ 2 files changed, 60 insertions(+), 10 deletions(-) diff --git a/src/mcp/server/fastmcp/server.py b/src/mcp/server/fastmcp/server.py index c785188f0..2a76d4d63 100644 --- a/src/mcp/server/fastmcp/server.py +++ b/src/mcp/server/fastmcp/server.py @@ -355,24 +355,20 @@ def _get_invocation_mode(self, info: Tool, client_supports_async: bool) -> Liter # New clients see the invocationMode field modes = info.invocation_modes - if self._is_async_only(modes): - return "async" # Async-only - if self._is_sync_only(modes) or self._is_hybrid(modes): - return "sync" # Hybrid or explicit sync + if self._is_async_capable(modes): + return "async" # Hybrid or explicit async + if self._is_sync_only(modes): + return "sync" return None - def _is_async_only(self, modes: list[InvocationMode]) -> bool: + def _is_async_capable(self, modes: list[InvocationMode]) -> bool: """Return True if invocation_modes is async-only.""" - return modes == ["async"] + return "async" in modes def _is_sync_only(self, modes: list[InvocationMode]) -> bool: """Return True if invocation_modes is sync-only.""" return modes == ["sync"] - def _is_hybrid(self, modes: list[InvocationMode]) -> bool: - """Return True if invocation_modes contains both sync and async.""" - return "sync" in modes and "async" in modes and len(modes) > 1 - async def list_tools(self) -> list[MCPTool]: """List all available tools.""" tools = self._tool_manager.list_tools() diff --git a/tests/server/fastmcp/test_integration.py b/tests/server/fastmcp/test_integration.py index 7ef0d5674..6c311175c 100644 --- a/tests/server/fastmcp/test_integration.py +++ b/tests/server/fastmcp/test_integration.py @@ -757,6 +757,7 @@ async def test_async_tool_basic(server_transport: str, server_url: str) -> None: @pytest.mark.parametrize( "server_transport", [ + # ("async_tool_basic", "sse"), ("async_tool_basic", "streamable-http"), ], indirect=True, @@ -795,6 +796,59 @@ async def test_async_tool_basic_legacy_protocol(server_transport: str, server_ur assert "Processed: HELLO" in hybrid_result.content[0].text +@pytest.mark.anyio +@pytest.mark.parametrize( + "server_transport", + [ + # ("async_tool_basic", "sse"), + ("async_tool_basic", "streamable-http"), + ], + indirect=True, +) +async def test_async_tool_reconnection(server_transport: str, server_url: str) -> None: + """Test that async operations can be retrieved after reconnecting with a new session.""" + transport = server_transport + client_cm1 = create_client_for_transport(transport, server_url) + + # Start async operation in first session + async with client_cm1 as client_streams: + read_stream, write_stream = unpack_streams(client_streams) + async with ClientSession(read_stream, write_stream, protocol_version="next") as session1: + await session1.initialize() + + # Start async operation + result = await session1.call_tool("process_text", {"text": "test data"}) + assert result.operation is not None + token = result.operation.token + + # Reconnect with new session and retrieve result + client_cm2 = create_client_for_transport(transport, server_url) + async with client_cm2 as client_streams: + read_stream, write_stream = unpack_streams(client_streams) + async with ClientSession(read_stream, write_stream, protocol_version="next") as session2: + await session2.initialize() + + # Poll for completion in new session + max_attempts = 20 + attempt = 0 + while attempt < max_attempts: + status = await session2.get_operation_status(token) + if status.status == "completed": + final_result = await session2.get_operation_result(token) + assert not final_result.result.isError + assert len(final_result.result.content) == 1 + content = final_result.result.content[0] + assert isinstance(content, TextContent) + break + elif status.status == "failed": + pytest.fail(f"Operation failed: {status.error}") + + attempt += 1 + await anyio.sleep(0.5) + else: + pytest.fail("Async operation timed out") + + # Test structured output example @pytest.mark.anyio @pytest.mark.parametrize( From 5f422e71919c1db81eef545bed54284db31f9f34 Mon Sep 17 00:00:00 2001 From: Luca Chang Date: Mon, 6 Oct 2025 14:33:51 -0700 Subject: [PATCH 36/41] Fix async test assertion --- README.md | 10 +++++++ examples/snippets/servers/async_tool_basic.py | 10 +++++++ tests/server/fastmcp/test_integration.py | 29 +++++++++++++++---- 3 files changed, 44 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 1eaad9270..0589f4450 100644 --- a/README.md +++ b/README.md @@ -535,6 +535,16 @@ async def process_text(text: str, ctx: Context[ServerSession, None]) -> str: return f"Processed: {text.upper()}" +@mcp.tool() +async def process_text_sync(text: str, ctx: Context[ServerSession, None]) -> str: + """Process text in sync mode only.""" + + await ctx.info(f"Processing text: {text[:20]}...") + await anyio.sleep(0.3) + + return f"Processed: {text.upper()}" + + if __name__ == "__main__": mcp.run() ``` diff --git a/examples/snippets/servers/async_tool_basic.py b/examples/snippets/servers/async_tool_basic.py index 562e18e23..2ff3c4168 100644 --- a/examples/snippets/servers/async_tool_basic.py +++ b/examples/snippets/servers/async_tool_basic.py @@ -38,5 +38,15 @@ async def process_text(text: str, ctx: Context[ServerSession, None]) -> str: return f"Processed: {text.upper()}" +@mcp.tool() +async def process_text_sync(text: str, ctx: Context[ServerSession, None]) -> str: + """Process text in sync mode only.""" + + await ctx.info(f"Processing text: {text[:20]}...") + await anyio.sleep(0.3) + + return f"Processed: {text.upper()}" + + if __name__ == "__main__": mcp.run() diff --git a/tests/server/fastmcp/test_integration.py b/tests/server/fastmcp/test_integration.py index 6c311175c..30f2139cf 100644 --- a/tests/server/fastmcp/test_integration.py +++ b/tests/server/fastmcp/test_integration.py @@ -714,7 +714,7 @@ async def test_async_tool_basic(server_transport: str, server_url: str) -> None: assert result.serverInfo.name == "Async Tool Basic" # Test sync tool (should work normally) - sync_result = await session.call_tool("process_text", {"text": "hello"}) + sync_result = await session.call_tool("process_text_sync", {"text": "hello"}) assert len(sync_result.content) == 1 assert isinstance(sync_result.content[0], TextContent) assert sync_result.content[0].text == "Processed: HELLO" @@ -745,11 +745,30 @@ async def test_async_tool_basic(server_transport: str, server_url: str) -> None: else: pytest.fail("Async operation timed out") - # Test hybrid tool (process_text can work in sync or async mode) + # Test hybrid tool (process_text should only run in async mode in this version) hybrid_result = await session.call_tool("process_text", {"text": "world"}) - assert len(hybrid_result.content) == 1 - assert isinstance(hybrid_result.content[0], TextContent) - assert "Processed: WORLD" in hybrid_result.content[0].text + assert hybrid_result.operation is not None + token = hybrid_result.operation.token + + # Poll for completion with timeout + max_attempts = 20 + attempt = 0 + while attempt < max_attempts: + status = await session.get_operation_status(token) + if status.status == "completed": + final_hybrid_result = await session.get_operation_result(token) + assert not final_hybrid_result.result.isError + assert len(final_hybrid_result.result.content) == 1 + assert isinstance(final_hybrid_result.result.content[0], TextContent) + assert "Processed: WORLD" in final_hybrid_result.result.content[0].text + break + elif status.status == "failed": + pytest.fail(f"Async operation failed: {status.error}") + + attempt += 1 + await anyio.sleep(0.5) + else: + pytest.fail("Async operation timed out") # Test async tools example with legacy protocol From 40cf77ee4c683a2d8d3995976699485c4bf2634f Mon Sep 17 00:00:00 2001 From: Luca Chang Date: Mon, 6 Oct 2025 18:04:02 -0700 Subject: [PATCH 37/41] Convert ServerAsyncOperationManager into async context manager --- src/mcp/server/fastmcp/server.py | 54 +++++++++++++++----- src/mcp/server/lowlevel/server.py | 39 +++++++------- src/mcp/shared/async_operations.py | 65 +++++++++++++++++++++++- src/mcp/shared/memory.py | 53 +++++++++---------- tests/server/fastmcp/test_integration.py | 1 - 5 files changed, 148 insertions(+), 64 deletions(-) diff --git a/src/mcp/server/fastmcp/server.py b/src/mcp/server/fastmcp/server.py index 2a76d4d63..55819c202 100644 --- a/src/mcp/server/fastmcp/server.py +++ b/src/mcp/server/fastmcp/server.py @@ -2,6 +2,7 @@ from __future__ import annotations as _annotations +import contextlib import inspect import re from collections.abc import AsyncIterator, Awaitable, Callable, Collection, Iterable, Sequence @@ -845,14 +846,21 @@ def decorator( return decorator + @contextlib.asynccontextmanager + async def _stdio_lifespan(self) -> AsyncIterator[None]: + """Lifespan that manages stdio operations.""" + async with self._async_operations.run(): + yield + async def run_stdio_async(self) -> None: """Run the server using stdio transport.""" async with stdio_server() as (read_stream, write_stream): - await self._mcp_server.run( - read_stream, - write_stream, - self._mcp_server.create_initialization_options(), - ) + async with self._stdio_lifespan(): + await self._mcp_server.run( + read_stream, + write_stream, + self._mcp_server.create_initialization_options(), + ) async def run_sse_async(self, mount_path: str | None = None) -> None: """Run the server using SSE transport.""" @@ -910,6 +918,12 @@ def _normalize_path(self, mount_path: str, endpoint: str) -> str: # Combine paths return mount_path + endpoint + @contextlib.asynccontextmanager + async def _sse_lifespan(self) -> AsyncIterator[None]: + """Lifespan that manages SSE operations.""" + async with self._async_operations.run(): + yield + def sse_app(self, mount_path: str | None = None) -> Starlette: """Return an instance of the SSE server app.""" from starlette.middleware import Middleware @@ -1040,7 +1054,16 @@ async def sse_endpoint(request: Request) -> Response: routes.extend(self._custom_starlette_routes) # Create Starlette app with routes and middleware - return Starlette(debug=self.settings.debug, routes=routes, middleware=middleware) + return Starlette( + debug=self.settings.debug, routes=routes, middleware=middleware, lifespan=lambda app: self._sse_lifespan() + ) + + @contextlib.asynccontextmanager + async def _streamable_http_lifespan(self) -> AsyncIterator[None]: + """Lifespan that manages Streamable HTTP operations.""" + async with self.session_manager.run(): + async with self._async_operations.run(): + yield def streamable_http_app(self) -> Starlette: """Return an instance of the StreamableHTTP server app.""" @@ -1135,7 +1158,7 @@ def streamable_http_app(self) -> Starlette: debug=self.settings.debug, routes=routes, middleware=middleware, - lifespan=lambda app: self.session_manager.run(), + lifespan=lambda app: self._streamable_http_lifespan(), ) async def list_prompts(self) -> list[MCPPrompt]: @@ -1337,12 +1360,17 @@ async def log( logger_name: Optional logger name **extra: Additional structured data to include """ - await self.request_context.session.send_log_message( - level=level, - data=message, - logger=logger_name, - related_request_id=self.request_id, - ) + try: + await self.request_context.session.send_log_message( + level=level, + data=message, + logger=logger_name, + related_request_id=self.request_id, + ) + except Exception: + # Session might be closed (e.g., client disconnected) + logger.warning(f"Failed to send log message to client (session closed?): {message}") + pass @property def client_id(self) -> str | None: diff --git a/src/mcp/server/lowlevel/server.py b/src/mcp/server/lowlevel/server.py index dd9008b6e..2c05e9d2d 100644 --- a/src/mcp/server/lowlevel/server.py +++ b/src/mcp/server/lowlevel/server.py @@ -95,6 +95,7 @@ async def main(): from mcp.types import Operation, RequestId logger = logging.getLogger(__name__) +logger.setLevel(logging.DEBUG) LifespanResultT = TypeVar("LifespanResultT", default=Any) RequestT = TypeVar("RequestT", default=Any) @@ -564,8 +565,11 @@ async def execute_async(): logger.exception(f"Async execution failed for {tool_name}") self.async_operations.fail_operation(operation.token, str(e)) - # Dispatch in concurrency scope of the server to run between requests - server_scope.start_soon(execute_async) + # Start task directly in independent task group + current_request_context = request_ctx.get() + self.async_operations.start_task( + operation.token, execute_async, current_request_context, request_ctx + ) # Return operation result with immediate content logger.info(f"Returning async operation result for {tool_name}") @@ -866,26 +870,17 @@ async def run( ) async with anyio.create_task_group() as tg: - tg.start_soon(self.async_operations.cleanup_loop) - - try: - async for message in session.incoming_messages: - logger.debug("Received message: %s", message) - - tg.start_soon( - self._handle_message, - message, - session, - lifespan_context, - raise_exceptions, - tg, - ) - finally: - # Stop cleanup loop before task group exits - await self.async_operations.stop_cleanup_loop() - - # Cancel all remaining tasks in the task group (cleanup loop and potentially LROs) - tg.cancel_scope.cancel() + async for message in session.incoming_messages: + logger.debug("Received message: %s", message) + + tg.start_soon( + self._handle_message, + message, + session, + lifespan_context, + raise_exceptions, + tg, + ) async def _handle_message( self, diff --git a/src/mcp/shared/async_operations.py b/src/mcp/shared/async_operations.py index 1785cc65f..1c79d11b3 100644 --- a/src/mcp/shared/async_operations.py +++ b/src/mcp/shared/async_operations.py @@ -2,18 +2,22 @@ from __future__ import annotations +import contextlib import logging import secrets import time -from collections.abc import Callable +from collections.abc import AsyncIterator, Awaitable, Callable from dataclasses import dataclass from typing import Any, Generic, TypeVar import anyio +from anyio.abc import TaskGroup import mcp.types as types from mcp.types import AsyncOperationStatus +logger = logging.getLogger(__name__) + @dataclass class ClientAsyncOperation: @@ -120,7 +124,7 @@ async def cleanup_loop(self) -> None: await anyio.sleep(self._cleanup_interval) count = self.cleanup_expired() if count > 0: - logging.debug(f"Cleaned up {count} expired operations") + logger.debug(f"Cleaned up {count} expired operations") class ClientAsyncOperationManager(BaseOperationManager[ClientAsyncOperation]): @@ -145,6 +149,63 @@ def get_tool_name(self, token: str) -> str | None: class ServerAsyncOperationManager(BaseOperationManager[ServerAsyncOperation]): """Manages async tool operations with token-based tracking.""" + def __init__(self, *, token_generator: Callable[[str | None], str] | None = None): + super().__init__(token_generator=token_generator) + self._task_group: TaskGroup | None = None + self._run_lock = anyio.Lock() + self._running = False + + @contextlib.asynccontextmanager + async def run(self) -> AsyncIterator[None]: + """Run the async operations manager with its own task group.""" + # Thread-safe check to ensure run() is only called once + async with self._run_lock: + if self._running: + raise RuntimeError("ServerAsyncOperationManager.run() is already running.") + self._running = True + + async with anyio.create_task_group() as tg: + self._task_group = tg + logger.info("ServerAsyncOperationManager started") + # Start cleanup loop + tg.start_soon(self.cleanup_loop) + try: + yield + finally: + logger.info("ServerAsyncOperationManager shutting down") + # Stop cleanup loop gracefully + await self.stop_cleanup_loop() + # Cancel task group to stop all spawned tasks + tg.cancel_scope.cancel() + self._task_group = None + self._running = False + + def start_task( + self, + token: str, + task_func: Callable[[], Awaitable[None]], + request_context: Any = None, + request_ctx_var: Any = None, + ) -> None: + """Start an async task immediately in the independent task group.""" + if self._task_group is None: + raise RuntimeError("Task group not started. Call run() first.") + + async def run_task_with_context(): + context_token = None + try: + if request_context and request_ctx_var: + context_token = request_ctx_var.set(request_context) + await task_func() + except Exception: + # Handle task failures gracefully + pass + finally: + if context_token and request_ctx_var: + request_ctx_var.reset(context_token) + + self._task_group.start_soon(run_task_with_context, name=f"lro_{token}") + def create_operation( self, tool_name: str, diff --git a/src/mcp/shared/memory.py b/src/mcp/shared/memory.py index 2bbc9e580..e822d980d 100644 --- a/src/mcp/shared/memory.py +++ b/src/mcp/shared/memory.py @@ -71,30 +71,31 @@ async def create_connected_server_and_client_session( server_read, server_write = server_streams # Create a cancel scope for the server task - async with anyio.create_task_group() as tg: - tg.start_soon( - lambda: server.run( - server_read, - server_write, - server.create_initialization_options(), - raise_exceptions=raise_exceptions, + async with server.async_operations.run(): + async with anyio.create_task_group() as tg: + tg.start_soon( + lambda: server.run( + server_read, + server_write, + server.create_initialization_options(), + raise_exceptions=raise_exceptions, + ) ) - ) - - try: - async with ClientSession( - read_stream=client_read, - write_stream=client_write, - read_timeout_seconds=read_timeout_seconds, - sampling_callback=sampling_callback, - list_roots_callback=list_roots_callback, - logging_callback=logging_callback, - message_handler=message_handler, - client_info=client_info, - elicitation_callback=elicitation_callback, - protocol_version=protocol_version, - ) as client_session: - await client_session.initialize() - yield client_session - finally: - tg.cancel_scope.cancel() + + try: + async with ClientSession( + read_stream=client_read, + write_stream=client_write, + read_timeout_seconds=read_timeout_seconds, + sampling_callback=sampling_callback, + list_roots_callback=list_roots_callback, + logging_callback=logging_callback, + message_handler=message_handler, + client_info=client_info, + elicitation_callback=elicitation_callback, + protocol_version=protocol_version, + ) as client_session: + await client_session.initialize() + yield client_session + finally: + tg.cancel_scope.cancel() diff --git a/tests/server/fastmcp/test_integration.py b/tests/server/fastmcp/test_integration.py index 30f2139cf..e7b2e670e 100644 --- a/tests/server/fastmcp/test_integration.py +++ b/tests/server/fastmcp/test_integration.py @@ -1013,7 +1013,6 @@ async def test_immediate_result_backward_compatibility(server_transport: str, se await anyio.sleep(0.5) else: pytest.fail("Async operation timed out") - await anyio.sleep(0.01) # Test async progress notifications From 272b2380ebc4e536e56d811c588f3fdc5831f17f Mon Sep 17 00:00:00 2001 From: Luca Chang Date: Mon, 6 Oct 2025 18:06:50 -0700 Subject: [PATCH 38/41] Tidy up debug/test cruft --- src/mcp/server/fastmcp/server.py | 4 ++-- src/mcp/server/lowlevel/server.py | 1 - 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/src/mcp/server/fastmcp/server.py b/src/mcp/server/fastmcp/server.py index 55819c202..27b127dfd 100644 --- a/src/mcp/server/fastmcp/server.py +++ b/src/mcp/server/fastmcp/server.py @@ -1367,9 +1367,9 @@ async def log( logger=logger_name, related_request_id=self.request_id, ) - except Exception: + except Exception as e: # Session might be closed (e.g., client disconnected) - logger.warning(f"Failed to send log message to client (session closed?): {message}") + logger.warning(f"Failed to send log message to client (session closed?): {e}") pass @property diff --git a/src/mcp/server/lowlevel/server.py b/src/mcp/server/lowlevel/server.py index 2c05e9d2d..bfd156349 100644 --- a/src/mcp/server/lowlevel/server.py +++ b/src/mcp/server/lowlevel/server.py @@ -95,7 +95,6 @@ async def main(): from mcp.types import Operation, RequestId logger = logging.getLogger(__name__) -logger.setLevel(logging.DEBUG) LifespanResultT = TypeVar("LifespanResultT", default=Any) RequestT = TypeVar("RequestT", default=Any) From 3701594ac4e959af414ad288e2b98683ffa0cac9 Mon Sep 17 00:00:00 2001 From: Luca Chang Date: Tue, 7 Oct 2025 14:48:23 -0700 Subject: [PATCH 39/41] Split ServerAsyncOperationManager into AsyncOperationStore and AsyncOperationBroker components --- README.md | 34 ++ .../mcp_async_reconnect_client/client.py | 2 + .../sqlite-async-operations/.gitignore | 1 + .../servers/sqlite-async-operations/README.md | 36 ++ .../mcp_sqlite_async_operations/__init__.py | 1 + .../mcp_sqlite_async_operations/__main__.py | 4 + .../mcp_sqlite_async_operations/server.py | 371 +++++++++++++++++ .../sqlite-async-operations/pyproject.toml | 33 ++ src/mcp/client/session.py | 1 + src/mcp/server/fastmcp/server.py | 17 +- src/mcp/server/lowlevel/server.py | 101 +++-- src/mcp/shared/async_operations.py | 390 +++++++++++++----- src/mcp/shared/context.py | 18 +- tests/issues/test_176_progress_token.py | 1 + .../server/test_lowlevel_async_operations.py | 94 ++--- tests/shared/test_async_operations.py | 142 +++---- tests/shared/test_progress_notifications.py | 1 + uv.lock | 34 ++ 18 files changed, 1000 insertions(+), 281 deletions(-) create mode 100644 examples/servers/sqlite-async-operations/.gitignore create mode 100644 examples/servers/sqlite-async-operations/README.md create mode 100644 examples/servers/sqlite-async-operations/mcp_sqlite_async_operations/__init__.py create mode 100644 examples/servers/sqlite-async-operations/mcp_sqlite_async_operations/__main__.py create mode 100644 examples/servers/sqlite-async-operations/mcp_sqlite_async_operations/server.py create mode 100644 examples/servers/sqlite-async-operations/pyproject.toml diff --git a/README.md b/README.md index 0589f4450..39769e4bf 100644 --- a/README.md +++ b/README.md @@ -1661,6 +1661,40 @@ For more information on mounting applications in Starlette, see the [Starlette d ## Advanced Usage +### Persistent Async Operations + +For production deployments, you may want async operations to survive server restarts. The `ServerAsyncOperationManager` uses pluggable `AsyncOperationStore` and `AsyncOperationBroker` components to handle operation persistence and task queuing. + +#### Operation Lifecycle + +Async operations follow this lifecycle: + +1. **Submitted** - Operation token generated and stored +2. **Working** - Task begins execution +3. **Completed/Failed/Cancelled** - Operation reaches terminal state with results + +#### Custom Store and Broker + +```python +from mcp.server.fastmcp import FastMCP +from mcp.shared.async_operations import ServerAsyncOperationManager + +# Create custom store and broker implementations +custom_store = MyAsyncOperationStore() +custom_broker = MyAsyncOperationBroker() + +# Create operation manager with custom components +operation_manager = ServerAsyncOperationManager( + store=custom_store, + broker=custom_broker +) + +# Use with FastMCP +mcp = FastMCP("My Server", async_operations=operation_manager) +``` + +For a complete SQLite-based implementation example, see [`examples/servers/sqlite-async-operations/`](examples/servers/sqlite-async-operations/). + ### Low-Level Server For more control, you can use the low-level server implementation directly. This gives you full access to the protocol and allows you to customize every aspect of your server, including lifecycle management through the lifespan API: diff --git a/examples/clients/async-reconnect-client/mcp_async_reconnect_client/client.py b/examples/clients/async-reconnect-client/mcp_async_reconnect_client/client.py index 24b8d8a03..6fa3af9ce 100644 --- a/examples/clients/async-reconnect-client/mcp_async_reconnect_client/client.py +++ b/examples/clients/async-reconnect-client/mcp_async_reconnect_client/client.py @@ -10,6 +10,8 @@ async def call_async_tool(session: ClientSession, token: str | None): if not token: result = await session.call_tool("fetch_website", arguments={"url": "https://modelcontextprotocol.io"}) + if result.isError: + raise RuntimeError(f"Error calling tool: {result}") assert result.operation token = result.operation.token print(f"Operation started with token: {token}") diff --git a/examples/servers/sqlite-async-operations/.gitignore b/examples/servers/sqlite-async-operations/.gitignore new file mode 100644 index 000000000..3997beadf --- /dev/null +++ b/examples/servers/sqlite-async-operations/.gitignore @@ -0,0 +1 @@ +*.db \ No newline at end of file diff --git a/examples/servers/sqlite-async-operations/README.md b/examples/servers/sqlite-async-operations/README.md new file mode 100644 index 000000000..efbcd7775 --- /dev/null +++ b/examples/servers/sqlite-async-operations/README.md @@ -0,0 +1,36 @@ +# SQLite Async Operations Example + +This example demonstrates how to implement custom async operations storage and task queuing using SQLite with the MCP Python SDK. + +## Architecture + +The example showcases the pluggable architecture of the async operations system: + +- `SQLiteAsyncOperationStore`: Custom implementation that persists operations to SQLite +- `SQLiteAsyncOperationBroker`: Custom implementation that persists pending tasks to SQLite +- `ServerAsyncOperationManager`: Uses both custom store and broker for full persistence +- `FastMCP`: Configured with the custom async operations manager + +## Usage + +Install and run the server: + +```bash +# Using stdio transport (default) +# Run with default SQLite database +uv run mcp-sqlite-async-operations + +# Run with custom database path +uv run mcp-sqlite-async-operations --db-path /path/to/custom.db + +# Using streamable-http transport on custom port +uv run mcp-sqlite-async-operations --transport streamable-http --port 8000 +``` + +## Testing Persistent Async Operations + +1. Start the server +2. Call one of the async tools (`long_computation` or `fetch_data`) +3. **Restart the server while the operation is running** +4. The operation will automatically resume and complete +5. Use the operation token to check status and retrieve results diff --git a/examples/servers/sqlite-async-operations/mcp_sqlite_async_operations/__init__.py b/examples/servers/sqlite-async-operations/mcp_sqlite_async_operations/__init__.py new file mode 100644 index 000000000..8b1378917 --- /dev/null +++ b/examples/servers/sqlite-async-operations/mcp_sqlite_async_operations/__init__.py @@ -0,0 +1 @@ + diff --git a/examples/servers/sqlite-async-operations/mcp_sqlite_async_operations/__main__.py b/examples/servers/sqlite-async-operations/mcp_sqlite_async_operations/__main__.py new file mode 100644 index 000000000..f5f6e402d --- /dev/null +++ b/examples/servers/sqlite-async-operations/mcp_sqlite_async_operations/__main__.py @@ -0,0 +1,4 @@ +from .server import main + +if __name__ == "__main__": + main() diff --git a/examples/servers/sqlite-async-operations/mcp_sqlite_async_operations/server.py b/examples/servers/sqlite-async-operations/mcp_sqlite_async_operations/server.py new file mode 100644 index 000000000..033b4abcb --- /dev/null +++ b/examples/servers/sqlite-async-operations/mcp_sqlite_async_operations/server.py @@ -0,0 +1,371 @@ +"""SQLite-based async operations example server.""" + +from __future__ import annotations + +import json +import sqlite3 +import time +from collections import deque +from typing import Any + +import anyio +import click +import uvicorn +from mcp import types +from mcp.server.fastmcp import FastMCP +from mcp.server.session import ServerSession +from mcp.shared._httpx_utils import create_mcp_http_client +from mcp.shared.async_operations import ( + AsyncOperationBroker, + AsyncOperationStore, + PendingAsyncTask, + ServerAsyncOperation, + ServerAsyncOperationManager, +) +from mcp.shared.context import RequestContext +from mcp.types import AsyncOperationStatus, CallToolResult + + +class SQLiteAsyncOperationStore(AsyncOperationStore): + """SQLite-based implementation of AsyncOperationStore.""" + + def __init__(self, db_path: str = "async_operations.db"): + self.db_path = db_path + self._init_db() + + def _init_db(self): + """Initialize the SQLite database.""" + with sqlite3.connect(self.db_path) as conn: + conn.execute(""" + CREATE TABLE IF NOT EXISTS operations ( + token TEXT PRIMARY KEY, + tool_name TEXT NOT NULL, + arguments TEXT NOT NULL, + status TEXT NOT NULL, + created_at REAL NOT NULL, + keep_alive INTEGER NOT NULL, + resolved_at REAL, + session_id TEXT, + result TEXT, + error TEXT + ) + """) + conn.commit() + + async def get_operation(self, token: str) -> ServerAsyncOperation | None: + """Get operation by token.""" + with sqlite3.connect(self.db_path) as conn: + conn.row_factory = sqlite3.Row + cursor = conn.execute("SELECT * FROM operations WHERE token = ?", (token,)) + row = cursor.fetchone() + if not row: + return None + + # Reconstruct CallToolResult from stored JSON + result = None + if row["result"]: + result_data = json.loads(row["result"]) + result = CallToolResult( + content=result_data.get("content", []), + structuredContent=result_data.get("structuredContent"), + isError=result_data.get("isError", False), + ) + + return ServerAsyncOperation( + token=row["token"], + tool_name=row["tool_name"], + arguments=json.loads(row["arguments"]), + status=row["status"], + created_at=row["created_at"], + keep_alive=row["keep_alive"], + resolved_at=row["resolved_at"], + session_id=row["session_id"], + result=result, + error=row["error"], + ) + + async def store_operation(self, operation: ServerAsyncOperation) -> None: + """Store an operation.""" + # Serialize result using Pydantic model_dump() + result_json = None + if operation.result: + try: + result_dict = operation.result.model_dump() + result_json = json.dumps(result_dict) + except (TypeError, ValueError): + # Skip if not serializable + result_json = None + + with sqlite3.connect(self.db_path) as conn: + conn.execute( + """ + INSERT OR REPLACE INTO operations + (token, tool_name, arguments, status, created_at, keep_alive, + resolved_at, session_id, result, error) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + """, + ( + operation.token, + operation.tool_name, + json.dumps(operation.arguments), + operation.status, + operation.created_at, + operation.keep_alive, + operation.resolved_at, + operation.session_id, + result_json, + operation.error, + ), + ) + conn.commit() + + async def update_status(self, token: str, status: AsyncOperationStatus) -> bool: + """Update operation status.""" + operation = await self.get_operation(token) + if not operation: + return False + + # Don't allow transitions from terminal states + if operation.is_terminal: + return False + + resolved_at = time.time() if status in ("completed", "failed", "canceled") else None + + with sqlite3.connect(self.db_path) as conn: + cursor = conn.execute( + """ + UPDATE operations + SET status = ?, resolved_at = ? + WHERE token = ? + """, + (status, resolved_at, token), + ) + conn.commit() + + return cursor.rowcount > 0 + + async def complete_operation_with_result(self, token: str, result: CallToolResult) -> bool: + """Complete operation with result.""" + operation = await self.get_operation(token) + if not operation or operation.is_terminal: + return False + + # Serialize result using Pydantic model_dump() + result_json = None + try: + result_dict = result.model_dump() + result_json = json.dumps(result_dict) + except (TypeError, ValueError): + # Skip if not serializable + result_json = None + + resolved_at = time.time() + with sqlite3.connect(self.db_path) as conn: + cursor = conn.execute( + """ + UPDATE operations + SET status = 'completed', result = ?, resolved_at = ? + WHERE token = ? + """, + (result_json, resolved_at, token), + ) + conn.commit() + return cursor.rowcount > 0 + + async def fail_operation_with_error(self, token: str, error: str) -> bool: + """Fail operation with error.""" + operation = await self.get_operation(token) + if not operation or operation.is_terminal: + return False + + resolved_at = time.time() + with sqlite3.connect(self.db_path) as conn: + cursor = conn.execute( + """ + UPDATE operations + SET status = 'failed', error = ?, resolved_at = ? + WHERE token = ? + """, + (error, resolved_at, token), + ) + conn.commit() + return cursor.rowcount > 0 + + async def cleanup_expired(self) -> int: + """Remove expired operations and return count.""" + current_time = time.time() + with sqlite3.connect(self.db_path) as conn: + cursor = conn.execute( + """ + DELETE FROM operations + WHERE resolved_at IS NOT NULL + AND (resolved_at + keep_alive) < ? + """, + (current_time,), + ) + conn.commit() + return cursor.rowcount + + +class SQLiteAsyncOperationBroker(AsyncOperationBroker): + """SQLite-based implementation of AsyncOperationBroker for persistent task queuing.""" + + def __init__(self, db_path: str = "async_operations.db"): + self.db_path = db_path + self._task_queue: deque[PendingAsyncTask] = deque() + self._init_db() + # Load persisted tasks on startup + self._load_persisted_tasks_sync() + + def _load_persisted_tasks_sync(self): + """Load persisted tasks from SQLite on startup (sync version for __init__).""" + with sqlite3.connect(self.db_path) as conn: + conn.row_factory = sqlite3.Row + cursor = conn.execute(""" + SELECT token, tool_name, arguments, request_id, operation_token, meta, supports_async + FROM pending_tasks ORDER BY created_at + """) + for row in cursor.fetchall(): + # Check if operation is already terminal - don't queue if so + with sqlite3.connect(self.db_path) as op_conn: + op_conn.row_factory = sqlite3.Row + op_cursor = op_conn.execute("SELECT status FROM operations WHERE token = ?", (row["token"],)) + op_row = op_cursor.fetchone() + if op_row and op_row["status"] in ("completed", "failed", "canceled"): + continue + + # Reconstruct serializable parts of RequestContext + from mcp.shared.context import SerializableRequestContext + + serializable_context = None + if row["request_id"]: + serializable_context = SerializableRequestContext( + request_id=row["request_id"], + operation_token=row["operation_token"], + meta=json.loads(row["meta"]) if row["meta"] else None, + supports_async=bool(row["supports_async"]), + ) + + task = PendingAsyncTask( + token=row["token"], + tool_name=row["tool_name"], + arguments=json.loads(row["arguments"]), + request_context=serializable_context, + ) + self._task_queue.append(task) + + def _init_db(self): + """Initialize the SQLite database for pending tasks.""" + with sqlite3.connect(self.db_path) as conn: + conn.execute(""" + CREATE TABLE IF NOT EXISTS pending_tasks ( + token TEXT PRIMARY KEY, + tool_name TEXT NOT NULL, + arguments TEXT NOT NULL, + request_id TEXT, + operation_token TEXT, + meta TEXT, + request_data TEXT, + supports_async INTEGER DEFAULT 0, + created_at REAL NOT NULL + ) + """) + conn.commit() + + async def enqueue_task( + self, + token: str, + tool_name: str, + arguments: dict[str, Any], + request_context: RequestContext[ServerSession, Any, Any], + ) -> None: + """Enqueue a task for execution and persist to SQLite.""" + # Store in memory queue for immediate processing + task = PendingAsyncTask(token=token, tool_name=tool_name, arguments=arguments, request_context=request_context) + self._task_queue.append(task) + + # Extract serializable parts for persistence + serializable = request_context.to_serializable() + request_id = serializable.request_id + operation_token = serializable.operation_token + supports_async = serializable.supports_async + meta = json.dumps(serializable.meta.model_dump()) if serializable.meta else None + + # Persist to SQLite for restart recovery + with sqlite3.connect(self.db_path) as conn: + conn.execute( + """ + INSERT OR REPLACE INTO pending_tasks + (token, tool_name, arguments, request_id, operation_token, meta, + supports_async, created_at) + VALUES (?, ?, ?, ?, ?, ?, ?, ?) + """, + ( + token, + tool_name, + json.dumps(arguments), + request_id, + operation_token, + meta, + int(supports_async), + time.time(), + ), + ) + conn.commit() + + async def get_pending_tasks(self) -> list[PendingAsyncTask]: + """Get all pending tasks without clearing them.""" + return list(self._task_queue) + + async def acknowledge_task(self, token: str) -> None: + """Acknowledge that a task has been dispatched (but keep it in SQLite until completion).""" + # Remove from memory queue only - keep in SQLite until operation completes + self._task_queue = deque(task for task in self._task_queue if task.token != token) + + async def complete_task(self, token: str) -> None: + """Remove a completed task from persistent storage.""" + with sqlite3.connect(self.db_path) as conn: + conn.execute("DELETE FROM pending_tasks WHERE token = ?", (token,)) + conn.commit() + + +@click.command() +@click.option("--port", default=8000, help="Port to listen on for HTTP") +@click.option( + "--transport", + type=click.Choice(["stdio", "streamable-http"]), + default="stdio", + help="Transport type", +) +@click.option("--db-path", default="async_operations.db", help="SQLite database path") +def main(port: int, transport: str, db_path: str): + """Run the SQLite async operations example server.""" + # Create components with specified database path + broker = SQLiteAsyncOperationBroker(db_path) + store = SQLiteAsyncOperationStore(db_path) # No broker reference needed + manager = ServerAsyncOperationManager(store=store, broker=broker) + mcp = FastMCP("SQLite Async Operations Demo", async_operations=manager) + + @mcp.tool(invocation_modes=["async"]) + async def fetch_website( + url: str, + ) -> list[types.ContentBlock]: + headers = {"User-Agent": "MCP Test Server (github.com/modelcontextprotocol/python-sdk)"} + async with create_mcp_http_client(headers=headers) as client: + await anyio.sleep(10) + response = await client.get(url) + response.raise_for_status() + return [types.TextContent(type="text", text=response.text)] + + print(f"Starting server with SQLite database: {db_path}") + print("Pending tasks will be automatically restarted on server restart!") + + if transport == "stdio": + mcp.run(transport="stdio") + elif transport == "streamable-http": + app = mcp.streamable_http_app() + server = uvicorn.Server(config=uvicorn.Config(app=app, host="127.0.0.1", port=port, log_level="error")) + print(f"Starting {transport} server on port {port}") + server.run() + else: + raise ValueError(f"Invalid transport for test server: {transport}") diff --git a/examples/servers/sqlite-async-operations/pyproject.toml b/examples/servers/sqlite-async-operations/pyproject.toml new file mode 100644 index 000000000..e5ba37f29 --- /dev/null +++ b/examples/servers/sqlite-async-operations/pyproject.toml @@ -0,0 +1,33 @@ +[project] +name = "mcp-sqlite-async-operations" +version = "0.1.0" +description = "Example MCP server demonstrating SQLite-based async operations storage" +readme = "README.md" +requires-python = ">=3.10" +dependencies = ["anyio>=4.5", "click>=8.2.0", "httpx>=0.27", "mcp"] + +[project.scripts] +mcp-sqlite-async-operations = "mcp_sqlite_async_operations.server:main" + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["mcp_sqlite_async_operations"] + +[tool.pyright] +include = ["mcp_sqlite_async_operations"] +venvPath = "." +venv = ".venv" + +[tool.ruff.lint] +select = ["E", "F", "I"] +ignore = [] + +[tool.ruff] +line-length = 120 +target-version = "py310" + +[tool.uv] +dev-dependencies = ["pyright>=1.1.378", "pytest>=8.3.3", "ruff>=0.6.9"] diff --git a/src/mcp/client/session.py b/src/mcp/client/session.py index e6d2d45f2..dfb0eb508 100644 --- a/src/mcp/client/session.py +++ b/src/mcp/client/session.py @@ -469,6 +469,7 @@ async def _received_request(self, responder: RequestResponder[types.ServerReques operation_token=responder.operation.token if responder.operation is not None else None, meta=responder.request_meta, session=self, + supports_async=False, # No client tools right now lifespan_context=None, ) diff --git a/src/mcp/server/fastmcp/server.py b/src/mcp/server/fastmcp/server.py index 27b127dfd..38dda76ea 100644 --- a/src/mcp/server/fastmcp/server.py +++ b/src/mcp/server/fastmcp/server.py @@ -48,7 +48,6 @@ from mcp.shared.async_operations import ServerAsyncOperationManager from mcp.shared.context import LifespanContextT, RequestContext, RequestT from mcp.types import ( - NEXT_PROTOCOL_VERSION, AnyFunction, ContentBlock, GetOperationPayloadResult, @@ -306,7 +305,7 @@ def _setup_handlers(self) -> None: async def get_operation_status(self, token: str) -> GetOperationStatusResult: """Get the status of an async operation.""" try: - operation = self._async_operations.get_operation(token) + operation = await self._async_operations.get_operation(token) if not operation: raise ValueError(f"Operation not found: {token}") @@ -321,7 +320,7 @@ async def get_operation_status(self, token: str) -> GetOperationStatusResult: async def get_operation_result(self, token: str) -> GetOperationPayloadResult: """Get the result of a completed async operation.""" try: - operation = self._async_operations.get_operation(token) + operation = await self._async_operations.get_operation(token) if not operation: raise ValueError(f"Operation not found: {token}") @@ -337,13 +336,10 @@ async def get_operation_result(self, token: str) -> GetOperationPayloadResult: raise def _client_supports_async(self) -> bool: - """Check if the current client supports async tools based on protocol version.""" + """Check if the current client supports async tools.""" try: context = self.get_context() - if context.request_context and context.request_context.session.client_params: - client_version = str(context.request_context.session.client_params.protocolVersion) - # Only "next" version supports async tools for now - return client_version == NEXT_PROTOCOL_VERSION + return context.supports_async except ValueError: # Context not available (outside of request), assume no async support pass @@ -1387,6 +1383,11 @@ def session(self): """Access to the underlying session for advanced usage.""" return self.request_context.session + @property + def supports_async(self): + """If async tools are supported in the current context.""" + return self.request_context.supports_async + # Convenience methods for common log levels async def debug(self, message: str, **extra: Any) -> None: """Send a debug log message.""" diff --git a/src/mcp/server/lowlevel/server.py b/src/mcp/server/lowlevel/server.py index bfd156349..d4011aeb4 100644 --- a/src/mcp/server/lowlevel/server.py +++ b/src/mcp/server/lowlevel/server.py @@ -92,7 +92,7 @@ async def main(): from mcp.shared.exceptions import McpError from mcp.shared.message import ServerMessageMetadata, SessionMessage from mcp.shared.session import RequestResponder -from mcp.types import Operation, RequestId +from mcp.types import NEXT_PROTOCOL_VERSION, Operation, RequestId logger = logging.getLogger(__name__) @@ -154,8 +154,13 @@ def __init__( self.icons = icons self.lifespan = lifespan self.async_operations = async_operations or ServerAsyncOperationManager() + self.async_operations.set_handler(self._execute_tool_async) # Track request ID to operation token mapping for cancellation self._request_to_operation: dict[RequestId, str] = {} + # Store tool functions for async execution + self._tool_function: ( + Callable[..., Awaitable[UnstructuredContent | StructuredContent | CombinationContent]] | None + ) = None self.request_handlers: dict[type, Callable[..., Awaitable[types.ServerResult]]] = { types.PingRequest: _ping_handler, } @@ -494,6 +499,9 @@ def decorator( ): logger.debug("Registering handler for CallToolRequest") + # Store the tool function for async execution + self._tool_function = func + async def handler(req: types.CallToolRequest, server_scope: TaskGroup): try: tool_name = req.params.name @@ -529,7 +537,7 @@ async def handler(req: types.CallToolRequest, server_scope: TaskGroup): ) # Create async operation - operation = self.async_operations.create_operation( + operation = await self.async_operations.create_operation( tool_name=tool_name, arguments=arguments, keep_alive=keep_alive, @@ -542,32 +550,17 @@ async def handler(req: types.CallToolRequest, server_scope: TaskGroup): operation_token=self.request_context.operation_token, meta=self.request_context.meta, session=self.request_context.session, + supports_async=self._client_supports_async(self.request_context.session), lifespan_context=self.request_context.lifespan_context, request=self.request_context.request, ) ctx.operation_token = operation.token request_ctx.set(ctx) - # Start async execution in background - async def execute_async(): - try: - logger.debug(f"Starting async execution of {tool_name}") - self.async_operations.mark_working(operation.token) - results = await func(tool_name, arguments) - logger.debug(f"Async execution completed for {tool_name}") - - # Process results using shared logic - result = self._process_tool_result(results, tool) - self.async_operations.complete_operation(operation.token, result) - logger.debug(f"Completed async operation {operation.token}") - except Exception as e: - logger.exception(f"Async execution failed for {tool_name}") - self.async_operations.fail_operation(operation.token, str(e)) - - # Start task directly in independent task group + # Start task with tool name and arguments current_request_context = request_ctx.get() - self.async_operations.start_task( - operation.token, execute_async, current_request_context, request_ctx + await self.async_operations.start_task( + operation.token, tool_name, arguments, current_request_context ) # Return operation result with immediate content @@ -599,6 +592,14 @@ async def execute_async(): return decorator + def _client_supports_async(self, session: ServerSession) -> bool: + """Check if the provided session supports async tools based on protocol version.""" + if session.client_params: + client_version = str(session.client_params.protocolVersion) + # Only "next" version supports async tools for now + return client_version == NEXT_PROTOCOL_VERSION + return False + def _process_tool_result( self, results: UnstructuredContent | StructuredContent | CombinationContent, tool: types.Tool | None = None ) -> types.CallToolResult: @@ -688,6 +689,37 @@ async def _execute_immediate_result(self, tool: types.Tool, arguments: dict[str, types.ErrorData(code=types.INTERNAL_ERROR, message=f"Immediate result execution error: {str(e)}") ) + async def _execute_tool_async( + self, tool_name: str, arguments: dict[str, Any], request_context: Any + ) -> types.CallToolResult: + """Execute a tool asynchronously and return the result.""" + context_token = None + + try: + # Restore the request context for this task + if request_context: + context_token = request_ctx.set(request_context) + + logger.info(f"Starting async execution of tool '{tool_name}'") + + if not self._tool_function: + raise ValueError("No tool function registered") + + # Execute the tool function + results = await self._tool_function(tool_name, arguments) + + # Get tool definition for validation + tool = await self._get_cached_tool_definition(tool_name) + + # Process results using shared logic + result = self._process_tool_result(results, tool) + logger.info(f"Async execution of tool '{tool_name}' completed") + return result + + finally: + if context_token: + request_ctx.reset(context_token) + def progress_notification(self): def decorator( func: Callable[[str | int, float, float | None, str | None], Awaitable[None]], @@ -737,9 +769,9 @@ async def handler(req: types.CompleteRequest, _: Any = None): return decorator - def _validate_operation_token(self, token: str) -> ServerAsyncOperation: + async def _validate_operation_token(self, token: str) -> ServerAsyncOperation: """Validate operation token and return operation if valid.""" - operation = self.async_operations.get_operation(token) + operation = await self.async_operations.get_operation(token) if not operation: raise McpError(types.ErrorData(code=-32602, message="Invalid token")) @@ -760,7 +792,7 @@ def decorator(func: Callable[[str], Awaitable[types.GetOperationStatusResult]]): async def handler(req: types.GetOperationStatusRequest, _: Any = None): # Validate token and get operation - operation = self._validate_operation_token(req.params.token) + operation = await self._validate_operation_token(req.params.token) return types.ServerResult( types.GetOperationStatusResult( @@ -782,7 +814,7 @@ def decorator(func: Callable[[str], Awaitable[types.GetOperationPayloadResult]]) async def handler(req: types.GetOperationPayloadRequest, _: Any = None): # Validate token and get operation - operation = self._validate_operation_token(req.params.token) + operation = await self._validate_operation_token(req.params.token) if operation.status != "completed": raise McpError( @@ -799,13 +831,13 @@ async def handler(req: types.GetOperationPayloadRequest, _: Any = None): return decorator - def handle_cancelled_notification(self, request_id: RequestId) -> None: + async def handle_cancelled_notification(self, request_id: RequestId) -> None: """Handle cancellation notification for a request.""" # Check if this request ID corresponds to an async operation if request_id in self._request_to_operation: token = self._request_to_operation[request_id] # Cancel the operation - if self.async_operations.cancel_operation(token): + if await self.async_operations.cancel_operation(token): logger.debug(f"Cancelled async operation {token} for request {request_id}") # Clean up the mapping del self._request_to_operation[request_id] @@ -814,31 +846,31 @@ async def _handle_cancelled_notification(self, notification: types.CancelledNoti """Handle cancelled notification from client.""" request_id = notification.params.requestId logger.debug(f"Received cancellation notification for request {request_id}") - self.handle_cancelled_notification(request_id) + await self.handle_cancelled_notification(request_id) - def send_request_for_operation(self, token: str, request: types.ServerRequest) -> None: + async def send_request_for_operation(self, token: str, request: types.ServerRequest) -> None: """Send a request associated with an async operation.""" # Mark operation as requiring input - if self.async_operations.mark_input_required(token): + if await self.async_operations.mark_input_required(token): # Add operation token to request if hasattr(request.root, "params") and request.root.params is not None: if not hasattr(request.root.params, "operation") or request.root.params.operation is None: request.root.params.operation = Operation(token=token) logger.debug(f"Marked operation {token} as input_required and added to request") - def send_notification_for_operation(self, token: str, notification: types.ServerNotification) -> None: + async def send_notification_for_operation(self, token: str, notification: types.ServerNotification) -> None: """Send a notification associated with an async operation.""" # Mark operation as requiring input - if self.async_operations.mark_input_required(token): + if await self.async_operations.mark_input_required(token): # Add operation token to notification if hasattr(notification.root, "params") and notification.root.params is not None: if not hasattr(notification.root.params, "operation") or notification.root.params.operation is None: notification.root.params.operation = Operation(token=token) logger.debug(f"Marked operation {token} as input_required and added to notification") - def complete_request_for_operation(self, token: str) -> None: + async def complete_request_for_operation(self, token: str) -> None: """Mark that a request for an operation has been completed.""" - if self.async_operations.mark_input_completed(token): + if await self.async_operations.mark_input_completed(token): logger.debug(f"Marked operation {token} as no longer requiring input") async def run( @@ -931,6 +963,7 @@ async def _handle_request( operation_token=message.operation.token if message.operation else None, meta=message.request_meta, session=session, + supports_async=self._client_supports_async(session), lifespan_context=lifespan_context, request=request_data, ) diff --git a/src/mcp/shared/async_operations.py b/src/mcp/shared/async_operations.py index 1c79d11b3..d7029ac5e 100644 --- a/src/mcp/shared/async_operations.py +++ b/src/mcp/shared/async_operations.py @@ -6,9 +6,10 @@ import logging import secrets import time +from collections import deque from collections.abc import AsyncIterator, Awaitable, Callable from dataclasses import dataclass -from typing import Any, Generic, TypeVar +from typing import TYPE_CHECKING, Any, Generic, Protocol, TypeVar import anyio from anyio.abc import TaskGroup @@ -16,9 +17,24 @@ import mcp.types as types from mcp.types import AsyncOperationStatus +if TYPE_CHECKING: + # Avoid circular import with mcp.server.lowlevel.Server + from mcp.server.session import ServerSession + from mcp.shared.context import RequestContext + logger = logging.getLogger(__name__) +@dataclass +class PendingAsyncTask: + """Represents a task waiting to be dispatched.""" + + token: str + tool_name: str + arguments: dict[str, Any] + request_context: Any # The RequestContext object to restore + + @dataclass class ClientAsyncOperation: """Minimal operation tracking for client-side use.""" @@ -127,6 +143,60 @@ async def cleanup_loop(self) -> None: logger.debug(f"Cleaned up {count} expired operations") +class AsyncOperationStore(Protocol): + """Protocol for async operation storage implementations.""" + + async def get_operation(self, token: str) -> ServerAsyncOperation | None: + """Get operation by token.""" + ... + + async def store_operation(self, operation: ServerAsyncOperation) -> None: + """Store an operation.""" + ... + + async def update_status(self, token: str, status: AsyncOperationStatus) -> bool: + """Update operation status.""" + ... + + async def complete_operation_with_result(self, token: str, result: types.CallToolResult) -> bool: + """Complete operation with result.""" + ... + + async def fail_operation_with_error(self, token: str, error: str) -> bool: + """Fail operation with error.""" + ... + + async def cleanup_expired(self) -> int: + """Remove expired operations and return count.""" + ... + + +class AsyncOperationBroker(Protocol): + """Protocol for async operation queueing and scheduling.""" + + async def enqueue_task( + self, + token: str, + tool_name: str, + arguments: dict[str, Any], + request_context: RequestContext[ServerSession, Any, Any], + ) -> None: + """Enqueue a task for execution.""" + ... + + async def get_pending_tasks(self) -> list[PendingAsyncTask]: + """Get all pending tasks.""" + ... + + async def acknowledge_task(self, token: str) -> None: + """Acknowledge that a task has been dispatched.""" + ... + + async def complete_task(self, token: str) -> None: + """Remove a completed task from persistent storage.""" + ... + + class ClientAsyncOperationManager(BaseOperationManager[ClientAsyncOperation]): """Manages client-side operation tracking.""" @@ -146,15 +216,37 @@ def get_tool_name(self, token: str) -> str | None: return operation.tool_name if operation else None -class ServerAsyncOperationManager(BaseOperationManager[ServerAsyncOperation]): - """Manages async tool operations with token-based tracking.""" +class ServerAsyncOperationManager: + """Manages async tool operations using Store and Broker components.""" - def __init__(self, *, token_generator: Callable[[str | None], str] | None = None): - super().__init__(token_generator=token_generator) + def __init__( + self, + store: AsyncOperationStore | None = None, + broker: AsyncOperationBroker | None = None, + *, + token_generator: Callable[[str | None], str] | None = None, + ): + # Use provided implementations or default to InMemory + self.store = store or InMemoryAsyncOperationStore() + self.broker = broker or InMemoryAsyncOperationBroker() + self._token_generator = token_generator or self._default_token_generator + self._tool_executor: Callable[[str, dict[str, Any], Any], Awaitable[types.CallToolResult]] | None = None self._task_group: TaskGroup | None = None self._run_lock = anyio.Lock() self._running = False + def set_handler(self, tool_executor: Callable[[str, dict[str, Any], Any], Awaitable[types.CallToolResult]]) -> None: + """Set the tool executor handler for late binding.""" + self._tool_executor = tool_executor + + def _default_token_generator(self, session_id: str | None = None) -> str: + """Default token generation using random tokens.""" + return secrets.token_urlsafe(32) + + def generate_token(self, session_id: str | None = None) -> str: + """Generate a token.""" + return self._token_generator(session_id) + @contextlib.asynccontextmanager async def run(self) -> AsyncIterator[None]: """Run the async operations manager with its own task group.""" @@ -167,46 +259,69 @@ async def run(self) -> AsyncIterator[None]: async with anyio.create_task_group() as tg: self._task_group = tg logger.info("ServerAsyncOperationManager started") - # Start cleanup loop - tg.start_soon(self.cleanup_loop) + # Start cleanup loop and task dispatcher + tg.start_soon(self._cleanup_loop) + tg.start_soon(self._task_dispatcher) try: yield finally: logger.info("ServerAsyncOperationManager shutting down") # Stop cleanup loop gracefully - await self.stop_cleanup_loop() + await self._stop_cleanup_loop() # Cancel task group to stop all spawned tasks tg.cancel_scope.cancel() self._task_group = None self._running = False - def start_task( + async def _cleanup_loop(self) -> None: + """Background cleanup loop for expired operations.""" + while self._running: + await anyio.sleep(60) # Cleanup every 60 seconds + count = await self.store.cleanup_expired() + if count > 0: + logger.debug(f"Cleaned up {count} expired operations") + + async def _stop_cleanup_loop(self) -> None: + """Stop the cleanup loop.""" + self._running = False + + async def _task_dispatcher(self) -> None: + """Background task dispatcher that processes queued tasks.""" + while self._running: + await anyio.sleep(0.1) # Check for tasks frequently + pending_tasks = await self.broker.get_pending_tasks() + for task in pending_tasks: + if self._task_group and self._tool_executor: + logger.debug(f"Dispatching queued async task {task.token}") + self._task_group.start_soon(self._execute_tool_task, task, name=f"lro_{task.token}") + # Acknowledge that we've dispatched this task + await self.broker.acknowledge_task(task.token) + + async def _execute_tool_task(self, task: PendingAsyncTask) -> None: + """Execute a tool task.""" + try: + if not self._tool_executor: + raise ValueError("No tool executor configured") + + await self.mark_working(task.token) + result = await self._tool_executor(task.tool_name, task.arguments, task.request_context) + await self.complete_operation(task.token, result) + + except Exception as e: + logger.exception(f"Tool task {task.token} failed: {e}") + await self.fail_operation(task.token, str(e)) + + async def start_task( self, token: str, - task_func: Callable[[], Awaitable[None]], - request_context: Any = None, - request_ctx_var: Any = None, + tool_name: str, + arguments: dict[str, Any], + request_context: RequestContext[ServerSession, Any, Any], ) -> None: - """Start an async task immediately in the independent task group.""" - if self._task_group is None: - raise RuntimeError("Task group not started. Call run() first.") - - async def run_task_with_context(): - context_token = None - try: - if request_context and request_ctx_var: - context_token = request_ctx_var.set(request_context) - await task_func() - except Exception: - # Handle task failures gracefully - pass - finally: - if context_token and request_ctx_var: - request_ctx_var.reset(context_token) - - self._task_group.start_soon(run_task_with_context, name=f"lro_{token}") + """Enqueue an async task for execution.""" + await self.broker.enqueue_task(token, tool_name, arguments, request_context) - def create_operation( + async def create_operation( self, tool_name: str, arguments: dict[str, Any], @@ -224,123 +339,174 @@ def create_operation( keep_alive=keep_alive, session_id=session_id, ) - self._set_operation(token, operation) + await self.store.store_operation(operation) + logger.info(f"Created async operation {token} for tool '{tool_name}'") return operation - def mark_working(self, token: str) -> bool: - """Mark operation as working.""" - operation = self._get_operation(token) - if not operation: - return False - - # Can only transition to working from submitted - if operation.status != "submitted": - return False + async def get_operation(self, token: str) -> ServerAsyncOperation | None: + """Get operation by token.""" + return await self.store.get_operation(token) - operation.status = "working" - return True + async def mark_working(self, token: str) -> bool: + """Mark operation as working.""" + return await self.store.update_status(token, "working") - def complete_operation(self, token: str, result: types.CallToolResult) -> bool: + async def complete_operation(self, token: str, result: types.CallToolResult) -> bool: """Complete operation with result.""" - operation = self._get_operation(token) - if not operation: - return False + success = await self.store.complete_operation_with_result(token, result) + if success: + await self.broker.complete_task(token) + logger.info(f"Async operation {token} completed successfully") + return success + + async def fail_operation(self, token: str, error: str) -> bool: + """Fail operation with error.""" + success = await self.store.fail_operation_with_error(token, error) + if success: + await self.broker.complete_task(token) + logger.info(f"Async operation {token} failed: {error}") + return success - # Can only complete from submitted or working states - if operation.status not in ("submitted", "working"): + async def cancel_operation(self, token: str) -> bool: + """Cancel operation.""" + operation = await self.store.get_operation(token) + if not operation or operation.status in ("completed", "failed", "canceled"): return False - operation.status = "completed" - operation.result = result - operation.resolved_at = time.time() + # Create new operation with updated fields instead of mutating + cancelled_operation = ServerAsyncOperation( + token=operation.token, + tool_name=operation.tool_name, + arguments=operation.arguments, + status="canceled", + created_at=operation.created_at, + keep_alive=operation.keep_alive, + resolved_at=time.time(), + session_id=operation.session_id, + result=operation.result, + error=operation.error, + ) + await self.store.store_operation(cancelled_operation) + await self.broker.complete_task(token) # Clean up from broker + logger.info(f"Async operation {token} was cancelled") return True - def fail_operation(self, token: str, error: str) -> bool: - """Fail operation with error.""" - operation = self._get_operation(token) - if not operation: + async def mark_input_required(self, token: str) -> bool: + """Mark operation as requiring input.""" + operation = await self.store.get_operation(token) + if not operation or operation.status not in ("submitted", "working"): return False - # Can only fail from submitted or working states - if operation.status not in ("submitted", "working"): + await self.store.update_status(token, "input_required") + return True + + async def mark_input_completed(self, token: str) -> bool: + """Mark input as completed, transitioning back to working.""" + operation = await self.store.get_operation(token) + if not operation or operation.status != "input_required": return False - operation.status = "failed" - operation.error = error - operation.resolved_at = time.time() + await self.store.update_status(token, "working") return True - def get_operation_result(self, token: str) -> types.CallToolResult | None: + async def get_operation_result(self, token: str) -> types.CallToolResult | None: """Get result for completed operation.""" - operation = self._get_operation(token) + operation = await self.store.get_operation(token) if not operation or operation.status != "completed": return None return operation.result - def cancel_operation(self, token: str) -> bool: - """Cancel operation.""" - operation = self._get_operation(token) - if not operation: - return False - - # Can only cancel from submitted or working states - if operation.status not in ("submitted", "working"): - return False - - operation.status = "canceled" - return True - - def remove_operation(self, token: str) -> bool: - """Remove operation by token.""" - return self._operations.pop(token, None) is not None - - def cleanup_expired_operations(self) -> int: - """Remove expired operations and return count removed.""" - expired_tokens = [token for token, op in self._operations.items() if op.is_expired] - - for token in expired_tokens: - del self._operations[token] + async def cleanup_expired(self) -> int: + """Remove expired operations and return count.""" + return await self.store.cleanup_expired() - return len(expired_tokens) - def get_session_operations(self, session_id: str) -> list[ServerAsyncOperation]: - """Get all operations for a session.""" - return [op for op in self._operations.values() if op.session_id == session_id] +class InMemoryAsyncOperationStore(AsyncOperationStore): + """In-memory implementation of AsyncOperationStore.""" - def cancel_session_operations(self, session_id: str) -> int: - """Cancel all operations for a session.""" - session_ops = self.get_session_operations(session_id) - canceled_count = 0 + def __init__(self): + self._operations: dict[str, ServerAsyncOperation] = {} - for op in session_ops: - if not op.is_terminal: - op.status = "canceled" - canceled_count += 1 + async def get_operation(self, token: str) -> ServerAsyncOperation | None: + """Get operation by token.""" + return self._operations.get(token) - return canceled_count + async def store_operation(self, operation: ServerAsyncOperation) -> None: + """Store an operation.""" + self._operations[operation.token] = operation - def mark_input_required(self, token: str) -> bool: - """Mark operation as requiring input from client.""" - operation = self._get_operation(token) + async def update_status(self, token: str, status: AsyncOperationStatus) -> bool: + """Update operation status.""" + operation = self._operations.get(token) if not operation: return False - # Can only move to input_required from submitted or working states - if operation.status not in ("submitted", "working"): + # Don't allow transitions from terminal states + if operation.is_terminal: return False - operation.status = "input_required" + operation.status = status + if status in ("completed", "failed", "canceled"): + operation.resolved_at = time.time() return True - def mark_input_completed(self, token: str) -> bool: - """Mark operation as no longer requiring input, return to working state.""" - operation = self._get_operation(token) - if not operation: + async def complete_operation_with_result(self, token: str, result: types.CallToolResult) -> bool: + """Complete operation with result.""" + operation = self._operations.get(token) + if not operation or operation.is_terminal: return False - # Can only move from input_required back to working - if operation.status != "input_required": + operation.status = "completed" + operation.result = result + operation.resolved_at = time.time() + return True + + async def fail_operation_with_error(self, token: str, error: str) -> bool: + """Fail operation with error.""" + operation = self._operations.get(token) + if not operation or operation.is_terminal: return False - operation.status = "working" + operation.status = "failed" + operation.error = error + operation.resolved_at = time.time() return True + + async def cleanup_expired(self) -> int: + """Remove expired operations and return count.""" + expired_tokens = [token for token, op in self._operations.items() if op.is_expired] + for token in expired_tokens: + del self._operations[token] + return len(expired_tokens) + + +class InMemoryAsyncOperationBroker(AsyncOperationBroker): + """In-memory implementation of AsyncOperationBroker.""" + + def __init__(self): + self._task_queue: deque[PendingAsyncTask] = deque() + + async def enqueue_task( + self, + token: str, + tool_name: str, + arguments: dict[str, Any], + request_context: RequestContext[ServerSession, Any, Any], + ) -> None: + """Enqueue a task for execution.""" + task = PendingAsyncTask(token=token, tool_name=tool_name, arguments=arguments, request_context=request_context) + self._task_queue.append(task) + + async def get_pending_tasks(self) -> list[PendingAsyncTask]: + """Get all pending tasks without clearing them.""" + return list(self._task_queue) + + async def acknowledge_task(self, token: str) -> None: + """Acknowledge that a task has been dispatched.""" + # Remove the task from the queue + self._task_queue = deque(task for task in self._task_queue if task.token != token) + + async def complete_task(self, token: str) -> None: + """Remove a completed task from persistent storage.""" + # For in-memory broker, this is the same as acknowledge + self._task_queue = deque(task for task in self._task_queue if task.token != token) diff --git a/src/mcp/shared/context.py b/src/mcp/shared/context.py index f4e394990..d83c9f7bb 100644 --- a/src/mcp/shared/context.py +++ b/src/mcp/shared/context.py @@ -12,10 +12,26 @@ @dataclass -class RequestContext(Generic[SessionT, LifespanContextT, RequestT]): +class SerializableRequestContext: + """Serializable subset of RequestContext for persistent storage.""" + request_id: RequestId operation_token: str | None meta: RequestParams.Meta | None + supports_async: bool + + +@dataclass +class RequestContext(SerializableRequestContext, Generic[SessionT, LifespanContextT, RequestT]): session: SessionT lifespan_context: LifespanContextT request: RequestT | None = None + + def to_serializable(self) -> SerializableRequestContext: + """Extract serializable parts of this context.""" + return SerializableRequestContext( + request_id=self.request_id, + operation_token=self.operation_token, + meta=self.meta, + supports_async=self.supports_async, + ) diff --git a/tests/issues/test_176_progress_token.py b/tests/issues/test_176_progress_token.py index 230be8241..59fc30bc6 100644 --- a/tests/issues/test_176_progress_token.py +++ b/tests/issues/test_176_progress_token.py @@ -23,6 +23,7 @@ async def test_progress_token_zero_first_call(): request_id="test-request", operation_token=None, session=mock_session, + supports_async=False, meta=mock_meta, lifespan_context=None, ) diff --git a/tests/server/test_lowlevel_async_operations.py b/tests/server/test_lowlevel_async_operations.py index 7adaa2199..498ec737a 100644 --- a/tests/server/test_lowlevel_async_operations.py +++ b/tests/server/test_lowlevel_async_operations.py @@ -48,8 +48,8 @@ async def check_status_handler(token: str) -> types.GetOperationStatusResult: return types.GetOperationStatusResult(status="unknown") # Create and complete operation with short keepAlive - operation = manager.create_operation("test_tool", {}, keep_alive=1, session_id="session1") - manager.complete_operation(operation.token, types.CallToolResult(content=[])) + operation = await manager.create_operation("test_tool", {}, keep_alive=1, session_id="session1") + await manager.complete_operation(operation.token, types.CallToolResult(content=[])) # Make it expired operation.resolved_at = time.time() - 2 @@ -75,8 +75,8 @@ async def check_status_handler(token: str) -> types.GetOperationStatusResult: return types.GetOperationStatusResult(status="unknown") # Create valid operation - operation = manager.create_operation("test_tool", {}, session_id="session1") - manager.mark_working(operation.token) + operation = await manager.create_operation("test_tool", {}, session_id="session1") + await manager.mark_working(operation.token) valid_request = types.GetOperationStatusRequest(params=types.GetOperationStatusParams(token=operation.token)) @@ -100,8 +100,8 @@ async def check_status_handler(token: str) -> types.GetOperationStatusResult: return types.GetOperationStatusResult(status="unknown") # Create and fail operation - operation = manager.create_operation("test_tool", {}, session_id="session1") - manager.fail_operation(operation.token, "Something went wrong") + operation = await manager.create_operation("test_tool", {}, session_id="session1") + await manager.fail_operation(operation.token, "Something went wrong") failed_request = types.GetOperationStatusRequest(params=types.GetOperationStatusParams(token=operation.token)) @@ -147,8 +147,8 @@ async def get_result_handler(token: str) -> types.GetOperationPayloadResult: return types.GetOperationPayloadResult(result=types.CallToolResult(content=[])) # Create and complete operation with short keepAlive - operation = manager.create_operation("test_tool", {}, keep_alive=1, session_id="session1") - manager.complete_operation(operation.token, types.CallToolResult(content=[])) + operation = await manager.create_operation("test_tool", {}, keep_alive=1, session_id="session1") + await manager.complete_operation(operation.token, types.CallToolResult(content=[])) # Make it expired operation.resolved_at = time.time() - 2 @@ -176,8 +176,8 @@ async def get_result_handler(token: str) -> types.GetOperationPayloadResult: return types.GetOperationPayloadResult(result=types.CallToolResult(content=[])) # Create operation that's still working - operation = manager.create_operation("test_tool", {}, session_id="session1") - manager.mark_working(operation.token) + operation = await manager.create_operation("test_tool", {}, session_id="session1") + await manager.mark_working(operation.token) working_request = types.GetOperationPayloadRequest( params=types.GetOperationPayloadParams(token=operation.token) @@ -202,9 +202,9 @@ async def get_result_handler(token: str) -> types.GetOperationPayloadResult: return types.GetOperationPayloadResult(result=types.CallToolResult(content=[])) # Create and complete operation with result - operation = manager.create_operation("test_tool", {}, session_id="session1") + operation = await manager.create_operation("test_tool", {}, session_id="session1") result = types.CallToolResult(content=[types.TextContent(type="text", text="success")]) - manager.complete_operation(operation.token, result) + await manager.complete_operation(operation.token, result) completed_request = types.GetOperationPayloadRequest( params=types.GetOperationPayloadParams(token=operation.token) @@ -229,17 +229,17 @@ async def test_handle_cancelled_notification(self): server = Server("Test", async_operations=manager) # Create an operation - operation = manager.create_operation("test_tool", {"arg": "value"}, session_id="session1") + operation = await manager.create_operation("test_tool", {"arg": "value"}, session_id="session1") # Track the operation with a request ID request_id = "req_123" server._request_to_operation[request_id] = operation.token # Handle cancellation - server.handle_cancelled_notification(request_id) + await server.handle_cancelled_notification(request_id) # Verify operation was cancelled - cancelled_op = manager.get_operation(operation.token) + cancelled_op = await manager.get_operation(operation.token) assert cancelled_op is not None assert cancelled_op.status == "canceled" @@ -253,7 +253,7 @@ async def test_cancelled_notification_handler(self): server = Server("Test", async_operations=manager) # Create an operation - operation = manager.create_operation("test_tool", {"arg": "value"}, session_id="session1") + operation = await manager.create_operation("test_tool", {"arg": "value"}, session_id="session1") # Track the operation with a request ID request_id = "req_456" @@ -265,7 +265,7 @@ async def test_cancelled_notification_handler(self): await server._handle_cancelled_notification(notification) # Verify operation was cancelled - cancelled_op = manager.get_operation(operation.token) + cancelled_op = await manager.get_operation(operation.token) assert cancelled_op is not None assert cancelled_op.status == "canceled" @@ -276,12 +276,12 @@ async def test_validate_operation_token_cancelled(self): server = Server("Test", async_operations=manager) # Create and cancel an operation - operation = manager.create_operation("test_tool", {"arg": "value"}, session_id="session1") - manager.cancel_operation(operation.token) + operation = await manager.create_operation("test_tool", {"arg": "value"}, session_id="session1") + await manager.cancel_operation(operation.token) # Verify that accessing cancelled operation raises error with pytest.raises(McpError) as exc_info: - server._validate_operation_token(operation.token) + await server._validate_operation_token(operation.token) assert exc_info.value.error.code == -32602 assert "cancelled" in exc_info.value.error.message.lower() @@ -292,7 +292,7 @@ async def test_nonexistent_request_id_cancellation(self): server = Server("Test") # Should not raise error for non-existent request ID - server.handle_cancelled_notification("nonexistent_request") + await server.handle_cancelled_notification("nonexistent_request") # Verify no operations were affected assert len(server._request_to_operation) == 0 @@ -307,15 +307,15 @@ async def test_mark_input_required(self): manager = ServerAsyncOperationManager() # Create operation in submitted state - operation = manager.create_operation("test_tool", {"arg": "value"}, session_id="session1") + operation = await manager.create_operation("test_tool", {"arg": "value"}, session_id="session1") assert operation.status == "submitted" # Mark as input required - result = manager.mark_input_required(operation.token) + result = await manager.mark_input_required(operation.token) assert result is True # Verify status changed - updated_op = manager.get_operation(operation.token) + updated_op = await manager.get_operation(operation.token) assert updated_op is not None assert updated_op.status == "input_required" @@ -325,12 +325,12 @@ async def test_mark_input_required_from_working(self): manager = ServerAsyncOperationManager() # Create and mark as working - operation = manager.create_operation("test_tool", {"arg": "value"}, session_id="session1") - manager.mark_working(operation.token) + operation = await manager.create_operation("test_tool", {"arg": "value"}, session_id="session1") + await manager.mark_working(operation.token) assert operation.status == "working" # Mark as input required - result = manager.mark_input_required(operation.token) + result = await manager.mark_input_required(operation.token) assert result is True assert operation.status == "input_required" @@ -340,10 +340,10 @@ async def test_mark_input_required_invalid_states(self): manager = ServerAsyncOperationManager() # Test from completed state - operation = manager.create_operation("test_tool", {"arg": "value"}, session_id="session1") - manager.complete_operation(operation.token, types.CallToolResult(content=[])) + operation = await manager.create_operation("test_tool", {"arg": "value"}, session_id="session1") + await manager.complete_operation(operation.token, types.CallToolResult(content=[])) - result = manager.mark_input_required(operation.token) + result = await manager.mark_input_required(operation.token) assert result is False assert operation.status == "completed" @@ -353,12 +353,12 @@ async def test_mark_input_completed(self): manager = ServerAsyncOperationManager() # Create operation and mark as input required - operation = manager.create_operation("test_tool", {"arg": "value"}, session_id="session1") - manager.mark_input_required(operation.token) + operation = await manager.create_operation("test_tool", {"arg": "value"}, session_id="session1") + await manager.mark_input_required(operation.token) assert operation.status == "input_required" # Mark input as completed - result = manager.mark_input_completed(operation.token) + result = await manager.mark_input_completed(operation.token) assert result is True assert operation.status == "working" @@ -368,11 +368,11 @@ async def test_mark_input_completed_invalid_state(self): manager = ServerAsyncOperationManager() # Create operation in submitted state - operation = manager.create_operation("test_tool", {"arg": "value"}, session_id="session1") + operation = await manager.create_operation("test_tool", {"arg": "value"}, session_id="session1") assert operation.status == "submitted" # Try to mark input completed from wrong state - result = manager.mark_input_completed(operation.token) + result = await manager.mark_input_completed(operation.token) assert result is False assert operation.status == "submitted" @@ -382,8 +382,8 @@ async def test_nonexistent_token_operations(self): manager = ServerAsyncOperationManager() # Test with fake token - assert manager.mark_input_required("fake_token") is False - assert manager.mark_input_completed("fake_token") is False + assert await manager.mark_input_required("fake_token") is False + assert await manager.mark_input_completed("fake_token") is False @pytest.mark.anyio async def test_server_send_request_for_operation(self): @@ -392,8 +392,8 @@ async def test_server_send_request_for_operation(self): server = Server("Test", async_operations=manager) # Create operation - operation = manager.create_operation("test_tool", {"arg": "value"}, session_id="session1") - manager.mark_working(operation.token) + operation = await manager.create_operation("test_tool", {"arg": "value"}, session_id="session1") + await manager.mark_working(operation.token) # Create a mock request request = types.ServerRequest( @@ -406,10 +406,10 @@ async def test_server_send_request_for_operation(self): ) # Send request for operation - server.send_request_for_operation(operation.token, request) + await server.send_request_for_operation(operation.token, request) # Verify operation status changed - updated_op = manager.get_operation(operation.token) + updated_op = await manager.get_operation(operation.token) assert updated_op is not None assert updated_op.status == "input_required" @@ -420,14 +420,14 @@ async def test_server_complete_request_for_operation(self): server = Server("Test", async_operations=manager) # Create operation and mark as input required - operation = manager.create_operation("test_tool", {"arg": "value"}, session_id="session1") - manager.mark_input_required(operation.token) + operation = await manager.create_operation("test_tool", {"arg": "value"}, session_id="session1") + await manager.mark_input_required(operation.token) # Complete request for operation - server.complete_request_for_operation(operation.token) + await server.complete_request_for_operation(operation.token) # Verify operation status changed back to working - updated_op = manager.get_operation(operation.token) + updated_op = await manager.get_operation(operation.token) assert updated_op is not None assert updated_op.status == "working" @@ -437,8 +437,8 @@ async def test_input_required_is_terminal_check(self): manager = ServerAsyncOperationManager() # Create operation and mark as input required - operation = manager.create_operation("test_tool", {"arg": "value"}, session_id="session1") - manager.mark_input_required(operation.token) + operation = await manager.create_operation("test_tool", {"arg": "value"}, session_id="session1") + await manager.mark_input_required(operation.token) # Verify it's not terminal assert not operation.is_terminal diff --git a/tests/shared/test_async_operations.py b/tests/shared/test_async_operations.py index 8c349b6aa..4a0707036 100644 --- a/tests/shared/test_async_operations.py +++ b/tests/shared/test_async_operations.py @@ -5,6 +5,8 @@ from typing import Any, cast from unittest.mock import Mock +import pytest + import mcp.types as types from mcp.shared.async_operations import ServerAsyncOperation, ServerAsyncOperationManager from mcp.types import AsyncOperationStatus @@ -13,12 +15,12 @@ class TestAsyncOperationManager: """Test AsyncOperationManager functionality.""" - def _create_manager_with_operation( + async def _create_manager_with_operation( self, session_id: str = "session1", **kwargs: Any ) -> tuple[ServerAsyncOperationManager, ServerAsyncOperation]: """Helper to create manager with a test operation.""" manager = ServerAsyncOperationManager() - operation = manager.create_operation("test_tool", {"arg": "value"}, session_id=session_id, **kwargs) + operation = await manager.create_operation("test_tool", {"arg": "value"}, session_id=session_id, **kwargs) return manager, operation def test_token_generation(self): @@ -38,93 +40,97 @@ def test_token_generation(self): token1, token2 = scoped_manager.generate_token("s1"), scoped_manager.generate_token("s2") assert token1.startswith("s1_") and token2.startswith("s2_") and token1 != token2 - def test_operation_lifecycle(self): + @pytest.mark.anyio + async def test_operation_lifecycle(self): """Test complete operation lifecycle including direct transitions.""" - manager, operation = self._create_manager_with_operation() + manager, operation = await self._create_manager_with_operation() token = operation.token # Test creation assert operation.status == "submitted" and operation.result is None # Test working transition - assert manager.mark_working(token) - working_op = manager.get_operation(token) + assert await manager.mark_working(token) + working_op = await manager.get_operation(token) assert working_op is not None and working_op.status == "working" # Test completion result = types.CallToolResult(content=[types.TextContent(type="text", text="success")]) - assert manager.complete_operation(token, result) - completed_op = manager.get_operation(token) + assert await manager.complete_operation(token, result) + completed_op = await manager.get_operation(token) assert completed_op is not None assert completed_op.status == "completed" and completed_op.result == result - assert manager.get_operation_result(token) == result + assert await manager.get_operation_result(token) == result # Test direct completion from submitted (new manager to avoid interference) - direct_manager, direct_op = self._create_manager_with_operation() - assert direct_manager.complete_operation(direct_op.token, result) - direct_completed = direct_manager.get_operation(direct_op.token) + direct_manager, direct_op = await self._create_manager_with_operation() + assert await direct_manager.complete_operation(direct_op.token, result) + direct_completed = await direct_manager.get_operation(direct_op.token) assert direct_completed is not None and direct_completed.status == "completed" # Test direct failure from submitted (new manager to avoid interference) - fail_manager, fail_op = self._create_manager_with_operation() - assert fail_manager.fail_operation(fail_op.token, "immediate error") - failed = fail_manager.get_operation(fail_op.token) + fail_manager, fail_op = await self._create_manager_with_operation() + assert await fail_manager.fail_operation(fail_op.token, "immediate error") + failed = await fail_manager.get_operation(fail_op.token) assert failed is not None assert failed.status == "failed" and failed.error == "immediate error" - def test_operation_failure_and_cancellation(self): + @pytest.mark.anyio + async def test_operation_failure_and_cancellation(self): """Test operation failure and cancellation.""" - manager, operation = self._create_manager_with_operation() + manager, operation = await self._create_manager_with_operation() # Test failure - manager.mark_working(operation.token) - assert manager.fail_operation(operation.token, "Something went wrong") - failed_op = manager.get_operation(operation.token) + await manager.mark_working(operation.token) + assert await manager.fail_operation(operation.token, "Something went wrong") + failed_op = await manager.get_operation(operation.token) assert failed_op is not None assert failed_op.status == "failed" and failed_op.error == "Something went wrong" - assert manager.get_operation_result(operation.token) is None + assert await manager.get_operation_result(operation.token) is None # Test cancellation (new manager to avoid interference) - cancel_manager, cancel_op = self._create_manager_with_operation() - assert cancel_manager.cancel_operation(cancel_op.token) - canceled_op = cancel_manager.get_operation(cancel_op.token) + cancel_manager, cancel_op = await self._create_manager_with_operation() + assert await cancel_manager.cancel_operation(cancel_op.token) + canceled_op = await cancel_manager.get_operation(cancel_op.token) assert canceled_op is not None and canceled_op.status == "canceled" - def test_state_transitions_and_terminal_states(self): + @pytest.mark.anyio + async def test_state_transitions_and_terminal_states(self): """Test state transition validation and terminal state immutability.""" - manager, operation = self._create_manager_with_operation() + manager, operation = await self._create_manager_with_operation() token = operation.token result = Mock() # Valid transitions - assert manager.mark_working(token) - assert manager.complete_operation(token, result) + assert await manager.mark_working(token) + assert await manager.complete_operation(token, result) # Invalid transitions from terminal state - assert not manager.mark_working(token) - assert not manager.fail_operation(token, "error") - assert not manager.cancel_operation(token) - completed_check = manager.get_operation(token) + assert not await manager.mark_working(token) + assert not await manager.fail_operation(token, "error") + assert not await manager.cancel_operation(token) + completed_check = await manager.get_operation(token) assert completed_check is not None and completed_check.status == "completed" # Test other terminal states (use separate managers since previous operation is already completed) - def fail_action(m: ServerAsyncOperationManager, t: str) -> bool: - return m.fail_operation(t, "err") + async def fail_action(m: ServerAsyncOperationManager, t: str) -> bool: + return await m.fail_operation(t, "err") - def cancel_action(m: ServerAsyncOperationManager, t: str) -> bool: - return m.cancel_operation(t) + async def cancel_action(m: ServerAsyncOperationManager, t: str) -> bool: + return await m.cancel_operation(t) for status, action in [ ("failed", fail_action), ("canceled", cancel_action), ]: - test_manager, test_op = self._create_manager_with_operation() - action(test_manager, test_op.token) - terminal_op = test_manager.get_operation(test_op.token) + test_manager, test_op = await self._create_manager_with_operation() + await action(test_manager, test_op.token) + terminal_op = await test_manager.get_operation(test_op.token) assert terminal_op is not None assert terminal_op.status == status and terminal_op.is_terminal - def test_nonexistent_token_operations(self): + @pytest.mark.anyio + async def test_nonexistent_token_operations(self): """Test operations on nonexistent tokens.""" manager = ServerAsyncOperationManager() fake_token = "fake_token" @@ -136,64 +142,42 @@ def test_nonexistent_token_operations(self): ("fail_operation", ("error",)), ("cancel_operation", ()), ("get_operation_result", ()), - ("remove_operation", ()), ]: - assert getattr(manager, method)(fake_token, *args) in (None, False) - - def test_session_management(self): - """Test session-based operation management and termination.""" - manager = ServerAsyncOperationManager() - - # Create operations for different sessions - ops = [manager.create_operation(f"tool{i}", {}, session_id=f"session{i % 2}") for i in range(4)] - - # Test session filtering - s0_ops = manager.get_session_operations("session0") - s1_ops = manager.get_session_operations("session1") - assert len(s0_ops) == 2 and len(s1_ops) == 2 - - # Test session termination - ops[0] and ops[2] are in session0 - manager.mark_working(ops[0].token) # session0 - should be canceled - manager.complete_operation(ops[2].token, Mock()) # session0 - should NOT be canceled (completed) - - canceled_count = manager.cancel_session_operations("session0") - assert canceled_count == 1 # Only working operation canceled, not completed - - s0_after = manager.get_session_operations("session0") - # Find the operations by status since order might vary - working_op = next(op for op in s0_after if op.token == ops[0].token) - completed_op = next(op for op in s0_after if op.token == ops[2].token) - assert working_op.status == "canceled" and completed_op.status == "completed" + result = await getattr(manager, method)(fake_token, *args) + assert result in (None, False) - def test_expiration_and_cleanup(self): + @pytest.mark.anyio + async def test_expiration_and_cleanup(self): """Test operation expiration and cleanup.""" manager = ServerAsyncOperationManager() # Create operations with different expiration times - short_op = manager.create_operation("tool1", {}, keep_alive=1, session_id="session1") - long_op = manager.create_operation("tool2", {}, keep_alive=10, session_id="session1") + short_op = await manager.create_operation("tool1", {}, keep_alive=1, session_id="session1") + long_op = await manager.create_operation("tool2", {}, keep_alive=10, session_id="session1") # Complete both and make first expired for op in [short_op, long_op]: - manager.complete_operation(op.token, Mock()) + await manager.complete_operation(op.token, Mock()) short_op.resolved_at = time.time() - 2 # Test expiration detection assert short_op.is_expired and not long_op.is_expired # Test cleanup - removed_count = manager.cleanup_expired_operations() + removed_count = await manager.cleanup_expired() assert removed_count == 1 - assert manager.get_operation(short_op.token) is None - assert manager.get_operation(long_op.token) is not None + assert await manager.get_operation(short_op.token) is None + assert await manager.get_operation(long_op.token) is not None - def test_concurrent_operations(self): + @pytest.mark.anyio + async def test_concurrent_operations(self): """Test concurrent operation handling and memory management.""" manager = ServerAsyncOperationManager() # Create many operations operations = [ - manager.create_operation(f"tool_{i}", {"data": "x" * 100}, session_id=f"session_{i % 3}") for i in range(50) + await manager.create_operation(f"tool_{i}", {"data": "x" * 100}, session_id=f"session_{i % 3}") + for i in range(50) ] # All should be created successfully with unique tokens @@ -203,13 +187,13 @@ def test_concurrent_operations(self): # Complete half with short keepAlive and make them expired for i in range(25): - manager.complete_operation(operations[i].token, Mock()) + await manager.complete_operation(operations[i].token, Mock()) operations[i].keep_alive = 1 operations[i].resolved_at = time.time() - 2 # Cleanup should remove expired operations - removed_count = manager.cleanup_expired_operations() - assert removed_count == 25 and len(manager._operations) == 25 + removed_count = await manager.cleanup_expired() + assert removed_count == 25 class TestAsyncOperation: diff --git a/tests/shared/test_progress_notifications.py b/tests/shared/test_progress_notifications.py index 0425048c0..abb9d49c1 100644 --- a/tests/shared/test_progress_notifications.py +++ b/tests/shared/test_progress_notifications.py @@ -279,6 +279,7 @@ async def handle_client_message( request_id="test-request", operation_token=None, session=client_session, + supports_async=False, meta=meta, lifespan_context=None, ) diff --git a/uv.lock b/uv.lock index 3ab753e87..91c417e49 100644 --- a/uv.lock +++ b/uv.lock @@ -14,6 +14,7 @@ members = [ "mcp-simple-tool", "mcp-simple-tool-async", "mcp-snippets", + "mcp-sqlite-async-operations", ] [[package]] @@ -982,6 +983,39 @@ dependencies = [ [package.metadata] requires-dist = [{ name = "mcp", editable = "." }] +[[package]] +name = "mcp-sqlite-async-operations" +version = "0.1.0" +source = { editable = "examples/servers/sqlite-async-operations" } +dependencies = [ + { name = "anyio" }, + { name = "click" }, + { name = "httpx" }, + { name = "mcp" }, +] + +[package.dev-dependencies] +dev = [ + { name = "pyright" }, + { name = "pytest" }, + { name = "ruff" }, +] + +[package.metadata] +requires-dist = [ + { name = "anyio", specifier = ">=4.5" }, + { name = "click", specifier = ">=8.2.0" }, + { name = "httpx", specifier = ">=0.27" }, + { name = "mcp", editable = "." }, +] + +[package.metadata.requires-dev] +dev = [ + { name = "pyright", specifier = ">=1.1.378" }, + { name = "pytest", specifier = ">=8.3.3" }, + { name = "ruff", specifier = ">=0.6.9" }, +] + [[package]] name = "mdurl" version = "0.1.2" From 0d48861924d0f2313a849745214d8b2f99422ddb Mon Sep 17 00:00:00 2001 From: Luca Chang Date: Thu, 9 Oct 2025 09:46:37 -0700 Subject: [PATCH 40/41] Add dedicated event queues to fix IO and be transport-agnostic --- .../mcp_async_reconnect_client/client.py | 35 +- .../async-reconnect-client/pyproject.toml | 20 +- .../clients/async-reconnect-client/uv.lock | 761 ------------------ .../clients/simple-auth-client/pyproject.toml | 11 +- examples/clients/simple-auth-client/uv.lock | 535 ------------ examples/clients/simple-chatbot/uv.lock | 555 ------------- .../mcp_sqlite_async_operations/server.py | 139 +++- pyproject.toml | 52 +- src/mcp/client/session.py | 2 +- src/mcp/server/fastmcp/server.py | 20 +- src/mcp/server/lowlevel/server.py | 125 ++- src/mcp/server/streamable_http.py | 70 +- src/mcp/shared/async_operations.py | 206 +++-- src/mcp/shared/async_operations_utils.py | 66 ++ src/mcp/shared/session.py | 35 +- tests/server/fastmcp/test_integration.py | 8 +- uv.lock | 96 +++ 17 files changed, 647 insertions(+), 2089 deletions(-) delete mode 100644 examples/clients/async-reconnect-client/uv.lock delete mode 100644 examples/clients/simple-auth-client/uv.lock delete mode 100644 examples/clients/simple-chatbot/uv.lock create mode 100644 src/mcp/shared/async_operations_utils.py diff --git a/examples/clients/async-reconnect-client/mcp_async_reconnect_client/client.py b/examples/clients/async-reconnect-client/mcp_async_reconnect_client/client.py index 6fa3af9ce..16e3c5f8d 100644 --- a/examples/clients/async-reconnect-client/mcp_async_reconnect_client/client.py +++ b/examples/clients/async-reconnect-client/mcp_async_reconnect_client/client.py @@ -1,34 +1,51 @@ +import logging + import anyio import click from mcp import ClientSession, types from mcp.client.streamable_http import streamablehttp_client +from mcp.shared.context import RequestContext + +logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(name)s - %(message)s") +logger = logging.getLogger(__name__) + + +async def elicitation_callback(context: RequestContext[ClientSession, None], params: types.ElicitRequestParams): + """Handle elicitation requests from the server.""" + logger.info(f"Server is asking: {params.message}") + return types.ElicitResult( + action="accept", + content={"continue_processing": True}, + ) async def call_async_tool(session: ClientSession, token: str | None): """Demonstrate calling an async tool.""" - print("Calling async tool...") - if not token: - result = await session.call_tool("fetch_website", arguments={"url": "https://modelcontextprotocol.io"}) + logger.info("Calling async tool...") + result = await session.call_tool( + "fetch_website", + arguments={"url": "https://modelcontextprotocol.io"}, + ) if result.isError: raise RuntimeError(f"Error calling tool: {result}") assert result.operation token = result.operation.token - print(f"Operation started with token: {token}") + logger.info(f"Operation started with token: {token}") # Poll for completion while True: status = await session.get_operation_status(token) - print(f"Status: {status.status}") + logger.info(f"Status: {status.status}") if status.status == "completed": final_result = await session.get_operation_result(token) for content in final_result.result.content: if isinstance(content, types.TextContent): - print(f"Result: {content.text}") + logger.info(f"Result: {content.text}") break elif status.status == "failed": - print(f"Operation failed: {status.error}") + logger.error(f"Operation failed: {status.error}") break await anyio.sleep(0.5) @@ -36,7 +53,9 @@ async def call_async_tool(session: ClientSession, token: str | None): async def run_session(endpoint: str, token: str | None): async with streamablehttp_client(endpoint) as (read, write, _): - async with ClientSession(read, write, protocol_version="next") as session: + async with ClientSession( + read, write, protocol_version="next", elicitation_callback=elicitation_callback + ) as session: await session.initialize() await call_async_tool(session, token) diff --git a/examples/clients/async-reconnect-client/pyproject.toml b/examples/clients/async-reconnect-client/pyproject.toml index 53c66ea28..251d441ed 100644 --- a/examples/clients/async-reconnect-client/pyproject.toml +++ b/examples/clients/async-reconnect-client/pyproject.toml @@ -4,17 +4,7 @@ version = "0.1.0" description = "A client for the MCP simple-tool-async server that supports reconnection" readme = "README.md" requires-python = ">=3.10" -authors = [{ name = "Anthropic" }] -keywords = ["mcp", "client", "async"] -license = { text = "MIT" } -classifiers = [ - "Development Status :: 4 - Beta", - "Intended Audience :: Developers", - "License :: OSI Approved :: MIT License", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.10", -] -dependencies = ["click>=8.2.0", "mcp>=1.0.0"] +dependencies = ["anyio>=4.5", "click>=8.2.0", "mcp"] [project.scripts] mcp-async-reconnect-client = "mcp_async_reconnect_client.client:main" @@ -40,10 +30,4 @@ line-length = 120 target-version = "py310" [tool.uv] -dev-dependencies = ["pyright>=1.1.379", "pytest>=8.3.3", "ruff>=0.6.9"] - -[tool.uv.sources] -mcp = { path = "../../../" } - -[[tool.uv.index]] -url = "https://pypi.org/simple" +dev-dependencies = ["pyright>=1.1.378", "pytest>=8.3.3", "ruff>=0.6.9"] diff --git a/examples/clients/async-reconnect-client/uv.lock b/examples/clients/async-reconnect-client/uv.lock deleted file mode 100644 index 21173abdc..000000000 --- a/examples/clients/async-reconnect-client/uv.lock +++ /dev/null @@ -1,761 +0,0 @@ -version = 1 -revision = 2 -requires-python = ">=3.10" - -[[package]] -name = "annotated-types" -version = "0.7.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, -] - -[[package]] -name = "anyio" -version = "4.11.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, - { name = "idna" }, - { name = "sniffio" }, - { name = "typing-extensions", marker = "python_full_version < '3.13'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/c6/78/7d432127c41b50bccba979505f272c16cbcadcc33645d5fa3a738110ae75/anyio-4.11.0.tar.gz", hash = "sha256:82a8d0b81e318cc5ce71a5f1f8b5c4e63619620b63141ef8c995fa0db95a57c4", size = 219094, upload-time = "2025-09-23T09:19:12.58Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/15/b3/9b1a8074496371342ec1e796a96f99c82c945a339cd81a8e73de28b4cf9e/anyio-4.11.0-py3-none-any.whl", hash = "sha256:0287e96f4d26d4149305414d4e3bc32f0dcd0862365a4bddea19d7a1ec38c4fc", size = 109097, upload-time = "2025-09-23T09:19:10.601Z" }, -] - -[[package]] -name = "attrs" -version = "25.3.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/1367933a8532ee6ff8d63537de4f1177af4bff9f3e829baf7331f595bb24/attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b", size = 812032, upload-time = "2025-03-13T11:10:22.779Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815, upload-time = "2025-03-13T11:10:21.14Z" }, -] - -[[package]] -name = "certifi" -version = "2025.8.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/dc/67/960ebe6bf230a96cda2e0abcf73af550ec4f090005363542f0765df162e0/certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407", size = 162386, upload-time = "2025-08-03T03:07:47.08Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/48/1549795ba7742c948d2ad169c1c8cdbae65bc450d6cd753d124b17c8cd32/certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5", size = 161216, upload-time = "2025-08-03T03:07:45.777Z" }, -] - -[[package]] -name = "click" -version = "8.3.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "colorama", marker = "sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/46/61/de6cd827efad202d7057d93e0fed9294b96952e188f7384832791c7b2254/click-8.3.0.tar.gz", hash = "sha256:e7b8232224eba16f4ebe410c25ced9f7875cb5f3263ffc93cc3e8da705e229c4", size = 276943, upload-time = "2025-09-18T17:32:23.696Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/db/d3/9dcc0f5797f070ec8edf30fbadfb200e71d9db6b84d211e3b2085a7589a0/click-8.3.0-py3-none-any.whl", hash = "sha256:9b9f285302c6e3064f4330c05f05b81945b2a39544279343e6e7c5f27a9baddc", size = 107295, upload-time = "2025-09-18T17:32:22.42Z" }, -] - -[[package]] -name = "colorama" -version = "0.4.6" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, -] - -[[package]] -name = "exceptiongroup" -version = "1.3.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.13'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749, upload-time = "2025-05-10T17:42:51.123Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", size = 16674, upload-time = "2025-05-10T17:42:49.33Z" }, -] - -[[package]] -name = "h11" -version = "0.16.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, -] - -[[package]] -name = "httpcore" -version = "1.0.9" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "certifi" }, - { name = "h11" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, -] - -[[package]] -name = "httpx" -version = "0.28.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "anyio" }, - { name = "certifi" }, - { name = "httpcore" }, - { name = "idna" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, -] - -[[package]] -name = "httpx-sse" -version = "0.4.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6e/fa/66bd985dd0b7c109a3bcb89272ee0bfb7e2b4d06309ad7b38ff866734b2a/httpx_sse-0.4.1.tar.gz", hash = "sha256:8f44d34414bc7b21bf3602713005c5df4917884f76072479b21f68befa4ea26e", size = 12998, upload-time = "2025-06-24T13:21:05.71Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/25/0a/6269e3473b09aed2dab8aa1a600c70f31f00ae1349bee30658f7e358a159/httpx_sse-0.4.1-py3-none-any.whl", hash = "sha256:cba42174344c3a5b06f255ce65b350880f962d99ead85e776f23c6618a377a37", size = 8054, upload-time = "2025-06-24T13:21:04.772Z" }, -] - -[[package]] -name = "idna" -version = "3.10" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, -] - -[[package]] -name = "iniconfig" -version = "2.1.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, -] - -[[package]] -name = "jsonschema" -version = "4.25.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "attrs" }, - { name = "jsonschema-specifications" }, - { name = "referencing" }, - { name = "rpds-py" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/74/69/f7185de793a29082a9f3c7728268ffb31cb5095131a9c139a74078e27336/jsonschema-4.25.1.tar.gz", hash = "sha256:e4a9655ce0da0c0b67a085847e00a3a51449e1157f4f75e9fb5aa545e122eb85", size = 357342, upload-time = "2025-08-18T17:03:50.038Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/bf/9c/8c95d856233c1f82500c2450b8c68576b4cf1c871db3afac5c34ff84e6fd/jsonschema-4.25.1-py3-none-any.whl", hash = "sha256:3fba0169e345c7175110351d456342c364814cfcf3b964ba4587f22915230a63", size = 90040, upload-time = "2025-08-18T17:03:48.373Z" }, -] - -[[package]] -name = "jsonschema-specifications" -version = "2025.9.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "referencing" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/19/74/a633ee74eb36c44aa6d1095e7cc5569bebf04342ee146178e2d36600708b/jsonschema_specifications-2025.9.1.tar.gz", hash = "sha256:b540987f239e745613c7a9176f3edb72b832a4ac465cf02712288397832b5e8d", size = 32855, upload-time = "2025-09-08T01:34:59.186Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl", hash = "sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe", size = 18437, upload-time = "2025-09-08T01:34:57.871Z" }, -] - -[[package]] -name = "mcp" -source = { directory = "../../../" } -dependencies = [ - { name = "anyio" }, - { name = "httpx" }, - { name = "httpx-sse" }, - { name = "jsonschema" }, - { name = "pydantic" }, - { name = "pydantic-settings" }, - { name = "python-multipart" }, - { name = "pywin32", marker = "sys_platform == 'win32'" }, - { name = "sse-starlette" }, - { name = "starlette" }, - { name = "uvicorn", marker = "sys_platform != 'emscripten'" }, -] - -[package.metadata] -requires-dist = [ - { name = "anyio", specifier = ">=4.5" }, - { name = "httpx", specifier = ">=0.27.1" }, - { name = "httpx-sse", specifier = ">=0.4" }, - { name = "jsonschema", specifier = ">=4.20.0" }, - { name = "pydantic", specifier = ">=2.11.0,<3.0.0" }, - { name = "pydantic-settings", specifier = ">=2.5.2" }, - { name = "python-dotenv", marker = "extra == 'cli'", specifier = ">=1.0.0" }, - { name = "python-multipart", specifier = ">=0.0.9" }, - { name = "pywin32", marker = "sys_platform == 'win32'", specifier = ">=310" }, - { name = "rich", marker = "extra == 'rich'", specifier = ">=13.9.4" }, - { name = "sse-starlette", specifier = ">=1.6.1" }, - { name = "starlette", specifier = ">=0.27" }, - { name = "typer", marker = "extra == 'cli'", specifier = ">=0.16.0" }, - { name = "uvicorn", marker = "sys_platform != 'emscripten'", specifier = ">=0.31.1" }, - { name = "websockets", marker = "extra == 'ws'", specifier = ">=15.0.1" }, -] -provides-extras = ["cli", "rich", "ws"] - -[package.metadata.requires-dev] -dev = [ - { name = "dirty-equals", specifier = ">=0.9.0" }, - { name = "inline-snapshot", specifier = ">=0.23.0" }, - { name = "pyright", specifier = ">=1.1.400" }, - { name = "pytest", specifier = ">=8.3.4" }, - { name = "pytest-examples", specifier = ">=0.0.14" }, - { name = "pytest-flakefinder", specifier = ">=1.1.0" }, - { name = "pytest-pretty", specifier = ">=1.2.0" }, - { name = "pytest-xdist", specifier = ">=3.6.1" }, - { name = "ruff", specifier = ">=0.8.5" }, - { name = "trio", specifier = ">=0.26.2" }, -] -docs = [ - { name = "mkdocs", specifier = ">=1.6.1" }, - { name = "mkdocs-glightbox", specifier = ">=0.4.0" }, - { name = "mkdocs-material", extras = ["imaging"], specifier = ">=9.5.45" }, - { name = "mkdocstrings-python", specifier = ">=1.12.2" }, -] - -[[package]] -name = "mcp-async-reconnect-client" -version = "0.1.0" -source = { editable = "." } -dependencies = [ - { name = "click" }, - { name = "mcp" }, -] - -[package.dev-dependencies] -dev = [ - { name = "pyright" }, - { name = "pytest" }, - { name = "ruff" }, -] - -[package.metadata] -requires-dist = [ - { name = "click", specifier = ">=8.2.0" }, - { name = "mcp", directory = "../../../" }, -] - -[package.metadata.requires-dev] -dev = [ - { name = "pyright", specifier = ">=1.1.379" }, - { name = "pytest", specifier = ">=8.3.3" }, - { name = "ruff", specifier = ">=0.6.9" }, -] - -[[package]] -name = "nodeenv" -version = "1.9.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437, upload-time = "2024-06-04T18:44:11.171Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload-time = "2024-06-04T18:44:08.352Z" }, -] - -[[package]] -name = "packaging" -version = "25.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, -] - -[[package]] -name = "pluggy" -version = "1.6.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, -] - -[[package]] -name = "pydantic" -version = "2.11.9" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "annotated-types" }, - { name = "pydantic-core" }, - { name = "typing-extensions" }, - { name = "typing-inspection" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/ff/5d/09a551ba512d7ca404d785072700d3f6727a02f6f3c24ecfd081c7cf0aa8/pydantic-2.11.9.tar.gz", hash = "sha256:6b8ffda597a14812a7975c90b82a8a2e777d9257aba3453f973acd3c032a18e2", size = 788495, upload-time = "2025-09-13T11:26:39.325Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/3e/d3/108f2006987c58e76691d5ae5d200dd3e0f532cb4e5fa3560751c3a1feba/pydantic-2.11.9-py3-none-any.whl", hash = "sha256:c42dd626f5cfc1c6950ce6205ea58c93efa406da65f479dcb4029d5934857da2", size = 444855, upload-time = "2025-09-13T11:26:36.909Z" }, -] - -[[package]] -name = "pydantic-core" -version = "2.33.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/92/b31726561b5dae176c2d2c2dc43a9c5bfba5d32f96f8b4c0a600dd492447/pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8", size = 2028817, upload-time = "2025-04-23T18:30:43.919Z" }, - { url = "https://files.pythonhosted.org/packages/a3/44/3f0b95fafdaca04a483c4e685fe437c6891001bf3ce8b2fded82b9ea3aa1/pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d", size = 1861357, upload-time = "2025-04-23T18:30:46.372Z" }, - { url = "https://files.pythonhosted.org/packages/30/97/e8f13b55766234caae05372826e8e4b3b96e7b248be3157f53237682e43c/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d", size = 1898011, upload-time = "2025-04-23T18:30:47.591Z" }, - { url = "https://files.pythonhosted.org/packages/9b/a3/99c48cf7bafc991cc3ee66fd544c0aae8dc907b752f1dad2d79b1b5a471f/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572", size = 1982730, upload-time = "2025-04-23T18:30:49.328Z" }, - { url = "https://files.pythonhosted.org/packages/de/8e/a5b882ec4307010a840fb8b58bd9bf65d1840c92eae7534c7441709bf54b/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02", size = 2136178, upload-time = "2025-04-23T18:30:50.907Z" }, - { url = "https://files.pythonhosted.org/packages/e4/bb/71e35fc3ed05af6834e890edb75968e2802fe98778971ab5cba20a162315/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b", size = 2736462, upload-time = "2025-04-23T18:30:52.083Z" }, - { url = "https://files.pythonhosted.org/packages/31/0d/c8f7593e6bc7066289bbc366f2235701dcbebcd1ff0ef8e64f6f239fb47d/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2", size = 2005652, upload-time = "2025-04-23T18:30:53.389Z" }, - { url = "https://files.pythonhosted.org/packages/d2/7a/996d8bd75f3eda405e3dd219ff5ff0a283cd8e34add39d8ef9157e722867/pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a", size = 2113306, upload-time = "2025-04-23T18:30:54.661Z" }, - { url = "https://files.pythonhosted.org/packages/ff/84/daf2a6fb2db40ffda6578a7e8c5a6e9c8affb251a05c233ae37098118788/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac", size = 2073720, upload-time = "2025-04-23T18:30:56.11Z" }, - { url = "https://files.pythonhosted.org/packages/77/fb/2258da019f4825128445ae79456a5499c032b55849dbd5bed78c95ccf163/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a", size = 2244915, upload-time = "2025-04-23T18:30:57.501Z" }, - { url = "https://files.pythonhosted.org/packages/d8/7a/925ff73756031289468326e355b6fa8316960d0d65f8b5d6b3a3e7866de7/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b", size = 2241884, upload-time = "2025-04-23T18:30:58.867Z" }, - { url = "https://files.pythonhosted.org/packages/0b/b0/249ee6d2646f1cdadcb813805fe76265745c4010cf20a8eba7b0e639d9b2/pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22", size = 1910496, upload-time = "2025-04-23T18:31:00.078Z" }, - { url = "https://files.pythonhosted.org/packages/66/ff/172ba8f12a42d4b552917aa65d1f2328990d3ccfc01d5b7c943ec084299f/pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640", size = 1955019, upload-time = "2025-04-23T18:31:01.335Z" }, - { url = "https://files.pythonhosted.org/packages/3f/8d/71db63483d518cbbf290261a1fc2839d17ff89fce7089e08cad07ccfce67/pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7", size = 2028584, upload-time = "2025-04-23T18:31:03.106Z" }, - { url = "https://files.pythonhosted.org/packages/24/2f/3cfa7244ae292dd850989f328722d2aef313f74ffc471184dc509e1e4e5a/pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246", size = 1855071, upload-time = "2025-04-23T18:31:04.621Z" }, - { url = "https://files.pythonhosted.org/packages/b3/d3/4ae42d33f5e3f50dd467761304be2fa0a9417fbf09735bc2cce003480f2a/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f", size = 1897823, upload-time = "2025-04-23T18:31:06.377Z" }, - { url = "https://files.pythonhosted.org/packages/f4/f3/aa5976e8352b7695ff808599794b1fba2a9ae2ee954a3426855935799488/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc", size = 1983792, upload-time = "2025-04-23T18:31:07.93Z" }, - { url = "https://files.pythonhosted.org/packages/d5/7a/cda9b5a23c552037717f2b2a5257e9b2bfe45e687386df9591eff7b46d28/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de", size = 2136338, upload-time = "2025-04-23T18:31:09.283Z" }, - { url = "https://files.pythonhosted.org/packages/2b/9f/b8f9ec8dd1417eb9da784e91e1667d58a2a4a7b7b34cf4af765ef663a7e5/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a", size = 2730998, upload-time = "2025-04-23T18:31:11.7Z" }, - { url = "https://files.pythonhosted.org/packages/47/bc/cd720e078576bdb8255d5032c5d63ee5c0bf4b7173dd955185a1d658c456/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef", size = 2003200, upload-time = "2025-04-23T18:31:13.536Z" }, - { url = "https://files.pythonhosted.org/packages/ca/22/3602b895ee2cd29d11a2b349372446ae9727c32e78a94b3d588a40fdf187/pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e", size = 2113890, upload-time = "2025-04-23T18:31:15.011Z" }, - { url = "https://files.pythonhosted.org/packages/ff/e6/e3c5908c03cf00d629eb38393a98fccc38ee0ce8ecce32f69fc7d7b558a7/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d", size = 2073359, upload-time = "2025-04-23T18:31:16.393Z" }, - { url = "https://files.pythonhosted.org/packages/12/e7/6a36a07c59ebefc8777d1ffdaf5ae71b06b21952582e4b07eba88a421c79/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30", size = 2245883, upload-time = "2025-04-23T18:31:17.892Z" }, - { url = "https://files.pythonhosted.org/packages/16/3f/59b3187aaa6cc0c1e6616e8045b284de2b6a87b027cce2ffcea073adf1d2/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf", size = 2241074, upload-time = "2025-04-23T18:31:19.205Z" }, - { url = "https://files.pythonhosted.org/packages/e0/ed/55532bb88f674d5d8f67ab121a2a13c385df382de2a1677f30ad385f7438/pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51", size = 1910538, upload-time = "2025-04-23T18:31:20.541Z" }, - { url = "https://files.pythonhosted.org/packages/fe/1b/25b7cccd4519c0b23c2dd636ad39d381abf113085ce4f7bec2b0dc755eb1/pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab", size = 1952909, upload-time = "2025-04-23T18:31:22.371Z" }, - { url = "https://files.pythonhosted.org/packages/49/a9/d809358e49126438055884c4366a1f6227f0f84f635a9014e2deb9b9de54/pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65", size = 1897786, upload-time = "2025-04-23T18:31:24.161Z" }, - { url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000, upload-time = "2025-04-23T18:31:25.863Z" }, - { url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996, upload-time = "2025-04-23T18:31:27.341Z" }, - { url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957, upload-time = "2025-04-23T18:31:28.956Z" }, - { url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199, upload-time = "2025-04-23T18:31:31.025Z" }, - { url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296, upload-time = "2025-04-23T18:31:32.514Z" }, - { url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109, upload-time = "2025-04-23T18:31:33.958Z" }, - { url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028, upload-time = "2025-04-23T18:31:39.095Z" }, - { url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044, upload-time = "2025-04-23T18:31:41.034Z" }, - { url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881, upload-time = "2025-04-23T18:31:42.757Z" }, - { url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034, upload-time = "2025-04-23T18:31:44.304Z" }, - { url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187, upload-time = "2025-04-23T18:31:45.891Z" }, - { url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628, upload-time = "2025-04-23T18:31:47.819Z" }, - { url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866, upload-time = "2025-04-23T18:31:49.635Z" }, - { url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894, upload-time = "2025-04-23T18:31:51.609Z" }, - { url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688, upload-time = "2025-04-23T18:31:53.175Z" }, - { url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808, upload-time = "2025-04-23T18:31:54.79Z" }, - { url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580, upload-time = "2025-04-23T18:31:57.393Z" }, - { url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859, upload-time = "2025-04-23T18:31:59.065Z" }, - { url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810, upload-time = "2025-04-23T18:32:00.78Z" }, - { url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498, upload-time = "2025-04-23T18:32:02.418Z" }, - { url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611, upload-time = "2025-04-23T18:32:04.152Z" }, - { url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924, upload-time = "2025-04-23T18:32:06.129Z" }, - { url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196, upload-time = "2025-04-23T18:32:08.178Z" }, - { url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389, upload-time = "2025-04-23T18:32:10.242Z" }, - { url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223, upload-time = "2025-04-23T18:32:12.382Z" }, - { url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473, upload-time = "2025-04-23T18:32:14.034Z" }, - { url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269, upload-time = "2025-04-23T18:32:15.783Z" }, - { url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921, upload-time = "2025-04-23T18:32:18.473Z" }, - { url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162, upload-time = "2025-04-23T18:32:20.188Z" }, - { url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560, upload-time = "2025-04-23T18:32:22.354Z" }, - { url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" }, - { url = "https://files.pythonhosted.org/packages/30/68/373d55e58b7e83ce371691f6eaa7175e3a24b956c44628eb25d7da007917/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa", size = 2023982, upload-time = "2025-04-23T18:32:53.14Z" }, - { url = "https://files.pythonhosted.org/packages/a4/16/145f54ac08c96a63d8ed6442f9dec17b2773d19920b627b18d4f10a061ea/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29", size = 1858412, upload-time = "2025-04-23T18:32:55.52Z" }, - { url = "https://files.pythonhosted.org/packages/41/b1/c6dc6c3e2de4516c0bb2c46f6a373b91b5660312342a0cf5826e38ad82fa/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d", size = 1892749, upload-time = "2025-04-23T18:32:57.546Z" }, - { url = "https://files.pythonhosted.org/packages/12/73/8cd57e20afba760b21b742106f9dbdfa6697f1570b189c7457a1af4cd8a0/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e", size = 2067527, upload-time = "2025-04-23T18:32:59.771Z" }, - { url = "https://files.pythonhosted.org/packages/e3/d5/0bb5d988cc019b3cba4a78f2d4b3854427fc47ee8ec8e9eaabf787da239c/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c", size = 2108225, upload-time = "2025-04-23T18:33:04.51Z" }, - { url = "https://files.pythonhosted.org/packages/f1/c5/00c02d1571913d496aabf146106ad8239dc132485ee22efe08085084ff7c/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec", size = 2069490, upload-time = "2025-04-23T18:33:06.391Z" }, - { url = "https://files.pythonhosted.org/packages/22/a8/dccc38768274d3ed3a59b5d06f59ccb845778687652daa71df0cab4040d7/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052", size = 2237525, upload-time = "2025-04-23T18:33:08.44Z" }, - { url = "https://files.pythonhosted.org/packages/d4/e7/4f98c0b125dda7cf7ccd14ba936218397b44f50a56dd8c16a3091df116c3/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c", size = 2238446, upload-time = "2025-04-23T18:33:10.313Z" }, - { url = "https://files.pythonhosted.org/packages/ce/91/2ec36480fdb0b783cd9ef6795753c1dea13882f2e68e73bce76ae8c21e6a/pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808", size = 2066678, upload-time = "2025-04-23T18:33:12.224Z" }, - { url = "https://files.pythonhosted.org/packages/7b/27/d4ae6487d73948d6f20dddcd94be4ea43e74349b56eba82e9bdee2d7494c/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8", size = 2025200, upload-time = "2025-04-23T18:33:14.199Z" }, - { url = "https://files.pythonhosted.org/packages/f1/b8/b3cb95375f05d33801024079b9392a5ab45267a63400bf1866e7ce0f0de4/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593", size = 1859123, upload-time = "2025-04-23T18:33:16.555Z" }, - { url = "https://files.pythonhosted.org/packages/05/bc/0d0b5adeda59a261cd30a1235a445bf55c7e46ae44aea28f7bd6ed46e091/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612", size = 1892852, upload-time = "2025-04-23T18:33:18.513Z" }, - { url = "https://files.pythonhosted.org/packages/3e/11/d37bdebbda2e449cb3f519f6ce950927b56d62f0b84fd9cb9e372a26a3d5/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7", size = 2067484, upload-time = "2025-04-23T18:33:20.475Z" }, - { url = "https://files.pythonhosted.org/packages/8c/55/1f95f0a05ce72ecb02a8a8a1c3be0579bbc29b1d5ab68f1378b7bebc5057/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e", size = 2108896, upload-time = "2025-04-23T18:33:22.501Z" }, - { url = "https://files.pythonhosted.org/packages/53/89/2b2de6c81fa131f423246a9109d7b2a375e83968ad0800d6e57d0574629b/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8", size = 2069475, upload-time = "2025-04-23T18:33:24.528Z" }, - { url = "https://files.pythonhosted.org/packages/b8/e9/1f7efbe20d0b2b10f6718944b5d8ece9152390904f29a78e68d4e7961159/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf", size = 2239013, upload-time = "2025-04-23T18:33:26.621Z" }, - { url = "https://files.pythonhosted.org/packages/3c/b2/5309c905a93811524a49b4e031e9851a6b00ff0fb668794472ea7746b448/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb", size = 2238715, upload-time = "2025-04-23T18:33:28.656Z" }, - { url = "https://files.pythonhosted.org/packages/32/56/8a7ca5d2cd2cda1d245d34b1c9a942920a718082ae8e54e5f3e5a58b7add/pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1", size = 2066757, upload-time = "2025-04-23T18:33:30.645Z" }, -] - -[[package]] -name = "pydantic-settings" -version = "2.11.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pydantic" }, - { name = "python-dotenv" }, - { name = "typing-inspection" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/20/c5/dbbc27b814c71676593d1c3f718e6cd7d4f00652cefa24b75f7aa3efb25e/pydantic_settings-2.11.0.tar.gz", hash = "sha256:d0e87a1c7d33593beb7194adb8470fc426e95ba02af83a0f23474a04c9a08180", size = 188394, upload-time = "2025-09-24T14:19:11.764Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/83/d6/887a1ff844e64aa823fb4905978d882a633cfe295c32eacad582b78a7d8b/pydantic_settings-2.11.0-py3-none-any.whl", hash = "sha256:fe2cea3413b9530d10f3a5875adffb17ada5c1e1bab0b2885546d7310415207c", size = 48608, upload-time = "2025-09-24T14:19:10.015Z" }, -] - -[[package]] -name = "pygments" -version = "2.19.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, -] - -[[package]] -name = "pyright" -version = "1.1.405" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "nodeenv" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/fb/6c/ba4bbee22e76af700ea593a1d8701e3225080956753bee9750dcc25e2649/pyright-1.1.405.tar.gz", hash = "sha256:5c2a30e1037af27eb463a1cc0b9f6d65fec48478ccf092c1ac28385a15c55763", size = 4068319, upload-time = "2025-09-04T03:37:06.776Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d5/1a/524f832e1ff1962a22a1accc775ca7b143ba2e9f5924bb6749dce566784a/pyright-1.1.405-py3-none-any.whl", hash = "sha256:a2cb13700b5508ce8e5d4546034cb7ea4aedb60215c6c33f56cec7f53996035a", size = 5905038, upload-time = "2025-09-04T03:37:04.913Z" }, -] - -[[package]] -name = "pytest" -version = "8.4.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "colorama", marker = "sys_platform == 'win32'" }, - { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, - { name = "iniconfig" }, - { name = "packaging" }, - { name = "pluggy" }, - { name = "pygments" }, - { name = "tomli", marker = "python_full_version < '3.11'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/a3/5c/00a0e072241553e1a7496d638deababa67c5058571567b92a7eaa258397c/pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01", size = 1519618, upload-time = "2025-09-04T14:34:22.711Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a8/a4/20da314d277121d6534b3a980b29035dcd51e6744bd79075a6ce8fa4eb8d/pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79", size = 365750, upload-time = "2025-09-04T14:34:20.226Z" }, -] - -[[package]] -name = "python-dotenv" -version = "1.1.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f6/b0/4bc07ccd3572a2f9df7e6782f52b0c6c90dcbb803ac4a167702d7d0dfe1e/python_dotenv-1.1.1.tar.gz", hash = "sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab", size = 41978, upload-time = "2025-06-24T04:21:07.341Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556, upload-time = "2025-06-24T04:21:06.073Z" }, -] - -[[package]] -name = "python-multipart" -version = "0.0.20" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f3/87/f44d7c9f274c7ee665a29b885ec97089ec5dc034c7f3fafa03da9e39a09e/python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13", size = 37158, upload-time = "2024-12-16T19:45:46.972Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/45/58/38b5afbc1a800eeea951b9285d3912613f2603bdf897a4ab0f4bd7f405fc/python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104", size = 24546, upload-time = "2024-12-16T19:45:44.423Z" }, -] - -[[package]] -name = "pywin32" -version = "311" -source = { registry = "https://pypi.org/simple" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7b/40/44efbb0dfbd33aca6a6483191dae0716070ed99e2ecb0c53683f400a0b4f/pywin32-311-cp310-cp310-win32.whl", hash = "sha256:d03ff496d2a0cd4a5893504789d4a15399133fe82517455e78bad62efbb7f0a3", size = 8760432, upload-time = "2025-07-14T20:13:05.9Z" }, - { url = "https://files.pythonhosted.org/packages/5e/bf/360243b1e953bd254a82f12653974be395ba880e7ec23e3731d9f73921cc/pywin32-311-cp310-cp310-win_amd64.whl", hash = "sha256:797c2772017851984b97180b0bebe4b620bb86328e8a884bb626156295a63b3b", size = 9590103, upload-time = "2025-07-14T20:13:07.698Z" }, - { url = "https://files.pythonhosted.org/packages/57/38/d290720e6f138086fb3d5ffe0b6caa019a791dd57866940c82e4eeaf2012/pywin32-311-cp310-cp310-win_arm64.whl", hash = "sha256:0502d1facf1fed4839a9a51ccbcc63d952cf318f78ffc00a7e78528ac27d7a2b", size = 8778557, upload-time = "2025-07-14T20:13:11.11Z" }, - { url = "https://files.pythonhosted.org/packages/7c/af/449a6a91e5d6db51420875c54f6aff7c97a86a3b13a0b4f1a5c13b988de3/pywin32-311-cp311-cp311-win32.whl", hash = "sha256:184eb5e436dea364dcd3d2316d577d625c0351bf237c4e9a5fabbcfa5a58b151", size = 8697031, upload-time = "2025-07-14T20:13:13.266Z" }, - { url = "https://files.pythonhosted.org/packages/51/8f/9bb81dd5bb77d22243d33c8397f09377056d5c687aa6d4042bea7fbf8364/pywin32-311-cp311-cp311-win_amd64.whl", hash = "sha256:3ce80b34b22b17ccbd937a6e78e7225d80c52f5ab9940fe0506a1a16f3dab503", size = 9508308, upload-time = "2025-07-14T20:13:15.147Z" }, - { url = "https://files.pythonhosted.org/packages/44/7b/9c2ab54f74a138c491aba1b1cd0795ba61f144c711daea84a88b63dc0f6c/pywin32-311-cp311-cp311-win_arm64.whl", hash = "sha256:a733f1388e1a842abb67ffa8e7aad0e70ac519e09b0f6a784e65a136ec7cefd2", size = 8703930, upload-time = "2025-07-14T20:13:16.945Z" }, - { url = "https://files.pythonhosted.org/packages/e7/ab/01ea1943d4eba0f850c3c61e78e8dd59757ff815ff3ccd0a84de5f541f42/pywin32-311-cp312-cp312-win32.whl", hash = "sha256:750ec6e621af2b948540032557b10a2d43b0cee2ae9758c54154d711cc852d31", size = 8706543, upload-time = "2025-07-14T20:13:20.765Z" }, - { url = "https://files.pythonhosted.org/packages/d1/a8/a0e8d07d4d051ec7502cd58b291ec98dcc0c3fff027caad0470b72cfcc2f/pywin32-311-cp312-cp312-win_amd64.whl", hash = "sha256:b8c095edad5c211ff31c05223658e71bf7116daa0ecf3ad85f3201ea3190d067", size = 9495040, upload-time = "2025-07-14T20:13:22.543Z" }, - { url = "https://files.pythonhosted.org/packages/ba/3a/2ae996277b4b50f17d61f0603efd8253cb2d79cc7ae159468007b586396d/pywin32-311-cp312-cp312-win_arm64.whl", hash = "sha256:e286f46a9a39c4a18b319c28f59b61de793654af2f395c102b4f819e584b5852", size = 8710102, upload-time = "2025-07-14T20:13:24.682Z" }, - { url = "https://files.pythonhosted.org/packages/a5/be/3fd5de0979fcb3994bfee0d65ed8ca9506a8a1260651b86174f6a86f52b3/pywin32-311-cp313-cp313-win32.whl", hash = "sha256:f95ba5a847cba10dd8c4d8fefa9f2a6cf283b8b88ed6178fa8a6c1ab16054d0d", size = 8705700, upload-time = "2025-07-14T20:13:26.471Z" }, - { url = "https://files.pythonhosted.org/packages/e3/28/e0a1909523c6890208295a29e05c2adb2126364e289826c0a8bc7297bd5c/pywin32-311-cp313-cp313-win_amd64.whl", hash = "sha256:718a38f7e5b058e76aee1c56ddd06908116d35147e133427e59a3983f703a20d", size = 9494700, upload-time = "2025-07-14T20:13:28.243Z" }, - { url = "https://files.pythonhosted.org/packages/04/bf/90339ac0f55726dce7d794e6d79a18a91265bdf3aa70b6b9ca52f35e022a/pywin32-311-cp313-cp313-win_arm64.whl", hash = "sha256:7b4075d959648406202d92a2310cb990fea19b535c7f4a78d3f5e10b926eeb8a", size = 8709318, upload-time = "2025-07-14T20:13:30.348Z" }, - { url = "https://files.pythonhosted.org/packages/c9/31/097f2e132c4f16d99a22bfb777e0fd88bd8e1c634304e102f313af69ace5/pywin32-311-cp314-cp314-win32.whl", hash = "sha256:b7a2c10b93f8986666d0c803ee19b5990885872a7de910fc460f9b0c2fbf92ee", size = 8840714, upload-time = "2025-07-14T20:13:32.449Z" }, - { url = "https://files.pythonhosted.org/packages/90/4b/07c77d8ba0e01349358082713400435347df8426208171ce297da32c313d/pywin32-311-cp314-cp314-win_amd64.whl", hash = "sha256:3aca44c046bd2ed8c90de9cb8427f581c479e594e99b5c0bb19b29c10fd6cb87", size = 9656800, upload-time = "2025-07-14T20:13:34.312Z" }, - { url = "https://files.pythonhosted.org/packages/c0/d2/21af5c535501a7233e734b8af901574572da66fcc254cb35d0609c9080dd/pywin32-311-cp314-cp314-win_arm64.whl", hash = "sha256:a508e2d9025764a8270f93111a970e1d0fbfc33f4153b388bb649b7eec4f9b42", size = 8932540, upload-time = "2025-07-14T20:13:36.379Z" }, -] - -[[package]] -name = "referencing" -version = "0.36.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "attrs" }, - { name = "rpds-py" }, - { name = "typing-extensions", marker = "python_full_version < '3.13'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/2f/db/98b5c277be99dd18bfd91dd04e1b759cad18d1a338188c936e92f921c7e2/referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa", size = 74744, upload-time = "2025-01-25T08:48:16.138Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c1/b1/3baf80dc6d2b7bc27a95a67752d0208e410351e3feb4eb78de5f77454d8d/referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0", size = 26775, upload-time = "2025-01-25T08:48:14.241Z" }, -] - -[[package]] -name = "rpds-py" -version = "0.27.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e9/dd/2c0cbe774744272b0ae725f44032c77bdcab6e8bcf544bffa3b6e70c8dba/rpds_py-0.27.1.tar.gz", hash = "sha256:26a1c73171d10b7acccbded82bf6a586ab8203601e565badc74bbbf8bc5a10f8", size = 27479, upload-time = "2025-08-27T12:16:36.024Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a5/ed/3aef893e2dd30e77e35d20d4ddb45ca459db59cead748cad9796ad479411/rpds_py-0.27.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:68afeec26d42ab3b47e541b272166a0b4400313946871cba3ed3a4fc0cab1cef", size = 371606, upload-time = "2025-08-27T12:12:25.189Z" }, - { url = "https://files.pythonhosted.org/packages/6d/82/9818b443e5d3eb4c83c3994561387f116aae9833b35c484474769c4a8faf/rpds_py-0.27.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74e5b2f7bb6fa38b1b10546d27acbacf2a022a8b5543efb06cfebc72a59c85be", size = 353452, upload-time = "2025-08-27T12:12:27.433Z" }, - { url = "https://files.pythonhosted.org/packages/99/c7/d2a110ffaaa397fc6793a83c7bd3545d9ab22658b7cdff05a24a4535cc45/rpds_py-0.27.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9024de74731df54546fab0bfbcdb49fae19159ecaecfc8f37c18d2c7e2c0bd61", size = 381519, upload-time = "2025-08-27T12:12:28.719Z" }, - { url = "https://files.pythonhosted.org/packages/5a/bc/e89581d1f9d1be7d0247eaef602566869fdc0d084008ba139e27e775366c/rpds_py-0.27.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:31d3ebadefcd73b73928ed0b2fd696f7fefda8629229f81929ac9c1854d0cffb", size = 394424, upload-time = "2025-08-27T12:12:30.207Z" }, - { url = "https://files.pythonhosted.org/packages/ac/2e/36a6861f797530e74bb6ed53495f8741f1ef95939eed01d761e73d559067/rpds_py-0.27.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2e7f8f169d775dd9092a1743768d771f1d1300453ddfe6325ae3ab5332b4657", size = 523467, upload-time = "2025-08-27T12:12:31.808Z" }, - { url = "https://files.pythonhosted.org/packages/c4/59/c1bc2be32564fa499f988f0a5c6505c2f4746ef96e58e4d7de5cf923d77e/rpds_py-0.27.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d905d16f77eb6ab2e324e09bfa277b4c8e5e6b8a78a3e7ff8f3cdf773b4c013", size = 402660, upload-time = "2025-08-27T12:12:33.444Z" }, - { url = "https://files.pythonhosted.org/packages/0a/ec/ef8bf895f0628dd0a59e54d81caed6891663cb9c54a0f4bb7da918cb88cf/rpds_py-0.27.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50c946f048209e6362e22576baea09193809f87687a95a8db24e5fbdb307b93a", size = 384062, upload-time = "2025-08-27T12:12:34.857Z" }, - { url = "https://files.pythonhosted.org/packages/69/f7/f47ff154be8d9a5e691c083a920bba89cef88d5247c241c10b9898f595a1/rpds_py-0.27.1-cp310-cp310-manylinux_2_31_riscv64.whl", hash = "sha256:3deab27804d65cd8289eb814c2c0e807c4b9d9916c9225e363cb0cf875eb67c1", size = 401289, upload-time = "2025-08-27T12:12:36.085Z" }, - { url = "https://files.pythonhosted.org/packages/3b/d9/ca410363efd0615814ae579f6829cafb39225cd63e5ea5ed1404cb345293/rpds_py-0.27.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8b61097f7488de4be8244c89915da8ed212832ccf1e7c7753a25a394bf9b1f10", size = 417718, upload-time = "2025-08-27T12:12:37.401Z" }, - { url = "https://files.pythonhosted.org/packages/e3/a0/8cb5c2ff38340f221cc067cc093d1270e10658ba4e8d263df923daa18e86/rpds_py-0.27.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8a3f29aba6e2d7d90528d3c792555a93497fe6538aa65eb675b44505be747808", size = 558333, upload-time = "2025-08-27T12:12:38.672Z" }, - { url = "https://files.pythonhosted.org/packages/6f/8c/1b0de79177c5d5103843774ce12b84caa7164dfc6cd66378768d37db11bf/rpds_py-0.27.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:dd6cd0485b7d347304067153a6dc1d73f7d4fd995a396ef32a24d24b8ac63ac8", size = 589127, upload-time = "2025-08-27T12:12:41.48Z" }, - { url = "https://files.pythonhosted.org/packages/c8/5e/26abb098d5e01266b0f3a2488d299d19ccc26849735d9d2b95c39397e945/rpds_py-0.27.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6f4461bf931108c9fa226ffb0e257c1b18dc2d44cd72b125bec50ee0ab1248a9", size = 554899, upload-time = "2025-08-27T12:12:42.925Z" }, - { url = "https://files.pythonhosted.org/packages/de/41/905cc90ced13550db017f8f20c6d8e8470066c5738ba480d7ba63e3d136b/rpds_py-0.27.1-cp310-cp310-win32.whl", hash = "sha256:ee5422d7fb21f6a00c1901bf6559c49fee13a5159d0288320737bbf6585bd3e4", size = 217450, upload-time = "2025-08-27T12:12:44.813Z" }, - { url = "https://files.pythonhosted.org/packages/75/3d/6bef47b0e253616ccdf67c283e25f2d16e18ccddd38f92af81d5a3420206/rpds_py-0.27.1-cp310-cp310-win_amd64.whl", hash = "sha256:3e039aabf6d5f83c745d5f9a0a381d031e9ed871967c0a5c38d201aca41f3ba1", size = 228447, upload-time = "2025-08-27T12:12:46.204Z" }, - { url = "https://files.pythonhosted.org/packages/b5/c1/7907329fbef97cbd49db6f7303893bd1dd5a4a3eae415839ffdfb0762cae/rpds_py-0.27.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:be898f271f851f68b318872ce6ebebbc62f303b654e43bf72683dbdc25b7c881", size = 371063, upload-time = "2025-08-27T12:12:47.856Z" }, - { url = "https://files.pythonhosted.org/packages/11/94/2aab4bc86228bcf7c48760990273653a4900de89c7537ffe1b0d6097ed39/rpds_py-0.27.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:62ac3d4e3e07b58ee0ddecd71d6ce3b1637de2d373501412df395a0ec5f9beb5", size = 353210, upload-time = "2025-08-27T12:12:49.187Z" }, - { url = "https://files.pythonhosted.org/packages/3a/57/f5eb3ecf434342f4f1a46009530e93fd201a0b5b83379034ebdb1d7c1a58/rpds_py-0.27.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4708c5c0ceb2d034f9991623631d3d23cb16e65c83736ea020cdbe28d57c0a0e", size = 381636, upload-time = "2025-08-27T12:12:50.492Z" }, - { url = "https://files.pythonhosted.org/packages/ae/f4/ef95c5945e2ceb5119571b184dd5a1cc4b8541bbdf67461998cfeac9cb1e/rpds_py-0.27.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:abfa1171a9952d2e0002aba2ad3780820b00cc3d9c98c6630f2e93271501f66c", size = 394341, upload-time = "2025-08-27T12:12:52.024Z" }, - { url = "https://files.pythonhosted.org/packages/5a/7e/4bd610754bf492d398b61725eb9598ddd5eb86b07d7d9483dbcd810e20bc/rpds_py-0.27.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b507d19f817ebaca79574b16eb2ae412e5c0835542c93fe9983f1e432aca195", size = 523428, upload-time = "2025-08-27T12:12:53.779Z" }, - { url = "https://files.pythonhosted.org/packages/9f/e5/059b9f65a8c9149361a8b75094864ab83b94718344db511fd6117936ed2a/rpds_py-0.27.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:168b025f8fd8d8d10957405f3fdcef3dc20f5982d398f90851f4abc58c566c52", size = 402923, upload-time = "2025-08-27T12:12:55.15Z" }, - { url = "https://files.pythonhosted.org/packages/f5/48/64cabb7daced2968dd08e8a1b7988bf358d7bd5bcd5dc89a652f4668543c/rpds_py-0.27.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb56c6210ef77caa58e16e8c17d35c63fe3f5b60fd9ba9d424470c3400bcf9ed", size = 384094, upload-time = "2025-08-27T12:12:57.194Z" }, - { url = "https://files.pythonhosted.org/packages/ae/e1/dc9094d6ff566bff87add8a510c89b9e158ad2ecd97ee26e677da29a9e1b/rpds_py-0.27.1-cp311-cp311-manylinux_2_31_riscv64.whl", hash = "sha256:d252f2d8ca0195faa707f8eb9368955760880b2b42a8ee16d382bf5dd807f89a", size = 401093, upload-time = "2025-08-27T12:12:58.985Z" }, - { url = "https://files.pythonhosted.org/packages/37/8e/ac8577e3ecdd5593e283d46907d7011618994e1d7ab992711ae0f78b9937/rpds_py-0.27.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6e5e54da1e74b91dbc7996b56640f79b195d5925c2b78efaa8c5d53e1d88edde", size = 417969, upload-time = "2025-08-27T12:13:00.367Z" }, - { url = "https://files.pythonhosted.org/packages/66/6d/87507430a8f74a93556fe55c6485ba9c259949a853ce407b1e23fea5ba31/rpds_py-0.27.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ffce0481cc6e95e5b3f0a47ee17ffbd234399e6d532f394c8dce320c3b089c21", size = 558302, upload-time = "2025-08-27T12:13:01.737Z" }, - { url = "https://files.pythonhosted.org/packages/3a/bb/1db4781ce1dda3eecc735e3152659a27b90a02ca62bfeea17aee45cc0fbc/rpds_py-0.27.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:a205fdfe55c90c2cd8e540ca9ceba65cbe6629b443bc05db1f590a3db8189ff9", size = 589259, upload-time = "2025-08-27T12:13:03.127Z" }, - { url = "https://files.pythonhosted.org/packages/7b/0e/ae1c8943d11a814d01b482e1f8da903f88047a962dff9bbdadf3bd6e6fd1/rpds_py-0.27.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:689fb5200a749db0415b092972e8eba85847c23885c8543a8b0f5c009b1a5948", size = 554983, upload-time = "2025-08-27T12:13:04.516Z" }, - { url = "https://files.pythonhosted.org/packages/b2/d5/0b2a55415931db4f112bdab072443ff76131b5ac4f4dc98d10d2d357eb03/rpds_py-0.27.1-cp311-cp311-win32.whl", hash = "sha256:3182af66048c00a075010bc7f4860f33913528a4b6fc09094a6e7598e462fe39", size = 217154, upload-time = "2025-08-27T12:13:06.278Z" }, - { url = "https://files.pythonhosted.org/packages/24/75/3b7ffe0d50dc86a6a964af0d1cc3a4a2cdf437cb7b099a4747bbb96d1819/rpds_py-0.27.1-cp311-cp311-win_amd64.whl", hash = "sha256:b4938466c6b257b2f5c4ff98acd8128ec36b5059e5c8f8372d79316b1c36bb15", size = 228627, upload-time = "2025-08-27T12:13:07.625Z" }, - { url = "https://files.pythonhosted.org/packages/8d/3f/4fd04c32abc02c710f09a72a30c9a55ea3cc154ef8099078fd50a0596f8e/rpds_py-0.27.1-cp311-cp311-win_arm64.whl", hash = "sha256:2f57af9b4d0793e53266ee4325535a31ba48e2f875da81a9177c9926dfa60746", size = 220998, upload-time = "2025-08-27T12:13:08.972Z" }, - { url = "https://files.pythonhosted.org/packages/bd/fe/38de28dee5df58b8198c743fe2bea0c785c6d40941b9950bac4cdb71a014/rpds_py-0.27.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ae2775c1973e3c30316892737b91f9283f9908e3cc7625b9331271eaaed7dc90", size = 361887, upload-time = "2025-08-27T12:13:10.233Z" }, - { url = "https://files.pythonhosted.org/packages/7c/9a/4b6c7eedc7dd90986bf0fab6ea2a091ec11c01b15f8ba0a14d3f80450468/rpds_py-0.27.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2643400120f55c8a96f7c9d858f7be0c88d383cd4653ae2cf0d0c88f668073e5", size = 345795, upload-time = "2025-08-27T12:13:11.65Z" }, - { url = "https://files.pythonhosted.org/packages/6f/0e/e650e1b81922847a09cca820237b0edee69416a01268b7754d506ade11ad/rpds_py-0.27.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16323f674c089b0360674a4abd28d5042947d54ba620f72514d69be4ff64845e", size = 385121, upload-time = "2025-08-27T12:13:13.008Z" }, - { url = "https://files.pythonhosted.org/packages/1b/ea/b306067a712988e2bff00dcc7c8f31d26c29b6d5931b461aa4b60a013e33/rpds_py-0.27.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9a1f4814b65eacac94a00fc9a526e3fdafd78e439469644032032d0d63de4881", size = 398976, upload-time = "2025-08-27T12:13:14.368Z" }, - { url = "https://files.pythonhosted.org/packages/2c/0a/26dc43c8840cb8fe239fe12dbc8d8de40f2365e838f3d395835dde72f0e5/rpds_py-0.27.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ba32c16b064267b22f1850a34051121d423b6f7338a12b9459550eb2096e7ec", size = 525953, upload-time = "2025-08-27T12:13:15.774Z" }, - { url = "https://files.pythonhosted.org/packages/22/14/c85e8127b573aaf3a0cbd7fbb8c9c99e735a4a02180c84da2a463b766e9e/rpds_py-0.27.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5c20f33fd10485b80f65e800bbe5f6785af510b9f4056c5a3c612ebc83ba6cb", size = 407915, upload-time = "2025-08-27T12:13:17.379Z" }, - { url = "https://files.pythonhosted.org/packages/ed/7b/8f4fee9ba1fb5ec856eb22d725a4efa3deb47f769597c809e03578b0f9d9/rpds_py-0.27.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:466bfe65bd932da36ff279ddd92de56b042f2266d752719beb97b08526268ec5", size = 386883, upload-time = "2025-08-27T12:13:18.704Z" }, - { url = "https://files.pythonhosted.org/packages/86/47/28fa6d60f8b74fcdceba81b272f8d9836ac0340570f68f5df6b41838547b/rpds_py-0.27.1-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:41e532bbdcb57c92ba3be62c42e9f096431b4cf478da9bc3bc6ce5c38ab7ba7a", size = 405699, upload-time = "2025-08-27T12:13:20.089Z" }, - { url = "https://files.pythonhosted.org/packages/d0/fd/c5987b5e054548df56953a21fe2ebed51fc1ec7c8f24fd41c067b68c4a0a/rpds_py-0.27.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f149826d742b406579466283769a8ea448eed82a789af0ed17b0cd5770433444", size = 423713, upload-time = "2025-08-27T12:13:21.436Z" }, - { url = "https://files.pythonhosted.org/packages/ac/ba/3c4978b54a73ed19a7d74531be37a8bcc542d917c770e14d372b8daea186/rpds_py-0.27.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:80c60cfb5310677bd67cb1e85a1e8eb52e12529545441b43e6f14d90b878775a", size = 562324, upload-time = "2025-08-27T12:13:22.789Z" }, - { url = "https://files.pythonhosted.org/packages/b5/6c/6943a91768fec16db09a42b08644b960cff540c66aab89b74be6d4a144ba/rpds_py-0.27.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:7ee6521b9baf06085f62ba9c7a3e5becffbc32480d2f1b351559c001c38ce4c1", size = 593646, upload-time = "2025-08-27T12:13:24.122Z" }, - { url = "https://files.pythonhosted.org/packages/11/73/9d7a8f4be5f4396f011a6bb7a19fe26303a0dac9064462f5651ced2f572f/rpds_py-0.27.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a512c8263249a9d68cac08b05dd59d2b3f2061d99b322813cbcc14c3c7421998", size = 558137, upload-time = "2025-08-27T12:13:25.557Z" }, - { url = "https://files.pythonhosted.org/packages/6e/96/6772cbfa0e2485bcceef8071de7821f81aeac8bb45fbfd5542a3e8108165/rpds_py-0.27.1-cp312-cp312-win32.whl", hash = "sha256:819064fa048ba01b6dadc5116f3ac48610435ac9a0058bbde98e569f9e785c39", size = 221343, upload-time = "2025-08-27T12:13:26.967Z" }, - { url = "https://files.pythonhosted.org/packages/67/b6/c82f0faa9af1c6a64669f73a17ee0eeef25aff30bb9a1c318509efe45d84/rpds_py-0.27.1-cp312-cp312-win_amd64.whl", hash = "sha256:d9199717881f13c32c4046a15f024971a3b78ad4ea029e8da6b86e5aa9cf4594", size = 232497, upload-time = "2025-08-27T12:13:28.326Z" }, - { url = "https://files.pythonhosted.org/packages/e1/96/2817b44bd2ed11aebacc9251da03689d56109b9aba5e311297b6902136e2/rpds_py-0.27.1-cp312-cp312-win_arm64.whl", hash = "sha256:33aa65b97826a0e885ef6e278fbd934e98cdcfed80b63946025f01e2f5b29502", size = 222790, upload-time = "2025-08-27T12:13:29.71Z" }, - { url = "https://files.pythonhosted.org/packages/cc/77/610aeee8d41e39080c7e14afa5387138e3c9fa9756ab893d09d99e7d8e98/rpds_py-0.27.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:e4b9fcfbc021633863a37e92571d6f91851fa656f0180246e84cbd8b3f6b329b", size = 361741, upload-time = "2025-08-27T12:13:31.039Z" }, - { url = "https://files.pythonhosted.org/packages/3a/fc/c43765f201c6a1c60be2043cbdb664013def52460a4c7adace89d6682bf4/rpds_py-0.27.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1441811a96eadca93c517d08df75de45e5ffe68aa3089924f963c782c4b898cf", size = 345574, upload-time = "2025-08-27T12:13:32.902Z" }, - { url = "https://files.pythonhosted.org/packages/20/42/ee2b2ca114294cd9847d0ef9c26d2b0851b2e7e00bf14cc4c0b581df0fc3/rpds_py-0.27.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55266dafa22e672f5a4f65019015f90336ed31c6383bd53f5e7826d21a0e0b83", size = 385051, upload-time = "2025-08-27T12:13:34.228Z" }, - { url = "https://files.pythonhosted.org/packages/fd/e8/1e430fe311e4799e02e2d1af7c765f024e95e17d651612425b226705f910/rpds_py-0.27.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d78827d7ac08627ea2c8e02c9e5b41180ea5ea1f747e9db0915e3adf36b62dcf", size = 398395, upload-time = "2025-08-27T12:13:36.132Z" }, - { url = "https://files.pythonhosted.org/packages/82/95/9dc227d441ff2670651c27a739acb2535ccaf8b351a88d78c088965e5996/rpds_py-0.27.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae92443798a40a92dc5f0b01d8a7c93adde0c4dc965310a29ae7c64d72b9fad2", size = 524334, upload-time = "2025-08-27T12:13:37.562Z" }, - { url = "https://files.pythonhosted.org/packages/87/01/a670c232f401d9ad461d9a332aa4080cd3cb1d1df18213dbd0d2a6a7ab51/rpds_py-0.27.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c46c9dd2403b66a2a3b9720ec4b74d4ab49d4fabf9f03dfdce2d42af913fe8d0", size = 407691, upload-time = "2025-08-27T12:13:38.94Z" }, - { url = "https://files.pythonhosted.org/packages/03/36/0a14aebbaa26fe7fab4780c76f2239e76cc95a0090bdb25e31d95c492fcd/rpds_py-0.27.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2efe4eb1d01b7f5f1939f4ef30ecea6c6b3521eec451fb93191bf84b2a522418", size = 386868, upload-time = "2025-08-27T12:13:40.192Z" }, - { url = "https://files.pythonhosted.org/packages/3b/03/8c897fb8b5347ff6c1cc31239b9611c5bf79d78c984430887a353e1409a1/rpds_py-0.27.1-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:15d3b4d83582d10c601f481eca29c3f138d44c92187d197aff663a269197c02d", size = 405469, upload-time = "2025-08-27T12:13:41.496Z" }, - { url = "https://files.pythonhosted.org/packages/da/07/88c60edc2df74850d496d78a1fdcdc7b54360a7f610a4d50008309d41b94/rpds_py-0.27.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4ed2e16abbc982a169d30d1a420274a709949e2cbdef119fe2ec9d870b42f274", size = 422125, upload-time = "2025-08-27T12:13:42.802Z" }, - { url = "https://files.pythonhosted.org/packages/6b/86/5f4c707603e41b05f191a749984f390dabcbc467cf833769b47bf14ba04f/rpds_py-0.27.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a75f305c9b013289121ec0f1181931975df78738cdf650093e6b86d74aa7d8dd", size = 562341, upload-time = "2025-08-27T12:13:44.472Z" }, - { url = "https://files.pythonhosted.org/packages/b2/92/3c0cb2492094e3cd9baf9e49bbb7befeceb584ea0c1a8b5939dca4da12e5/rpds_py-0.27.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:67ce7620704745881a3d4b0ada80ab4d99df390838839921f99e63c474f82cf2", size = 592511, upload-time = "2025-08-27T12:13:45.898Z" }, - { url = "https://files.pythonhosted.org/packages/10/bb/82e64fbb0047c46a168faa28d0d45a7851cd0582f850b966811d30f67ad8/rpds_py-0.27.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9d992ac10eb86d9b6f369647b6a3f412fc0075cfd5d799530e84d335e440a002", size = 557736, upload-time = "2025-08-27T12:13:47.408Z" }, - { url = "https://files.pythonhosted.org/packages/00/95/3c863973d409210da7fb41958172c6b7dbe7fc34e04d3cc1f10bb85e979f/rpds_py-0.27.1-cp313-cp313-win32.whl", hash = "sha256:4f75e4bd8ab8db624e02c8e2fc4063021b58becdbe6df793a8111d9343aec1e3", size = 221462, upload-time = "2025-08-27T12:13:48.742Z" }, - { url = "https://files.pythonhosted.org/packages/ce/2c/5867b14a81dc217b56d95a9f2a40fdbc56a1ab0181b80132beeecbd4b2d6/rpds_py-0.27.1-cp313-cp313-win_amd64.whl", hash = "sha256:f9025faafc62ed0b75a53e541895ca272815bec18abe2249ff6501c8f2e12b83", size = 232034, upload-time = "2025-08-27T12:13:50.11Z" }, - { url = "https://files.pythonhosted.org/packages/c7/78/3958f3f018c01923823f1e47f1cc338e398814b92d83cd278364446fac66/rpds_py-0.27.1-cp313-cp313-win_arm64.whl", hash = "sha256:ed10dc32829e7d222b7d3b93136d25a406ba9788f6a7ebf6809092da1f4d279d", size = 222392, upload-time = "2025-08-27T12:13:52.587Z" }, - { url = "https://files.pythonhosted.org/packages/01/76/1cdf1f91aed5c3a7bf2eba1f1c4e4d6f57832d73003919a20118870ea659/rpds_py-0.27.1-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:92022bbbad0d4426e616815b16bc4127f83c9a74940e1ccf3cfe0b387aba0228", size = 358355, upload-time = "2025-08-27T12:13:54.012Z" }, - { url = "https://files.pythonhosted.org/packages/c3/6f/bf142541229374287604caf3bb2a4ae17f0a580798fd72d3b009b532db4e/rpds_py-0.27.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:47162fdab9407ec3f160805ac3e154df042e577dd53341745fc7fb3f625e6d92", size = 342138, upload-time = "2025-08-27T12:13:55.791Z" }, - { url = "https://files.pythonhosted.org/packages/1a/77/355b1c041d6be40886c44ff5e798b4e2769e497b790f0f7fd1e78d17e9a8/rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb89bec23fddc489e5d78b550a7b773557c9ab58b7946154a10a6f7a214a48b2", size = 380247, upload-time = "2025-08-27T12:13:57.683Z" }, - { url = "https://files.pythonhosted.org/packages/d6/a4/d9cef5c3946ea271ce2243c51481971cd6e34f21925af2783dd17b26e815/rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e48af21883ded2b3e9eb48cb7880ad8598b31ab752ff3be6457001d78f416723", size = 390699, upload-time = "2025-08-27T12:13:59.137Z" }, - { url = "https://files.pythonhosted.org/packages/3a/06/005106a7b8c6c1a7e91b73169e49870f4af5256119d34a361ae5240a0c1d/rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6f5b7bd8e219ed50299e58551a410b64daafb5017d54bbe822e003856f06a802", size = 521852, upload-time = "2025-08-27T12:14:00.583Z" }, - { url = "https://files.pythonhosted.org/packages/e5/3e/50fb1dac0948e17a02eb05c24510a8fe12d5ce8561c6b7b7d1339ab7ab9c/rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08f1e20bccf73b08d12d804d6e1c22ca5530e71659e6673bce31a6bb71c1e73f", size = 402582, upload-time = "2025-08-27T12:14:02.034Z" }, - { url = "https://files.pythonhosted.org/packages/cb/b0/f4e224090dc5b0ec15f31a02d746ab24101dd430847c4d99123798661bfc/rpds_py-0.27.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dc5dceeaefcc96dc192e3a80bbe1d6c410c469e97bdd47494a7d930987f18b2", size = 384126, upload-time = "2025-08-27T12:14:03.437Z" }, - { url = "https://files.pythonhosted.org/packages/54/77/ac339d5f82b6afff1df8f0fe0d2145cc827992cb5f8eeb90fc9f31ef7a63/rpds_py-0.27.1-cp313-cp313t-manylinux_2_31_riscv64.whl", hash = "sha256:d76f9cc8665acdc0c9177043746775aa7babbf479b5520b78ae4002d889f5c21", size = 399486, upload-time = "2025-08-27T12:14:05.443Z" }, - { url = "https://files.pythonhosted.org/packages/d6/29/3e1c255eee6ac358c056a57d6d6869baa00a62fa32eea5ee0632039c50a3/rpds_py-0.27.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:134fae0e36022edad8290a6661edf40c023562964efea0cc0ec7f5d392d2aaef", size = 414832, upload-time = "2025-08-27T12:14:06.902Z" }, - { url = "https://files.pythonhosted.org/packages/3f/db/6d498b844342deb3fa1d030598db93937a9964fcf5cb4da4feb5f17be34b/rpds_py-0.27.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:eb11a4f1b2b63337cfd3b4d110af778a59aae51c81d195768e353d8b52f88081", size = 557249, upload-time = "2025-08-27T12:14:08.37Z" }, - { url = "https://files.pythonhosted.org/packages/60/f3/690dd38e2310b6f68858a331399b4d6dbb9132c3e8ef8b4333b96caf403d/rpds_py-0.27.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:13e608ac9f50a0ed4faec0e90ece76ae33b34c0e8656e3dceb9a7db994c692cd", size = 587356, upload-time = "2025-08-27T12:14:10.034Z" }, - { url = "https://files.pythonhosted.org/packages/86/e3/84507781cccd0145f35b1dc32c72675200c5ce8d5b30f813e49424ef68fc/rpds_py-0.27.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dd2135527aa40f061350c3f8f89da2644de26cd73e4de458e79606384f4f68e7", size = 555300, upload-time = "2025-08-27T12:14:11.783Z" }, - { url = "https://files.pythonhosted.org/packages/e5/ee/375469849e6b429b3516206b4580a79e9ef3eb12920ddbd4492b56eaacbe/rpds_py-0.27.1-cp313-cp313t-win32.whl", hash = "sha256:3020724ade63fe320a972e2ffd93b5623227e684315adce194941167fee02688", size = 216714, upload-time = "2025-08-27T12:14:13.629Z" }, - { url = "https://files.pythonhosted.org/packages/21/87/3fc94e47c9bd0742660e84706c311a860dcae4374cf4a03c477e23ce605a/rpds_py-0.27.1-cp313-cp313t-win_amd64.whl", hash = "sha256:8ee50c3e41739886606388ba3ab3ee2aae9f35fb23f833091833255a31740797", size = 228943, upload-time = "2025-08-27T12:14:14.937Z" }, - { url = "https://files.pythonhosted.org/packages/70/36/b6e6066520a07cf029d385de869729a895917b411e777ab1cde878100a1d/rpds_py-0.27.1-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:acb9aafccaae278f449d9c713b64a9e68662e7799dbd5859e2c6b3c67b56d334", size = 362472, upload-time = "2025-08-27T12:14:16.333Z" }, - { url = "https://files.pythonhosted.org/packages/af/07/b4646032e0dcec0df9c73a3bd52f63bc6c5f9cda992f06bd0e73fe3fbebd/rpds_py-0.27.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:b7fb801aa7f845ddf601c49630deeeccde7ce10065561d92729bfe81bd21fb33", size = 345676, upload-time = "2025-08-27T12:14:17.764Z" }, - { url = "https://files.pythonhosted.org/packages/b0/16/2f1003ee5d0af4bcb13c0cf894957984c32a6751ed7206db2aee7379a55e/rpds_py-0.27.1-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe0dd05afb46597b9a2e11c351e5e4283c741237e7f617ffb3252780cca9336a", size = 385313, upload-time = "2025-08-27T12:14:19.829Z" }, - { url = "https://files.pythonhosted.org/packages/05/cd/7eb6dd7b232e7f2654d03fa07f1414d7dfc980e82ba71e40a7c46fd95484/rpds_py-0.27.1-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b6dfb0e058adb12d8b1d1b25f686e94ffa65d9995a5157afe99743bf7369d62b", size = 399080, upload-time = "2025-08-27T12:14:21.531Z" }, - { url = "https://files.pythonhosted.org/packages/20/51/5829afd5000ec1cb60f304711f02572d619040aa3ec033d8226817d1e571/rpds_py-0.27.1-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ed090ccd235f6fa8bb5861684567f0a83e04f52dfc2e5c05f2e4b1309fcf85e7", size = 523868, upload-time = "2025-08-27T12:14:23.485Z" }, - { url = "https://files.pythonhosted.org/packages/05/2c/30eebca20d5db95720ab4d2faec1b5e4c1025c473f703738c371241476a2/rpds_py-0.27.1-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf876e79763eecf3e7356f157540d6a093cef395b65514f17a356f62af6cc136", size = 408750, upload-time = "2025-08-27T12:14:24.924Z" }, - { url = "https://files.pythonhosted.org/packages/90/1a/cdb5083f043597c4d4276eae4e4c70c55ab5accec078da8611f24575a367/rpds_py-0.27.1-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:12ed005216a51b1d6e2b02a7bd31885fe317e45897de81d86dcce7d74618ffff", size = 387688, upload-time = "2025-08-27T12:14:27.537Z" }, - { url = "https://files.pythonhosted.org/packages/7c/92/cf786a15320e173f945d205ab31585cc43969743bb1a48b6888f7a2b0a2d/rpds_py-0.27.1-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:ee4308f409a40e50593c7e3bb8cbe0b4d4c66d1674a316324f0c2f5383b486f9", size = 407225, upload-time = "2025-08-27T12:14:28.981Z" }, - { url = "https://files.pythonhosted.org/packages/33/5c/85ee16df5b65063ef26017bef33096557a4c83fbe56218ac7cd8c235f16d/rpds_py-0.27.1-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0b08d152555acf1f455154d498ca855618c1378ec810646fcd7c76416ac6dc60", size = 423361, upload-time = "2025-08-27T12:14:30.469Z" }, - { url = "https://files.pythonhosted.org/packages/4b/8e/1c2741307fcabd1a334ecf008e92c4f47bb6f848712cf15c923becfe82bb/rpds_py-0.27.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:dce51c828941973a5684d458214d3a36fcd28da3e1875d659388f4f9f12cc33e", size = 562493, upload-time = "2025-08-27T12:14:31.987Z" }, - { url = "https://files.pythonhosted.org/packages/04/03/5159321baae9b2222442a70c1f988cbbd66b9be0675dd3936461269be360/rpds_py-0.27.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:c1476d6f29eb81aa4151c9a31219b03f1f798dc43d8af1250a870735516a1212", size = 592623, upload-time = "2025-08-27T12:14:33.543Z" }, - { url = "https://files.pythonhosted.org/packages/ff/39/c09fd1ad28b85bc1d4554a8710233c9f4cefd03d7717a1b8fbfd171d1167/rpds_py-0.27.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:3ce0cac322b0d69b63c9cdb895ee1b65805ec9ffad37639f291dd79467bee675", size = 558800, upload-time = "2025-08-27T12:14:35.436Z" }, - { url = "https://files.pythonhosted.org/packages/c5/d6/99228e6bbcf4baa764b18258f519a9035131d91b538d4e0e294313462a98/rpds_py-0.27.1-cp314-cp314-win32.whl", hash = "sha256:dfbfac137d2a3d0725758cd141f878bf4329ba25e34979797c89474a89a8a3a3", size = 221943, upload-time = "2025-08-27T12:14:36.898Z" }, - { url = "https://files.pythonhosted.org/packages/be/07/c802bc6b8e95be83b79bdf23d1aa61d68324cb1006e245d6c58e959e314d/rpds_py-0.27.1-cp314-cp314-win_amd64.whl", hash = "sha256:a6e57b0abfe7cc513450fcf529eb486b6e4d3f8aee83e92eb5f1ef848218d456", size = 233739, upload-time = "2025-08-27T12:14:38.386Z" }, - { url = "https://files.pythonhosted.org/packages/c8/89/3e1b1c16d4c2d547c5717377a8df99aee8099ff050f87c45cb4d5fa70891/rpds_py-0.27.1-cp314-cp314-win_arm64.whl", hash = "sha256:faf8d146f3d476abfee026c4ae3bdd9ca14236ae4e4c310cbd1cf75ba33d24a3", size = 223120, upload-time = "2025-08-27T12:14:39.82Z" }, - { url = "https://files.pythonhosted.org/packages/62/7e/dc7931dc2fa4a6e46b2a4fa744a9fe5c548efd70e0ba74f40b39fa4a8c10/rpds_py-0.27.1-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:ba81d2b56b6d4911ce735aad0a1d4495e808b8ee4dc58715998741a26874e7c2", size = 358944, upload-time = "2025-08-27T12:14:41.199Z" }, - { url = "https://files.pythonhosted.org/packages/e6/22/4af76ac4e9f336bfb1a5f240d18a33c6b2fcaadb7472ac7680576512b49a/rpds_py-0.27.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:84f7d509870098de0e864cad0102711c1e24e9b1a50ee713b65928adb22269e4", size = 342283, upload-time = "2025-08-27T12:14:42.699Z" }, - { url = "https://files.pythonhosted.org/packages/1c/15/2a7c619b3c2272ea9feb9ade67a45c40b3eeb500d503ad4c28c395dc51b4/rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9e960fc78fecd1100539f14132425e1d5fe44ecb9239f8f27f079962021523e", size = 380320, upload-time = "2025-08-27T12:14:44.157Z" }, - { url = "https://files.pythonhosted.org/packages/a2/7d/4c6d243ba4a3057e994bb5bedd01b5c963c12fe38dde707a52acdb3849e7/rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:62f85b665cedab1a503747617393573995dac4600ff51869d69ad2f39eb5e817", size = 391760, upload-time = "2025-08-27T12:14:45.845Z" }, - { url = "https://files.pythonhosted.org/packages/b4/71/b19401a909b83bcd67f90221330bc1ef11bc486fe4e04c24388d28a618ae/rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fed467af29776f6556250c9ed85ea5a4dd121ab56a5f8b206e3e7a4c551e48ec", size = 522476, upload-time = "2025-08-27T12:14:47.364Z" }, - { url = "https://files.pythonhosted.org/packages/e4/44/1a3b9715c0455d2e2f0f6df5ee6d6f5afdc423d0773a8a682ed2b43c566c/rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2729615f9d430af0ae6b36cf042cb55c0936408d543fb691e1a9e36648fd35a", size = 403418, upload-time = "2025-08-27T12:14:49.991Z" }, - { url = "https://files.pythonhosted.org/packages/1c/4b/fb6c4f14984eb56673bc868a66536f53417ddb13ed44b391998100a06a96/rpds_py-0.27.1-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b207d881a9aef7ba753d69c123a35d96ca7cb808056998f6b9e8747321f03b8", size = 384771, upload-time = "2025-08-27T12:14:52.159Z" }, - { url = "https://files.pythonhosted.org/packages/c0/56/d5265d2d28b7420d7b4d4d85cad8ef891760f5135102e60d5c970b976e41/rpds_py-0.27.1-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:639fd5efec029f99b79ae47e5d7e00ad8a773da899b6309f6786ecaf22948c48", size = 400022, upload-time = "2025-08-27T12:14:53.859Z" }, - { url = "https://files.pythonhosted.org/packages/8f/e9/9f5fc70164a569bdd6ed9046486c3568d6926e3a49bdefeeccfb18655875/rpds_py-0.27.1-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fecc80cb2a90e28af8a9b366edacf33d7a91cbfe4c2c4544ea1246e949cfebeb", size = 416787, upload-time = "2025-08-27T12:14:55.673Z" }, - { url = "https://files.pythonhosted.org/packages/d4/64/56dd03430ba491db943a81dcdef115a985aac5f44f565cd39a00c766d45c/rpds_py-0.27.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:42a89282d711711d0a62d6f57d81aa43a1368686c45bc1c46b7f079d55692734", size = 557538, upload-time = "2025-08-27T12:14:57.245Z" }, - { url = "https://files.pythonhosted.org/packages/3f/36/92cc885a3129993b1d963a2a42ecf64e6a8e129d2c7cc980dbeba84e55fb/rpds_py-0.27.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:cf9931f14223de59551ab9d38ed18d92f14f055a5f78c1d8ad6493f735021bbb", size = 588512, upload-time = "2025-08-27T12:14:58.728Z" }, - { url = "https://files.pythonhosted.org/packages/dd/10/6b283707780a81919f71625351182b4f98932ac89a09023cb61865136244/rpds_py-0.27.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:f39f58a27cc6e59f432b568ed8429c7e1641324fbe38131de852cd77b2d534b0", size = 555813, upload-time = "2025-08-27T12:15:00.334Z" }, - { url = "https://files.pythonhosted.org/packages/04/2e/30b5ea18c01379da6272a92825dd7e53dc9d15c88a19e97932d35d430ef7/rpds_py-0.27.1-cp314-cp314t-win32.whl", hash = "sha256:d5fa0ee122dc09e23607a28e6d7b150da16c662e66409bbe85230e4c85bb528a", size = 217385, upload-time = "2025-08-27T12:15:01.937Z" }, - { url = "https://files.pythonhosted.org/packages/32/7d/97119da51cb1dd3f2f3c0805f155a3aa4a95fa44fe7d78ae15e69edf4f34/rpds_py-0.27.1-cp314-cp314t-win_amd64.whl", hash = "sha256:6567d2bb951e21232c2f660c24cf3470bb96de56cdcb3f071a83feeaff8a2772", size = 230097, upload-time = "2025-08-27T12:15:03.961Z" }, - { url = "https://files.pythonhosted.org/packages/d5/63/b7cc415c345625d5e62f694ea356c58fb964861409008118f1245f8c3347/rpds_py-0.27.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7ba22cb9693df986033b91ae1d7a979bc399237d45fccf875b76f62bb9e52ddf", size = 371360, upload-time = "2025-08-27T12:15:29.218Z" }, - { url = "https://files.pythonhosted.org/packages/e5/8c/12e1b24b560cf378b8ffbdb9dc73abd529e1adcfcf82727dfd29c4a7b88d/rpds_py-0.27.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5b640501be9288c77738b5492b3fd3abc4ba95c50c2e41273c8a1459f08298d3", size = 353933, upload-time = "2025-08-27T12:15:30.837Z" }, - { url = "https://files.pythonhosted.org/packages/9b/85/1bb2210c1f7a1b99e91fea486b9f0f894aa5da3a5ec7097cbad7dec6d40f/rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb08b65b93e0c6dd70aac7f7890a9c0938d5ec71d5cb32d45cf844fb8ae47636", size = 382962, upload-time = "2025-08-27T12:15:32.348Z" }, - { url = "https://files.pythonhosted.org/packages/cc/c9/a839b9f219cf80ed65f27a7f5ddbb2809c1b85c966020ae2dff490e0b18e/rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d7ff07d696a7a38152ebdb8212ca9e5baab56656749f3d6004b34ab726b550b8", size = 394412, upload-time = "2025-08-27T12:15:33.839Z" }, - { url = "https://files.pythonhosted.org/packages/02/2d/b1d7f928b0b1f4fc2e0133e8051d199b01d7384875adc63b6ddadf3de7e5/rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fb7c72262deae25366e3b6c0c0ba46007967aea15d1eea746e44ddba8ec58dcc", size = 523972, upload-time = "2025-08-27T12:15:35.377Z" }, - { url = "https://files.pythonhosted.org/packages/a9/af/2cbf56edd2d07716df1aec8a726b3159deb47cb5c27e1e42b71d705a7c2f/rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7b002cab05d6339716b03a4a3a2ce26737f6231d7b523f339fa061d53368c9d8", size = 403273, upload-time = "2025-08-27T12:15:37.051Z" }, - { url = "https://files.pythonhosted.org/packages/c0/93/425e32200158d44ff01da5d9612c3b6711fe69f606f06e3895511f17473b/rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23f6b69d1c26c4704fec01311963a41d7de3ee0570a84ebde4d544e5a1859ffc", size = 385278, upload-time = "2025-08-27T12:15:38.571Z" }, - { url = "https://files.pythonhosted.org/packages/eb/1a/1a04a915ecd0551bfa9e77b7672d1937b4b72a0fc204a17deef76001cfb2/rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:530064db9146b247351f2a0250b8f00b289accea4596a033e94be2389977de71", size = 402084, upload-time = "2025-08-27T12:15:40.529Z" }, - { url = "https://files.pythonhosted.org/packages/51/f7/66585c0fe5714368b62951d2513b684e5215beaceab2c6629549ddb15036/rpds_py-0.27.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7b90b0496570bd6b0321724a330d8b545827c4df2034b6ddfc5f5275f55da2ad", size = 419041, upload-time = "2025-08-27T12:15:42.191Z" }, - { url = "https://files.pythonhosted.org/packages/8e/7e/83a508f6b8e219bba2d4af077c35ba0e0cdd35a751a3be6a7cba5a55ad71/rpds_py-0.27.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:879b0e14a2da6a1102a3fc8af580fc1ead37e6d6692a781bd8c83da37429b5ab", size = 560084, upload-time = "2025-08-27T12:15:43.839Z" }, - { url = "https://files.pythonhosted.org/packages/66/66/bb945683b958a1b19eb0fe715594630d0f36396ebdef4d9b89c2fa09aa56/rpds_py-0.27.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:0d807710df3b5faa66c731afa162ea29717ab3be17bdc15f90f2d9f183da4059", size = 590115, upload-time = "2025-08-27T12:15:46.647Z" }, - { url = "https://files.pythonhosted.org/packages/12/00/ccfaafaf7db7e7adace915e5c2f2c2410e16402561801e9c7f96683002d3/rpds_py-0.27.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:3adc388fc3afb6540aec081fa59e6e0d3908722771aa1e37ffe22b220a436f0b", size = 556561, upload-time = "2025-08-27T12:15:48.219Z" }, - { url = "https://files.pythonhosted.org/packages/e1/b7/92b6ed9aad103bfe1c45df98453dfae40969eef2cb6c6239c58d7e96f1b3/rpds_py-0.27.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c796c0c1cc68cb08b0284db4229f5af76168172670c74908fdbd4b7d7f515819", size = 229125, upload-time = "2025-08-27T12:15:49.956Z" }, - { url = "https://files.pythonhosted.org/packages/0c/ed/e1fba02de17f4f76318b834425257c8ea297e415e12c68b4361f63e8ae92/rpds_py-0.27.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cdfe4bb2f9fe7458b7453ad3c33e726d6d1c7c0a72960bcc23800d77384e42df", size = 371402, upload-time = "2025-08-27T12:15:51.561Z" }, - { url = "https://files.pythonhosted.org/packages/af/7c/e16b959b316048b55585a697e94add55a4ae0d984434d279ea83442e460d/rpds_py-0.27.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:8fabb8fd848a5f75a2324e4a84501ee3a5e3c78d8603f83475441866e60b94a3", size = 354084, upload-time = "2025-08-27T12:15:53.219Z" }, - { url = "https://files.pythonhosted.org/packages/de/c1/ade645f55de76799fdd08682d51ae6724cb46f318573f18be49b1e040428/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eda8719d598f2f7f3e0f885cba8646644b55a187762bec091fa14a2b819746a9", size = 383090, upload-time = "2025-08-27T12:15:55.158Z" }, - { url = "https://files.pythonhosted.org/packages/1f/27/89070ca9b856e52960da1472efcb6c20ba27cfe902f4f23ed095b9cfc61d/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3c64d07e95606ec402a0a1c511fe003873fa6af630bda59bac77fac8b4318ebc", size = 394519, upload-time = "2025-08-27T12:15:57.238Z" }, - { url = "https://files.pythonhosted.org/packages/b3/28/be120586874ef906aa5aeeae95ae8df4184bc757e5b6bd1c729ccff45ed5/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:93a2ed40de81bcff59aabebb626562d48332f3d028ca2036f1d23cbb52750be4", size = 523817, upload-time = "2025-08-27T12:15:59.237Z" }, - { url = "https://files.pythonhosted.org/packages/a8/ef/70cc197bc11cfcde02a86f36ac1eed15c56667c2ebddbdb76a47e90306da/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:387ce8c44ae94e0ec50532d9cb0edce17311024c9794eb196b90e1058aadeb66", size = 403240, upload-time = "2025-08-27T12:16:00.923Z" }, - { url = "https://files.pythonhosted.org/packages/cf/35/46936cca449f7f518f2f4996e0e8344db4b57e2081e752441154089d2a5f/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aaf94f812c95b5e60ebaf8bfb1898a7d7cb9c1af5744d4a67fa47796e0465d4e", size = 385194, upload-time = "2025-08-27T12:16:02.802Z" }, - { url = "https://files.pythonhosted.org/packages/e1/62/29c0d3e5125c3270b51415af7cbff1ec587379c84f55a5761cc9efa8cd06/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:4848ca84d6ded9b58e474dfdbad4b8bfb450344c0551ddc8d958bf4b36aa837c", size = 402086, upload-time = "2025-08-27T12:16:04.806Z" }, - { url = "https://files.pythonhosted.org/packages/8f/66/03e1087679227785474466fdd04157fb793b3b76e3fcf01cbf4c693c1949/rpds_py-0.27.1-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2bde09cbcf2248b73c7c323be49b280180ff39fadcfe04e7b6f54a678d02a7cf", size = 419272, upload-time = "2025-08-27T12:16:06.471Z" }, - { url = "https://files.pythonhosted.org/packages/6a/24/e3e72d265121e00b063aef3e3501e5b2473cf1b23511d56e529531acf01e/rpds_py-0.27.1-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:94c44ee01fd21c9058f124d2d4f0c9dc7634bec93cd4b38eefc385dabe71acbf", size = 560003, upload-time = "2025-08-27T12:16:08.06Z" }, - { url = "https://files.pythonhosted.org/packages/26/ca/f5a344c534214cc2d41118c0699fffbdc2c1bc7046f2a2b9609765ab9c92/rpds_py-0.27.1-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:df8b74962e35c9249425d90144e721eed198e6555a0e22a563d29fe4486b51f6", size = 590482, upload-time = "2025-08-27T12:16:10.137Z" }, - { url = "https://files.pythonhosted.org/packages/ce/08/4349bdd5c64d9d193c360aa9db89adeee6f6682ab8825dca0a3f535f434f/rpds_py-0.27.1-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:dc23e6820e3b40847e2f4a7726462ba0cf53089512abe9ee16318c366494c17a", size = 556523, upload-time = "2025-08-27T12:16:12.188Z" }, -] - -[[package]] -name = "ruff" -version = "0.13.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/02/df/8d7d8c515d33adfc540e2edf6c6021ea1c5a58a678d8cfce9fae59aabcab/ruff-0.13.2.tar.gz", hash = "sha256:cb12fffd32fb16d32cef4ed16d8c7cdc27ed7c944eaa98d99d01ab7ab0b710ff", size = 5416417, upload-time = "2025-09-25T14:54:09.936Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/6e/84/5716a7fa4758e41bf70e603e13637c42cfb9dbf7ceb07180211b9bbf75ef/ruff-0.13.2-py3-none-linux_armv6l.whl", hash = "sha256:3796345842b55f033a78285e4f1641078f902020d8450cade03aad01bffd81c3", size = 12343254, upload-time = "2025-09-25T14:53:27.784Z" }, - { url = "https://files.pythonhosted.org/packages/9b/77/c7042582401bb9ac8eff25360e9335e901d7a1c0749a2b28ba4ecb239991/ruff-0.13.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ff7e4dda12e683e9709ac89e2dd436abf31a4d8a8fc3d89656231ed808e231d2", size = 13040891, upload-time = "2025-09-25T14:53:31.38Z" }, - { url = "https://files.pythonhosted.org/packages/c6/15/125a7f76eb295cb34d19c6778e3a82ace33730ad4e6f28d3427e134a02e0/ruff-0.13.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:c75e9d2a2fafd1fdd895d0e7e24b44355984affdde1c412a6f6d3f6e16b22d46", size = 12243588, upload-time = "2025-09-25T14:53:33.543Z" }, - { url = "https://files.pythonhosted.org/packages/9e/eb/0093ae04a70f81f8be7fd7ed6456e926b65d238fc122311293d033fdf91e/ruff-0.13.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cceac74e7bbc53ed7d15d1042ffe7b6577bf294611ad90393bf9b2a0f0ec7cb6", size = 12491359, upload-time = "2025-09-25T14:53:35.892Z" }, - { url = "https://files.pythonhosted.org/packages/43/fe/72b525948a6956f07dad4a6f122336b6a05f2e3fd27471cea612349fedb9/ruff-0.13.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6ae3f469b5465ba6d9721383ae9d49310c19b452a161b57507764d7ef15f4b07", size = 12162486, upload-time = "2025-09-25T14:53:38.171Z" }, - { url = "https://files.pythonhosted.org/packages/6a/e3/0fac422bbbfb2ea838023e0d9fcf1f30183d83ab2482800e2cb892d02dfe/ruff-0.13.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f8f9e3cd6714358238cd6626b9d43026ed19c0c018376ac1ef3c3a04ffb42d8", size = 13871203, upload-time = "2025-09-25T14:53:41.943Z" }, - { url = "https://files.pythonhosted.org/packages/6b/82/b721c8e3ec5df6d83ba0e45dcf00892c4f98b325256c42c38ef136496cbf/ruff-0.13.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:c6ed79584a8f6cbe2e5d7dbacf7cc1ee29cbdb5df1172e77fbdadc8bb85a1f89", size = 14929635, upload-time = "2025-09-25T14:53:43.953Z" }, - { url = "https://files.pythonhosted.org/packages/c4/a0/ad56faf6daa507b83079a1ad7a11694b87d61e6bf01c66bd82b466f21821/ruff-0.13.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aed130b2fde049cea2019f55deb939103123cdd191105f97a0599a3e753d61b0", size = 14338783, upload-time = "2025-09-25T14:53:46.205Z" }, - { url = "https://files.pythonhosted.org/packages/47/77/ad1d9156db8f99cd01ee7e29d74b34050e8075a8438e589121fcd25c4b08/ruff-0.13.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1887c230c2c9d65ed1b4e4cfe4d255577ea28b718ae226c348ae68df958191aa", size = 13355322, upload-time = "2025-09-25T14:53:48.164Z" }, - { url = "https://files.pythonhosted.org/packages/64/8b/e87cfca2be6f8b9f41f0bb12dc48c6455e2d66df46fe61bb441a226f1089/ruff-0.13.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5bcb10276b69b3cfea3a102ca119ffe5c6ba3901e20e60cf9efb53fa417633c3", size = 13354427, upload-time = "2025-09-25T14:53:50.486Z" }, - { url = "https://files.pythonhosted.org/packages/7f/df/bf382f3fbead082a575edb860897287f42b1b3c694bafa16bc9904c11ed3/ruff-0.13.2-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:afa721017aa55a555b2ff7944816587f1cb813c2c0a882d158f59b832da1660d", size = 13537637, upload-time = "2025-09-25T14:53:52.887Z" }, - { url = "https://files.pythonhosted.org/packages/51/70/1fb7a7c8a6fc8bd15636288a46e209e81913b87988f26e1913d0851e54f4/ruff-0.13.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:1dbc875cf3720c64b3990fef8939334e74cb0ca65b8dbc61d1f439201a38101b", size = 12340025, upload-time = "2025-09-25T14:53:54.88Z" }, - { url = "https://files.pythonhosted.org/packages/4c/27/1e5b3f1c23ca5dd4106d9d580e5c13d9acb70288bff614b3d7b638378cc9/ruff-0.13.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:5b939a1b2a960e9742e9a347e5bbc9b3c3d2c716f86c6ae273d9cbd64f193f22", size = 12133449, upload-time = "2025-09-25T14:53:57.089Z" }, - { url = "https://files.pythonhosted.org/packages/2d/09/b92a5ccee289f11ab128df57d5911224197d8d55ef3bd2043534ff72ca54/ruff-0.13.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:50e2d52acb8de3804fc5f6e2fa3ae9bdc6812410a9e46837e673ad1f90a18736", size = 13051369, upload-time = "2025-09-25T14:53:59.124Z" }, - { url = "https://files.pythonhosted.org/packages/89/99/26c9d1c7d8150f45e346dc045cc49f23e961efceb4a70c47dea0960dea9a/ruff-0.13.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:3196bc13ab2110c176b9a4ae5ff7ab676faaa1964b330a1383ba20e1e19645f2", size = 13523644, upload-time = "2025-09-25T14:54:01.622Z" }, - { url = "https://files.pythonhosted.org/packages/f7/00/e7f1501e81e8ec290e79527827af1d88f541d8d26151751b46108978dade/ruff-0.13.2-py3-none-win32.whl", hash = "sha256:7c2a0b7c1e87795fec3404a485096bcd790216c7c146a922d121d8b9c8f1aaac", size = 12245990, upload-time = "2025-09-25T14:54:03.647Z" }, - { url = "https://files.pythonhosted.org/packages/ee/bd/d9f33a73de84fafd0146c6fba4f497c4565fe8fa8b46874b8e438869abc2/ruff-0.13.2-py3-none-win_amd64.whl", hash = "sha256:17d95fb32218357c89355f6f6f9a804133e404fc1f65694372e02a557edf8585", size = 13324004, upload-time = "2025-09-25T14:54:06.05Z" }, - { url = "https://files.pythonhosted.org/packages/c3/12/28fa2f597a605884deb0f65c1b1ae05111051b2a7030f5d8a4ff7f4599ba/ruff-0.13.2-py3-none-win_arm64.whl", hash = "sha256:da711b14c530412c827219312b7d7fbb4877fb31150083add7e8c5336549cea7", size = 12484437, upload-time = "2025-09-25T14:54:08.022Z" }, -] - -[[package]] -name = "sniffio" -version = "1.3.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, -] - -[[package]] -name = "sse-starlette" -version = "3.0.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "anyio" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/42/6f/22ed6e33f8a9e76ca0a412405f31abb844b779d52c5f96660766edcd737c/sse_starlette-3.0.2.tar.gz", hash = "sha256:ccd60b5765ebb3584d0de2d7a6e4f745672581de4f5005ab31c3a25d10b52b3a", size = 20985, upload-time = "2025-07-27T09:07:44.565Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ef/10/c78f463b4ef22eef8491f218f692be838282cd65480f6e423d7730dfd1fb/sse_starlette-3.0.2-py3-none-any.whl", hash = "sha256:16b7cbfddbcd4eaca11f7b586f3b8a080f1afe952c15813455b162edea619e5a", size = 11297, upload-time = "2025-07-27T09:07:43.268Z" }, -] - -[[package]] -name = "starlette" -version = "0.48.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "anyio" }, - { name = "typing-extensions", marker = "python_full_version < '3.13'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/a7/a5/d6f429d43394057b67a6b5bbe6eae2f77a6bf7459d961fdb224bf206eee6/starlette-0.48.0.tar.gz", hash = "sha256:7e8cee469a8ab2352911528110ce9088fdc6a37d9876926e73da7ce4aa4c7a46", size = 2652949, upload-time = "2025-09-13T08:41:05.699Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/be/72/2db2f49247d0a18b4f1bb9a5a39a0162869acf235f3a96418363947b3d46/starlette-0.48.0-py3-none-any.whl", hash = "sha256:0764ca97b097582558ecb498132ed0c7d942f233f365b86ba37770e026510659", size = 73736, upload-time = "2025-09-13T08:41:03.869Z" }, -] - -[[package]] -name = "tomli" -version = "2.2.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", size = 17175, upload-time = "2024-11-27T22:38:36.873Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249", size = 131077, upload-time = "2024-11-27T22:37:54.956Z" }, - { url = "https://files.pythonhosted.org/packages/c7/16/51ae563a8615d472fdbffc43a3f3d46588c264ac4f024f63f01283becfbb/tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6", size = 123429, upload-time = "2024-11-27T22:37:56.698Z" }, - { url = "https://files.pythonhosted.org/packages/f1/dd/4f6cd1e7b160041db83c694abc78e100473c15d54620083dbd5aae7b990e/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", size = 226067, upload-time = "2024-11-27T22:37:57.63Z" }, - { url = "https://files.pythonhosted.org/packages/a9/6b/c54ede5dc70d648cc6361eaf429304b02f2871a345bbdd51e993d6cdf550/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee", size = 236030, upload-time = "2024-11-27T22:37:59.344Z" }, - { url = "https://files.pythonhosted.org/packages/1f/47/999514fa49cfaf7a92c805a86c3c43f4215621855d151b61c602abb38091/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e", size = 240898, upload-time = "2024-11-27T22:38:00.429Z" }, - { url = "https://files.pythonhosted.org/packages/73/41/0a01279a7ae09ee1573b423318e7934674ce06eb33f50936655071d81a24/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4", size = 229894, upload-time = "2024-11-27T22:38:02.094Z" }, - { url = "https://files.pythonhosted.org/packages/55/18/5d8bc5b0a0362311ce4d18830a5d28943667599a60d20118074ea1b01bb7/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106", size = 245319, upload-time = "2024-11-27T22:38:03.206Z" }, - { url = "https://files.pythonhosted.org/packages/92/a3/7ade0576d17f3cdf5ff44d61390d4b3febb8a9fc2b480c75c47ea048c646/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8", size = 238273, upload-time = "2024-11-27T22:38:04.217Z" }, - { url = "https://files.pythonhosted.org/packages/72/6f/fa64ef058ac1446a1e51110c375339b3ec6be245af9d14c87c4a6412dd32/tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff", size = 98310, upload-time = "2024-11-27T22:38:05.908Z" }, - { url = "https://files.pythonhosted.org/packages/6a/1c/4a2dcde4a51b81be3530565e92eda625d94dafb46dbeb15069df4caffc34/tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b", size = 108309, upload-time = "2024-11-27T22:38:06.812Z" }, - { url = "https://files.pythonhosted.org/packages/52/e1/f8af4c2fcde17500422858155aeb0d7e93477a0d59a98e56cbfe75070fd0/tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea", size = 132762, upload-time = "2024-11-27T22:38:07.731Z" }, - { url = "https://files.pythonhosted.org/packages/03/b8/152c68bb84fc00396b83e7bbddd5ec0bd3dd409db4195e2a9b3e398ad2e3/tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8", size = 123453, upload-time = "2024-11-27T22:38:09.384Z" }, - { url = "https://files.pythonhosted.org/packages/c8/d6/fc9267af9166f79ac528ff7e8c55c8181ded34eb4b0e93daa767b8841573/tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192", size = 233486, upload-time = "2024-11-27T22:38:10.329Z" }, - { url = "https://files.pythonhosted.org/packages/5c/51/51c3f2884d7bab89af25f678447ea7d297b53b5a3b5730a7cb2ef6069f07/tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222", size = 242349, upload-time = "2024-11-27T22:38:11.443Z" }, - { url = "https://files.pythonhosted.org/packages/ab/df/bfa89627d13a5cc22402e441e8a931ef2108403db390ff3345c05253935e/tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77", size = 252159, upload-time = "2024-11-27T22:38:13.099Z" }, - { url = "https://files.pythonhosted.org/packages/9e/6e/fa2b916dced65763a5168c6ccb91066f7639bdc88b48adda990db10c8c0b/tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6", size = 237243, upload-time = "2024-11-27T22:38:14.766Z" }, - { url = "https://files.pythonhosted.org/packages/b4/04/885d3b1f650e1153cbb93a6a9782c58a972b94ea4483ae4ac5cedd5e4a09/tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd", size = 259645, upload-time = "2024-11-27T22:38:15.843Z" }, - { url = "https://files.pythonhosted.org/packages/9c/de/6b432d66e986e501586da298e28ebeefd3edc2c780f3ad73d22566034239/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", size = 244584, upload-time = "2024-11-27T22:38:17.645Z" }, - { url = "https://files.pythonhosted.org/packages/1c/9a/47c0449b98e6e7d1be6cbac02f93dd79003234ddc4aaab6ba07a9a7482e2/tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98", size = 98875, upload-time = "2024-11-27T22:38:19.159Z" }, - { url = "https://files.pythonhosted.org/packages/ef/60/9b9638f081c6f1261e2688bd487625cd1e660d0a85bd469e91d8db969734/tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4", size = 109418, upload-time = "2024-11-27T22:38:20.064Z" }, - { url = "https://files.pythonhosted.org/packages/04/90/2ee5f2e0362cb8a0b6499dc44f4d7d48f8fff06d28ba46e6f1eaa61a1388/tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7", size = 132708, upload-time = "2024-11-27T22:38:21.659Z" }, - { url = "https://files.pythonhosted.org/packages/c0/ec/46b4108816de6b385141f082ba99e315501ccd0a2ea23db4a100dd3990ea/tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c", size = 123582, upload-time = "2024-11-27T22:38:22.693Z" }, - { url = "https://files.pythonhosted.org/packages/a0/bd/b470466d0137b37b68d24556c38a0cc819e8febe392d5b199dcd7f578365/tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13", size = 232543, upload-time = "2024-11-27T22:38:24.367Z" }, - { url = "https://files.pythonhosted.org/packages/d9/e5/82e80ff3b751373f7cead2815bcbe2d51c895b3c990686741a8e56ec42ab/tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281", size = 241691, upload-time = "2024-11-27T22:38:26.081Z" }, - { url = "https://files.pythonhosted.org/packages/05/7e/2a110bc2713557d6a1bfb06af23dd01e7dde52b6ee7dadc589868f9abfac/tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272", size = 251170, upload-time = "2024-11-27T22:38:27.921Z" }, - { url = "https://files.pythonhosted.org/packages/64/7b/22d713946efe00e0adbcdfd6d1aa119ae03fd0b60ebed51ebb3fa9f5a2e5/tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140", size = 236530, upload-time = "2024-11-27T22:38:29.591Z" }, - { url = "https://files.pythonhosted.org/packages/38/31/3a76f67da4b0cf37b742ca76beaf819dca0ebef26d78fc794a576e08accf/tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2", size = 258666, upload-time = "2024-11-27T22:38:30.639Z" }, - { url = "https://files.pythonhosted.org/packages/07/10/5af1293da642aded87e8a988753945d0cf7e00a9452d3911dd3bb354c9e2/tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744", size = 243954, upload-time = "2024-11-27T22:38:31.702Z" }, - { url = "https://files.pythonhosted.org/packages/5b/b9/1ed31d167be802da0fc95020d04cd27b7d7065cc6fbefdd2f9186f60d7bd/tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec", size = 98724, upload-time = "2024-11-27T22:38:32.837Z" }, - { url = "https://files.pythonhosted.org/packages/c7/32/b0963458706accd9afcfeb867c0f9175a741bf7b19cd424230714d722198/tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69", size = 109383, upload-time = "2024-11-27T22:38:34.455Z" }, - { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257, upload-time = "2024-11-27T22:38:35.385Z" }, -] - -[[package]] -name = "typing-extensions" -version = "4.15.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, -] - -[[package]] -name = "typing-inspection" -version = "0.4.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/f8/b1/0c11f5058406b3af7609f121aaa6b609744687f1d158b3c3a5bf4cc94238/typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28", size = 75726, upload-time = "2025-05-21T18:55:23.885Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/17/69/cd203477f944c353c31bade965f880aa1061fd6bf05ded0726ca845b6ff7/typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51", size = 14552, upload-time = "2025-05-21T18:55:22.152Z" }, -] - -[[package]] -name = "uvicorn" -version = "0.37.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "click" }, - { name = "h11" }, - { name = "typing-extensions", marker = "python_full_version < '3.11'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/71/57/1616c8274c3442d802621abf5deb230771c7a0fec9414cb6763900eb3868/uvicorn-0.37.0.tar.gz", hash = "sha256:4115c8add6d3fd536c8ee77f0e14a7fd2ebba939fed9b02583a97f80648f9e13", size = 80367, upload-time = "2025-09-23T13:33:47.486Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/85/cd/584a2ceb5532af99dd09e50919e3615ba99aa127e9850eafe5f31ddfdb9a/uvicorn-0.37.0-py3-none-any.whl", hash = "sha256:913b2b88672343739927ce381ff9e2ad62541f9f8289664fa1d1d3803fa2ce6c", size = 67976, upload-time = "2025-09-23T13:33:45.842Z" }, -] diff --git a/examples/clients/simple-auth-client/pyproject.toml b/examples/clients/simple-auth-client/pyproject.toml index 0c1021072..97da928ca 100644 --- a/examples/clients/simple-auth-client/pyproject.toml +++ b/examples/clients/simple-auth-client/pyproject.toml @@ -14,10 +14,7 @@ classifiers = [ "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.10", ] -dependencies = [ - "click>=8.2.0", - "mcp>=1.0.0", -] +dependencies = ["click>=8.2.0", "mcp>=1.0.0"] [project.scripts] mcp-simple-auth-client = "mcp_simple_auth_client.main:cli" @@ -44,9 +41,3 @@ target-version = "py310" [tool.uv] dev-dependencies = ["pyright>=1.1.379", "pytest>=8.3.3", "ruff>=0.6.9"] - -[tool.uv.sources] -mcp = { path = "../../../" } - -[[tool.uv.index]] -url = "https://pypi.org/simple" diff --git a/examples/clients/simple-auth-client/uv.lock b/examples/clients/simple-auth-client/uv.lock deleted file mode 100644 index a62447fcb..000000000 --- a/examples/clients/simple-auth-client/uv.lock +++ /dev/null @@ -1,535 +0,0 @@ -version = 1 -requires-python = ">=3.10" - -[[package]] -name = "annotated-types" -version = "0.7.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643 }, -] - -[[package]] -name = "anyio" -version = "4.9.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, - { name = "idna" }, - { name = "sniffio" }, - { name = "typing-extensions", marker = "python_full_version < '3.13'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/95/7d/4c1bd541d4dffa1b52bd83fb8527089e097a106fc90b467a7313b105f840/anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028", size = 190949 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a1/ee/48ca1a7c89ffec8b6a0c5d02b89c305671d5ffd8d3c94acf8b8c408575bb/anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c", size = 100916 }, -] - -[[package]] -name = "certifi" -version = "2025.4.26" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e8/9e/c05b3920a3b7d20d3d3310465f50348e5b3694f4f88c6daf736eef3024c4/certifi-2025.4.26.tar.gz", hash = "sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6", size = 160705 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/4a/7e/3db2bd1b1f9e95f7cddca6d6e75e2f2bd9f51b1246e546d88addca0106bd/certifi-2025.4.26-py3-none-any.whl", hash = "sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3", size = 159618 }, -] - -[[package]] -name = "click" -version = "8.2.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "colorama", marker = "sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/cd/0f/62ca20172d4f87d93cf89665fbaedcd560ac48b465bd1d92bfc7ea6b0a41/click-8.2.0.tar.gz", hash = "sha256:f5452aeddd9988eefa20f90f05ab66f17fce1ee2a36907fd30b05bbb5953814d", size = 235857 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a2/58/1f37bf81e3c689cc74ffa42102fa8915b59085f54a6e4a80bc6265c0f6bf/click-8.2.0-py3-none-any.whl", hash = "sha256:6b303f0b2aa85f1cb4e5303078fadcbcd4e476f114fab9b5007005711839325c", size = 102156 }, -] - -[[package]] -name = "colorama" -version = "0.4.6" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 }, -] - -[[package]] -name = "exceptiongroup" -version = "1.3.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.13'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", size = 16674 }, -] - -[[package]] -name = "h11" -version = "0.16.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515 }, -] - -[[package]] -name = "httpcore" -version = "1.0.9" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "certifi" }, - { name = "h11" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784 }, -] - -[[package]] -name = "httpx" -version = "0.28.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "anyio" }, - { name = "certifi" }, - { name = "httpcore" }, - { name = "idna" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517 }, -] - -[[package]] -name = "httpx-sse" -version = "0.4.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/4c/60/8f4281fa9bbf3c8034fd54c0e7412e66edbab6bc74c4996bd616f8d0406e/httpx-sse-0.4.0.tar.gz", hash = "sha256:1e81a3a3070ce322add1d3529ed42eb5f70817f45ed6ec915ab753f961139721", size = 12624 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e1/9b/a181f281f65d776426002f330c31849b86b31fc9d848db62e16f03ff739f/httpx_sse-0.4.0-py3-none-any.whl", hash = "sha256:f329af6eae57eaa2bdfd962b42524764af68075ea87370a2de920af5341e318f", size = 7819 }, -] - -[[package]] -name = "idna" -version = "3.10" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 }, -] - -[[package]] -name = "iniconfig" -version = "2.1.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050 }, -] - -[[package]] -name = "mcp" -source = { directory = "../../../" } -dependencies = [ - { name = "anyio" }, - { name = "httpx" }, - { name = "httpx-sse" }, - { name = "pydantic" }, - { name = "pydantic-settings" }, - { name = "python-multipart" }, - { name = "sse-starlette" }, - { name = "starlette" }, - { name = "uvicorn", marker = "sys_platform != 'emscripten'" }, -] - -[package.metadata] -requires-dist = [ - { name = "anyio", specifier = ">=4.5" }, - { name = "httpx", specifier = ">=0.27" }, - { name = "httpx-sse", specifier = ">=0.4" }, - { name = "pydantic", specifier = ">=2.7.2,<3.0.0" }, - { name = "pydantic-settings", specifier = ">=2.5.2" }, - { name = "python-dotenv", marker = "extra == 'cli'", specifier = ">=1.0.0" }, - { name = "python-multipart", specifier = ">=0.0.9" }, - { name = "rich", marker = "extra == 'rich'", specifier = ">=13.9.4" }, - { name = "sse-starlette", specifier = ">=1.6.1" }, - { name = "starlette", specifier = ">=0.27" }, - { name = "typer", marker = "extra == 'cli'", specifier = ">=0.12.4" }, - { name = "uvicorn", marker = "sys_platform != 'emscripten'", specifier = ">=0.23.1" }, - { name = "websockets", marker = "extra == 'ws'", specifier = ">=15.0.1" }, -] - -[package.metadata.requires-dev] -dev = [ - { name = "pyright", specifier = ">=1.1.391" }, - { name = "pytest", specifier = ">=8.3.4" }, - { name = "pytest-examples", specifier = ">=0.0.14" }, - { name = "pytest-flakefinder", specifier = ">=1.1.0" }, - { name = "pytest-pretty", specifier = ">=1.2.0" }, - { name = "pytest-xdist", specifier = ">=3.6.1" }, - { name = "ruff", specifier = ">=0.8.5" }, - { name = "trio", specifier = ">=0.26.2" }, -] -docs = [ - { name = "mkdocs", specifier = ">=1.6.1" }, - { name = "mkdocs-glightbox", specifier = ">=0.4.0" }, - { name = "mkdocs-material", extras = ["imaging"], specifier = ">=9.5.45" }, - { name = "mkdocstrings-python", specifier = ">=1.12.2" }, -] - -[[package]] -name = "mcp-simple-auth-client" -version = "0.1.0" -source = { editable = "." } -dependencies = [ - { name = "click" }, - { name = "mcp" }, -] - -[package.dev-dependencies] -dev = [ - { name = "pyright" }, - { name = "pytest" }, - { name = "ruff" }, -] - -[package.metadata] -requires-dist = [ - { name = "click", specifier = ">=8.0.0" }, - { name = "mcp", directory = "../../../" }, -] - -[package.metadata.requires-dev] -dev = [ - { name = "pyright", specifier = ">=1.1.379" }, - { name = "pytest", specifier = ">=8.3.3" }, - { name = "ruff", specifier = ">=0.6.9" }, -] - -[[package]] -name = "nodeenv" -version = "1.9.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314 }, -] - -[[package]] -name = "packaging" -version = "25.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469 }, -] - -[[package]] -name = "pluggy" -version = "1.6.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538 }, -] - -[[package]] -name = "pydantic" -version = "2.11.4" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "annotated-types" }, - { name = "pydantic-core" }, - { name = "typing-extensions" }, - { name = "typing-inspection" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/77/ab/5250d56ad03884ab5efd07f734203943c8a8ab40d551e208af81d0257bf2/pydantic-2.11.4.tar.gz", hash = "sha256:32738d19d63a226a52eed76645a98ee07c1f410ee41d93b4afbfa85ed8111c2d", size = 786540 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e7/12/46b65f3534d099349e38ef6ec98b1a5a81f42536d17e0ba382c28c67ba67/pydantic-2.11.4-py3-none-any.whl", hash = "sha256:d9615eaa9ac5a063471da949c8fc16376a84afb5024688b3ff885693506764eb", size = 443900 }, -] - -[[package]] -name = "pydantic-core" -version = "2.33.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/92/b31726561b5dae176c2d2c2dc43a9c5bfba5d32f96f8b4c0a600dd492447/pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8", size = 2028817 }, - { url = "https://files.pythonhosted.org/packages/a3/44/3f0b95fafdaca04a483c4e685fe437c6891001bf3ce8b2fded82b9ea3aa1/pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d", size = 1861357 }, - { url = "https://files.pythonhosted.org/packages/30/97/e8f13b55766234caae05372826e8e4b3b96e7b248be3157f53237682e43c/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d", size = 1898011 }, - { url = "https://files.pythonhosted.org/packages/9b/a3/99c48cf7bafc991cc3ee66fd544c0aae8dc907b752f1dad2d79b1b5a471f/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572", size = 1982730 }, - { url = "https://files.pythonhosted.org/packages/de/8e/a5b882ec4307010a840fb8b58bd9bf65d1840c92eae7534c7441709bf54b/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02", size = 2136178 }, - { url = "https://files.pythonhosted.org/packages/e4/bb/71e35fc3ed05af6834e890edb75968e2802fe98778971ab5cba20a162315/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b", size = 2736462 }, - { url = "https://files.pythonhosted.org/packages/31/0d/c8f7593e6bc7066289bbc366f2235701dcbebcd1ff0ef8e64f6f239fb47d/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2", size = 2005652 }, - { url = "https://files.pythonhosted.org/packages/d2/7a/996d8bd75f3eda405e3dd219ff5ff0a283cd8e34add39d8ef9157e722867/pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a", size = 2113306 }, - { url = "https://files.pythonhosted.org/packages/ff/84/daf2a6fb2db40ffda6578a7e8c5a6e9c8affb251a05c233ae37098118788/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac", size = 2073720 }, - { url = "https://files.pythonhosted.org/packages/77/fb/2258da019f4825128445ae79456a5499c032b55849dbd5bed78c95ccf163/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a", size = 2244915 }, - { url = "https://files.pythonhosted.org/packages/d8/7a/925ff73756031289468326e355b6fa8316960d0d65f8b5d6b3a3e7866de7/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b", size = 2241884 }, - { url = "https://files.pythonhosted.org/packages/0b/b0/249ee6d2646f1cdadcb813805fe76265745c4010cf20a8eba7b0e639d9b2/pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22", size = 1910496 }, - { url = "https://files.pythonhosted.org/packages/66/ff/172ba8f12a42d4b552917aa65d1f2328990d3ccfc01d5b7c943ec084299f/pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640", size = 1955019 }, - { url = "https://files.pythonhosted.org/packages/3f/8d/71db63483d518cbbf290261a1fc2839d17ff89fce7089e08cad07ccfce67/pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7", size = 2028584 }, - { url = "https://files.pythonhosted.org/packages/24/2f/3cfa7244ae292dd850989f328722d2aef313f74ffc471184dc509e1e4e5a/pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246", size = 1855071 }, - { url = "https://files.pythonhosted.org/packages/b3/d3/4ae42d33f5e3f50dd467761304be2fa0a9417fbf09735bc2cce003480f2a/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f", size = 1897823 }, - { url = "https://files.pythonhosted.org/packages/f4/f3/aa5976e8352b7695ff808599794b1fba2a9ae2ee954a3426855935799488/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc", size = 1983792 }, - { url = "https://files.pythonhosted.org/packages/d5/7a/cda9b5a23c552037717f2b2a5257e9b2bfe45e687386df9591eff7b46d28/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de", size = 2136338 }, - { url = "https://files.pythonhosted.org/packages/2b/9f/b8f9ec8dd1417eb9da784e91e1667d58a2a4a7b7b34cf4af765ef663a7e5/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a", size = 2730998 }, - { url = "https://files.pythonhosted.org/packages/47/bc/cd720e078576bdb8255d5032c5d63ee5c0bf4b7173dd955185a1d658c456/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef", size = 2003200 }, - { url = "https://files.pythonhosted.org/packages/ca/22/3602b895ee2cd29d11a2b349372446ae9727c32e78a94b3d588a40fdf187/pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e", size = 2113890 }, - { url = "https://files.pythonhosted.org/packages/ff/e6/e3c5908c03cf00d629eb38393a98fccc38ee0ce8ecce32f69fc7d7b558a7/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d", size = 2073359 }, - { url = "https://files.pythonhosted.org/packages/12/e7/6a36a07c59ebefc8777d1ffdaf5ae71b06b21952582e4b07eba88a421c79/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30", size = 2245883 }, - { url = "https://files.pythonhosted.org/packages/16/3f/59b3187aaa6cc0c1e6616e8045b284de2b6a87b027cce2ffcea073adf1d2/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf", size = 2241074 }, - { url = "https://files.pythonhosted.org/packages/e0/ed/55532bb88f674d5d8f67ab121a2a13c385df382de2a1677f30ad385f7438/pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51", size = 1910538 }, - { url = "https://files.pythonhosted.org/packages/fe/1b/25b7cccd4519c0b23c2dd636ad39d381abf113085ce4f7bec2b0dc755eb1/pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab", size = 1952909 }, - { url = "https://files.pythonhosted.org/packages/49/a9/d809358e49126438055884c4366a1f6227f0f84f635a9014e2deb9b9de54/pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65", size = 1897786 }, - { url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000 }, - { url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996 }, - { url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957 }, - { url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199 }, - { url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296 }, - { url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109 }, - { url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028 }, - { url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044 }, - { url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881 }, - { url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034 }, - { url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187 }, - { url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628 }, - { url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866 }, - { url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894 }, - { url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688 }, - { url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808 }, - { url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580 }, - { url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859 }, - { url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810 }, - { url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498 }, - { url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611 }, - { url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924 }, - { url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196 }, - { url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389 }, - { url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223 }, - { url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473 }, - { url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269 }, - { url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921 }, - { url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162 }, - { url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560 }, - { url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777 }, - { url = "https://files.pythonhosted.org/packages/30/68/373d55e58b7e83ce371691f6eaa7175e3a24b956c44628eb25d7da007917/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa", size = 2023982 }, - { url = "https://files.pythonhosted.org/packages/a4/16/145f54ac08c96a63d8ed6442f9dec17b2773d19920b627b18d4f10a061ea/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29", size = 1858412 }, - { url = "https://files.pythonhosted.org/packages/41/b1/c6dc6c3e2de4516c0bb2c46f6a373b91b5660312342a0cf5826e38ad82fa/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d", size = 1892749 }, - { url = "https://files.pythonhosted.org/packages/12/73/8cd57e20afba760b21b742106f9dbdfa6697f1570b189c7457a1af4cd8a0/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e", size = 2067527 }, - { url = "https://files.pythonhosted.org/packages/e3/d5/0bb5d988cc019b3cba4a78f2d4b3854427fc47ee8ec8e9eaabf787da239c/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c", size = 2108225 }, - { url = "https://files.pythonhosted.org/packages/f1/c5/00c02d1571913d496aabf146106ad8239dc132485ee22efe08085084ff7c/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec", size = 2069490 }, - { url = "https://files.pythonhosted.org/packages/22/a8/dccc38768274d3ed3a59b5d06f59ccb845778687652daa71df0cab4040d7/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052", size = 2237525 }, - { url = "https://files.pythonhosted.org/packages/d4/e7/4f98c0b125dda7cf7ccd14ba936218397b44f50a56dd8c16a3091df116c3/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c", size = 2238446 }, - { url = "https://files.pythonhosted.org/packages/ce/91/2ec36480fdb0b783cd9ef6795753c1dea13882f2e68e73bce76ae8c21e6a/pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808", size = 2066678 }, - { url = "https://files.pythonhosted.org/packages/7b/27/d4ae6487d73948d6f20dddcd94be4ea43e74349b56eba82e9bdee2d7494c/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8", size = 2025200 }, - { url = "https://files.pythonhosted.org/packages/f1/b8/b3cb95375f05d33801024079b9392a5ab45267a63400bf1866e7ce0f0de4/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593", size = 1859123 }, - { url = "https://files.pythonhosted.org/packages/05/bc/0d0b5adeda59a261cd30a1235a445bf55c7e46ae44aea28f7bd6ed46e091/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612", size = 1892852 }, - { url = "https://files.pythonhosted.org/packages/3e/11/d37bdebbda2e449cb3f519f6ce950927b56d62f0b84fd9cb9e372a26a3d5/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7", size = 2067484 }, - { url = "https://files.pythonhosted.org/packages/8c/55/1f95f0a05ce72ecb02a8a8a1c3be0579bbc29b1d5ab68f1378b7bebc5057/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e", size = 2108896 }, - { url = "https://files.pythonhosted.org/packages/53/89/2b2de6c81fa131f423246a9109d7b2a375e83968ad0800d6e57d0574629b/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8", size = 2069475 }, - { url = "https://files.pythonhosted.org/packages/b8/e9/1f7efbe20d0b2b10f6718944b5d8ece9152390904f29a78e68d4e7961159/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf", size = 2239013 }, - { url = "https://files.pythonhosted.org/packages/3c/b2/5309c905a93811524a49b4e031e9851a6b00ff0fb668794472ea7746b448/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb", size = 2238715 }, - { url = "https://files.pythonhosted.org/packages/32/56/8a7ca5d2cd2cda1d245d34b1c9a942920a718082ae8e54e5f3e5a58b7add/pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1", size = 2066757 }, -] - -[[package]] -name = "pydantic-settings" -version = "2.9.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pydantic" }, - { name = "python-dotenv" }, - { name = "typing-inspection" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/67/1d/42628a2c33e93f8e9acbde0d5d735fa0850f3e6a2f8cb1eb6c40b9a732ac/pydantic_settings-2.9.1.tar.gz", hash = "sha256:c509bf79d27563add44e8446233359004ed85066cd096d8b510f715e6ef5d268", size = 163234 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b6/5f/d6d641b490fd3ec2c4c13b4244d68deea3a1b970a97be64f34fb5504ff72/pydantic_settings-2.9.1-py3-none-any.whl", hash = "sha256:59b4f431b1defb26fe620c71a7d3968a710d719f5f4cdbbdb7926edeb770f6ef", size = 44356 }, -] - -[[package]] -name = "pyright" -version = "1.1.400" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "nodeenv" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/6c/cb/c306618a02d0ee8aed5fb8d0fe0ecfed0dbf075f71468f03a30b5f4e1fe0/pyright-1.1.400.tar.gz", hash = "sha256:b8a3ba40481aa47ba08ffb3228e821d22f7d391f83609211335858bf05686bdb", size = 3846546 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c8/a5/5d285e4932cf149c90e3c425610c5efaea005475d5f96f1bfdb452956c62/pyright-1.1.400-py3-none-any.whl", hash = "sha256:c80d04f98b5a4358ad3a35e241dbf2a408eee33a40779df365644f8054d2517e", size = 5563460 }, -] - -[[package]] -name = "pytest" -version = "8.3.5" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "colorama", marker = "sys_platform == 'win32'" }, - { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, - { name = "iniconfig" }, - { name = "packaging" }, - { name = "pluggy" }, - { name = "tomli", marker = "python_full_version < '3.11'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/ae/3c/c9d525a414d506893f0cd8a8d0de7706446213181570cdbd766691164e40/pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845", size = 1450891 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/30/3d/64ad57c803f1fa1e963a7946b6e0fea4a70df53c1a7fed304586539c2bac/pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820", size = 343634 }, -] - -[[package]] -name = "python-dotenv" -version = "1.1.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/88/2c/7bb1416c5620485aa793f2de31d3df393d3686aa8a8506d11e10e13c5baf/python_dotenv-1.1.0.tar.gz", hash = "sha256:41f90bc6f5f177fb41f53e87666db362025010eb28f60a01c9143bfa33a2b2d5", size = 39920 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/1e/18/98a99ad95133c6a6e2005fe89faedf294a748bd5dc803008059409ac9b1e/python_dotenv-1.1.0-py3-none-any.whl", hash = "sha256:d7c01d9e2293916c18baf562d95698754b0dbbb5e74d457c45d4f6561fb9d55d", size = 20256 }, -] - -[[package]] -name = "python-multipart" -version = "0.0.20" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f3/87/f44d7c9f274c7ee665a29b885ec97089ec5dc034c7f3fafa03da9e39a09e/python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13", size = 37158 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/45/58/38b5afbc1a800eeea951b9285d3912613f2603bdf897a4ab0f4bd7f405fc/python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104", size = 24546 }, -] - -[[package]] -name = "ruff" -version = "0.11.10" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e8/4c/4a3c5a97faaae6b428b336dcca81d03ad04779f8072c267ad2bd860126bf/ruff-0.11.10.tar.gz", hash = "sha256:d522fb204b4959909ecac47da02830daec102eeb100fb50ea9554818d47a5fa6", size = 4165632 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2f/9f/596c628f8824a2ce4cd12b0f0b4c0629a62dfffc5d0f742c19a1d71be108/ruff-0.11.10-py3-none-linux_armv6l.whl", hash = "sha256:859a7bfa7bc8888abbea31ef8a2b411714e6a80f0d173c2a82f9041ed6b50f58", size = 10316243 }, - { url = "https://files.pythonhosted.org/packages/3c/38/c1e0b77ab58b426f8c332c1d1d3432d9fc9a9ea622806e208220cb133c9e/ruff-0.11.10-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:968220a57e09ea5e4fd48ed1c646419961a0570727c7e069842edd018ee8afed", size = 11083636 }, - { url = "https://files.pythonhosted.org/packages/23/41/b75e15961d6047d7fe1b13886e56e8413be8467a4e1be0a07f3b303cd65a/ruff-0.11.10-py3-none-macosx_11_0_arm64.whl", hash = "sha256:1067245bad978e7aa7b22f67113ecc6eb241dca0d9b696144256c3a879663bca", size = 10441624 }, - { url = "https://files.pythonhosted.org/packages/b6/2c/e396b6703f131406db1811ea3d746f29d91b41bbd43ad572fea30da1435d/ruff-0.11.10-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4854fd09c7aed5b1590e996a81aeff0c9ff51378b084eb5a0b9cd9518e6cff2", size = 10624358 }, - { url = "https://files.pythonhosted.org/packages/bd/8c/ee6cca8bdaf0f9a3704796022851a33cd37d1340bceaf4f6e991eb164e2e/ruff-0.11.10-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8b4564e9f99168c0f9195a0fd5fa5928004b33b377137f978055e40008a082c5", size = 10176850 }, - { url = "https://files.pythonhosted.org/packages/e9/ce/4e27e131a434321b3b7c66512c3ee7505b446eb1c8a80777c023f7e876e6/ruff-0.11.10-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b6a9cc5b62c03cc1fea0044ed8576379dbaf751d5503d718c973d5418483641", size = 11759787 }, - { url = "https://files.pythonhosted.org/packages/58/de/1e2e77fc72adc7cf5b5123fd04a59ed329651d3eab9825674a9e640b100b/ruff-0.11.10-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:607ecbb6f03e44c9e0a93aedacb17b4eb4f3563d00e8b474298a201622677947", size = 12430479 }, - { url = "https://files.pythonhosted.org/packages/07/ed/af0f2340f33b70d50121628ef175523cc4c37619e98d98748c85764c8d88/ruff-0.11.10-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7b3a522fa389402cd2137df9ddefe848f727250535c70dafa840badffb56b7a4", size = 11919760 }, - { url = "https://files.pythonhosted.org/packages/24/09/d7b3d3226d535cb89234390f418d10e00a157b6c4a06dfbe723e9322cb7d/ruff-0.11.10-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f071b0deed7e9245d5820dac235cbdd4ef99d7b12ff04c330a241ad3534319f", size = 14041747 }, - { url = "https://files.pythonhosted.org/packages/62/b3/a63b4e91850e3f47f78795e6630ee9266cb6963de8f0191600289c2bb8f4/ruff-0.11.10-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a60e3a0a617eafba1f2e4186d827759d65348fa53708ca547e384db28406a0b", size = 11550657 }, - { url = "https://files.pythonhosted.org/packages/46/63/a4f95c241d79402ccdbdb1d823d156c89fbb36ebfc4289dce092e6c0aa8f/ruff-0.11.10-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:da8ec977eaa4b7bf75470fb575bea2cb41a0e07c7ea9d5a0a97d13dbca697bf2", size = 10489671 }, - { url = "https://files.pythonhosted.org/packages/6a/9b/c2238bfebf1e473495659c523d50b1685258b6345d5ab0b418ca3f010cd7/ruff-0.11.10-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:ddf8967e08227d1bd95cc0851ef80d2ad9c7c0c5aab1eba31db49cf0a7b99523", size = 10160135 }, - { url = "https://files.pythonhosted.org/packages/ba/ef/ba7251dd15206688dbfba7d413c0312e94df3b31b08f5d695580b755a899/ruff-0.11.10-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5a94acf798a82db188f6f36575d80609072b032105d114b0f98661e1679c9125", size = 11170179 }, - { url = "https://files.pythonhosted.org/packages/73/9f/5c336717293203ba275dbfa2ea16e49b29a9fd9a0ea8b6febfc17e133577/ruff-0.11.10-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:3afead355f1d16d95630df28d4ba17fb2cb9c8dfac8d21ced14984121f639bad", size = 11626021 }, - { url = "https://files.pythonhosted.org/packages/d9/2b/162fa86d2639076667c9aa59196c020dc6d7023ac8f342416c2f5ec4bda0/ruff-0.11.10-py3-none-win32.whl", hash = "sha256:dc061a98d32a97211af7e7f3fa1d4ca2fcf919fb96c28f39551f35fc55bdbc19", size = 10494958 }, - { url = "https://files.pythonhosted.org/packages/24/f3/66643d8f32f50a4b0d09a4832b7d919145ee2b944d43e604fbd7c144d175/ruff-0.11.10-py3-none-win_amd64.whl", hash = "sha256:5cc725fbb4d25b0f185cb42df07ab6b76c4489b4bfb740a175f3a59c70e8a224", size = 11650285 }, - { url = "https://files.pythonhosted.org/packages/95/3a/2e8704d19f376c799748ff9cb041225c1d59f3e7711bc5596c8cfdc24925/ruff-0.11.10-py3-none-win_arm64.whl", hash = "sha256:ef69637b35fb8b210743926778d0e45e1bffa850a7c61e428c6b971549b5f5d1", size = 10765278 }, -] - -[[package]] -name = "sniffio" -version = "1.3.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235 }, -] - -[[package]] -name = "sse-starlette" -version = "2.3.5" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "anyio" }, - { name = "starlette" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/10/5f/28f45b1ff14bee871bacafd0a97213f7ec70e389939a80c60c0fb72a9fc9/sse_starlette-2.3.5.tar.gz", hash = "sha256:228357b6e42dcc73a427990e2b4a03c023e2495ecee82e14f07ba15077e334b2", size = 17511 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c8/48/3e49cf0f64961656402c0023edbc51844fe17afe53ab50e958a6dbbbd499/sse_starlette-2.3.5-py3-none-any.whl", hash = "sha256:251708539a335570f10eaaa21d1848a10c42ee6dc3a9cf37ef42266cdb1c52a8", size = 10233 }, -] - -[[package]] -name = "starlette" -version = "0.46.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "anyio" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/ce/20/08dfcd9c983f6a6f4a1000d934b9e6d626cff8d2eeb77a89a68eef20a2b7/starlette-0.46.2.tar.gz", hash = "sha256:7f7361f34eed179294600af672f565727419830b54b7b084efe44bb82d2fccd5", size = 2580846 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8b/0c/9d30a4ebeb6db2b25a841afbb80f6ef9a854fc3b41be131d249a977b4959/starlette-0.46.2-py3-none-any.whl", hash = "sha256:595633ce89f8ffa71a015caed34a5b2dc1c0cdb3f0f1fbd1e69339cf2abeec35", size = 72037 }, -] - -[[package]] -name = "tomli" -version = "2.2.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", size = 17175 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249", size = 131077 }, - { url = "https://files.pythonhosted.org/packages/c7/16/51ae563a8615d472fdbffc43a3f3d46588c264ac4f024f63f01283becfbb/tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6", size = 123429 }, - { url = "https://files.pythonhosted.org/packages/f1/dd/4f6cd1e7b160041db83c694abc78e100473c15d54620083dbd5aae7b990e/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", size = 226067 }, - { url = "https://files.pythonhosted.org/packages/a9/6b/c54ede5dc70d648cc6361eaf429304b02f2871a345bbdd51e993d6cdf550/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee", size = 236030 }, - { url = "https://files.pythonhosted.org/packages/1f/47/999514fa49cfaf7a92c805a86c3c43f4215621855d151b61c602abb38091/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e", size = 240898 }, - { url = "https://files.pythonhosted.org/packages/73/41/0a01279a7ae09ee1573b423318e7934674ce06eb33f50936655071d81a24/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4", size = 229894 }, - { url = "https://files.pythonhosted.org/packages/55/18/5d8bc5b0a0362311ce4d18830a5d28943667599a60d20118074ea1b01bb7/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106", size = 245319 }, - { url = "https://files.pythonhosted.org/packages/92/a3/7ade0576d17f3cdf5ff44d61390d4b3febb8a9fc2b480c75c47ea048c646/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8", size = 238273 }, - { url = "https://files.pythonhosted.org/packages/72/6f/fa64ef058ac1446a1e51110c375339b3ec6be245af9d14c87c4a6412dd32/tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff", size = 98310 }, - { url = "https://files.pythonhosted.org/packages/6a/1c/4a2dcde4a51b81be3530565e92eda625d94dafb46dbeb15069df4caffc34/tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b", size = 108309 }, - { url = "https://files.pythonhosted.org/packages/52/e1/f8af4c2fcde17500422858155aeb0d7e93477a0d59a98e56cbfe75070fd0/tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea", size = 132762 }, - { url = "https://files.pythonhosted.org/packages/03/b8/152c68bb84fc00396b83e7bbddd5ec0bd3dd409db4195e2a9b3e398ad2e3/tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8", size = 123453 }, - { url = "https://files.pythonhosted.org/packages/c8/d6/fc9267af9166f79ac528ff7e8c55c8181ded34eb4b0e93daa767b8841573/tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192", size = 233486 }, - { url = "https://files.pythonhosted.org/packages/5c/51/51c3f2884d7bab89af25f678447ea7d297b53b5a3b5730a7cb2ef6069f07/tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222", size = 242349 }, - { url = "https://files.pythonhosted.org/packages/ab/df/bfa89627d13a5cc22402e441e8a931ef2108403db390ff3345c05253935e/tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77", size = 252159 }, - { url = "https://files.pythonhosted.org/packages/9e/6e/fa2b916dced65763a5168c6ccb91066f7639bdc88b48adda990db10c8c0b/tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6", size = 237243 }, - { url = "https://files.pythonhosted.org/packages/b4/04/885d3b1f650e1153cbb93a6a9782c58a972b94ea4483ae4ac5cedd5e4a09/tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd", size = 259645 }, - { url = "https://files.pythonhosted.org/packages/9c/de/6b432d66e986e501586da298e28ebeefd3edc2c780f3ad73d22566034239/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", size = 244584 }, - { url = "https://files.pythonhosted.org/packages/1c/9a/47c0449b98e6e7d1be6cbac02f93dd79003234ddc4aaab6ba07a9a7482e2/tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98", size = 98875 }, - { url = "https://files.pythonhosted.org/packages/ef/60/9b9638f081c6f1261e2688bd487625cd1e660d0a85bd469e91d8db969734/tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4", size = 109418 }, - { url = "https://files.pythonhosted.org/packages/04/90/2ee5f2e0362cb8a0b6499dc44f4d7d48f8fff06d28ba46e6f1eaa61a1388/tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7", size = 132708 }, - { url = "https://files.pythonhosted.org/packages/c0/ec/46b4108816de6b385141f082ba99e315501ccd0a2ea23db4a100dd3990ea/tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c", size = 123582 }, - { url = "https://files.pythonhosted.org/packages/a0/bd/b470466d0137b37b68d24556c38a0cc819e8febe392d5b199dcd7f578365/tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13", size = 232543 }, - { url = "https://files.pythonhosted.org/packages/d9/e5/82e80ff3b751373f7cead2815bcbe2d51c895b3c990686741a8e56ec42ab/tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281", size = 241691 }, - { url = "https://files.pythonhosted.org/packages/05/7e/2a110bc2713557d6a1bfb06af23dd01e7dde52b6ee7dadc589868f9abfac/tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272", size = 251170 }, - { url = "https://files.pythonhosted.org/packages/64/7b/22d713946efe00e0adbcdfd6d1aa119ae03fd0b60ebed51ebb3fa9f5a2e5/tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140", size = 236530 }, - { url = "https://files.pythonhosted.org/packages/38/31/3a76f67da4b0cf37b742ca76beaf819dca0ebef26d78fc794a576e08accf/tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2", size = 258666 }, - { url = "https://files.pythonhosted.org/packages/07/10/5af1293da642aded87e8a988753945d0cf7e00a9452d3911dd3bb354c9e2/tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744", size = 243954 }, - { url = "https://files.pythonhosted.org/packages/5b/b9/1ed31d167be802da0fc95020d04cd27b7d7065cc6fbefdd2f9186f60d7bd/tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec", size = 98724 }, - { url = "https://files.pythonhosted.org/packages/c7/32/b0963458706accd9afcfeb867c0f9175a741bf7b19cd424230714d722198/tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69", size = 109383 }, - { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257 }, -] - -[[package]] -name = "typing-extensions" -version = "4.13.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f6/37/23083fcd6e35492953e8d2aaaa68b860eb422b34627b13f2ce3eb6106061/typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef", size = 106967 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8b/54/b1ae86c0973cc6f0210b53d508ca3641fb6d0c56823f288d108bc7ab3cc8/typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c", size = 45806 }, -] - -[[package]] -name = "typing-inspection" -version = "0.4.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/82/5c/e6082df02e215b846b4b8c0b887a64d7d08ffaba30605502639d44c06b82/typing_inspection-0.4.0.tar.gz", hash = "sha256:9765c87de36671694a67904bf2c96e395be9c6439bb6c87b5142569dcdd65122", size = 76222 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/31/08/aa4fdfb71f7de5176385bd9e90852eaf6b5d622735020ad600f2bab54385/typing_inspection-0.4.0-py3-none-any.whl", hash = "sha256:50e72559fcd2a6367a19f7a7e610e6afcb9fac940c650290eed893d61386832f", size = 14125 }, -] - -[[package]] -name = "uvicorn" -version = "0.34.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "click" }, - { name = "h11" }, - { name = "typing-extensions", marker = "python_full_version < '3.11'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/a6/ae/9bbb19b9e1c450cf9ecaef06463e40234d98d95bf572fab11b4f19ae5ded/uvicorn-0.34.2.tar.gz", hash = "sha256:0e929828f6186353a80b58ea719861d2629d766293b6d19baf086ba31d4f3328", size = 76815 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b1/4b/4cef6ce21a2aaca9d852a6e84ef4f135d99fcd74fa75105e2fc0c8308acd/uvicorn-0.34.2-py3-none-any.whl", hash = "sha256:deb49af569084536d269fe0a6d67e3754f104cf03aba7c11c40f01aadf33c403", size = 62483 }, -] diff --git a/examples/clients/simple-chatbot/uv.lock b/examples/clients/simple-chatbot/uv.lock deleted file mode 100644 index ee7cb2fab..000000000 --- a/examples/clients/simple-chatbot/uv.lock +++ /dev/null @@ -1,555 +0,0 @@ -version = 1 -requires-python = ">=3.10" - -[[package]] -name = "annotated-types" -version = "0.7.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643 }, -] - -[[package]] -name = "anyio" -version = "4.8.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, - { name = "idna" }, - { name = "sniffio" }, - { name = "typing-extensions", marker = "python_full_version < '3.13'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/a3/73/199a98fc2dae33535d6b8e8e6ec01f8c1d76c9adb096c6b7d64823038cde/anyio-4.8.0.tar.gz", hash = "sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a", size = 181126 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/46/eb/e7f063ad1fec6b3178a3cd82d1a3c4de82cccf283fc42746168188e1cdd5/anyio-4.8.0-py3-none-any.whl", hash = "sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a", size = 96041 }, -] - -[[package]] -name = "certifi" -version = "2024.12.14" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0f/bd/1d41ee578ce09523c81a15426705dd20969f5abf006d1afe8aeff0dd776a/certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db", size = 166010 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a5/32/8f6669fc4798494966bf446c8c4a162e0b5d893dff088afddf76414f70e1/certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56", size = 164927 }, -] - -[[package]] -name = "charset-normalizer" -version = "3.4.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/16/b0/572805e227f01586461c80e0fd25d65a2115599cc9dad142fee4b747c357/charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3", size = 123188 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0d/58/5580c1716040bc89206c77d8f74418caf82ce519aae06450393ca73475d1/charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de", size = 198013 }, - { url = "https://files.pythonhosted.org/packages/d0/11/00341177ae71c6f5159a08168bcb98c6e6d196d372c94511f9f6c9afe0c6/charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176", size = 141285 }, - { url = "https://files.pythonhosted.org/packages/01/09/11d684ea5819e5a8f5100fb0b38cf8d02b514746607934134d31233e02c8/charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037", size = 151449 }, - { url = "https://files.pythonhosted.org/packages/08/06/9f5a12939db324d905dc1f70591ae7d7898d030d7662f0d426e2286f68c9/charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f", size = 143892 }, - { url = "https://files.pythonhosted.org/packages/93/62/5e89cdfe04584cb7f4d36003ffa2936681b03ecc0754f8e969c2becb7e24/charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a", size = 146123 }, - { url = "https://files.pythonhosted.org/packages/a9/ac/ab729a15c516da2ab70a05f8722ecfccc3f04ed7a18e45c75bbbaa347d61/charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a", size = 147943 }, - { url = "https://files.pythonhosted.org/packages/03/d2/3f392f23f042615689456e9a274640c1d2e5dd1d52de36ab8f7955f8f050/charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247", size = 142063 }, - { url = "https://files.pythonhosted.org/packages/f2/e3/e20aae5e1039a2cd9b08d9205f52142329f887f8cf70da3650326670bddf/charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408", size = 150578 }, - { url = "https://files.pythonhosted.org/packages/8d/af/779ad72a4da0aed925e1139d458adc486e61076d7ecdcc09e610ea8678db/charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb", size = 153629 }, - { url = "https://files.pythonhosted.org/packages/c2/b6/7aa450b278e7aa92cf7732140bfd8be21f5f29d5bf334ae987c945276639/charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d", size = 150778 }, - { url = "https://files.pythonhosted.org/packages/39/f4/d9f4f712d0951dcbfd42920d3db81b00dd23b6ab520419626f4023334056/charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807", size = 146453 }, - { url = "https://files.pythonhosted.org/packages/49/2b/999d0314e4ee0cff3cb83e6bc9aeddd397eeed693edb4facb901eb8fbb69/charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f", size = 95479 }, - { url = "https://files.pythonhosted.org/packages/2d/ce/3cbed41cff67e455a386fb5e5dd8906cdda2ed92fbc6297921f2e4419309/charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f", size = 102790 }, - { url = "https://files.pythonhosted.org/packages/72/80/41ef5d5a7935d2d3a773e3eaebf0a9350542f2cab4eac59a7a4741fbbbbe/charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125", size = 194995 }, - { url = "https://files.pythonhosted.org/packages/7a/28/0b9fefa7b8b080ec492110af6d88aa3dea91c464b17d53474b6e9ba5d2c5/charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1", size = 139471 }, - { url = "https://files.pythonhosted.org/packages/71/64/d24ab1a997efb06402e3fc07317e94da358e2585165930d9d59ad45fcae2/charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3", size = 149831 }, - { url = "https://files.pythonhosted.org/packages/37/ed/be39e5258e198655240db5e19e0b11379163ad7070962d6b0c87ed2c4d39/charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd", size = 142335 }, - { url = "https://files.pythonhosted.org/packages/88/83/489e9504711fa05d8dde1574996408026bdbdbd938f23be67deebb5eca92/charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00", size = 143862 }, - { url = "https://files.pythonhosted.org/packages/c6/c7/32da20821cf387b759ad24627a9aca289d2822de929b8a41b6241767b461/charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12", size = 145673 }, - { url = "https://files.pythonhosted.org/packages/68/85/f4288e96039abdd5aeb5c546fa20a37b50da71b5cf01e75e87f16cd43304/charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77", size = 140211 }, - { url = "https://files.pythonhosted.org/packages/28/a3/a42e70d03cbdabc18997baf4f0227c73591a08041c149e710045c281f97b/charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146", size = 148039 }, - { url = "https://files.pythonhosted.org/packages/85/e4/65699e8ab3014ecbe6f5c71d1a55d810fb716bbfd74f6283d5c2aa87febf/charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd", size = 151939 }, - { url = "https://files.pythonhosted.org/packages/b1/82/8e9fe624cc5374193de6860aba3ea8070f584c8565ee77c168ec13274bd2/charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6", size = 149075 }, - { url = "https://files.pythonhosted.org/packages/3d/7b/82865ba54c765560c8433f65e8acb9217cb839a9e32b42af4aa8e945870f/charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8", size = 144340 }, - { url = "https://files.pythonhosted.org/packages/b5/b6/9674a4b7d4d99a0d2df9b215da766ee682718f88055751e1e5e753c82db0/charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b", size = 95205 }, - { url = "https://files.pythonhosted.org/packages/1e/ab/45b180e175de4402dcf7547e4fb617283bae54ce35c27930a6f35b6bef15/charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76", size = 102441 }, - { url = "https://files.pythonhosted.org/packages/0a/9a/dd1e1cdceb841925b7798369a09279bd1cf183cef0f9ddf15a3a6502ee45/charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545", size = 196105 }, - { url = "https://files.pythonhosted.org/packages/d3/8c/90bfabf8c4809ecb648f39794cf2a84ff2e7d2a6cf159fe68d9a26160467/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7", size = 140404 }, - { url = "https://files.pythonhosted.org/packages/ad/8f/e410d57c721945ea3b4f1a04b74f70ce8fa800d393d72899f0a40526401f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757", size = 150423 }, - { url = "https://files.pythonhosted.org/packages/f0/b8/e6825e25deb691ff98cf5c9072ee0605dc2acfca98af70c2d1b1bc75190d/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa", size = 143184 }, - { url = "https://files.pythonhosted.org/packages/3e/a2/513f6cbe752421f16d969e32f3583762bfd583848b763913ddab8d9bfd4f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d", size = 145268 }, - { url = "https://files.pythonhosted.org/packages/74/94/8a5277664f27c3c438546f3eb53b33f5b19568eb7424736bdc440a88a31f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616", size = 147601 }, - { url = "https://files.pythonhosted.org/packages/7c/5f/6d352c51ee763623a98e31194823518e09bfa48be2a7e8383cf691bbb3d0/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b", size = 141098 }, - { url = "https://files.pythonhosted.org/packages/78/d4/f5704cb629ba5ab16d1d3d741396aec6dc3ca2b67757c45b0599bb010478/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d", size = 149520 }, - { url = "https://files.pythonhosted.org/packages/c5/96/64120b1d02b81785f222b976c0fb79a35875457fa9bb40827678e54d1bc8/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a", size = 152852 }, - { url = "https://files.pythonhosted.org/packages/84/c9/98e3732278a99f47d487fd3468bc60b882920cef29d1fa6ca460a1fdf4e6/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9", size = 150488 }, - { url = "https://files.pythonhosted.org/packages/13/0e/9c8d4cb99c98c1007cc11eda969ebfe837bbbd0acdb4736d228ccaabcd22/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1", size = 146192 }, - { url = "https://files.pythonhosted.org/packages/b2/21/2b6b5b860781a0b49427309cb8670785aa543fb2178de875b87b9cc97746/charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35", size = 95550 }, - { url = "https://files.pythonhosted.org/packages/21/5b/1b390b03b1d16c7e382b561c5329f83cc06623916aab983e8ab9239c7d5c/charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f", size = 102785 }, - { url = "https://files.pythonhosted.org/packages/38/94/ce8e6f63d18049672c76d07d119304e1e2d7c6098f0841b51c666e9f44a0/charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda", size = 195698 }, - { url = "https://files.pythonhosted.org/packages/24/2e/dfdd9770664aae179a96561cc6952ff08f9a8cd09a908f259a9dfa063568/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313", size = 140162 }, - { url = "https://files.pythonhosted.org/packages/24/4e/f646b9093cff8fc86f2d60af2de4dc17c759de9d554f130b140ea4738ca6/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9", size = 150263 }, - { url = "https://files.pythonhosted.org/packages/5e/67/2937f8d548c3ef6e2f9aab0f6e21001056f692d43282b165e7c56023e6dd/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b", size = 142966 }, - { url = "https://files.pythonhosted.org/packages/52/ed/b7f4f07de100bdb95c1756d3a4d17b90c1a3c53715c1a476f8738058e0fa/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11", size = 144992 }, - { url = "https://files.pythonhosted.org/packages/96/2c/d49710a6dbcd3776265f4c923bb73ebe83933dfbaa841c5da850fe0fd20b/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f", size = 147162 }, - { url = "https://files.pythonhosted.org/packages/b4/41/35ff1f9a6bd380303dea55e44c4933b4cc3c4850988927d4082ada230273/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd", size = 140972 }, - { url = "https://files.pythonhosted.org/packages/fb/43/c6a0b685fe6910d08ba971f62cd9c3e862a85770395ba5d9cad4fede33ab/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2", size = 149095 }, - { url = "https://files.pythonhosted.org/packages/4c/ff/a9a504662452e2d2878512115638966e75633519ec11f25fca3d2049a94a/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886", size = 152668 }, - { url = "https://files.pythonhosted.org/packages/6c/71/189996b6d9a4b932564701628af5cee6716733e9165af1d5e1b285c530ed/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601", size = 150073 }, - { url = "https://files.pythonhosted.org/packages/e4/93/946a86ce20790e11312c87c75ba68d5f6ad2208cfb52b2d6a2c32840d922/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd", size = 145732 }, - { url = "https://files.pythonhosted.org/packages/cd/e5/131d2fb1b0dddafc37be4f3a2fa79aa4c037368be9423061dccadfd90091/charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407", size = 95391 }, - { url = "https://files.pythonhosted.org/packages/27/f2/4f9a69cc7712b9b5ad8fdb87039fd89abba997ad5cbe690d1835d40405b0/charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971", size = 102702 }, - { url = "https://files.pythonhosted.org/packages/0e/f6/65ecc6878a89bb1c23a086ea335ad4bf21a588990c3f535a227b9eea9108/charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85", size = 49767 }, -] - -[[package]] -name = "click" -version = "8.1.8" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "colorama", marker = "platform_system == 'Windows'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/d4/7ebdbd03970677812aac39c869717059dbb71a4cfc033ca6e5221787892c/click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2", size = 98188 }, -] - -[[package]] -name = "colorama" -version = "0.4.6" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 }, -] - -[[package]] -name = "exceptiongroup" -version = "1.2.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/09/35/2495c4ac46b980e4ca1f6ad6db102322ef3ad2410b79fdde159a4b0f3b92/exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc", size = 28883 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/02/cc/b7e31358aac6ed1ef2bb790a9746ac2c69bcb3c8588b41616914eb106eaf/exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b", size = 16453 }, -] - -[[package]] -name = "h11" -version = "0.14.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f5/38/3af3d3633a34a3316095b39c8e8fb4853a28a536e55d347bd8d8e9a14b03/h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d", size = 100418 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/95/04/ff642e65ad6b90db43e668d70ffb6736436c7ce41fcc549f4e9472234127/h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761", size = 58259 }, -] - -[[package]] -name = "httpcore" -version = "1.0.7" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "certifi" }, - { name = "h11" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/6a/41/d7d0a89eb493922c37d343b607bc1b5da7f5be7e383740b4753ad8943e90/httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c", size = 85196 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/87/f5/72347bc88306acb359581ac4d52f23c0ef445b57157adedb9aee0cd689d2/httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd", size = 78551 }, -] - -[[package]] -name = "httpx" -version = "0.28.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "anyio" }, - { name = "certifi" }, - { name = "httpcore" }, - { name = "idna" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517 }, -] - -[[package]] -name = "httpx-sse" -version = "0.4.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/4c/60/8f4281fa9bbf3c8034fd54c0e7412e66edbab6bc74c4996bd616f8d0406e/httpx-sse-0.4.0.tar.gz", hash = "sha256:1e81a3a3070ce322add1d3529ed42eb5f70817f45ed6ec915ab753f961139721", size = 12624 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e1/9b/a181f281f65d776426002f330c31849b86b31fc9d848db62e16f03ff739f/httpx_sse-0.4.0-py3-none-any.whl", hash = "sha256:f329af6eae57eaa2bdfd962b42524764af68075ea87370a2de920af5341e318f", size = 7819 }, -] - -[[package]] -name = "idna" -version = "3.10" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 }, -] - -[[package]] -name = "iniconfig" -version = "2.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", size = 4646 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374", size = 5892 }, -] - -[[package]] -name = "mcp" -version = "1.2.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "anyio" }, - { name = "httpx" }, - { name = "httpx-sse" }, - { name = "pydantic" }, - { name = "pydantic-settings" }, - { name = "sse-starlette" }, - { name = "starlette" }, - { name = "uvicorn" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/ab/a5/b08dc846ebedae9f17ced878e6975826e90e448cd4592f532f6a88a925a7/mcp-1.2.0.tar.gz", hash = "sha256:2b06c7ece98d6ea9e6379caa38d74b432385c338fb530cb82e2c70ea7add94f5", size = 102973 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/af/84/fca78f19ac8ce6c53ba416247c71baa53a9e791e98d3c81edbc20a77d6d1/mcp-1.2.0-py3-none-any.whl", hash = "sha256:1d0e77d8c14955a5aea1f5aa1f444c8e531c09355c829b20e42f7a142bc0755f", size = 66468 }, -] - -[[package]] -name = "mcp-simple-chatbot" -version = "0.1.0" -source = { editable = "." } -dependencies = [ - { name = "mcp" }, - { name = "python-dotenv" }, - { name = "requests" }, - { name = "uvicorn" }, -] - -[package.dev-dependencies] -dev = [ - { name = "pyright" }, - { name = "pytest" }, - { name = "ruff" }, -] - -[package.metadata] -requires-dist = [ - { name = "mcp", specifier = ">=1.0.0" }, - { name = "python-dotenv", specifier = ">=1.0.0" }, - { name = "requests", specifier = ">=2.31.0" }, - { name = "uvicorn", specifier = ">=0.32.1" }, -] - -[package.metadata.requires-dev] -dev = [ - { name = "pyright", specifier = ">=1.1.379" }, - { name = "pytest", specifier = ">=8.3.3" }, - { name = "ruff", specifier = ">=0.6.9" }, -] - -[[package]] -name = "nodeenv" -version = "1.9.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314 }, -] - -[[package]] -name = "packaging" -version = "24.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d0/63/68dbb6eb2de9cb10ee4c9c14a0148804425e13c4fb20d61cce69f53106da/packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f", size = 163950 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/88/ef/eb23f262cca3c0c4eb7ab1933c3b1f03d021f2c48f54763065b6f0e321be/packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759", size = 65451 }, -] - -[[package]] -name = "pluggy" -version = "1.5.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/96/2d/02d4312c973c6050a18b314a5ad0b3210edb65a906f868e31c111dede4a6/pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1", size = 67955 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669", size = 20556 }, -] - -[[package]] -name = "pydantic" -version = "2.10.5" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "annotated-types" }, - { name = "pydantic-core" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/6a/c7/ca334c2ef6f2e046b1144fe4bb2a5da8a4c574e7f2ebf7e16b34a6a2fa92/pydantic-2.10.5.tar.gz", hash = "sha256:278b38dbbaec562011d659ee05f63346951b3a248a6f3642e1bc68894ea2b4ff", size = 761287 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/58/26/82663c79010b28eddf29dcdd0ea723439535fa917fce5905885c0e9ba562/pydantic-2.10.5-py3-none-any.whl", hash = "sha256:4dd4e322dbe55472cb7ca7e73f4b63574eecccf2835ffa2af9021ce113c83c53", size = 431426 }, -] - -[[package]] -name = "pydantic-core" -version = "2.27.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/fc/01/f3e5ac5e7c25833db5eb555f7b7ab24cd6f8c322d3a3ad2d67a952dc0abc/pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39", size = 413443 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/3a/bc/fed5f74b5d802cf9a03e83f60f18864e90e3aed7223adaca5ffb7a8d8d64/pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa", size = 1895938 }, - { url = "https://files.pythonhosted.org/packages/71/2a/185aff24ce844e39abb8dd680f4e959f0006944f4a8a0ea372d9f9ae2e53/pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c", size = 1815684 }, - { url = "https://files.pythonhosted.org/packages/c3/43/fafabd3d94d159d4f1ed62e383e264f146a17dd4d48453319fd782e7979e/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a", size = 1829169 }, - { url = "https://files.pythonhosted.org/packages/a2/d1/f2dfe1a2a637ce6800b799aa086d079998959f6f1215eb4497966efd2274/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5", size = 1867227 }, - { url = "https://files.pythonhosted.org/packages/7d/39/e06fcbcc1c785daa3160ccf6c1c38fea31f5754b756e34b65f74e99780b5/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c", size = 2037695 }, - { url = "https://files.pythonhosted.org/packages/7a/67/61291ee98e07f0650eb756d44998214231f50751ba7e13f4f325d95249ab/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7", size = 2741662 }, - { url = "https://files.pythonhosted.org/packages/32/90/3b15e31b88ca39e9e626630b4c4a1f5a0dfd09076366f4219429e6786076/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a", size = 1993370 }, - { url = "https://files.pythonhosted.org/packages/ff/83/c06d333ee3a67e2e13e07794995c1535565132940715931c1c43bfc85b11/pydantic_core-2.27.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236", size = 1996813 }, - { url = "https://files.pythonhosted.org/packages/7c/f7/89be1c8deb6e22618a74f0ca0d933fdcb8baa254753b26b25ad3acff8f74/pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962", size = 2005287 }, - { url = "https://files.pythonhosted.org/packages/b7/7d/8eb3e23206c00ef7feee17b83a4ffa0a623eb1a9d382e56e4aa46fd15ff2/pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9", size = 2128414 }, - { url = "https://files.pythonhosted.org/packages/4e/99/fe80f3ff8dd71a3ea15763878d464476e6cb0a2db95ff1c5c554133b6b83/pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af", size = 2155301 }, - { url = "https://files.pythonhosted.org/packages/2b/a3/e50460b9a5789ca1451b70d4f52546fa9e2b420ba3bfa6100105c0559238/pydantic_core-2.27.2-cp310-cp310-win32.whl", hash = "sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4", size = 1816685 }, - { url = "https://files.pythonhosted.org/packages/57/4c/a8838731cb0f2c2a39d3535376466de6049034d7b239c0202a64aaa05533/pydantic_core-2.27.2-cp310-cp310-win_amd64.whl", hash = "sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31", size = 1982876 }, - { url = "https://files.pythonhosted.org/packages/c2/89/f3450af9d09d44eea1f2c369f49e8f181d742f28220f88cc4dfaae91ea6e/pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc", size = 1893421 }, - { url = "https://files.pythonhosted.org/packages/9e/e3/71fe85af2021f3f386da42d291412e5baf6ce7716bd7101ea49c810eda90/pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7", size = 1814998 }, - { url = "https://files.pythonhosted.org/packages/a6/3c/724039e0d848fd69dbf5806894e26479577316c6f0f112bacaf67aa889ac/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15", size = 1826167 }, - { url = "https://files.pythonhosted.org/packages/2b/5b/1b29e8c1fb5f3199a9a57c1452004ff39f494bbe9bdbe9a81e18172e40d3/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306", size = 1865071 }, - { url = "https://files.pythonhosted.org/packages/89/6c/3985203863d76bb7d7266e36970d7e3b6385148c18a68cc8915fd8c84d57/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99", size = 2036244 }, - { url = "https://files.pythonhosted.org/packages/0e/41/f15316858a246b5d723f7d7f599f79e37493b2e84bfc789e58d88c209f8a/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459", size = 2737470 }, - { url = "https://files.pythonhosted.org/packages/a8/7c/b860618c25678bbd6d1d99dbdfdf0510ccb50790099b963ff78a124b754f/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048", size = 1992291 }, - { url = "https://files.pythonhosted.org/packages/bf/73/42c3742a391eccbeab39f15213ecda3104ae8682ba3c0c28069fbcb8c10d/pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d", size = 1994613 }, - { url = "https://files.pythonhosted.org/packages/94/7a/941e89096d1175d56f59340f3a8ebaf20762fef222c298ea96d36a6328c5/pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b", size = 2002355 }, - { url = "https://files.pythonhosted.org/packages/6e/95/2359937a73d49e336a5a19848713555605d4d8d6940c3ec6c6c0ca4dcf25/pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474", size = 2126661 }, - { url = "https://files.pythonhosted.org/packages/2b/4c/ca02b7bdb6012a1adef21a50625b14f43ed4d11f1fc237f9d7490aa5078c/pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6", size = 2153261 }, - { url = "https://files.pythonhosted.org/packages/72/9d/a241db83f973049a1092a079272ffe2e3e82e98561ef6214ab53fe53b1c7/pydantic_core-2.27.2-cp311-cp311-win32.whl", hash = "sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c", size = 1812361 }, - { url = "https://files.pythonhosted.org/packages/e8/ef/013f07248041b74abd48a385e2110aa3a9bbfef0fbd97d4e6d07d2f5b89a/pydantic_core-2.27.2-cp311-cp311-win_amd64.whl", hash = "sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc", size = 1982484 }, - { url = "https://files.pythonhosted.org/packages/10/1c/16b3a3e3398fd29dca77cea0a1d998d6bde3902fa2706985191e2313cc76/pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4", size = 1867102 }, - { url = "https://files.pythonhosted.org/packages/d6/74/51c8a5482ca447871c93e142d9d4a92ead74de6c8dc5e66733e22c9bba89/pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0", size = 1893127 }, - { url = "https://files.pythonhosted.org/packages/d3/f3/c97e80721735868313c58b89d2de85fa80fe8dfeeed84dc51598b92a135e/pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef", size = 1811340 }, - { url = "https://files.pythonhosted.org/packages/9e/91/840ec1375e686dbae1bd80a9e46c26a1e0083e1186abc610efa3d9a36180/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7", size = 1822900 }, - { url = "https://files.pythonhosted.org/packages/f6/31/4240bc96025035500c18adc149aa6ffdf1a0062a4b525c932065ceb4d868/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934", size = 1869177 }, - { url = "https://files.pythonhosted.org/packages/fa/20/02fbaadb7808be578317015c462655c317a77a7c8f0ef274bc016a784c54/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6", size = 2038046 }, - { url = "https://files.pythonhosted.org/packages/06/86/7f306b904e6c9eccf0668248b3f272090e49c275bc488a7b88b0823444a4/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c", size = 2685386 }, - { url = "https://files.pythonhosted.org/packages/8d/f0/49129b27c43396581a635d8710dae54a791b17dfc50c70164866bbf865e3/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2", size = 1997060 }, - { url = "https://files.pythonhosted.org/packages/0d/0f/943b4af7cd416c477fd40b187036c4f89b416a33d3cc0ab7b82708a667aa/pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4", size = 2004870 }, - { url = "https://files.pythonhosted.org/packages/35/40/aea70b5b1a63911c53a4c8117c0a828d6790483f858041f47bab0b779f44/pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3", size = 1999822 }, - { url = "https://files.pythonhosted.org/packages/f2/b3/807b94fd337d58effc5498fd1a7a4d9d59af4133e83e32ae39a96fddec9d/pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4", size = 2130364 }, - { url = "https://files.pythonhosted.org/packages/fc/df/791c827cd4ee6efd59248dca9369fb35e80a9484462c33c6649a8d02b565/pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57", size = 2158303 }, - { url = "https://files.pythonhosted.org/packages/9b/67/4e197c300976af185b7cef4c02203e175fb127e414125916bf1128b639a9/pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc", size = 1834064 }, - { url = "https://files.pythonhosted.org/packages/1f/ea/cd7209a889163b8dcca139fe32b9687dd05249161a3edda62860430457a5/pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9", size = 1989046 }, - { url = "https://files.pythonhosted.org/packages/bc/49/c54baab2f4658c26ac633d798dab66b4c3a9bbf47cff5284e9c182f4137a/pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b", size = 1885092 }, - { url = "https://files.pythonhosted.org/packages/41/b1/9bc383f48f8002f99104e3acff6cba1231b29ef76cfa45d1506a5cad1f84/pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b", size = 1892709 }, - { url = "https://files.pythonhosted.org/packages/10/6c/e62b8657b834f3eb2961b49ec8e301eb99946245e70bf42c8817350cbefc/pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154", size = 1811273 }, - { url = "https://files.pythonhosted.org/packages/ba/15/52cfe49c8c986e081b863b102d6b859d9defc63446b642ccbbb3742bf371/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9", size = 1823027 }, - { url = "https://files.pythonhosted.org/packages/b1/1c/b6f402cfc18ec0024120602bdbcebc7bdd5b856528c013bd4d13865ca473/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9", size = 1868888 }, - { url = "https://files.pythonhosted.org/packages/bd/7b/8cb75b66ac37bc2975a3b7de99f3c6f355fcc4d89820b61dffa8f1e81677/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1", size = 2037738 }, - { url = "https://files.pythonhosted.org/packages/c8/f1/786d8fe78970a06f61df22cba58e365ce304bf9b9f46cc71c8c424e0c334/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a", size = 2685138 }, - { url = "https://files.pythonhosted.org/packages/a6/74/d12b2cd841d8724dc8ffb13fc5cef86566a53ed358103150209ecd5d1999/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e", size = 1997025 }, - { url = "https://files.pythonhosted.org/packages/a0/6e/940bcd631bc4d9a06c9539b51f070b66e8f370ed0933f392db6ff350d873/pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4", size = 2004633 }, - { url = "https://files.pythonhosted.org/packages/50/cc/a46b34f1708d82498c227d5d80ce615b2dd502ddcfd8376fc14a36655af1/pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27", size = 1999404 }, - { url = "https://files.pythonhosted.org/packages/ca/2d/c365cfa930ed23bc58c41463bae347d1005537dc8db79e998af8ba28d35e/pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee", size = 2130130 }, - { url = "https://files.pythonhosted.org/packages/f4/d7/eb64d015c350b7cdb371145b54d96c919d4db516817f31cd1c650cae3b21/pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1", size = 2157946 }, - { url = "https://files.pythonhosted.org/packages/a4/99/bddde3ddde76c03b65dfd5a66ab436c4e58ffc42927d4ff1198ffbf96f5f/pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130", size = 1834387 }, - { url = "https://files.pythonhosted.org/packages/71/47/82b5e846e01b26ac6f1893d3c5f9f3a2eb6ba79be26eef0b759b4fe72946/pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee", size = 1990453 }, - { url = "https://files.pythonhosted.org/packages/51/b2/b2b50d5ecf21acf870190ae5d093602d95f66c9c31f9d5de6062eb329ad1/pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b", size = 1885186 }, - { url = "https://files.pythonhosted.org/packages/46/72/af70981a341500419e67d5cb45abe552a7c74b66326ac8877588488da1ac/pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e", size = 1891159 }, - { url = "https://files.pythonhosted.org/packages/ad/3d/c5913cccdef93e0a6a95c2d057d2c2cba347815c845cda79ddd3c0f5e17d/pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8", size = 1768331 }, - { url = "https://files.pythonhosted.org/packages/f6/f0/a3ae8fbee269e4934f14e2e0e00928f9346c5943174f2811193113e58252/pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3", size = 1822467 }, - { url = "https://files.pythonhosted.org/packages/d7/7a/7bbf241a04e9f9ea24cd5874354a83526d639b02674648af3f350554276c/pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f", size = 1979797 }, - { url = "https://files.pythonhosted.org/packages/4f/5f/4784c6107731f89e0005a92ecb8a2efeafdb55eb992b8e9d0a2be5199335/pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133", size = 1987839 }, - { url = "https://files.pythonhosted.org/packages/6d/a7/61246562b651dff00de86a5f01b6e4befb518df314c54dec187a78d81c84/pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc", size = 1998861 }, - { url = "https://files.pythonhosted.org/packages/86/aa/837821ecf0c022bbb74ca132e117c358321e72e7f9702d1b6a03758545e2/pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50", size = 2116582 }, - { url = "https://files.pythonhosted.org/packages/81/b0/5e74656e95623cbaa0a6278d16cf15e10a51f6002e3ec126541e95c29ea3/pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9", size = 2151985 }, - { url = "https://files.pythonhosted.org/packages/63/37/3e32eeb2a451fddaa3898e2163746b0cffbbdbb4740d38372db0490d67f3/pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151", size = 2004715 }, -] - -[[package]] -name = "pydantic-settings" -version = "2.7.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pydantic" }, - { name = "python-dotenv" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/73/7b/c58a586cd7d9ac66d2ee4ba60ca2d241fa837c02bca9bea80a9a8c3d22a9/pydantic_settings-2.7.1.tar.gz", hash = "sha256:10c9caad35e64bfb3c2fbf70a078c0e25cc92499782e5200747f942a065dec93", size = 79920 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b4/46/93416fdae86d40879714f72956ac14df9c7b76f7d41a4d68aa9f71a0028b/pydantic_settings-2.7.1-py3-none-any.whl", hash = "sha256:590be9e6e24d06db33a4262829edef682500ef008565a969c73d39d5f8bfb3fd", size = 29718 }, -] - -[[package]] -name = "pyright" -version = "1.1.392.post0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "nodeenv" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/66/df/3c6f6b08fba7ccf49b114dfc4bb33e25c299883fd763f93fad47ef8bc58d/pyright-1.1.392.post0.tar.gz", hash = "sha256:3b7f88de74a28dcfa90c7d90c782b6569a48c2be5f9d4add38472bdaac247ebd", size = 3789911 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e7/b1/a18de17f40e4f61ca58856b9ef9b0febf74ff88978c3f7776f910071f567/pyright-1.1.392.post0-py3-none-any.whl", hash = "sha256:252f84458a46fa2f0fd4e2f91fc74f50b9ca52c757062e93f6c250c0d8329eb2", size = 5595487 }, -] - -[[package]] -name = "pytest" -version = "8.3.4" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "colorama", marker = "sys_platform == 'win32'" }, - { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, - { name = "iniconfig" }, - { name = "packaging" }, - { name = "pluggy" }, - { name = "tomli", marker = "python_full_version < '3.11'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/05/35/30e0d83068951d90a01852cb1cef56e5d8a09d20c7f511634cc2f7e0372a/pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761", size = 1445919 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/11/92/76a1c94d3afee238333bc0a42b82935dd8f9cf8ce9e336ff87ee14d9e1cf/pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6", size = 343083 }, -] - -[[package]] -name = "python-dotenv" -version = "1.0.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/bc/57/e84d88dfe0aec03b7a2d4327012c1627ab5f03652216c63d49846d7a6c58/python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca", size = 39115 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/6a/3e/b68c118422ec867fa7ab88444e1274aa40681c606d59ac27de5a5588f082/python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a", size = 19863 }, -] - -[[package]] -name = "requests" -version = "2.32.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "certifi" }, - { name = "charset-normalizer" }, - { name = "idna" }, - { name = "urllib3" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928 }, -] - -[[package]] -name = "ruff" -version = "0.9.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/80/63/77ecca9d21177600f551d1c58ab0e5a0b260940ea7312195bd2a4798f8a8/ruff-0.9.2.tar.gz", hash = "sha256:b5eceb334d55fae5f316f783437392642ae18e16dcf4f1858d55d3c2a0f8f5d0", size = 3553799 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/af/b9/0e168e4e7fb3af851f739e8f07889b91d1a33a30fca8c29fa3149d6b03ec/ruff-0.9.2-py3-none-linux_armv6l.whl", hash = "sha256:80605a039ba1454d002b32139e4970becf84b5fee3a3c3bf1c2af6f61a784347", size = 11652408 }, - { url = "https://files.pythonhosted.org/packages/2c/22/08ede5db17cf701372a461d1cb8fdde037da1d4fa622b69ac21960e6237e/ruff-0.9.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b9aab82bb20afd5f596527045c01e6ae25a718ff1784cb92947bff1f83068b00", size = 11587553 }, - { url = "https://files.pythonhosted.org/packages/42/05/dedfc70f0bf010230229e33dec6e7b2235b2a1b8cbb2a991c710743e343f/ruff-0.9.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:fbd337bac1cfa96be615f6efcd4bc4d077edbc127ef30e2b8ba2a27e18c054d4", size = 11020755 }, - { url = "https://files.pythonhosted.org/packages/df/9b/65d87ad9b2e3def67342830bd1af98803af731243da1255537ddb8f22209/ruff-0.9.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82b35259b0cbf8daa22a498018e300b9bb0174c2bbb7bcba593935158a78054d", size = 11826502 }, - { url = "https://files.pythonhosted.org/packages/93/02/f2239f56786479e1a89c3da9bc9391120057fc6f4a8266a5b091314e72ce/ruff-0.9.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8b6a9701d1e371bf41dca22015c3f89769da7576884d2add7317ec1ec8cb9c3c", size = 11390562 }, - { url = "https://files.pythonhosted.org/packages/c9/37/d3a854dba9931f8cb1b2a19509bfe59e00875f48ade632e95aefcb7a0aee/ruff-0.9.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9cc53e68b3c5ae41e8faf83a3b89f4a5d7b2cb666dff4b366bb86ed2a85b481f", size = 12548968 }, - { url = "https://files.pythonhosted.org/packages/fa/c3/c7b812bb256c7a1d5553433e95980934ffa85396d332401f6b391d3c4569/ruff-0.9.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:8efd9da7a1ee314b910da155ca7e8953094a7c10d0c0a39bfde3fcfd2a015684", size = 13187155 }, - { url = "https://files.pythonhosted.org/packages/bd/5a/3c7f9696a7875522b66aa9bba9e326e4e5894b4366bd1dc32aa6791cb1ff/ruff-0.9.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3292c5a22ea9a5f9a185e2d131dc7f98f8534a32fb6d2ee7b9944569239c648d", size = 12704674 }, - { url = "https://files.pythonhosted.org/packages/be/d6/d908762257a96ce5912187ae9ae86792e677ca4f3dc973b71e7508ff6282/ruff-0.9.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a605fdcf6e8b2d39f9436d343d1f0ff70c365a1e681546de0104bef81ce88df", size = 14529328 }, - { url = "https://files.pythonhosted.org/packages/2d/c2/049f1e6755d12d9cd8823242fa105968f34ee4c669d04cac8cea51a50407/ruff-0.9.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c547f7f256aa366834829a08375c297fa63386cbe5f1459efaf174086b564247", size = 12385955 }, - { url = "https://files.pythonhosted.org/packages/91/5a/a9bdb50e39810bd9627074e42743b00e6dc4009d42ae9f9351bc3dbc28e7/ruff-0.9.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:d18bba3d3353ed916e882521bc3e0af403949dbada344c20c16ea78f47af965e", size = 11810149 }, - { url = "https://files.pythonhosted.org/packages/e5/fd/57df1a0543182f79a1236e82a79c68ce210efb00e97c30657d5bdb12b478/ruff-0.9.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:b338edc4610142355ccf6b87bd356729b62bf1bc152a2fad5b0c7dc04af77bfe", size = 11479141 }, - { url = "https://files.pythonhosted.org/packages/dc/16/bc3fd1d38974f6775fc152a0554f8c210ff80f2764b43777163c3c45d61b/ruff-0.9.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:492a5e44ad9b22a0ea98cf72e40305cbdaf27fac0d927f8bc9e1df316dcc96eb", size = 12014073 }, - { url = "https://files.pythonhosted.org/packages/47/6b/e4ca048a8f2047eb652e1e8c755f384d1b7944f69ed69066a37acd4118b0/ruff-0.9.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:af1e9e9fe7b1f767264d26b1075ac4ad831c7db976911fa362d09b2d0356426a", size = 12435758 }, - { url = "https://files.pythonhosted.org/packages/c2/40/4d3d6c979c67ba24cf183d29f706051a53c36d78358036a9cd21421582ab/ruff-0.9.2-py3-none-win32.whl", hash = "sha256:71cbe22e178c5da20e1514e1e01029c73dc09288a8028a5d3446e6bba87a5145", size = 9796916 }, - { url = "https://files.pythonhosted.org/packages/c3/ef/7f548752bdb6867e6939489c87fe4da489ab36191525fadc5cede2a6e8e2/ruff-0.9.2-py3-none-win_amd64.whl", hash = "sha256:c5e1d6abc798419cf46eed03f54f2e0c3adb1ad4b801119dedf23fcaf69b55b5", size = 10773080 }, - { url = "https://files.pythonhosted.org/packages/0e/4e/33df635528292bd2d18404e4daabcd74ca8a9853b2e1df85ed3d32d24362/ruff-0.9.2-py3-none-win_arm64.whl", hash = "sha256:a1b63fa24149918f8b37cef2ee6fff81f24f0d74b6f0bdc37bc3e1f2143e41c6", size = 10001738 }, -] - -[[package]] -name = "sniffio" -version = "1.3.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235 }, -] - -[[package]] -name = "sse-starlette" -version = "2.2.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "anyio" }, - { name = "starlette" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/71/a4/80d2a11af59fe75b48230846989e93979c892d3a20016b42bb44edb9e398/sse_starlette-2.2.1.tar.gz", hash = "sha256:54470d5f19274aeed6b2d473430b08b4b379ea851d953b11d7f1c4a2c118b419", size = 17376 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d9/e0/5b8bd393f27f4a62461c5cf2479c75a2cc2ffa330976f9f00f5f6e4f50eb/sse_starlette-2.2.1-py3-none-any.whl", hash = "sha256:6410a3d3ba0c89e7675d4c273a301d64649c03a5ef1ca101f10b47f895fd0e99", size = 10120 }, -] - -[[package]] -name = "starlette" -version = "0.45.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "anyio" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/90/4f/e1c9f4ec3dae67a94c9285ed275355d5f7cf0f3a5c34538c8ae5412af550/starlette-0.45.2.tar.gz", hash = "sha256:bba1831d15ae5212b22feab2f218bab6ed3cd0fc2dc1d4442443bb1ee52260e0", size = 2574026 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/aa/ab/fe4f57c83620b39dfc9e7687ebad59129ff05170b99422105019d9a65eec/starlette-0.45.2-py3-none-any.whl", hash = "sha256:4daec3356fb0cb1e723a5235e5beaf375d2259af27532958e2d79df549dad9da", size = 71505 }, -] - -[[package]] -name = "tomli" -version = "2.2.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", size = 17175 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249", size = 131077 }, - { url = "https://files.pythonhosted.org/packages/c7/16/51ae563a8615d472fdbffc43a3f3d46588c264ac4f024f63f01283becfbb/tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6", size = 123429 }, - { url = "https://files.pythonhosted.org/packages/f1/dd/4f6cd1e7b160041db83c694abc78e100473c15d54620083dbd5aae7b990e/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", size = 226067 }, - { url = "https://files.pythonhosted.org/packages/a9/6b/c54ede5dc70d648cc6361eaf429304b02f2871a345bbdd51e993d6cdf550/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee", size = 236030 }, - { url = "https://files.pythonhosted.org/packages/1f/47/999514fa49cfaf7a92c805a86c3c43f4215621855d151b61c602abb38091/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e", size = 240898 }, - { url = "https://files.pythonhosted.org/packages/73/41/0a01279a7ae09ee1573b423318e7934674ce06eb33f50936655071d81a24/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4", size = 229894 }, - { url = "https://files.pythonhosted.org/packages/55/18/5d8bc5b0a0362311ce4d18830a5d28943667599a60d20118074ea1b01bb7/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106", size = 245319 }, - { url = "https://files.pythonhosted.org/packages/92/a3/7ade0576d17f3cdf5ff44d61390d4b3febb8a9fc2b480c75c47ea048c646/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8", size = 238273 }, - { url = "https://files.pythonhosted.org/packages/72/6f/fa64ef058ac1446a1e51110c375339b3ec6be245af9d14c87c4a6412dd32/tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff", size = 98310 }, - { url = "https://files.pythonhosted.org/packages/6a/1c/4a2dcde4a51b81be3530565e92eda625d94dafb46dbeb15069df4caffc34/tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b", size = 108309 }, - { url = "https://files.pythonhosted.org/packages/52/e1/f8af4c2fcde17500422858155aeb0d7e93477a0d59a98e56cbfe75070fd0/tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea", size = 132762 }, - { url = "https://files.pythonhosted.org/packages/03/b8/152c68bb84fc00396b83e7bbddd5ec0bd3dd409db4195e2a9b3e398ad2e3/tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8", size = 123453 }, - { url = "https://files.pythonhosted.org/packages/c8/d6/fc9267af9166f79ac528ff7e8c55c8181ded34eb4b0e93daa767b8841573/tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192", size = 233486 }, - { url = "https://files.pythonhosted.org/packages/5c/51/51c3f2884d7bab89af25f678447ea7d297b53b5a3b5730a7cb2ef6069f07/tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222", size = 242349 }, - { url = "https://files.pythonhosted.org/packages/ab/df/bfa89627d13a5cc22402e441e8a931ef2108403db390ff3345c05253935e/tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77", size = 252159 }, - { url = "https://files.pythonhosted.org/packages/9e/6e/fa2b916dced65763a5168c6ccb91066f7639bdc88b48adda990db10c8c0b/tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6", size = 237243 }, - { url = "https://files.pythonhosted.org/packages/b4/04/885d3b1f650e1153cbb93a6a9782c58a972b94ea4483ae4ac5cedd5e4a09/tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd", size = 259645 }, - { url = "https://files.pythonhosted.org/packages/9c/de/6b432d66e986e501586da298e28ebeefd3edc2c780f3ad73d22566034239/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", size = 244584 }, - { url = "https://files.pythonhosted.org/packages/1c/9a/47c0449b98e6e7d1be6cbac02f93dd79003234ddc4aaab6ba07a9a7482e2/tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98", size = 98875 }, - { url = "https://files.pythonhosted.org/packages/ef/60/9b9638f081c6f1261e2688bd487625cd1e660d0a85bd469e91d8db969734/tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4", size = 109418 }, - { url = "https://files.pythonhosted.org/packages/04/90/2ee5f2e0362cb8a0b6499dc44f4d7d48f8fff06d28ba46e6f1eaa61a1388/tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7", size = 132708 }, - { url = "https://files.pythonhosted.org/packages/c0/ec/46b4108816de6b385141f082ba99e315501ccd0a2ea23db4a100dd3990ea/tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c", size = 123582 }, - { url = "https://files.pythonhosted.org/packages/a0/bd/b470466d0137b37b68d24556c38a0cc819e8febe392d5b199dcd7f578365/tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13", size = 232543 }, - { url = "https://files.pythonhosted.org/packages/d9/e5/82e80ff3b751373f7cead2815bcbe2d51c895b3c990686741a8e56ec42ab/tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281", size = 241691 }, - { url = "https://files.pythonhosted.org/packages/05/7e/2a110bc2713557d6a1bfb06af23dd01e7dde52b6ee7dadc589868f9abfac/tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272", size = 251170 }, - { url = "https://files.pythonhosted.org/packages/64/7b/22d713946efe00e0adbcdfd6d1aa119ae03fd0b60ebed51ebb3fa9f5a2e5/tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140", size = 236530 }, - { url = "https://files.pythonhosted.org/packages/38/31/3a76f67da4b0cf37b742ca76beaf819dca0ebef26d78fc794a576e08accf/tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2", size = 258666 }, - { url = "https://files.pythonhosted.org/packages/07/10/5af1293da642aded87e8a988753945d0cf7e00a9452d3911dd3bb354c9e2/tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744", size = 243954 }, - { url = "https://files.pythonhosted.org/packages/5b/b9/1ed31d167be802da0fc95020d04cd27b7d7065cc6fbefdd2f9186f60d7bd/tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec", size = 98724 }, - { url = "https://files.pythonhosted.org/packages/c7/32/b0963458706accd9afcfeb867c0f9175a741bf7b19cd424230714d722198/tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69", size = 109383 }, - { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257 }, -] - -[[package]] -name = "typing-extensions" -version = "4.12.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/df/db/f35a00659bc03fec321ba8bce9420de607a1d37f8342eee1863174c69557/typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8", size = 85321 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", size = 37438 }, -] - -[[package]] -name = "urllib3" -version = "2.3.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/aa/63/e53da845320b757bf29ef6a9062f5c669fe997973f966045cb019c3f4b66/urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d", size = 307268 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c8/19/4ec628951a74043532ca2cf5d97b7b14863931476d117c471e8e2b1eb39f/urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df", size = 128369 }, -] - -[[package]] -name = "uvicorn" -version = "0.34.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "click" }, - { name = "h11" }, - { name = "typing-extensions", marker = "python_full_version < '3.11'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/4b/4d/938bd85e5bf2edeec766267a5015ad969730bb91e31b44021dfe8b22df6c/uvicorn-0.34.0.tar.gz", hash = "sha256:404051050cd7e905de2c9a7e61790943440b3416f49cb409f965d9dcd0fa73e9", size = 76568 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/61/14/33a3a1352cfa71812a3a21e8c9bfb83f60b0011f5e36f2b1399d51928209/uvicorn-0.34.0-py3-none-any.whl", hash = "sha256:023dc038422502fa28a09c7a30bf2b6991512da7dcdb8fd35fe57cfc154126f4", size = 62315 }, -] diff --git a/examples/servers/sqlite-async-operations/mcp_sqlite_async_operations/server.py b/examples/servers/sqlite-async-operations/mcp_sqlite_async_operations/server.py index 033b4abcb..2ea6d6bc7 100644 --- a/examples/servers/sqlite-async-operations/mcp_sqlite_async_operations/server.py +++ b/examples/servers/sqlite-async-operations/mcp_sqlite_async_operations/server.py @@ -3,6 +3,7 @@ from __future__ import annotations import json +import logging import sqlite3 import time from collections import deque @@ -13,17 +14,22 @@ import uvicorn from mcp import types from mcp.server.fastmcp import FastMCP +from mcp.server.fastmcp.server import Context from mcp.server.session import ServerSession from mcp.shared._httpx_utils import create_mcp_http_client from mcp.shared.async_operations import ( AsyncOperationBroker, AsyncOperationStore, + OperationEventQueue, PendingAsyncTask, ServerAsyncOperation, ServerAsyncOperationManager, ) -from mcp.shared.context import RequestContext +from mcp.shared.context import RequestContext, SerializableRequestContext from mcp.types import AsyncOperationStatus, CallToolResult +from pydantic import BaseModel, Field + +logger = logging.getLogger(__name__) class SQLiteAsyncOperationStore(AsyncOperationStore): @@ -207,6 +213,78 @@ async def cleanup_expired(self) -> int: return cursor.rowcount +class SQLiteOperationEventQueue(OperationEventQueue): + """SQLite-based implementation of OperationEventQueue for operation-specific event delivery.""" + + def __init__(self, db_path: str = "async_operations.db"): + self.db_path = db_path + self._init_db() + + def _init_db(self): + """Initialize the SQLite database for operation event queuing.""" + with sqlite3.connect(self.db_path) as conn: + conn.execute(""" + CREATE TABLE IF NOT EXISTS operation_events ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + operation_token TEXT NOT NULL, + message TEXT NOT NULL, + created_at REAL NOT NULL + ) + """) + conn.execute(""" + CREATE INDEX IF NOT EXISTS idx_operation_events_token_created + ON operation_events(operation_token, created_at) + """) + conn.commit() + + async def enqueue_event(self, operation_token: str, message: types.JSONRPCMessage) -> None: + """Enqueue an event for a specific operation token.""" + message_json = json.dumps(message.model_dump()) + created_at = time.time() + + with sqlite3.connect(self.db_path) as conn: + conn.execute( + """ + INSERT INTO operation_events (operation_token, message, created_at) + VALUES (?, ?, ?) + """, + (operation_token, message_json, created_at), + ) + conn.commit() + + async def dequeue_events(self, operation_token: str) -> list[types.JSONRPCMessage]: + """Dequeue all pending events for a specific operation token.""" + with sqlite3.connect(self.db_path) as conn: + conn.row_factory = sqlite3.Row + + # Get all events for this operation token + cursor = conn.execute( + """ + SELECT id, message FROM operation_events + WHERE operation_token = ? + ORDER BY created_at + """, + (operation_token,), + ) + + events: list[types.JSONRPCMessage] = [] + event_ids: list[int] = [] + + for row in cursor: + event_ids.append(row["id"]) + message_data = json.loads(row["message"]) + message = types.JSONRPCMessage.model_validate(message_data) + events.append(message) + + # Delete the dequeued events + if event_ids: + placeholders = ",".join("?" * len(event_ids)) + conn.execute(f"DELETE FROM operation_events WHERE id IN ({placeholders})", event_ids) + conn.commit() + + return events + + class SQLiteAsyncOperationBroker(AsyncOperationBroker): """SQLite-based implementation of AsyncOperationBroker for persistent task queuing.""" @@ -234,23 +312,19 @@ def _load_persisted_tasks_sync(self): if op_row and op_row["status"] in ("completed", "failed", "canceled"): continue - # Reconstruct serializable parts of RequestContext - from mcp.shared.context import SerializableRequestContext - - serializable_context = None - if row["request_id"]: - serializable_context = SerializableRequestContext( - request_id=row["request_id"], - operation_token=row["operation_token"], - meta=json.loads(row["meta"]) if row["meta"] else None, - supports_async=bool(row["supports_async"]), - ) + # Reconstruct context - the server will hydrate the session + request_context = SerializableRequestContext( + request_id=row["request_id"], + operation_token=row["operation_token"], + meta=json.loads(row["meta"]) if row["meta"] else None, + supports_async=bool(row["supports_async"]), + ) task = PendingAsyncTask( token=row["token"], tool_name=row["tool_name"], arguments=json.loads(row["arguments"]), - request_context=serializable_context, + request_context=request_context, ) self._task_queue.append(task) @@ -329,6 +403,10 @@ async def complete_task(self, token: str) -> None: conn.commit() +class UserPreferences(BaseModel): + continue_processing: bool = Field(description="Should we continue with the operation?") + + @click.command() @click.option("--port", default=8000, help="Port to listen on for HTTP") @click.option( @@ -341,31 +419,54 @@ async def complete_task(self, token: str) -> None: def main(port: int, transport: str, db_path: str): """Run the SQLite async operations example server.""" # Create components with specified database path + operation_event_queue = SQLiteOperationEventQueue(db_path) broker = SQLiteAsyncOperationBroker(db_path) - store = SQLiteAsyncOperationStore(db_path) # No broker reference needed - manager = ServerAsyncOperationManager(store=store, broker=broker) - mcp = FastMCP("SQLite Async Operations Demo", async_operations=manager) + store = SQLiteAsyncOperationStore(db_path) + manager = ServerAsyncOperationManager(store=store, broker=broker, operation_request_queue=operation_event_queue) + mcp = FastMCP( + "SQLite Async Operations Demo", + operation_event_queue=operation_event_queue, + async_operations=manager, + ) @mcp.tool(invocation_modes=["async"]) async def fetch_website( url: str, + ctx: Context[ServerSession, None], ) -> list[types.ContentBlock]: headers = {"User-Agent": "MCP Test Server (github.com/modelcontextprotocol/python-sdk)"} async with create_mcp_http_client(headers=headers) as client: + logger.info("Entered fetch_website") + + # Simulate delay await anyio.sleep(10) + + # Request approval from user + logger.info("Sending elicitation to confirm") + result = await ctx.elicit( + message=f"Please confirm that you would like to fetch from {url}.", + schema=UserPreferences, + ) + logger.info(f"Elicitation result: {result}") + + if result.action != "accept" or not result.data.continue_processing: + return [types.TextContent(type="text", text="Operation cancelled by user")] + + logger.info(f"Fetching {url}") response = await client.get(url) response.raise_for_status() + logger.info("Returning fetch result") return [types.TextContent(type="text", text=response.text)] - print(f"Starting server with SQLite database: {db_path}") - print("Pending tasks will be automatically restarted on server restart!") + logger.info(f"Starting server with SQLite database: {db_path}") + logger.info("Pending tasks will be automatically restarted on server restart!") if transport == "stdio": mcp.run(transport="stdio") elif transport == "streamable-http": app = mcp.streamable_http_app() server = uvicorn.Server(config=uvicorn.Config(app=app, host="127.0.0.1", port=port, log_level="error")) - print(f"Starting {transport} server on port {port}") + logger.info(f"Starting {transport} server on port {port}") server.run() else: raise ValueError(f"Invalid transport for test server: {transport}") diff --git a/pyproject.toml b/pyproject.toml index b8bdb95db..29b9087df 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -98,8 +98,8 @@ venv = ".venv" # those private functions instead of testing the private functions directly. It makes it easier to maintain the code source # and refactor code that is not public. executionEnvironments = [ - { root = "tests", reportUnusedFunction = false, reportPrivateUsage = false }, - { root = "examples/servers", reportUnusedFunction = false }, + { root = "tests", reportUnusedFunction = false, reportPrivateUsage = false }, + { root = "examples/servers", reportUnusedFunction = false }, ] [tool.ruff] @@ -109,17 +109,17 @@ extend-exclude = ["README.md"] [tool.ruff.lint] select = [ - "C4", # flake8-comprehensions - "C90", # mccabe - "E", # pycodestyle - "F", # pyflakes - "I", # isort - "PERF", # Perflint - "PL", # Pylint - "UP", # pyupgrade + "C4", # flake8-comprehensions + "C90", # mccabe + "E", # pycodestyle + "F", # pyflakes + "I", # isort + "PERF", # Perflint + "PL", # Pylint + "UP", # pyupgrade ] ignore = ["PERF203", "PLC0415", "PLR0402"] -mccabe.max-complexity = 24 # Default is 10 +mccabe.max-complexity = 24 # Default is 10 [tool.ruff.lint.per-file-ignores] "__init__.py" = ["F401"] @@ -128,13 +128,13 @@ mccabe.max-complexity = 24 # Default is 10 [tool.ruff.lint.pylint] allow-magic-value-types = ["bytes", "float", "int", "str"] -max-args = 24 # Default is 5 -max-branches = 23 # Default is 12 -max-returns = 13 # Default is 6 -max-statements = 102 # Default is 50 +max-args = 24 # Default is 5 +max-branches = 23 # Default is 12 +max-returns = 13 # Default is 6 +max-statements = 102 # Default is 50 [tool.uv.workspace] -members = ["examples/servers/*", "examples/snippets"] +members = ["examples/clients/*", "examples/servers/*", "examples/snippets"] [tool.uv.sources] mcp = { workspace = true } @@ -154,16 +154,16 @@ filterwarnings = [ "ignore:websockets.server.WebSocketServerProtocol is deprecated:DeprecationWarning", "ignore:Returning str or bytes.*:DeprecationWarning:mcp.server.lowlevel", # pywin32 internal deprecation warning - "ignore:getargs.*The 'u' format is deprecated:DeprecationWarning" + "ignore:getargs.*The 'u' format is deprecated:DeprecationWarning", ] [tool.markdown.lint] -default=true -MD004=false # ul-style - Unordered list style -MD007.indent=2 # ul-indent - Unordered list indentation -MD013=false # line-length - Line length -MD029=false # ol-prefix - Ordered list item prefix -MD033=false # no-inline-html Inline HTML -MD041=false # first-line-heading/first-line-h1 -MD046=false # indented-code-blocks -MD059=false # descriptive-link-text +default = true +MD004 = false # ul-style - Unordered list style +MD007.indent = 2 # ul-indent - Unordered list indentation +MD013 = false # line-length - Line length +MD029 = false # ol-prefix - Ordered list item prefix +MD033 = false # no-inline-html Inline HTML +MD041 = false # first-line-heading/first-line-h1 +MD046 = false # indented-code-blocks +MD059 = false # descriptive-link-text diff --git a/src/mcp/client/session.py b/src/mcp/client/session.py index dfb0eb508..cfbf67ef9 100644 --- a/src/mcp/client/session.py +++ b/src/mcp/client/session.py @@ -364,7 +364,7 @@ async def get_operation_result(self, token: str) -> types.GetOperationPayloadRes # Validate using the stored tool name if hasattr(result, "result") and result.result: # Clean up expired operations first - self._operation_manager.cleanup_expired() + await self._operation_manager.cleanup_expired() tool_name = self._operation_manager.get_tool_name(token) await self._validate_tool_result(tool_name, result.result) diff --git a/src/mcp/server/fastmcp/server.py b/src/mcp/server/fastmcp/server.py index 38dda76ea..9f2fde6b5 100644 --- a/src/mcp/server/fastmcp/server.py +++ b/src/mcp/server/fastmcp/server.py @@ -7,7 +7,7 @@ import re from collections.abc import AsyncIterator, Awaitable, Callable, Collection, Iterable, Sequence from contextlib import AbstractAsyncContextManager, asynccontextmanager -from typing import Any, Generic, Literal +from typing import TYPE_CHECKING, Any, Generic, Literal import anyio import pydantic_core @@ -45,7 +45,7 @@ from mcp.server.streamable_http import EventStore from mcp.server.streamable_http_manager import StreamableHTTPSessionManager from mcp.server.transport_security import TransportSecuritySettings -from mcp.shared.async_operations import ServerAsyncOperationManager +from mcp.shared.async_operations import OperationEventQueue from mcp.shared.context import LifespanContextT, RequestContext, RequestT from mcp.types import ( AnyFunction, @@ -62,6 +62,9 @@ from mcp.types import ResourceTemplate as MCPResourceTemplate from mcp.types import Tool as MCPTool +if TYPE_CHECKING: + from mcp.shared.async_operations import ServerAsyncOperationManager + logger = get_logger(__name__) @@ -145,6 +148,7 @@ def __init__( # noqa: PLR0913 event_store: EventStore | None = None, *, async_operations: ServerAsyncOperationManager | None = None, + operation_event_queue: OperationEventQueue | None = None, tools: list[Tool] | None = None, debug: bool = False, log_level: Literal["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"] = "INFO", @@ -164,6 +168,8 @@ def __init__( # noqa: PLR0913 auth: AuthSettings | None = None, transport_security: TransportSecuritySettings | None = None, ): + from mcp.shared.async_operations import InMemoryOperationEventQueue, ServerAsyncOperationManager + self.settings = Settings( debug=debug, log_level=log_level, @@ -184,7 +190,12 @@ def __init__( # noqa: PLR0913 transport_security=transport_security, ) - self._async_operations = async_operations or ServerAsyncOperationManager() + self._operation_event_queue = operation_event_queue or InMemoryOperationEventQueue() + self._operation_response_queue = InMemoryOperationEventQueue() + self._async_operations = async_operations or ServerAsyncOperationManager( + operation_request_queue=self._operation_event_queue, + operation_response_queue=self._operation_response_queue, + ) self._mcp_server = MCPServer( name=name or "FastMCP", @@ -192,6 +203,8 @@ def __init__( # noqa: PLR0913 website_url=website_url, icons=icons, async_operations=self._async_operations, + operation_request_queue=self._operation_event_queue, + operation_response_queue=self._operation_response_queue, # TODO(Marcelo): It seems there's a type mismatch between the lifespan type from an FastMCP and Server. # We need to create a Lifespan type that is a generic on the server type, like Starlette does. lifespan=(lifespan_wrapper(self, self.settings.lifespan) if self.settings.lifespan else default_lifespan), # type: ignore @@ -215,6 +228,7 @@ def __init__( # noqa: PLR0913 if auth_server_provider and not token_verifier: self._token_verifier = ProviderTokenVerifier(auth_server_provider) self._event_store = event_store + self._operation_event_queue = operation_event_queue self._custom_starlette_routes: list[Route] = [] self.dependencies = self.settings.dependencies self._session_manager: StreamableHTTPSessionManager | None = None diff --git a/src/mcp/server/lowlevel/server.py b/src/mcp/server/lowlevel/server.py index d4011aeb4..83d6aa39d 100644 --- a/src/mcp/server/lowlevel/server.py +++ b/src/mcp/server/lowlevel/server.py @@ -73,7 +73,7 @@ async def main(): import warnings from collections.abc import AsyncIterator, Awaitable, Callable, Iterable from contextlib import AbstractAsyncContextManager, AsyncExitStack, asynccontextmanager -from typing import Any, Generic, TypeAlias, cast +from typing import TYPE_CHECKING, Any, Generic, TypeAlias, cast import anyio import jsonschema @@ -87,13 +87,16 @@ async def main(): from mcp.server.lowlevel.helper_types import ReadResourceContents from mcp.server.models import InitializationOptions from mcp.server.session import ServerSession -from mcp.shared.async_operations import ServerAsyncOperation, ServerAsyncOperationManager +from mcp.shared.async_operations_utils import ServerAsyncOperation, ToolExecutorParameters from mcp.shared.context import RequestContext from mcp.shared.exceptions import McpError from mcp.shared.message import ServerMessageMetadata, SessionMessage from mcp.shared.session import RequestResponder from mcp.types import NEXT_PROTOCOL_VERSION, Operation, RequestId +if TYPE_CHECKING: + from mcp.shared.async_operations import OperationEventQueue, ServerAsyncOperationManager + logger = logging.getLogger(__name__) LifespanResultT = TypeVar("LifespanResultT", default=Any) @@ -142,18 +145,25 @@ def __init__( website_url: str | None = None, icons: list[types.Icon] | None = None, async_operations: ServerAsyncOperationManager | None = None, + operation_request_queue: OperationEventQueue | None = None, + operation_response_queue: OperationEventQueue | None = None, lifespan: Callable[ [Server[LifespanResultT, RequestT]], AbstractAsyncContextManager[LifespanResultT], ] = lifespan, ): + from mcp.shared.async_operations import ServerAsyncOperationManager + self.name = name self.version = version self.instructions = instructions self.website_url = website_url self.icons = icons self.lifespan = lifespan - self.async_operations = async_operations or ServerAsyncOperationManager() + self.async_operations = async_operations or ServerAsyncOperationManager( + operation_request_queue=operation_request_queue, + operation_response_queue=operation_response_queue, + ) self.async_operations.set_handler(self._execute_tool_async) # Track request ID to operation token mapping for cancellation self._request_to_operation: dict[RequestId, str] = {} @@ -689,36 +699,53 @@ async def _execute_immediate_result(self, tool: types.Tool, arguments: dict[str, types.ErrorData(code=types.INTERNAL_ERROR, message=f"Immediate result execution error: {str(e)}") ) - async def _execute_tool_async( - self, tool_name: str, arguments: dict[str, Any], request_context: Any - ) -> types.CallToolResult: + async def _execute_tool_async(self, params: ToolExecutorParameters) -> types.CallToolResult: """Execute a tool asynchronously and return the result.""" - context_token = None + async with AsyncExitStack() as stack: + lifespan_context = await stack.enter_async_context(self.lifespan(self)) + session = await stack.enter_async_context( + ServerSession( + params.server_read, + params.server_write, + self.create_initialization_options(), + stateless=True, # Treat as initialized + ) + ) - try: - # Restore the request context for this task - if request_context: - context_token = request_ctx.set(request_context) + # Hydrate the request context + context_token = None + request_context = RequestContext( + request_id=params.request_context.request_id, + operation_token=params.request_context.operation_token, + meta=params.request_context.meta, + supports_async=params.request_context.supports_async, + lifespan_context=lifespan_context, + session=session, + ) - logger.info(f"Starting async execution of tool '{tool_name}'") + try: + # Restore the request context for this task + if request_context: + context_token = request_ctx.set(request_context) - if not self._tool_function: - raise ValueError("No tool function registered") + logger.info(f"Starting async execution of tool '{params.tool_name}'") - # Execute the tool function - results = await self._tool_function(tool_name, arguments) + if not self._tool_function: + raise ValueError("No tool function registered") - # Get tool definition for validation - tool = await self._get_cached_tool_definition(tool_name) + # Execute the tool function + results = await self._tool_function(params.tool_name, params.arguments) - # Process results using shared logic - result = self._process_tool_result(results, tool) - logger.info(f"Async execution of tool '{tool_name}' completed") - return result + # Get tool definition for validation + tool = await self._get_cached_tool_definition(params.tool_name) - finally: - if context_token: - request_ctx.reset(context_token) + # Process results using shared logic + result = self._process_tool_result(results, tool) + logger.info(f"Async execution of tool '{params.tool_name}' completed") + return result + finally: + if context_token: + request_ctx.reset(context_token) def progress_notification(self): def decorator( @@ -794,6 +821,54 @@ async def handler(req: types.GetOperationStatusRequest, _: Any = None): # Validate token and get operation operation = await self._validate_operation_token(req.params.token) + # Dequeue and send any pending events for this operation + operation_request_queue = self.async_operations.operation_request_queue + operation_response_queue = self.async_operations.operation_response_queue + queued_messages = await operation_request_queue.dequeue_events(req.params.token) + if queued_messages: + logger.debug(f"Dequeued {len(queued_messages)} events for operation {req.params.token}") + # Send queued messages to client using session methods + current_context = request_ctx.get() + if current_context and current_context.session: + for message in queued_messages: + try: + if isinstance(message.root, types.JSONRPCRequest): + logger.debug(f"Received detached request: {message}") + request_id = message.root.id + validated_request = types.ServerRequest.model_validate( + message.root.model_dump(by_alias=True, mode="json", exclude_none=True) + ) + response = await current_context.session.send_request( + validated_request, types.ClientResult + ) + + # Enqueue response back to response queue for detached session + await operation_response_queue.enqueue_event( + req.params.token, + types.JSONRPCMessage( + types.JSONRPCResponse( + jsonrpc="2.0", + id=request_id, + result=response.model_dump( + by_alias=True, mode="json", exclude_none=True + ), + ) + ), + ) + elif isinstance(message.root, types.JSONRPCNotification): + logger.debug(f"Received detached notification: {message}") + validated_notification = types.ServerNotification.model_validate( + message.root.model_dump(by_alias=True, mode="json", exclude_none=True) + ) + await current_context.session.send_notification(validated_notification) + else: + logger.debug(f"Invalid message in request queue: {message}") + raise McpError( + types.ErrorData(code=-32600, message="Invalid message type in event queue") + ) + except Exception: + logger.exception(f"Failed to process message: {message}") + return types.ServerResult( types.GetOperationStatusResult( status=operation.status, diff --git a/src/mcp/server/streamable_http.py b/src/mcp/server/streamable_http.py index da448794e..29567f5ca 100644 --- a/src/mcp/server/streamable_http.py +++ b/src/mcp/server/streamable_http.py @@ -172,8 +172,6 @@ def __init__( ], ] = {} self._terminated = False - # Track operation tokens to original request IDs for stream resumption - self._operation_to_request_id: dict[str, str] = {} @property def is_terminated(self) -> bool: @@ -308,27 +306,6 @@ def _check_content_type(self, request: Request) -> bool: return any(part == CONTENT_TYPE_JSON for part in content_type_parts) - def _is_async_operation_response(self, response_message: JSONRPCMessage) -> bool: - """Check if response is for an async operation that should keep stream open.""" - try: - if not isinstance(response_message.root, JSONRPCResponse): - return False - - result = response_message.root.result - if not result: - return False - - # Check if result has _operation with token - if hasattr(result, "__getitem__") and "_operation" in result: - operation = result["_operation"] # type: ignore - if hasattr(operation, "__getitem__") and "token" in operation: - return bool(operation["token"]) # type: ignore - - return False - except (TypeError, KeyError, AttributeError) as exc: - logger.exception("Exception in _is_async_operation_response: %s", exc) - return False - async def _handle_sse_mode( self, message: JSONRPCMessage, @@ -489,12 +466,11 @@ async def _handle_post_request(self, scope: Scope, request: Request, receive: Re metadata = ServerMessageMetadata(request_context=request) session_message = SessionMessage(message, metadata=metadata) await writer.send(session_message) - should_pop_stream = True # Default to cleaning up stream + try: # Process messages from the request-specific stream # We need to collect all messages until we get a response response_message = None - # Use similar approach to SSE writer for consistency async for event_message in request_stream_reader: # If it's a response, this is what we're waiting for @@ -507,11 +483,6 @@ async def _handle_post_request(self, scope: Scope, request: Request, receive: Re # At this point we should have a response if response_message: - # Check if this is an async operation response - keep stream open - if self._is_async_operation_response(response_message): - # This is an async operation - keep the stream open for elicitation/sampling - should_pop_stream = False - # Create JSON response response = self._create_json_response(response_message) await response(scope, receive, send) @@ -532,8 +503,7 @@ async def _handle_post_request(self, scope: Scope, request: Request, receive: Re ) await response(scope, receive, send) finally: - if should_pop_stream: - await self._clean_up_memory_streams(request_id) + await self._clean_up_memory_streams(request_id) else: await self._handle_sse_mode( message, request, writer, request_id, request_stream_reader, scope, receive, send @@ -813,7 +783,6 @@ async def send_event(event_message: EventMessage) -> None: async with msg_reader: async for event_message in msg_reader: event_data = self._create_event_data(event_message) - await sse_stream_writer.send(event_data) except Exception: logger.exception("Error in replay sender") @@ -884,38 +853,6 @@ async def message_router(): # If this response is for an existing request stream, # send it there target_request_id = response_id - - # Track operation tokens for stream resumption - if ( - isinstance(message.root, JSONRPCResponse) - and message.root.result - and "_operation" in message.root.result - and ( - ("token" in message.root.result["_operation"]) - and message.root.result["_operation"]["token"] - ) - ): - operation_token = message.root.result["_operation"]["token"] - self._operation_to_request_id[operation_token] = response_id - logger.info(f"Tracking operation token {operation_token} -> request {response_id}") - elif ( - message.root.params - and "_operation" in message.root.params - and ( - ("token" in message.root.params["_operation"]) - and message.root.params["_operation"]["token"] - ) - ): - # Route operation-related messages back to the original request stream - operation_token = message.root.params["_operation"]["token"] - if operation_token in self._operation_to_request_id: - target_request_id = self._operation_to_request_id[operation_token] - logging.info(operation_token) - else: - logger.warning( - f"Operation token {operation_token} not found in mapping, using GET_STREAM_KEY" - ) - target_request_id = GET_STREAM_KEY # Extract related_request_id from meta if it exists elif ( session_message.metadata is not None @@ -940,7 +877,8 @@ async def message_router(): if request_stream_id in self._request_streams: try: # Send both the message and the event ID - await self._request_streams[request_stream_id][0].send(EventMessage(message, event_id)) + event_data = EventMessage(message, event_id) + await self._request_streams[request_stream_id][0].send(event_data) except ( anyio.BrokenResourceError, anyio.ClosedResourceError, diff --git a/src/mcp/shared/async_operations.py b/src/mcp/shared/async_operations.py index d7029ac5e..9bb8b6526 100644 --- a/src/mcp/shared/async_operations.py +++ b/src/mcp/shared/async_operations.py @@ -6,6 +6,7 @@ import logging import secrets import time +from abc import abstractmethod from collections import deque from collections.abc import AsyncIterator, Awaitable, Callable from dataclasses import dataclass @@ -13,71 +14,59 @@ import anyio from anyio.abc import TaskGroup +from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream import mcp.types as types +from mcp.shared.async_operations_utils import ClientAsyncOperation, ServerAsyncOperation, ToolExecutorParameters +from mcp.shared.message import SessionMessage from mcp.types import AsyncOperationStatus if TYPE_CHECKING: # Avoid circular import with mcp.server.lowlevel.Server from mcp.server.session import ServerSession - from mcp.shared.context import RequestContext + from mcp.shared.context import RequestContext, SerializableRequestContext logger = logging.getLogger(__name__) -@dataclass -class PendingAsyncTask: - """Represents a task waiting to be dispatched.""" +class OperationEventQueue(Protocol): + """ + Interface for queuing events by operation token for async operation delivery. + """ - token: str - tool_name: str - arguments: dict[str, Any] - request_context: Any # The RequestContext object to restore + @abstractmethod + async def enqueue_event(self, operation_token: str, message: types.JSONRPCMessage) -> None: + """ + Enqueue an event for a specific operation token. + Args: + operation_token: The operation token to queue the event for + message: The server request or notification to queue + """ + ... -@dataclass -class ClientAsyncOperation: - """Minimal operation tracking for client-side use.""" + @abstractmethod + async def dequeue_events(self, operation_token: str) -> list[types.JSONRPCMessage]: + """ + Dequeue all pending events for a specific operation token. - token: str - tool_name: str - created_at: float - keep_alive: int + Args: + operation_token: The operation token to dequeue events for - @property - def is_expired(self) -> bool: - """Check if operation has expired based on keepAlive.""" - return time.time() > (self.created_at + self.keep_alive * 2) # Give some buffer before expiration + Returns: + List of queued server requests/notifications for the operation + """ + ... @dataclass -class ServerAsyncOperation: - """Represents an async tool operation.""" +class PendingAsyncTask: + """Represents a task waiting to be dispatched.""" token: str tool_name: str arguments: dict[str, Any] - status: AsyncOperationStatus - created_at: float - keep_alive: int - resolved_at: float | None = None - session_id: str | None = None - result: types.CallToolResult | None = None - error: str | None = None - - @property - def is_expired(self) -> bool: - """Check if operation has expired based on keepAlive.""" - if not self.resolved_at: - return False - if self.status in ("completed", "failed", "canceled"): - return time.time() > (self.resolved_at + self.keep_alive) - return False - - @property - def is_terminal(self) -> bool: - """Check if operation is in a terminal state.""" - return self.status in ("completed", "failed", "canceled", "unknown") + request_context: SerializableRequestContext OperationT = TypeVar("OperationT", ClientAsyncOperation, ServerAsyncOperation) @@ -112,7 +101,7 @@ def _remove_operation(self, token: str) -> OperationT | None: """Internal method to remove and return an operation.""" return self._operations.pop(token, None) - def get_operation(self, token: str) -> OperationT | None: + async def get_operation(self, token: str) -> OperationT | None: """Get operation by token.""" return self._get_operation(token) @@ -120,7 +109,7 @@ def remove_operation(self, token: str) -> bool: """Remove an operation by token.""" return self._remove_operation(token) is not None - def cleanup_expired(self) -> int: + async def cleanup_expired(self) -> int: """Remove expired operations and return count of removed operations.""" expired_tokens = [token for token, operation in self._operations.items() if operation.is_expired] for token in expired_tokens: @@ -138,7 +127,7 @@ async def cleanup_loop(self) -> None: while self._running: await anyio.sleep(self._cleanup_interval) - count = self.cleanup_expired() + count = await self.cleanup_expired() if count > 0: logger.debug(f"Cleaned up {count} expired operations") @@ -216,27 +205,34 @@ def get_tool_name(self, token: str) -> str | None: return operation.tool_name if operation else None -class ServerAsyncOperationManager: +class ServerAsyncOperationManager(BaseOperationManager[ServerAsyncOperation]): """Manages async tool operations using Store and Broker components.""" + operation_request_queue: OperationEventQueue + operation_response_queue: OperationEventQueue + def __init__( self, + *, store: AsyncOperationStore | None = None, broker: AsyncOperationBroker | None = None, - *, + operation_request_queue: OperationEventQueue | None = None, + operation_response_queue: OperationEventQueue | None = None, token_generator: Callable[[str | None], str] | None = None, ): # Use provided implementations or default to InMemory self.store = store or InMemoryAsyncOperationStore() self.broker = broker or InMemoryAsyncOperationBroker() + self.operation_request_queue = operation_request_queue or InMemoryOperationEventQueue() + self.operation_response_queue = operation_response_queue or InMemoryOperationEventQueue() self._token_generator = token_generator or self._default_token_generator - self._tool_executor: Callable[[str, dict[str, Any], Any], Awaitable[types.CallToolResult]] | None = None + self._tool_executor: Callable[[ToolExecutorParameters], Awaitable[types.CallToolResult]] | None = None self._task_group: TaskGroup | None = None self._run_lock = anyio.Lock() self._running = False - def set_handler(self, tool_executor: Callable[[str, dict[str, Any], Any], Awaitable[types.CallToolResult]]) -> None: - """Set the tool executor handler for late binding.""" + def set_handler(self, tool_executor: Callable[[ToolExecutorParameters], Awaitable[types.CallToolResult]]) -> None: + """Set the tool executor handler via late binding.""" self._tool_executor = tool_executor def _default_token_generator(self, session_id: str | None = None) -> str: @@ -299,18 +295,96 @@ async def _task_dispatcher(self) -> None: async def _execute_tool_task(self, task: PendingAsyncTask) -> None: """Execute a tool task.""" - try: - if not self._tool_executor: - raise ValueError("No tool executor configured") + if not self._tool_executor: + raise ValueError("No tool executor configured") + + logger.debug(f"Starting async tool task {task.token} for tool '{task.tool_name}'") + logger.debug(f"Operation event queue configured: {type(self.operation_request_queue)}") + logger.debug( + f"Event store configured: {hasattr(self, 'event_store') and getattr(self, 'event_store', None) is not None}" + ) - await self.mark_working(task.token) - result = await self._tool_executor(task.tool_name, task.arguments, task.request_context) - await self.complete_operation(task.token, result) + # Create dummy streams to simulate a client + server_write, client_read = anyio.create_memory_object_stream[SessionMessage](1) + client_write, server_read = anyio.create_memory_object_stream[SessionMessage](1) + try: + async with anyio.create_task_group() as tg: + tg.start_soon(self._execute_tool_task_client_loop, client_read, client_write, task.request_context) + + await self.mark_working(task.token) + result = await self._tool_executor( + ToolExecutorParameters( + tool_name=task.tool_name, + arguments=task.arguments, + request_context=task.request_context, + server_read=server_read, + server_write=server_write, + ) + ) + await self.complete_operation(task.token, result) except Exception as e: logger.exception(f"Tool task {task.token} failed: {e}") await self.fail_operation(task.token, str(e)) + async def _execute_tool_task_client_loop( + self, + read_stream: MemoryObjectReceiveStream[SessionMessage], + write_stream: MemoryObjectSendStream[SessionMessage], + request_context: SerializableRequestContext, + ): + """Simulated client loop that enqueues messages for operation event delivery.""" + async with ( + read_stream, + write_stream, + ): + try: + async with anyio.create_task_group() as tg: + # Handle incoming messages from server + tg.start_soon(self._handle_incoming_messages, read_stream, request_context) + # Handle outgoing responses to server + tg.start_soon(self._handle_outgoing_responses, write_stream, request_context) + except Exception as e: + logger.exception(f"Unhandled exception in client loop: {e}") + + async def _handle_incoming_messages( + self, + read_stream: MemoryObjectReceiveStream[SessionMessage], + request_context: SerializableRequestContext, + ): + """Handle incoming messages from server and enqueue them as events.""" + try: + async for session_message in read_stream: + message = session_message.message + + if request_context.operation_token: + await self.operation_request_queue.enqueue_event(request_context.operation_token, message) + else: + logger.warning("No operation token in request context!") + except Exception as e: + logger.exception(f"Unhandled exception in incoming message handler: {e}") + + async def _handle_outgoing_responses( + self, + write_stream: MemoryObjectSendStream[SessionMessage], + request_context: SerializableRequestContext, + ): + """Handle outgoing responses by dequeueing from response queue and sending to server.""" + if not request_context.operation_token: + return + + try: + while True: + # Poll for responses from the response queue + responses = await self.operation_response_queue.dequeue_events(request_context.operation_token) + for response in responses: + await write_stream.send(SessionMessage(message=response)) + + # Small delay to avoid busy waiting + await anyio.sleep(0.1) + except Exception as e: + logger.exception(f"Unhandled exception in outgoing response handler: {e}") + async def start_task( self, token: str, @@ -480,6 +554,26 @@ async def cleanup_expired(self) -> int: return len(expired_tokens) +class InMemoryOperationEventQueue(OperationEventQueue): + """In-memory implementation of OperationEventQueue.""" + + def __init__(self): + self._queued_events: dict[str, list[types.JSONRPCMessage]] = {} + + async def enqueue_event(self, operation_token: str, message: types.JSONRPCMessage) -> None: + """Enqueue an event for a specific operation token.""" + if operation_token not in self._queued_events: + self._queued_events[operation_token] = [] + self._queued_events[operation_token].append(message) + + async def dequeue_events(self, operation_token: str) -> list[types.JSONRPCMessage]: + """Dequeue all pending events for a specific operation token.""" + events = self._queued_events.get(operation_token, []) + if operation_token in self._queued_events: + del self._queued_events[operation_token] + return events + + class InMemoryAsyncOperationBroker(AsyncOperationBroker): """In-memory implementation of AsyncOperationBroker.""" diff --git a/src/mcp/shared/async_operations_utils.py b/src/mcp/shared/async_operations_utils.py new file mode 100644 index 000000000..24cd51583 --- /dev/null +++ b/src/mcp/shared/async_operations_utils.py @@ -0,0 +1,66 @@ +import time +from dataclasses import dataclass +from typing import TYPE_CHECKING, Any + +from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream + +from mcp import types +from mcp.shared.message import SessionMessage + +if TYPE_CHECKING: + # Avoid circular import with mcp.server.lowlevel.Server + from mcp.shared.context import SerializableRequestContext + + +@dataclass +class ClientAsyncOperation: + """Minimal operation tracking for client-side use.""" + + token: str + tool_name: str + created_at: float + keep_alive: int + + @property + def is_expired(self) -> bool: + """Check if operation has expired based on keepAlive.""" + return time.time() > (self.created_at + self.keep_alive * 2) # Give some buffer before expiration + + +@dataclass +class ServerAsyncOperation: + """Represents an async tool operation.""" + + token: str + tool_name: str + arguments: dict[str, Any] + status: types.AsyncOperationStatus + created_at: float + keep_alive: int + resolved_at: float | None = None + session_id: str | None = None + result: types.CallToolResult | None = None + error: str | None = None + + @property + def is_expired(self) -> bool: + """Check if operation has expired based on keepAlive.""" + if not self.resolved_at: + return False + if self.status in ("completed", "failed", "canceled"): + return time.time() > (self.resolved_at + self.keep_alive) + return False + + @property + def is_terminal(self) -> bool: + """Check if operation is in a terminal state.""" + return self.status in ("completed", "failed", "canceled", "unknown") + + +@dataclass +class ToolExecutorParameters: + tool_name: str + arguments: dict[str, Any] + request_context: "SerializableRequestContext" + server_read: MemoryObjectReceiveStream[SessionMessage | Exception] + server_write: MemoryObjectSendStream[SessionMessage] diff --git a/src/mcp/shared/session.py b/src/mcp/shared/session.py index 6d11469c8..8a13aa0de 100644 --- a/src/mcp/shared/session.py +++ b/src/mcp/shared/session.py @@ -46,6 +46,8 @@ RequestId = str | int +logger = logging.getLogger(__name__) + class ProgressFnT(Protocol): """Protocol for progress notification callbacks.""" @@ -246,6 +248,10 @@ async def send_request( response_stream, response_stream_reader = anyio.create_memory_object_stream[JSONRPCResponse | JSONRPCError](1) self._response_streams[request_id] = response_stream + logging.debug( + f"Created response stream for request ID {request_id}. " + f"Active streams: {list(self._response_streams.keys())}" + ) # Set up progress token if progress callback is provided request_data = request.model_dump(by_alias=True, mode="json", exclude_none=True) @@ -259,6 +265,14 @@ async def send_request( # Store the callback for this request self._progress_callbacks[request_id] = progress_callback + # Remove jsonrpc and id properties if present since we're adding them ourselves. + # For detached sessions in lowlevel.Server, the detached session has its own request ID + # which will be remapped later. + if "jsonrpc" in request_data: + del request_data["jsonrpc"] + if "id" in request_data: + del request_data["id"] + pop_progress: RequestId | None = request_id try: jsonrpc_request = JSONRPCRequest( @@ -313,6 +327,10 @@ async def send_request( return result finally: + logging.debug( + f"Cleaning up response stream for request ID {request_id}. " + f"Remaining streams: {list(self._response_streams.keys())}" + ) self._response_streams.pop(request_id, None) if pop_progress: self._progress_callbacks.pop(pop_progress, None) @@ -328,11 +346,17 @@ async def send_notification( Emits a notification, which is a one-way message that does not expect a response. """ + + # Remove jsonrpc property if present since we're adding it ourselves. + notification_data = notification.model_dump(by_alias=True, mode="json", exclude_none=True) + if "jsonrpc" in notification_data: + del notification_data["jsonrpc"] + # Some transport implementations may need to set the related_request_id # to attribute to the notifications to the request that triggered them. jsonrpc_notification = JSONRPCNotification( jsonrpc="2.0", - **notification.model_dump(by_alias=True, mode="json", exclude_none=True), + **notification_data, ) session_message = SessionMessage( message=JSONRPCMessage(jsonrpc_notification), @@ -362,8 +386,10 @@ async def _receive_loop(self) -> None: try: async for message in self._read_stream: if isinstance(message, Exception): + logger.debug(f"Received exception: {message}") await self._handle_incoming(message) elif isinstance(message.message.root, JSONRPCRequest): + logger.debug(f"Received request: {message}") try: validated_request = self._receive_request_type.model_validate( message.message.root.model_dump(by_alias=True, mode="json", exclude_none=True) @@ -404,6 +430,7 @@ async def _receive_loop(self) -> None: await self._write_stream.send(session_message) elif isinstance(message.message.root, JSONRPCNotification): + logger.debug(f"Received notification: {message}") try: notification = self._receive_notification_type.model_validate( message.message.root.model_dump(by_alias=True, mode="json", exclude_none=True) @@ -440,10 +467,16 @@ async def _receive_loop(self) -> None: f"Failed to validate notification: {e}. Message was: {message.message.root}" ) else: # Response or error + logger.debug(f"Received response or error: {message}") stream = self._response_streams.pop(message.message.root.id, None) if stream: + logging.debug(f"Routing response with ID {message.message.root.id} to waiting stream") await stream.send(message.message.root) else: + logging.warning( + f"Received response with unknown request ID {message.message.root.id}. " + f"Available streams: {list(self._response_streams.keys())}" + ) await self._handle_incoming( RuntimeError(f"Received response with an unknown request ID: {message}") ) diff --git a/tests/server/fastmcp/test_integration.py b/tests/server/fastmcp/test_integration.py index e7b2e670e..f918ac973 100644 --- a/tests/server/fastmcp/test_integration.py +++ b/tests/server/fastmcp/test_integration.py @@ -693,9 +693,7 @@ async def test_fastmcp_quickstart(server_transport: str, server_url: str) -> Non @pytest.mark.parametrize( "server_transport", [ - # Skip SSE for async tools - SSE client has issues with long polling in test environment - # causing BrokenResourceError during async operation status polling - # ("async_tool_basic", "sse"), + ("async_tool_basic", "sse"), ("async_tool_basic", "streamable-http"), ], indirect=True, @@ -776,7 +774,7 @@ async def test_async_tool_basic(server_transport: str, server_url: str) -> None: @pytest.mark.parametrize( "server_transport", [ - # ("async_tool_basic", "sse"), + ("async_tool_basic", "sse"), ("async_tool_basic", "streamable-http"), ], indirect=True, @@ -819,7 +817,7 @@ async def test_async_tool_basic_legacy_protocol(server_transport: str, server_ur @pytest.mark.parametrize( "server_transport", [ - # ("async_tool_basic", "sse"), + ("async_tool_basic", "sse"), ("async_tool_basic", "streamable-http"), ], indirect=True, diff --git a/uv.lock b/uv.lock index 91c417e49..4feed3ce1 100644 --- a/uv.lock +++ b/uv.lock @@ -5,7 +5,10 @@ requires-python = ">=3.10" [manifest] members = [ "mcp", + "mcp-async-reconnect-client", "mcp-simple-auth", + "mcp-simple-auth-client", + "mcp-simple-chatbot", "mcp-simple-pagination", "mcp-simple-prompt", "mcp-simple-resource", @@ -692,6 +695,37 @@ docs = [ { name = "mkdocstrings-python", specifier = ">=1.12.2" }, ] +[[package]] +name = "mcp-async-reconnect-client" +version = "0.1.0" +source = { editable = "examples/clients/async-reconnect-client" } +dependencies = [ + { name = "anyio" }, + { name = "click" }, + { name = "mcp" }, +] + +[package.dev-dependencies] +dev = [ + { name = "pyright" }, + { name = "pytest" }, + { name = "ruff" }, +] + +[package.metadata] +requires-dist = [ + { name = "anyio", specifier = ">=4.5" }, + { name = "click", specifier = ">=8.2.0" }, + { name = "mcp", editable = "." }, +] + +[package.metadata.requires-dev] +dev = [ + { name = "pyright", specifier = ">=1.1.378" }, + { name = "pytest", specifier = ">=8.3.3" }, + { name = "ruff", specifier = ">=0.6.9" }, +] + [[package]] name = "mcp-simple-auth" version = "0.1.0" @@ -733,6 +767,68 @@ dev = [ { name = "ruff", specifier = ">=0.8.5" }, ] +[[package]] +name = "mcp-simple-auth-client" +version = "0.1.0" +source = { editable = "examples/clients/simple-auth-client" } +dependencies = [ + { name = "click" }, + { name = "mcp" }, +] + +[package.dev-dependencies] +dev = [ + { name = "pyright" }, + { name = "pytest" }, + { name = "ruff" }, +] + +[package.metadata] +requires-dist = [ + { name = "click", specifier = ">=8.2.0" }, + { name = "mcp", editable = "." }, +] + +[package.metadata.requires-dev] +dev = [ + { name = "pyright", specifier = ">=1.1.379" }, + { name = "pytest", specifier = ">=8.3.3" }, + { name = "ruff", specifier = ">=0.6.9" }, +] + +[[package]] +name = "mcp-simple-chatbot" +version = "0.1.0" +source = { editable = "examples/clients/simple-chatbot" } +dependencies = [ + { name = "mcp" }, + { name = "python-dotenv" }, + { name = "requests" }, + { name = "uvicorn" }, +] + +[package.dev-dependencies] +dev = [ + { name = "pyright" }, + { name = "pytest" }, + { name = "ruff" }, +] + +[package.metadata] +requires-dist = [ + { name = "mcp", editable = "." }, + { name = "python-dotenv", specifier = ">=1.0.0" }, + { name = "requests", specifier = ">=2.31.0" }, + { name = "uvicorn", specifier = ">=0.32.1" }, +] + +[package.metadata.requires-dev] +dev = [ + { name = "pyright", specifier = ">=1.1.379" }, + { name = "pytest", specifier = ">=8.3.3" }, + { name = "ruff", specifier = ">=0.6.9" }, +] + [[package]] name = "mcp-simple-pagination" version = "0.1.0" From 84c6e4b0b7904a8f98d9de4edad0e9e1e500d3d4 Mon Sep 17 00:00:00 2001 From: Luca Chang Date: Fri, 10 Oct 2025 18:32:26 -0700 Subject: [PATCH 41/41] Clean up LRO code and update docs --- README.md | 10 ++++-- .../servers/sqlite-async-operations/README.md | 3 +- .../mcp_sqlite_async_operations/server.py | 32 +++++++++++-------- src/mcp/server/fastmcp/server.py | 14 ++------ src/mcp/server/lowlevel/server.py | 9 ++---- 5 files changed, 33 insertions(+), 35 deletions(-) diff --git a/README.md b/README.md index 39769e4bf..94b422b32 100644 --- a/README.md +++ b/README.md @@ -1663,7 +1663,7 @@ For more information on mounting applications in Starlette, see the [Starlette d ### Persistent Async Operations -For production deployments, you may want async operations to survive server restarts. The `ServerAsyncOperationManager` uses pluggable `AsyncOperationStore` and `AsyncOperationBroker` components to handle operation persistence and task queuing. +For production deployments, you may want async operations to survive server restarts. The `ServerAsyncOperationManager` uses pluggable `OperationEventQueue`, `AsyncOperationStore`, and `AsyncOperationBroker` components to handle operation persistence and task queuing. #### Operation Lifecycle @@ -1679,6 +1679,10 @@ Async operations follow this lifecycle: from mcp.server.fastmcp import FastMCP from mcp.shared.async_operations import ServerAsyncOperationManager +# Create custom event queues +custom_request_queue = MyAsyncOperationEventQueue() +custom_response_queue = MyAsyncOperationEventQueue() + # Create custom store and broker implementations custom_store = MyAsyncOperationStore() custom_broker = MyAsyncOperationBroker() @@ -1686,7 +1690,9 @@ custom_broker = MyAsyncOperationBroker() # Create operation manager with custom components operation_manager = ServerAsyncOperationManager( store=custom_store, - broker=custom_broker + broker=custom_broker, + operation_request_queue=custom_request_queue, + operation_response_queue=custom_response_queue, ) # Use with FastMCP diff --git a/examples/servers/sqlite-async-operations/README.md b/examples/servers/sqlite-async-operations/README.md index efbcd7775..643ad6b90 100644 --- a/examples/servers/sqlite-async-operations/README.md +++ b/examples/servers/sqlite-async-operations/README.md @@ -6,6 +6,7 @@ This example demonstrates how to implement custom async operations storage and t The example showcases the pluggable architecture of the async operations system: +- `SQLiteOperationEventQueue`: Custom event queue that manages operation messages for disconnected clients - `SQLiteAsyncOperationStore`: Custom implementation that persists operations to SQLite - `SQLiteAsyncOperationBroker`: Custom implementation that persists pending tasks to SQLite - `ServerAsyncOperationManager`: Uses both custom store and broker for full persistence @@ -30,7 +31,7 @@ uv run mcp-sqlite-async-operations --transport streamable-http --port 8000 ## Testing Persistent Async Operations 1. Start the server -2. Call one of the async tools (`long_computation` or `fetch_data`) +2. Call the async tool (`fetch_data`) 3. **Restart the server while the operation is running** 4. The operation will automatically resume and complete 5. Use the operation token to check status and retrieve results diff --git a/examples/servers/sqlite-async-operations/mcp_sqlite_async_operations/server.py b/examples/servers/sqlite-async-operations/mcp_sqlite_async_operations/server.py index 2ea6d6bc7..24f4390e7 100644 --- a/examples/servers/sqlite-async-operations/mcp_sqlite_async_operations/server.py +++ b/examples/servers/sqlite-async-operations/mcp_sqlite_async_operations/server.py @@ -216,24 +216,25 @@ async def cleanup_expired(self) -> int: class SQLiteOperationEventQueue(OperationEventQueue): """SQLite-based implementation of OperationEventQueue for operation-specific event delivery.""" - def __init__(self, db_path: str = "async_operations.db"): + def __init__(self, db_path: str = "async_operations.db", table_name: str = "operation_events"): self.db_path = db_path + self.table_name = table_name self._init_db() def _init_db(self): """Initialize the SQLite database for operation event queuing.""" with sqlite3.connect(self.db_path) as conn: - conn.execute(""" - CREATE TABLE IF NOT EXISTS operation_events ( + conn.execute(f""" + CREATE TABLE IF NOT EXISTS {self.table_name} ( id INTEGER PRIMARY KEY AUTOINCREMENT, operation_token TEXT NOT NULL, message TEXT NOT NULL, created_at REAL NOT NULL ) """) - conn.execute(""" + conn.execute(f""" CREATE INDEX IF NOT EXISTS idx_operation_events_token_created - ON operation_events(operation_token, created_at) + ON {self.table_name}(operation_token, created_at) """) conn.commit() @@ -244,8 +245,8 @@ async def enqueue_event(self, operation_token: str, message: types.JSONRPCMessag with sqlite3.connect(self.db_path) as conn: conn.execute( - """ - INSERT INTO operation_events (operation_token, message, created_at) + f""" + INSERT INTO {self.table_name} (operation_token, message, created_at) VALUES (?, ?, ?) """, (operation_token, message_json, created_at), @@ -259,8 +260,8 @@ async def dequeue_events(self, operation_token: str) -> list[types.JSONRPCMessag # Get all events for this operation token cursor = conn.execute( - """ - SELECT id, message FROM operation_events + f""" + SELECT id, message FROM {self.table_name} WHERE operation_token = ? ORDER BY created_at """, @@ -279,7 +280,7 @@ async def dequeue_events(self, operation_token: str) -> list[types.JSONRPCMessag # Delete the dequeued events if event_ids: placeholders = ",".join("?" * len(event_ids)) - conn.execute(f"DELETE FROM operation_events WHERE id IN ({placeholders})", event_ids) + conn.execute(f"DELETE FROM {self.table_name} WHERE id IN ({placeholders})", event_ids) conn.commit() return events @@ -419,13 +420,18 @@ class UserPreferences(BaseModel): def main(port: int, transport: str, db_path: str): """Run the SQLite async operations example server.""" # Create components with specified database path - operation_event_queue = SQLiteOperationEventQueue(db_path) + operation_request_queue = SQLiteOperationEventQueue(db_path, "operation_requests") + operation_response_queue = SQLiteOperationEventQueue(db_path, "operation_responses") broker = SQLiteAsyncOperationBroker(db_path) store = SQLiteAsyncOperationStore(db_path) - manager = ServerAsyncOperationManager(store=store, broker=broker, operation_request_queue=operation_event_queue) + manager = ServerAsyncOperationManager( + store=store, + broker=broker, + operation_request_queue=operation_request_queue, + operation_response_queue=operation_response_queue, + ) mcp = FastMCP( "SQLite Async Operations Demo", - operation_event_queue=operation_event_queue, async_operations=manager, ) diff --git a/src/mcp/server/fastmcp/server.py b/src/mcp/server/fastmcp/server.py index 9f2fde6b5..0d367c7a9 100644 --- a/src/mcp/server/fastmcp/server.py +++ b/src/mcp/server/fastmcp/server.py @@ -45,7 +45,6 @@ from mcp.server.streamable_http import EventStore from mcp.server.streamable_http_manager import StreamableHTTPSessionManager from mcp.server.transport_security import TransportSecuritySettings -from mcp.shared.async_operations import OperationEventQueue from mcp.shared.context import LifespanContextT, RequestContext, RequestT from mcp.types import ( AnyFunction, @@ -148,7 +147,6 @@ def __init__( # noqa: PLR0913 event_store: EventStore | None = None, *, async_operations: ServerAsyncOperationManager | None = None, - operation_event_queue: OperationEventQueue | None = None, tools: list[Tool] | None = None, debug: bool = False, log_level: Literal["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"] = "INFO", @@ -168,7 +166,7 @@ def __init__( # noqa: PLR0913 auth: AuthSettings | None = None, transport_security: TransportSecuritySettings | None = None, ): - from mcp.shared.async_operations import InMemoryOperationEventQueue, ServerAsyncOperationManager + from mcp.shared.async_operations import ServerAsyncOperationManager self.settings = Settings( debug=debug, @@ -190,12 +188,7 @@ def __init__( # noqa: PLR0913 transport_security=transport_security, ) - self._operation_event_queue = operation_event_queue or InMemoryOperationEventQueue() - self._operation_response_queue = InMemoryOperationEventQueue() - self._async_operations = async_operations or ServerAsyncOperationManager( - operation_request_queue=self._operation_event_queue, - operation_response_queue=self._operation_response_queue, - ) + self._async_operations = async_operations or ServerAsyncOperationManager() self._mcp_server = MCPServer( name=name or "FastMCP", @@ -203,8 +196,6 @@ def __init__( # noqa: PLR0913 website_url=website_url, icons=icons, async_operations=self._async_operations, - operation_request_queue=self._operation_event_queue, - operation_response_queue=self._operation_response_queue, # TODO(Marcelo): It seems there's a type mismatch between the lifespan type from an FastMCP and Server. # We need to create a Lifespan type that is a generic on the server type, like Starlette does. lifespan=(lifespan_wrapper(self, self.settings.lifespan) if self.settings.lifespan else default_lifespan), # type: ignore @@ -228,7 +219,6 @@ def __init__( # noqa: PLR0913 if auth_server_provider and not token_verifier: self._token_verifier = ProviderTokenVerifier(auth_server_provider) self._event_store = event_store - self._operation_event_queue = operation_event_queue self._custom_starlette_routes: list[Route] = [] self.dependencies = self.settings.dependencies self._session_manager: StreamableHTTPSessionManager | None = None diff --git a/src/mcp/server/lowlevel/server.py b/src/mcp/server/lowlevel/server.py index 83d6aa39d..b80689546 100644 --- a/src/mcp/server/lowlevel/server.py +++ b/src/mcp/server/lowlevel/server.py @@ -95,7 +95,7 @@ async def main(): from mcp.types import NEXT_PROTOCOL_VERSION, Operation, RequestId if TYPE_CHECKING: - from mcp.shared.async_operations import OperationEventQueue, ServerAsyncOperationManager + from mcp.shared.async_operations import ServerAsyncOperationManager logger = logging.getLogger(__name__) @@ -145,8 +145,6 @@ def __init__( website_url: str | None = None, icons: list[types.Icon] | None = None, async_operations: ServerAsyncOperationManager | None = None, - operation_request_queue: OperationEventQueue | None = None, - operation_response_queue: OperationEventQueue | None = None, lifespan: Callable[ [Server[LifespanResultT, RequestT]], AbstractAsyncContextManager[LifespanResultT], @@ -160,10 +158,7 @@ def __init__( self.website_url = website_url self.icons = icons self.lifespan = lifespan - self.async_operations = async_operations or ServerAsyncOperationManager( - operation_request_queue=operation_request_queue, - operation_response_queue=operation_response_queue, - ) + self.async_operations = async_operations or ServerAsyncOperationManager() self.async_operations.set_handler(self._execute_tool_async) # Track request ID to operation token mapping for cancellation self._request_to_operation: dict[RequestId, str] = {}