From 97e3dc3c76f793c1a67e0b383e83fe21f9a6a1b3 Mon Sep 17 00:00:00 2001 From: Rohan Mehta Date: Tue, 25 Mar 2025 12:51:40 -0400 Subject: [PATCH] [1/n] Add MCP types to the SDK ### Summary: 1. Add the MCP dep for python 3.10, since it doesn't support 3.9 and below 2. Create MCPServer, which is the agents SDK representation of an MCP server 3. Create implementations for HTTP-SSE and StdIO servers, directly copying the [MCP SDK example](https://github.com/modelcontextprotocol/python-sdk/blob/main/examples/clients/simple-chatbot/mcp_simple_chatbot/main.py) 4. Add a util to transform MCP tools into Agent SDK tools Note: I added optional caching support to the servers. That way, if you happen to know a server's tools don't change, you can just cache them. ### Test Plan: Checks pass. I added tests at the end of the stack. --- pyproject.toml | 1 + src/agents/mcp/__init__.py | 21 +++ src/agents/mcp/mcp_util.py | 94 +++++++++++++ src/agents/mcp/server.py | 269 +++++++++++++++++++++++++++++++++++++ src/agents/mcp/util.py | 96 +++++++++++++ uv.lock | 93 ++++++++++++- 6 files changed, 572 insertions(+), 2 deletions(-) create mode 100644 src/agents/mcp/__init__.py create mode 100644 src/agents/mcp/mcp_util.py create mode 100644 src/agents/mcp/server.py create mode 100644 src/agents/mcp/util.py diff --git a/pyproject.toml b/pyproject.toml index 667ab355..3678c714 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,6 +13,7 @@ dependencies = [ "typing-extensions>=4.12.2, <5", "requests>=2.0, <3", "types-requests>=2.0, <3", + "mcp; python_version >= '3.10'", ] classifiers = [ "Typing :: Typed", diff --git a/src/agents/mcp/__init__.py b/src/agents/mcp/__init__.py new file mode 100644 index 00000000..1a72a89f --- /dev/null +++ b/src/agents/mcp/__init__.py @@ -0,0 +1,21 @@ +try: + from .server import ( + MCPServer, + MCPServerSse, + MCPServerSseParams, + MCPServerStdio, + MCPServerStdioParams, + ) +except ImportError: + pass + +from .util import MCPUtil + +__all__ = [ + "MCPServer", + "MCPServerSse", + "MCPServerSseParams", + "MCPServerStdio", + "MCPServerStdioParams", + "MCPUtil", +] diff --git a/src/agents/mcp/mcp_util.py b/src/agents/mcp/mcp_util.py new file mode 100644 index 00000000..41b4c521 --- /dev/null +++ b/src/agents/mcp/mcp_util.py @@ -0,0 +1,94 @@ +import functools +import json +from typing import Any + +from mcp.types import Tool as MCPTool + +from .. import _debug +from ..exceptions import AgentsException, ModelBehaviorError, UserError +from ..logger import logger +from ..run_context import RunContextWrapper +from ..tool import FunctionTool, Tool +from .server import MCPServer + + +class MCPUtil: + """Set of utilities for interop between MCP and Agents SDK tools.""" + + @classmethod + async def get_all_function_tools(cls, servers: list[MCPServer]) -> list[Tool]: + """Get all function tools from a list of MCP servers.""" + tools = [] + tool_names: set[str] = set() + for server in servers: + server_tools = await cls.get_function_tools(server) + server_tool_names = {tool.name for tool in server_tools} + if len(server_tool_names & tool_names) > 0: + raise UserError( + f"Duplicate tool names found across MCP servers: " + f"{server_tool_names & tool_names}" + ) + tool_names.update(server_tool_names) + tools.extend(server_tools) + + return tools + + @classmethod + async def get_function_tools(cls, server: MCPServer) -> list[Tool]: + """Get all function tools from a single MCP server.""" + tools = await server.list_tools() + return [cls.to_function_tool(tool, server) for tool in tools] + + @classmethod + def to_function_tool(cls, tool: MCPTool, server: MCPServer) -> FunctionTool: + """Convert an MCP tool to an Agents SDK function tool.""" + invoke_func = functools.partial(cls.invoke_mcp_tool, server, tool) + return FunctionTool( + name=tool.name, + description=tool.description or "", + params_json_schema=tool.inputSchema, + on_invoke_tool=invoke_func, + strict_json_schema=False, + ) + + @classmethod + async def invoke_mcp_tool( + cls, server: MCPServer, tool: MCPTool, context: RunContextWrapper[Any], input_json: str + ) -> str: + """Invoke an MCP tool and return the result as a string.""" + try: + json_data: dict[str, Any] = json.loads(input_json) if input_json else {} + except Exception as e: + if _debug.DONT_LOG_TOOL_DATA: + logger.debug(f"Invalid JSON input for tool {tool.name}") + else: + logger.debug(f"Invalid JSON input for tool {tool.name}: {input_json}") + raise ModelBehaviorError( + f"Invalid JSON input for tool {tool.name}: {input_json}" + ) from e + + if _debug.DONT_LOG_TOOL_DATA: + logger.debug(f"Invoking MCP tool {tool.name}") + else: + logger.debug(f"Invoking MCP tool {tool.name} with input {input_json}") + + try: + result = await server.call_tool(tool.name, json_data) + except Exception as e: + logger.error(f"Error invoking MCP tool {tool.name}: {e}") + raise AgentsException(f"Error invoking MCP tool {tool.name}: {e}") from e + + if _debug.DONT_LOG_TOOL_DATA: + logger.debug(f"MCP tool {tool.name} completed.") + else: + logger.debug(f"MCP tool {tool.name} returned {result}") + + # The MCP tool result is a list of content items, whereas OpenAI tool outputs are a single + # string. We'll try to convert. + if len(result.content) == 1: + return result.content[0].model_dump_json() + elif len(result.content) > 1: + return json.dumps([item.model_dump() for item in result.content]) + else: + logger.error(f"Errored MCP tool result: {result}") + return "Error running tool." diff --git a/src/agents/mcp/server.py b/src/agents/mcp/server.py new file mode 100644 index 00000000..e19e686a --- /dev/null +++ b/src/agents/mcp/server.py @@ -0,0 +1,269 @@ +from __future__ import annotations + +import abc +import asyncio +from contextlib import AbstractAsyncContextManager, AsyncExitStack +from pathlib import Path +from typing import Any, Literal + +from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream +from mcp import ClientSession, StdioServerParameters, Tool as MCPTool, stdio_client +from mcp.client.sse import sse_client +from mcp.types import CallToolResult, JSONRPCMessage +from typing_extensions import NotRequired, TypedDict + +from ..exceptions import UserError +from ..logger import logger + + +class MCPServer(abc.ABC): + """Base class for Model Context Protocol servers.""" + + @abc.abstractmethod + async def connect(self): + """Connect to the server. For example, this might mean spawning a subprocess or + opening a network connection. The server is expected to remain connected until + `cleanup()` is called. + """ + pass + + @abc.abstractmethod + async def cleanup(self): + """Cleanup the server. For example, this might mean closing a subprocess or + closing a network connection. + """ + pass + + @abc.abstractmethod + async def list_tools(self) -> list[MCPTool]: + """List the tools available on the server.""" + pass + + @abc.abstractmethod + async def call_tool(self, tool_name: str, arguments: dict[str, Any] | None) -> CallToolResult: + """Invoke a tool on the server.""" + pass + + +class _MCPServerWithClientSession(MCPServer, abc.ABC): + """Base class for MCP servers that use a `ClientSession` to communicate with the server.""" + + def __init__(self, cache_tools_list: bool): + """ + Args: + cache_tools_list: Whether to cache the tools list. If `True`, the tools list will be + cached and only fetched from the server once. If `False`, the tools list will be + fetched from the server on each call to `list_tools()`. The cache can be invalidated + by calling `invalidate_tools_cache()`. You should set this to `True` if you know the + server will not change its tools list, because it can drastically improve latency + (by avoiding a round-trip to the server every time). + """ + self.session: ClientSession | None = None + self.exit_stack: AsyncExitStack = AsyncExitStack() + self._cleanup_lock: asyncio.Lock = asyncio.Lock() + self.cache_tools_list = cache_tools_list + + # The cache is always dirty at startup, so that we fetch tools at least once + self._cache_dirty = True + self._tools_list: list[MCPTool] | None = None + + @abc.abstractmethod + def create_streams( + self, + ) -> AbstractAsyncContextManager[ + tuple[ + MemoryObjectReceiveStream[JSONRPCMessage | Exception], + MemoryObjectSendStream[JSONRPCMessage], + ] + ]: + """Create the streams for the server.""" + pass + + async def __aenter__(self): + await self.connect() + return self + + async def __aexit__(self, exc_type, exc_value, traceback): + await self.cleanup() + + def invalidate_tools_cache(self): + """Invalidate the tools cache.""" + self._cache_dirty = True + + async def connect(self): + """Connect to the server.""" + try: + transport = await self.exit_stack.enter_async_context(self.create_streams()) + read, write = transport + session = await self.exit_stack.enter_async_context(ClientSession(read, write)) + await session.initialize() + self.session = session + except Exception as e: + logger.error(f"Error initializing MCP server: {e}") + await self.cleanup() + raise + + async def list_tools(self) -> list[MCPTool]: + """List the tools available on the server.""" + if not self.session: + raise UserError("Server not initialized. Make sure you call `connect()` first.") + + # Return from cache if caching is enabled, we have tools, and the cache is not dirty + if self.cache_tools_list and not self._cache_dirty and self._tools_list: + return self._tools_list + + # Reset the cache dirty to False + self._cache_dirty = False + + # Fetch the tools from the server + self._tools_list = (await self.session.list_tools()).tools + return self._tools_list + + async def call_tool(self, tool_name: str, arguments: dict[str, Any] | None) -> CallToolResult: + """Invoke a tool on the server.""" + if not self.session: + raise UserError("Server not initialized. Make sure you call `connect()` first.") + + return await self.session.call_tool(tool_name, arguments) + + async def cleanup(self): + """Cleanup the server.""" + async with self._cleanup_lock: + try: + await self.exit_stack.aclose() + self.session = None + except Exception as e: + logger.error(f"Error cleaning up server: {e}") + + +class MCPServerStdioParams(TypedDict): + """Mirrors `mcp.client.stdio.StdioServerParameters`, but lets you pass params without another + import. + """ + + command: str + """The executable to run to start the server. For example, `python` or `node`.""" + + args: NotRequired[list[str]] + """Command line args to pass to the `command` executable. For example, `['foo.py']` or + `['server.js', '--port', '8080']`.""" + + env: NotRequired[dict[str, str]] + """The environment variables to set for the server. .""" + + cwd: NotRequired[str | Path] + """The working directory to use when spawning the process.""" + + encoding: NotRequired[str] + """The text encoding used when sending/receiving messages to the server. Defaults to `utf-8`.""" + + encoding_error_handler: NotRequired[Literal["strict", "ignore", "replace"]] + """The text encoding error handler. Defaults to `strict`. + + See https://docs.python.org/3/library/codecs.html#codec-base-classes for + explanations of possible values. + """ + + +class MCPServerStdio(_MCPServerWithClientSession): + """MCP server implementation that uses the stdio transport. See the [spec] + (https://spec.modelcontextprotocol.io/specification/2024-11-05/basic/transports/#stdio) for + details. + """ + + def __init__(self, params: MCPServerStdioParams, cache_tools_list: bool = False): + """Create a new MCP server based on the stdio transport. + + Args: + params: The params that configure the server. This includes: + - The command (e.g. `python` or `node`) that starts the server. + - The args to pass to the server command (e.g. `foo.py` or `server.js`). + - The environment variables to set for the server. + cache_tools_list: Whether to cache the tools list. If `True`, the tools list will be + cached and only fetched from the server once. If `False`, the tools list will be + fetched from the server on each call to `list_tools()`. The cache can be + invalidated by calling `invalidate_tools_cache()`. You should set this to `True` + if you know the server will not change its tools list, because it can drastically + improve latency (by avoiding a round-trip to the server every time). + """ + super().__init__(cache_tools_list) + + self.params = StdioServerParameters( + command=params["command"], + args=params.get("args", []), + env=params.get("env"), + cwd=params.get("cwd"), + encoding=params.get("encoding", "utf-8"), + encoding_error_handler=params.get("encoding_error_handler", "strict"), + ) + + def create_streams( + self, + ) -> AbstractAsyncContextManager[ + tuple[ + MemoryObjectReceiveStream[JSONRPCMessage | Exception], + MemoryObjectSendStream[JSONRPCMessage], + ] + ]: + """Create the streams for the server.""" + return stdio_client(self.params) + + +class MCPServerSseParams(TypedDict): + """Mirrors the params in`mcp.client.sse.sse_client`.""" + + url: str + """The URL of the server.""" + + headers: NotRequired[dict[str, str]] + """The headers to send to the server.""" + + timeout: NotRequired[float] + """The timeout for the HTTP request. Defaults to 5 seconds.""" + + sse_read_timeout: NotRequired[float] + """The timeout for the SSE connection, in seconds. Defaults to 5 minutes.""" + + +class MCPServerSse(_MCPServerWithClientSession): + """MCP server implementation that uses the HTTP with SSE transport. See the [spec] + (https://spec.modelcontextprotocol.io/specification/2024-11-05/basic/transports/#http-with-sse) + for details. + """ + + def __init__(self, params: MCPServerSseParams, cache_tools_list: bool = False): + """Create a new MCP server based on the HTTP with SSE transport. + + Args: + params: The params that configure the server. This includes: + - The URL of the server. + - The headers to send to the server. + - The timeout for the HTTP request. + - The timeout for the SSE connection. + + cache_tools_list: Whether to cache the tools list. If `True`, the tools list will be + cached and only fetched from the server once. If `False`, the tools list will be + fetched from the server on each call to `list_tools()`. The cache can be + invalidated by calling `invalidate_tools_cache()`. You should set this to `True` + if you know the server will not change its tools list, because it can drastically + improve latency (by avoiding a round-trip to the server every time). + """ + super().__init__(cache_tools_list) + + self.params = params + + def create_streams( + self, + ) -> AbstractAsyncContextManager[ + tuple[ + MemoryObjectReceiveStream[JSONRPCMessage | Exception], + MemoryObjectSendStream[JSONRPCMessage], + ] + ]: + """Create the streams for the server.""" + return sse_client( + url=self.params["url"], + headers=self.params.get("headers", None), + timeout=self.params.get("timeout", 5), + sse_read_timeout=self.params.get("sse_read_timeout", 60 * 5), + ) diff --git a/src/agents/mcp/util.py b/src/agents/mcp/util.py new file mode 100644 index 00000000..038c4fec --- /dev/null +++ b/src/agents/mcp/util.py @@ -0,0 +1,96 @@ +import functools +import json +from typing import TYPE_CHECKING, Any + +from .. import _debug +from ..exceptions import AgentsException, ModelBehaviorError, UserError +from ..logger import logger +from ..run_context import RunContextWrapper +from ..tool import FunctionTool, Tool + +if TYPE_CHECKING: + from mcp.types import Tool as MCPTool + + from .server import MCPServer + + +class MCPUtil: + """Set of utilities for interop between MCP and Agents SDK tools.""" + + @classmethod + async def get_all_function_tools(cls, servers: list["MCPServer"]) -> list[Tool]: + """Get all function tools from a list of MCP servers.""" + tools = [] + tool_names: set[str] = set() + for server in servers: + server_tools = await cls.get_function_tools(server) + server_tool_names = {tool.name for tool in server_tools} + if len(server_tool_names & tool_names) > 0: + raise UserError( + f"Duplicate tool names found across MCP servers: " + f"{server_tool_names & tool_names}" + ) + tool_names.update(server_tool_names) + tools.extend(server_tools) + + return tools + + @classmethod + async def get_function_tools(cls, server: "MCPServer") -> list[Tool]: + """Get all function tools from a single MCP server.""" + tools = await server.list_tools() + return [cls.to_function_tool(tool, server) for tool in tools] + + @classmethod + def to_function_tool(cls, tool: "MCPTool", server: "MCPServer") -> FunctionTool: + """Convert an MCP tool to an Agents SDK function tool.""" + invoke_func = functools.partial(cls.invoke_mcp_tool, server, tool) + return FunctionTool( + name=tool.name, + description=tool.description or "", + params_json_schema=tool.inputSchema, + on_invoke_tool=invoke_func, + strict_json_schema=False, + ) + + @classmethod + async def invoke_mcp_tool( + cls, server: "MCPServer", tool: "MCPTool", context: RunContextWrapper[Any], input_json: str + ) -> str: + """Invoke an MCP tool and return the result as a string.""" + try: + json_data: dict[str, Any] = json.loads(input_json) if input_json else {} + except Exception as e: + if _debug.DONT_LOG_TOOL_DATA: + logger.debug(f"Invalid JSON input for tool {tool.name}") + else: + logger.debug(f"Invalid JSON input for tool {tool.name}: {input_json}") + raise ModelBehaviorError( + f"Invalid JSON input for tool {tool.name}: {input_json}" + ) from e + + if _debug.DONT_LOG_TOOL_DATA: + logger.debug(f"Invoking MCP tool {tool.name}") + else: + logger.debug(f"Invoking MCP tool {tool.name} with input {input_json}") + + try: + result = await server.call_tool(tool.name, json_data) + except Exception as e: + logger.error(f"Error invoking MCP tool {tool.name}: {e}") + raise AgentsException(f"Error invoking MCP tool {tool.name}: {e}") from e + + if _debug.DONT_LOG_TOOL_DATA: + logger.debug(f"MCP tool {tool.name} completed.") + else: + logger.debug(f"MCP tool {tool.name} returned {result}") + + # The MCP tool result is a list of content items, whereas OpenAI tool outputs are a single + # string. We'll try to convert. + if len(result.content) == 1: + return result.content[0].model_dump_json() + elif len(result.content) > 1: + return json.dumps([item.model_dump() for item in result.content]) + else: + logger.error(f"Errored MCP tool result: {result}") + return "Error running tool." diff --git a/uv.lock b/uv.lock index a9c79e21..d6eba43f 100644 --- a/uv.lock +++ b/uv.lock @@ -1,5 +1,4 @@ version = 1 -revision = 1 requires-python = ">=3.9" resolution-markers = [ "python_full_version >= '3.10'", @@ -459,6 +458,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517 }, ] +[[package]] +name = "httpx-sse" +version = "0.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4c/60/8f4281fa9bbf3c8034fd54c0e7412e66edbab6bc74c4996bd616f8d0406e/httpx-sse-0.4.0.tar.gz", hash = "sha256:1e81a3a3070ce322add1d3529ed42eb5f70817f45ed6ec915ab753f961139721", size = 12624 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e1/9b/a181f281f65d776426002f330c31849b86b31fc9d848db62e16f03ff739f/httpx_sse-0.4.0-py3-none-any.whl", hash = "sha256:f329af6eae57eaa2bdfd962b42524764af68075ea87370a2de920af5341e318f", size = 7819 }, +] + [[package]] name = "idna" version = "3.10" @@ -699,6 +707,25 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b3/73/085399401383ce949f727afec55ec3abd76648d04b9f22e1c0e99cb4bec3/MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a", size = 15506 }, ] +[[package]] +name = "mcp" +version = "1.5.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio", marker = "python_full_version >= '3.10'" }, + { name = "httpx", marker = "python_full_version >= '3.10'" }, + { name = "httpx-sse", marker = "python_full_version >= '3.10'" }, + { name = "pydantic", marker = "python_full_version >= '3.10'" }, + { name = "pydantic-settings", marker = "python_full_version >= '3.10'" }, + { name = "sse-starlette", marker = "python_full_version >= '3.10'" }, + { name = "starlette", marker = "python_full_version >= '3.10'" }, + { name = "uvicorn", marker = "python_full_version >= '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6d/c9/c55764824e893fdebe777ac7223200986a275c3191dba9169f8eb6d7c978/mcp-1.5.0.tar.gz", hash = "sha256:5b2766c05e68e01a2034875e250139839498c61792163a7b221fc170c12f5aa9", size = 159128 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/d1/3ff566ecf322077d861f1a68a1ff025cad337417bd66ad22a7c6f7dfcfaf/mcp-1.5.0-py3-none-any.whl", hash = "sha256:51c3f35ce93cb702f7513c12406bbea9665ef75a08db909200b07da9db641527", size = 73734 }, +] + [[package]] name = "mdit-py-plugins" version = "0.4.2" @@ -1054,6 +1081,7 @@ version = "0.0.6" source = { editable = "." } dependencies = [ { name = "griffe" }, + { name = "mcp", marker = "python_full_version >= '3.10'" }, { name = "openai" }, { name = "pydantic" }, { name = "requests" }, @@ -1091,6 +1119,7 @@ dev = [ [package.metadata] requires-dist = [ { name = "griffe", specifier = ">=1.5.6,<2" }, + { name = "mcp", marker = "python_full_version >= '3.10'" }, { name = "numpy", marker = "python_full_version >= '3.10' and extra == 'voice'", specifier = ">=2.2.0,<3" }, { name = "openai", specifier = ">=1.66.5" }, { name = "pydantic", specifier = ">=2.10,<3" }, @@ -1099,7 +1128,6 @@ requires-dist = [ { name = "typing-extensions", specifier = ">=4.12.2,<5" }, { name = "websockets", marker = "extra == 'voice'", specifier = ">=15.0,<16" }, ] -provides-extras = ["voice"] [package.metadata.requires-dev] dev = [ @@ -1305,6 +1333,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a1/0c/c5c5cd3689c32ed1fe8c5d234b079c12c281c051759770c05b8bed6412b5/pydantic_core-2.27.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7d0c8399fcc1848491f00e0314bd59fb34a9c008761bcb422a057670c3f65e35", size = 2004961 }, ] +[[package]] +name = "pydantic-settings" +version = "2.8.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic", marker = "python_full_version >= '3.10'" }, + { name = "python-dotenv", marker = "python_full_version >= '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/88/82/c79424d7d8c29b994fb01d277da57b0a9b09cc03c3ff875f9bd8a86b2145/pydantic_settings-2.8.1.tar.gz", hash = "sha256:d5c663dfbe9db9d5e1c646b2e161da12f0d734d422ee56f567d0ea2cee4e8585", size = 83550 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0b/53/a64f03044927dc47aafe029c42a5b7aabc38dfb813475e0e1bf71c4a59d0/pydantic_settings-2.8.1-py3-none-any.whl", hash = "sha256:81942d5ac3d905f7f3ee1a70df5dfb62d5569c12f51a5a647defc1c3d9ee2e9c", size = 30839 }, +] + [[package]] name = "pyee" version = "12.1.1" @@ -1496,6 +1537,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892 }, ] +[[package]] +name = "python-dotenv" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bc/57/e84d88dfe0aec03b7a2d4327012c1627ab5f03652216c63d49846d7a6c58/python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca", size = 39115 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6a/3e/b68c118422ec867fa7ab88444e1274aa40681c606d59ac27de5a5588f082/python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a", size = 19863 }, +] + [[package]] name = "python-xlib" version = "0.33" @@ -1660,6 +1710,31 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/af/9b/15217b04f3b36d30de55fef542389d722de63f1ad81f9c72d8afc98cb6ab/sounddevice-0.5.1-py3-none-win_amd64.whl", hash = "sha256:4313b63f2076552b23ac3e0abd3bcfc0c1c6a696fc356759a13bd113c9df90f1", size = 363634 }, ] +[[package]] +name = "sse-starlette" +version = "2.2.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio", marker = "python_full_version >= '3.10'" }, + { name = "starlette", marker = "python_full_version >= '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/71/a4/80d2a11af59fe75b48230846989e93979c892d3a20016b42bb44edb9e398/sse_starlette-2.2.1.tar.gz", hash = "sha256:54470d5f19274aeed6b2d473430b08b4b379ea851d953b11d7f1c4a2c118b419", size = 17376 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d9/e0/5b8bd393f27f4a62461c5cf2479c75a2cc2ffa330976f9f00f5f6e4f50eb/sse_starlette-2.2.1-py3-none-any.whl", hash = "sha256:6410a3d3ba0c89e7675d4c273a301d64649c03a5ef1ca101f10b47f895fd0e99", size = 10120 }, +] + +[[package]] +name = "starlette" +version = "0.46.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio", marker = "python_full_version >= '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/04/1b/52b27f2e13ceedc79a908e29eac426a63465a1a01248e5f24aa36a62aeb3/starlette-0.46.1.tar.gz", hash = "sha256:3c88d58ee4bd1bb807c0d1acb381838afc7752f9ddaec81bbe4383611d833230", size = 2580102 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/4b/528ccf7a982216885a1ff4908e886b8fb5f19862d1962f56a3fce2435a70/starlette-0.46.1-py3-none-any.whl", hash = "sha256:77c74ed9d2720138b25875133f3a2dae6d854af2ec37dceb56aef370c1d8a227", size = 71995 }, +] + [[package]] name = "textual" version = "2.1.2" @@ -1774,6 +1849,20 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c8/19/4ec628951a74043532ca2cf5d97b7b14863931476d117c471e8e2b1eb39f/urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df", size = 128369 }, ] +[[package]] +name = "uvicorn" +version = "0.34.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click", marker = "python_full_version >= '3.10'" }, + { name = "h11", marker = "python_full_version >= '3.10'" }, + { name = "typing-extensions", marker = "python_full_version == '3.10.*'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4b/4d/938bd85e5bf2edeec766267a5015ad969730bb91e31b44021dfe8b22df6c/uvicorn-0.34.0.tar.gz", hash = "sha256:404051050cd7e905de2c9a7e61790943440b3416f49cb409f965d9dcd0fa73e9", size = 76568 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/61/14/33a3a1352cfa71812a3a21e8c9bfb83f60b0011f5e36f2b1399d51928209/uvicorn-0.34.0-py3-none-any.whl", hash = "sha256:023dc038422502fa28a09c7a30bf2b6991512da7dcdb8fd35fe57cfc154126f4", size = 62315 }, +] + [[package]] name = "watchdog" version = "6.0.0"