From 064e25b01b5c82c08aea66ff898ff27adbb013d8 Mon Sep 17 00:00:00 2001 From: James Hills <70035505+jhills20@users.noreply.github.com> Date: Fri, 4 Apr 2025 16:53:12 -0700 Subject: [PATCH 1/4] add links and mcp + voice examples (#438) --- docs/examples.md | 24 +++++++++++++++--------- 1 file changed, 15 insertions(+), 9 deletions(-) diff --git a/docs/examples.md b/docs/examples.md index 1d3ebde7..30d60282 100644 --- a/docs/examples.md +++ b/docs/examples.md @@ -5,32 +5,38 @@ Check out a variety of sample implementations of the SDK in the examples section ## Categories -- **agent_patterns:** +- **[agent_patterns](https://github.com/openai/openai-agents-python/tree/main/examples/agent_patterns):** Examples in this category illustrate common agent design patterns, such as - + - Deterministic workflows - Agents as tools - Parallel agent execution -- **basic:** +- **[basic](https://github.com/openai/openai-agents-python/tree/main/examples/basic):** These examples showcase foundational capabilities of the SDK, such as - + - Dynamic system prompts - Streaming outputs - Lifecycle events -- **tool examples:** +- **[tool examples](https://github.com/openai/openai-agents-python/tree/main/examples/tools):** Learn how to implement OAI hosted tools such as web search and file search, and integrate them into your agents. -- **model providers:** +- **[model providers](https://github.com/openai/openai-agents-python/tree/main/examples/model_providers):** Explore how to use non-OpenAI models with the SDK. -- **handoffs:** +- **[handoffs](https://github.com/openai/openai-agents-python/tree/main/examples/handoffs):** See practical examples of agent handoffs. -- **customer_service** and **research_bot:** +- **[mcp](https://github.com/openai/openai-agents-python/tree/main/examples/mcp):** + Learn how to build agents with MCP. + +- **[customer_service](https://github.com/openai/openai-agents-python/tree/main/examples/customer_service)** and **[research_bot](https://github.com/openai/openai-agents-python/tree/main/examples/research_bot):** Two more built-out examples that illustrate real-world applications - + - **customer_service**: Example customer service system for an airline. - **research_bot**: Simple deep research clone. + +- **[voice](https://github.com/openai/openai-agents-python/tree/main/examples/voice):** + See examples of voice agents, using our TTS and STT models. From 50bbfdd8beb89a9c864a5e4fea4dd2a79e99f724 Mon Sep 17 00:00:00 2001 From: Rohan Mehta Date: Mon, 7 Apr 2025 18:38:36 -0400 Subject: [PATCH 2/4] Ensure MCP works when inputSchema.properties is missing (#454) Resolves #449 - TLDR, [OpenAI's API](https://platform.openai.com/docs/api-reference/responses/create) expects the properties field to be present, whereas the MCP schema explicitly allows omitting the properties field. [MCP Spec](https://github.com/modelcontextprotocol/specification/blob/main/schema/2025-03-26/schema.json) --- src/agents/mcp/util.py | 5 +++++ tests/mcp/test_mcp_util.py | 43 +++++++++++++++++++++++++++++++------- 2 files changed, 41 insertions(+), 7 deletions(-) diff --git a/src/agents/mcp/util.py b/src/agents/mcp/util.py index 770ae8dd..bbfe1885 100644 --- a/src/agents/mcp/util.py +++ b/src/agents/mcp/util.py @@ -59,6 +59,11 @@ def to_function_tool( """Convert an MCP tool to an Agents SDK function tool.""" invoke_func = functools.partial(cls.invoke_mcp_tool, server, tool) schema, is_strict = tool.inputSchema, False + + # MCP spec doesn't require the inputSchema to have `properties`, but OpenAI spec does. + if "properties" not in schema: + schema["properties"] = {} + if convert_schemas_to_strict: try: schema = ensure_strict_json_schema(schema) diff --git a/tests/mcp/test_mcp_util.py b/tests/mcp/test_mcp_util.py index 64378b59..74356a16 100644 --- a/tests/mcp/test_mcp_util.py +++ b/tests/mcp/test_mcp_util.py @@ -63,7 +63,10 @@ async def test_get_all_function_tools(): for idx, tool in enumerate(tools): assert isinstance(tool, FunctionTool) - assert tool.params_json_schema == schemas[idx] + if schemas[idx] == {}: + assert tool.params_json_schema == snapshot({"properties": {}}) + else: + assert tool.params_json_schema == schemas[idx] assert tool.name == names[idx] # Also make sure it works with strict schemas @@ -167,10 +170,7 @@ async def test_agent_convert_schemas_true(): # Checks that additionalProperties is set to False assert bar_tool.params_json_schema == snapshot( - { - "type": "object", - "additionalProperties": {"type": "string"}, - } + {"type": "object", "additionalProperties": {"type": "string"}, "properties": {}} ) assert bar_tool.strict_json_schema is False, "bar_tool should not be strict" @@ -220,7 +220,9 @@ async def test_agent_convert_schemas_false(): assert foo_tool.params_json_schema == strict_schema assert foo_tool.strict_json_schema is False, "Shouldn't be converted unless specified" - assert bar_tool.params_json_schema == non_strict_schema + assert bar_tool.params_json_schema == snapshot( + {"type": "object", "additionalProperties": {"type": "string"}, "properties": {}} + ) assert bar_tool.strict_json_schema is False assert baz_tool.params_json_schema == possible_to_convert_schema @@ -255,8 +257,35 @@ async def test_agent_convert_schemas_unset(): assert foo_tool.params_json_schema == strict_schema assert foo_tool.strict_json_schema is False, "Shouldn't be converted unless specified" - assert bar_tool.params_json_schema == non_strict_schema + assert bar_tool.params_json_schema == snapshot( + {"type": "object", "additionalProperties": {"type": "string"}, "properties": {}} + ) assert bar_tool.strict_json_schema is False assert baz_tool.params_json_schema == possible_to_convert_schema assert baz_tool.strict_json_schema is False, "Shouldn't be converted unless specified" + + +@pytest.mark.asyncio +async def test_util_adds_properties(): + """The MCP spec doesn't require the inputSchema to have `properties`, so we need to add it + if it's missing. + """ + schema = { + "type": "object", + "description": "Test tool", + } + + server = FakeMCPServer() + server.add_tool("test_tool", schema) + + tools = await MCPUtil.get_all_function_tools([server], convert_schemas_to_strict=False) + tool = next(tool for tool in tools if tool.name == "test_tool") + + assert isinstance(tool, FunctionTool) + assert "properties" in tool.params_json_schema + assert tool.params_json_schema["properties"] == {} + + assert tool.params_json_schema == snapshot( + {"type": "object", "description": "Test tool", "properties": {}} + ) From 2bcc864b81ba74c2e5252ce58b8f765bce98a329 Mon Sep 17 00:00:00 2001 From: Rohan Mehta Date: Mon, 7 Apr 2025 19:13:08 -0400 Subject: [PATCH 3/4] Don't send the "store" param unless its hitting OpenAI (#455) Summary: See #443. Causes issues with Gemini. Test Plan: Tests. Also tested with Gemini to ensure it works. --- src/agents/models/openai_chatcompletions.py | 14 ++++--- tests/test_openai_chatcompletions.py | 43 +++++++++++++++++++-- uv.lock | 2 +- 3 files changed, 50 insertions(+), 9 deletions(-) diff --git a/src/agents/models/openai_chatcompletions.py b/src/agents/models/openai_chatcompletions.py index cbc48c50..e0aafad0 100644 --- a/src/agents/models/openai_chatcompletions.py +++ b/src/agents/models/openai_chatcompletions.py @@ -518,10 +518,8 @@ async def _fetch_response( f"Response format: {response_format}\n" ) - # Match the behavior of Responses where store is True when not given - store = model_settings.store if model_settings.store is not None else True - reasoning_effort = model_settings.reasoning.effort if model_settings.reasoning else None + store = _Converter.get_store_param(self._get_client(), model_settings) ret = await self._get_client().chat.completions.create( model=self.model, @@ -537,10 +535,10 @@ async def _fetch_response( parallel_tool_calls=parallel_tool_calls, stream=stream, stream_options={"include_usage": True} if stream else NOT_GIVEN, - store=store, + store=self._non_null_or_not_given(store), reasoning_effort=self._non_null_or_not_given(reasoning_effort), extra_headers=_HEADERS, - metadata=model_settings.metadata, + metadata=self._non_null_or_not_given(model_settings.metadata), ) if isinstance(ret, ChatCompletion): @@ -570,6 +568,12 @@ def _get_client(self) -> AsyncOpenAI: class _Converter: + @classmethod + def get_store_param(cls, client: AsyncOpenAI, model_settings: ModelSettings) -> bool | None: + # Match the behavior of Responses where store is True when not given + default_store = True if str(client.base_url).startswith("https://api.openai.com") else None + return model_settings.store if model_settings.store is not None else default_store + @classmethod def convert_tool_choice( cls, tool_choice: Literal["auto", "required", "none"] | str | None diff --git a/tests/test_openai_chatcompletions.py b/tests/test_openai_chatcompletions.py index 9a53e2b7..a3198d33 100644 --- a/tests/test_openai_chatcompletions.py +++ b/tests/test_openai_chatcompletions.py @@ -5,7 +5,7 @@ import httpx import pytest -from openai import NOT_GIVEN +from openai import NOT_GIVEN, AsyncOpenAI from openai.types.chat.chat_completion import ChatCompletion, Choice from openai.types.chat.chat_completion_chunk import ChatCompletionChunk from openai.types.chat.chat_completion_message import ChatCompletionMessage @@ -31,6 +31,7 @@ generation_span, ) from agents.models.fake_id import FAKE_RESPONSES_ID +from agents.models.openai_chatcompletions import _Converter @pytest.mark.allow_call_model_methods @@ -226,7 +227,7 @@ def __init__(self, completions: DummyCompletions) -> None: # Ensure expected args were passed through to OpenAI client. kwargs = completions.kwargs assert kwargs["stream"] is False - assert kwargs["store"] is True + assert kwargs["store"] is NOT_GIVEN assert kwargs["model"] == "gpt-4" assert kwargs["messages"][0]["role"] == "system" assert kwargs["messages"][0]["content"] == "sys" @@ -280,7 +281,7 @@ def __init__(self, completions: DummyCompletions) -> None: ) # Check OpenAI client was called for streaming assert completions.kwargs["stream"] is True - assert completions.kwargs["store"] is True + assert completions.kwargs["store"] is NOT_GIVEN assert completions.kwargs["stream_options"] == {"include_usage": True} # Response is a proper openai Response assert isinstance(response, Response) @@ -290,3 +291,39 @@ def __init__(self, completions: DummyCompletions) -> None: assert response.output == [] # We returned the async iterator produced by our dummy. assert hasattr(stream, "__aiter__") + + +def test_store_param(): + """Should default to True for OpenAI API calls, and False otherwise.""" + + model_settings = ModelSettings() + client = AsyncOpenAI() + assert _Converter.get_store_param(client, model_settings) is True, ( + "Should default to True for OpenAI API calls" + ) + + model_settings = ModelSettings(store=False) + assert _Converter.get_store_param(client, model_settings) is False, ( + "Should respect explicitly set store=False" + ) + + model_settings = ModelSettings(store=True) + assert _Converter.get_store_param(client, model_settings) is True, ( + "Should respect explicitly set store=True" + ) + + client = AsyncOpenAI(base_url="http://www.notopenai.com") + model_settings = ModelSettings() + assert _Converter.get_store_param(client, model_settings) is None, ( + "Should default to None for non-OpenAI API calls" + ) + + model_settings = ModelSettings(store=False) + assert _Converter.get_store_param(client, model_settings) is False, ( + "Should respect explicitly set store=False" + ) + + model_settings = ModelSettings(store=True) + assert _Converter.get_store_param(client, model_settings) is True, ( + "Should respect explicitly set store=True" + ) diff --git a/uv.lock b/uv.lock index e443c009..a6018eeb 100644 --- a/uv.lock +++ b/uv.lock @@ -1087,7 +1087,7 @@ wheels = [ [[package]] name = "openai-agents" -version = "0.0.7" +version = "0.0.8" source = { editable = "." } dependencies = [ { name = "griffe" }, From 9d3d6a58e56e1f2c114ed0530666bd4b50e54449 Mon Sep 17 00:00:00 2001 From: Rohan Mehta Date: Mon, 7 Apr 2025 19:27:19 -0400 Subject: [PATCH 4/4] v0.0.9 (#456) bump version. --- pyproject.toml | 2 +- uv.lock | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 3ade2c52..f850d629 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "openai-agents" -version = "0.0.8" +version = "0.0.9" description = "OpenAI Agents SDK" readme = "README.md" requires-python = ">=3.9" diff --git a/uv.lock b/uv.lock index a6018eeb..d1c27225 100644 --- a/uv.lock +++ b/uv.lock @@ -1087,7 +1087,7 @@ wheels = [ [[package]] name = "openai-agents" -version = "0.0.8" +version = "0.0.9" source = { editable = "." } dependencies = [ { name = "griffe" },