From 813522bc210af67a0499de1e52e08db7f24b7ad1 Mon Sep 17 00:00:00 2001 From: Kazuhiro Sera Date: Wed, 25 Jun 2025 10:13:37 +0900 Subject: [PATCH 1/3] Fix #604 Chat Completion model raises runtime error when response.choices is empty --- src/agents/models/openai_chatcompletions.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/src/agents/models/openai_chatcompletions.py b/src/agents/models/openai_chatcompletions.py index 08803d8c0..b93b653f4 100644 --- a/src/agents/models/openai_chatcompletions.py +++ b/src/agents/models/openai_chatcompletions.py @@ -7,7 +7,8 @@ from openai import NOT_GIVEN, AsyncOpenAI, AsyncStream from openai.types import ChatModel -from openai.types.chat import ChatCompletion, ChatCompletionChunk +from openai.types.chat import ChatCompletion, ChatCompletionChunk, ChatCompletionMessage +from openai.types.chat.chat_completion import Choice from openai.types.responses import Response from openai.types.responses.response_prompt_param import ResponsePromptParam from openai.types.responses.response_usage import InputTokensDetails, OutputTokensDetails @@ -74,8 +75,11 @@ async def get_response( prompt=prompt, ) - first_choice = response.choices[0] - message = first_choice.message + message: ChatCompletionMessage | None = None + first_choice: Choice | None = None + if response.choices and len(response.choices) > 0: + first_choice = response.choices[0] + message = first_choice.message if _debug.DONT_LOG_MODEL_DATA: logger.debug("Received model response") From 34d1d4ac397f05ade8fba94df4944364f2317fbc Mon Sep 17 00:00:00 2001 From: Kazuhiro Sera Date: Wed, 25 Jun 2025 10:40:33 +0900 Subject: [PATCH 2/3] Fix --- src/agents/models/openai_chatcompletions.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/src/agents/models/openai_chatcompletions.py b/src/agents/models/openai_chatcompletions.py index b93b653f4..274c201db 100644 --- a/src/agents/models/openai_chatcompletions.py +++ b/src/agents/models/openai_chatcompletions.py @@ -90,10 +90,8 @@ async def get_response( json.dumps(message.model_dump(), indent=2), ) else: - logger.debug( - "LLM resp had no message. finish_reason: %s", - first_choice.finish_reason, - ) + finish_reason = first_choice.finish_reason if first_choice else "-" + logger.debug("LLM resp had no message. finish_reason: %s", finish_reason) usage = ( Usage( From 403aeea0445dbb7f4b530633add8d403943d9cd0 Mon Sep 17 00:00:00 2001 From: Kazuhiro Sera Date: Thu, 26 Jun 2025 08:26:32 +0900 Subject: [PATCH 3/3] review --- src/agents/models/openai_chatcompletions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/agents/models/openai_chatcompletions.py b/src/agents/models/openai_chatcompletions.py index 274c201db..120d726db 100644 --- a/src/agents/models/openai_chatcompletions.py +++ b/src/agents/models/openai_chatcompletions.py @@ -91,7 +91,7 @@ async def get_response( ) else: finish_reason = first_choice.finish_reason if first_choice else "-" - logger.debug("LLM resp had no message. finish_reason: %s", finish_reason) + logger.debug(f"LLM resp had no message. finish_reason: {finish_reason}") usage = ( Usage(