Skip to content

Commit d7eeaf2

Browse files
authored
Add reasoning text delta event support for gpt-oss models (openai#1586)
Since TS SDK has its own protocol layer, the way to handle data is a bit different but this aligns with openai/openai-agents-js#292
1 parent 3b36fd9 commit d7eeaf2

File tree

2 files changed

+135
-17
lines changed

2 files changed

+135
-17
lines changed
Lines changed: 54 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,54 @@
1+
import asyncio
2+
import os
3+
4+
from openai import AsyncOpenAI
5+
from openai.types.shared import Reasoning
6+
7+
from agents import (
8+
Agent,
9+
ModelSettings,
10+
OpenAIChatCompletionsModel,
11+
Runner,
12+
set_tracing_disabled,
13+
)
14+
15+
set_tracing_disabled(True)
16+
17+
# import logging
18+
# logging.basicConfig(level=logging.DEBUG)
19+
20+
gpt_oss_model = OpenAIChatCompletionsModel(
21+
model="openai/gpt-oss-20b",
22+
openai_client=AsyncOpenAI(
23+
base_url="https://openrouter.ai/api/v1",
24+
api_key=os.getenv("OPENROUTER_API_KEY"),
25+
),
26+
)
27+
28+
29+
async def main():
30+
agent = Agent(
31+
name="Assistant",
32+
instructions="You're a helpful assistant. You provide a concise answer to the user's question.",
33+
model=gpt_oss_model,
34+
model_settings=ModelSettings(
35+
reasoning=Reasoning(effort="high", summary="detailed"),
36+
),
37+
)
38+
39+
result = Runner.run_streamed(agent, "Tell me about recursion in programming.")
40+
print("=== Run starting ===")
41+
print("\n")
42+
async for event in result.stream_events():
43+
if event.type == "raw_response_event":
44+
if event.data.type == "response.reasoning_text.delta":
45+
print(f"\033[33m{event.data.delta}\033[0m", end="", flush=True)
46+
elif event.data.type == "response.output_text.delta":
47+
print(f"\033[32m{event.data.delta}\033[0m", end="", flush=True)
48+
49+
print("\n")
50+
print("=== Run complete ===")
51+
52+
53+
if __name__ == "__main__":
54+
asyncio.run(main())

src/agents/models/chatcmpl_stream_handler.py

Lines changed: 81 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -28,11 +28,17 @@
2828
ResponseTextDeltaEvent,
2929
ResponseUsage,
3030
)
31-
from openai.types.responses.response_reasoning_item import Summary
31+
from openai.types.responses.response_reasoning_item import Content, Summary
3232
from openai.types.responses.response_reasoning_summary_part_added_event import (
3333
Part as AddedEventPart,
3434
)
3535
from openai.types.responses.response_reasoning_summary_part_done_event import Part as DoneEventPart
36+
from openai.types.responses.response_reasoning_text_delta_event import (
37+
ResponseReasoningTextDeltaEvent,
38+
)
39+
from openai.types.responses.response_reasoning_text_done_event import (
40+
ResponseReasoningTextDoneEvent,
41+
)
3642
from openai.types.responses.response_usage import InputTokensDetails, OutputTokensDetails
3743

3844
from ..items import TResponseStreamEvent
@@ -95,7 +101,7 @@ async def handle_stream(
95101

96102
delta = chunk.choices[0].delta
97103

98-
# Handle reasoning content
104+
# Handle reasoning content for reasoning summaries
99105
if hasattr(delta, "reasoning_content"):
100106
reasoning_content = delta.reasoning_content
101107
if reasoning_content and not state.reasoning_content_index_and_output:
@@ -138,10 +144,55 @@ async def handle_stream(
138144
)
139145

140146
# Create a new summary with updated text
141-
current_summary = state.reasoning_content_index_and_output[1].summary[0]
142-
updated_text = current_summary.text + reasoning_content
143-
new_summary = Summary(text=updated_text, type="summary_text")
144-
state.reasoning_content_index_and_output[1].summary[0] = new_summary
147+
current_content = state.reasoning_content_index_and_output[1].summary[0]
148+
updated_text = current_content.text + reasoning_content
149+
new_content = Summary(text=updated_text, type="summary_text")
150+
state.reasoning_content_index_and_output[1].summary[0] = new_content
151+
152+
# Handle reasoning content from 3rd party platforms
153+
if hasattr(delta, "reasoning"):
154+
reasoning_text = delta.reasoning
155+
if reasoning_text and not state.reasoning_content_index_and_output:
156+
state.reasoning_content_index_and_output = (
157+
0,
158+
ResponseReasoningItem(
159+
id=FAKE_RESPONSES_ID,
160+
summary=[],
161+
content=[Content(text="", type="reasoning_text")],
162+
type="reasoning",
163+
),
164+
)
165+
yield ResponseOutputItemAddedEvent(
166+
item=ResponseReasoningItem(
167+
id=FAKE_RESPONSES_ID,
168+
summary=[],
169+
content=[Content(text="", type="reasoning_text")],
170+
type="reasoning",
171+
),
172+
output_index=0,
173+
type="response.output_item.added",
174+
sequence_number=sequence_number.get_and_increment(),
175+
)
176+
177+
if reasoning_text and state.reasoning_content_index_and_output:
178+
yield ResponseReasoningTextDeltaEvent(
179+
delta=reasoning_text,
180+
item_id=FAKE_RESPONSES_ID,
181+
output_index=0,
182+
content_index=0,
183+
type="response.reasoning_text.delta",
184+
sequence_number=sequence_number.get_and_increment(),
185+
)
186+
187+
# Create a new summary with updated text
188+
if state.reasoning_content_index_and_output[1].content is None:
189+
state.reasoning_content_index_and_output[1].content = [
190+
Content(text="", type="reasoning_text")
191+
]
192+
current_text = state.reasoning_content_index_and_output[1].content[0]
193+
updated_text = current_text.text + reasoning_text
194+
new_text_content = Content(text=updated_text, type="reasoning_text")
195+
state.reasoning_content_index_and_output[1].content[0] = new_text_content
145196

146197
# Handle regular content
147198
if delta.content is not None:
@@ -344,17 +395,30 @@ async def handle_stream(
344395
)
345396

346397
if state.reasoning_content_index_and_output:
347-
yield ResponseReasoningSummaryPartDoneEvent(
348-
item_id=FAKE_RESPONSES_ID,
349-
output_index=0,
350-
summary_index=0,
351-
part=DoneEventPart(
352-
text=state.reasoning_content_index_and_output[1].summary[0].text,
353-
type="summary_text",
354-
),
355-
type="response.reasoning_summary_part.done",
356-
sequence_number=sequence_number.get_and_increment(),
357-
)
398+
if (
399+
state.reasoning_content_index_and_output[1].summary
400+
and len(state.reasoning_content_index_and_output[1].summary) > 0
401+
):
402+
yield ResponseReasoningSummaryPartDoneEvent(
403+
item_id=FAKE_RESPONSES_ID,
404+
output_index=0,
405+
summary_index=0,
406+
part=DoneEventPart(
407+
text=state.reasoning_content_index_and_output[1].summary[0].text,
408+
type="summary_text",
409+
),
410+
type="response.reasoning_summary_part.done",
411+
sequence_number=sequence_number.get_and_increment(),
412+
)
413+
elif state.reasoning_content_index_and_output[1].content is not None:
414+
yield ResponseReasoningTextDoneEvent(
415+
item_id=FAKE_RESPONSES_ID,
416+
output_index=0,
417+
content_index=0,
418+
text=state.reasoning_content_index_and_output[1].content[0].text,
419+
type="response.reasoning_text.done",
420+
sequence_number=sequence_number.get_and_increment(),
421+
)
358422
yield ResponseOutputItemDoneEvent(
359423
item=state.reasoning_content_index_and_output[1],
360424
output_index=0,

0 commit comments

Comments
 (0)