2828from agents .models .openai_provider import OpenAIProvider
2929
3030from agentex import AsyncAgentex
31+ from agentex .lib .utils .logging import make_logger
3132from agentex .lib .core .tracing .tracer import AsyncTracer
3233from agentex .types .task_message_delta import TextDelta
3334from agentex .types .task_message_update import (
4041from agentex .types .tool_request_content import ToolRequestContent
4142from agentex .types .tool_response_content import ToolResponseContent
4243
44+ logger = make_logger (__name__ )
45+
46+
47+ def _serialize_item (item : Any ) -> dict [str , Any ]:
48+ """
49+ Universal serializer for any item type from OpenAI Agents SDK.
50+
51+ Uses model_dump() for Pydantic models, otherwise extracts attributes manually.
52+ Filters out internal Pydantic fields that can't be serialized.
53+ """
54+ if hasattr (item , 'model_dump' ):
55+ # Pydantic model - use model_dump for proper serialization
56+ try :
57+ return item .model_dump (mode = 'json' , exclude_unset = True )
58+ except Exception :
59+ # Fallback to dict conversion
60+ return dict (item ) if hasattr (item , '__iter__' ) else {}
61+ else :
62+ # Not a Pydantic model - extract attributes manually
63+ item_dict = {}
64+ for attr_name in dir (item ):
65+ if not attr_name .startswith ('_' ) and attr_name not in ('model_fields' , 'model_config' , 'model_computed_fields' ):
66+ try :
67+ attr_value = getattr (item , attr_name , None )
68+ # Skip methods and None values
69+ if attr_value is not None and not callable (attr_value ):
70+ # Convert to JSON-serializable format
71+ if hasattr (attr_value , 'model_dump' ):
72+ item_dict [attr_name ] = attr_value .model_dump ()
73+ elif isinstance (attr_value , (str , int , float , bool , list , dict )):
74+ item_dict [attr_name ] = attr_value
75+ else :
76+ item_dict [attr_name ] = str (attr_value )
77+ except Exception :
78+ # Skip attributes that can't be accessed
79+ pass
80+ return item_dict
81+
4382
4483class SyncStreamingModel (Model ):
4584 """Simple model wrapper that adds logging to stream_response and supports tracing."""
@@ -109,10 +148,38 @@ async def get_response(
109148
110149 response = await self .original_model .get_response (** kwargs )
111150
112- # Set span output
113- if span :
151+ # Set span output with structured data
152+ if span and response :
153+ new_items = []
154+ final_output = None
155+
156+ # Extract final output text from response
157+ response_final_output = getattr (response , 'final_output' , None )
158+ if response_final_output :
159+ final_output = response_final_output
160+
161+ # Extract items from the response output
162+ response_output = getattr (response , 'output' , None )
163+ if response_output :
164+ output_items = response_output if isinstance (response_output , list ) else [response_output ]
165+
166+ for item in output_items :
167+ item_dict = _serialize_item (item )
168+ if item_dict :
169+ new_items .append (item_dict )
170+
171+ # Extract final_output from message type if available
172+ if item_dict .get ('type' ) == 'message' and not final_output :
173+ content = item_dict .get ('content' , [])
174+ if content and isinstance (content , list ):
175+ for content_part in content :
176+ if isinstance (content_part , dict ) and 'text' in content_part :
177+ final_output = content_part ['text' ]
178+ break
179+
114180 span .output = {
115- "response" : str (response ) if response else None ,
181+ "new_items" : new_items ,
182+ "final_output" : final_output ,
116183 }
117184
118185 return response
@@ -160,7 +227,9 @@ async def stream_response(
160227 # Wrap the streaming in a tracing span if tracer is available
161228 if self .tracer and self .trace_id :
162229 trace = self .tracer .trace (self .trace_id )
163- async with trace .span (
230+
231+ # Manually start the span instead of using context manager
232+ span = await trace .start_span (
164233 parent_id = self .parent_span_id ,
165234 name = "run_agent_streamed" ,
166235 input = {
@@ -172,7 +241,9 @@ async def stream_response(
172241 "handoffs" : [str (h ) for h in handoffs ] if handoffs else [],
173242 "previous_response_id" : previous_response_id ,
174243 },
175- ) as span :
244+ )
245+
246+ try :
176247 # Get the stream from the original model
177248 stream_kwargs = {
178249 "system_instructions" : system_instructions ,
@@ -193,23 +264,40 @@ async def stream_response(
193264 # Get the stream response from the original model and yield each event
194265 stream_response = self .original_model .stream_response (** stream_kwargs )
195266
196- # Pass through each event from the original stream
197- event_count = 0
198- final_output = None
267+ # Pass through each event from the original stream and track items
268+ new_items = []
269+ final_response_text = ""
270+
199271 async for event in stream_response :
200- event_count += 1
201- # Track the final output if available
202- if hasattr (event , 'type' ) and event .type == 'raw_response_event' :
203- if hasattr (event .data , 'output' ):
204- final_output = event .data .output
272+ event_type = getattr (event , 'type' , 'no-type' )
273+
274+ # Handle response.output_item.done events which contain completed items
275+ if event_type == 'response.output_item.done' :
276+ item = getattr (event , 'item' , None )
277+ if item is not None :
278+ item_dict = _serialize_item (item )
279+ if item_dict :
280+ new_items .append (item_dict )
281+
282+ # Update final_response_text from message type if available
283+ if item_dict .get ('type' ) == 'message' :
284+ content = item_dict .get ('content' , [])
285+ if content and isinstance (content , list ):
286+ for content_part in content :
287+ if isinstance (content_part , dict ) and 'text' in content_part :
288+ final_response_text = content_part ['text' ]
289+ break
290+
205291 yield event
206292
207- # Set span output
208- if span :
209- span .output = {
210- "event_count" : event_count ,
211- "final_output" : str (final_output ) if final_output else None ,
212- }
293+ # Set span output with structured data including tool calls and final response
294+ span .output = {
295+ "new_items" : new_items ,
296+ "final_output" : final_response_text if final_response_text else None ,
297+ }
298+ finally :
299+ # End the span after all events have been yielded
300+ await trace .end_span (span )
213301 else :
214302 # No tracing, just stream normally
215303 # Get the stream from the original model
0 commit comments