@@ -172,6 +172,8 @@ async def test_async_run_hooks_with_agent_hooks_with_llm():
172172
173173@pytest .mark .asyncio
174174async def test_run_hooks_llm_error_non_streaming (monkeypatch ):
175+ from agents import RunError
176+
175177 hooks = RunHooksForTests ()
176178 model = FakeModel ()
177179 agent = Agent (name = "A" , model = model , tools = [get_function_tool ("f" , "res" )], handoffs = [])
@@ -181,9 +183,16 @@ async def boom(*args, **kwargs):
181183
182184 monkeypatch .setattr (FakeModel , "get_response" , boom , raising = True )
183185
184- with pytest .raises (RuntimeError , match = "boom" ) :
186+ with pytest .raises (RunError ) as exc_info :
185187 await Runner .run (agent , input = "hello" , hooks = hooks )
186188
189+ # Verify the original exception is preserved
190+ assert isinstance (exc_info .value .original_exception , RuntimeError )
191+ assert str (exc_info .value .original_exception ) == "boom"
192+ # Verify run_data is attached
193+ assert exc_info .value .run_data is not None
194+ assert exc_info .value .run_data .context_wrapper is not None
195+
187196 # Current behavior is that hooks will not fire on LLM failure
188197 assert hooks .events ["on_agent_start" ] == 1
189198 assert hooks .events ["on_llm_start" ] == 1
@@ -229,16 +238,26 @@ async def test_streamed_run_hooks_llm_error(monkeypatch):
229238 Verify that when the streaming path raises, we still emit on_llm_start
230239 but do NOT emit on_llm_end (current behavior), and the exception propagates.
231240 """
241+ from agents import RunError
242+
232243 hooks = RunHooksForTests ()
233244 agent = Agent (name = "A" , model = BoomModel (), tools = [get_function_tool ("f" , "res" )], handoffs = [])
234245
235246 stream = Runner .run_streamed (agent , input = "hello" , hooks = hooks )
236247
237- # Consuming the stream should surface the exception
238- with pytest .raises (RuntimeError , match = "stream blew up" ) :
248+ # Consuming the stream should surface the exception (wrapped in RunError to preserve usage data)
249+ with pytest .raises (RunError ) as exc_info :
239250 async for _ in stream .stream_events ():
240251 pass
241252
253+ # Verify the original exception is preserved and accessible
254+ assert isinstance (exc_info .value .original_exception , RuntimeError )
255+ assert str (exc_info .value .original_exception ) == "stream blew up"
256+ # Verify run_data is attached with usage information
257+ assert exc_info .value .run_data is not None
258+ assert exc_info .value .run_data .context_wrapper is not None
259+ assert exc_info .value .run_data .context_wrapper .usage is not None
260+
242261 # Current behavior: success-only on_llm_end; ensure starts fired but ends did not.
243262 assert hooks .events ["on_agent_start" ] == 1
244263 assert hooks .events ["on_llm_start" ] == 1
0 commit comments