@@ -994,10 +994,16 @@ async def _run_single_turn_streamed(
994
994
)
995
995
996
996
# Call hook just before the model is invoked, with the correct system_prompt.
997
- if agent .hooks :
998
- await agent .hooks .on_llm_start (
999
- context_wrapper , agent , filtered .instructions , filtered .input
1000
- )
997
+ await asyncio .gather (
998
+ hooks .on_llm_start (context_wrapper , agent , filtered .instructions , filtered .input ),
999
+ (
1000
+ agent .hooks .on_llm_start (
1001
+ context_wrapper , agent , filtered .instructions , filtered .input
1002
+ )
1003
+ if agent .hooks
1004
+ else _coro .noop_coroutine ()
1005
+ ),
1006
+ )
1001
1007
1002
1008
# 1. Stream the output events
1003
1009
async for event in model .stream_response (
@@ -1056,8 +1062,15 @@ async def _run_single_turn_streamed(
1056
1062
streamed_result ._event_queue .put_nowait (RawResponsesStreamEvent (data = event ))
1057
1063
1058
1064
# Call hook just after the model response is finalized.
1059
- if agent .hooks and final_response is not None :
1060
- await agent .hooks .on_llm_end (context_wrapper , agent , final_response )
1065
+ if final_response is not None :
1066
+ await asyncio .gather (
1067
+ (
1068
+ agent .hooks .on_llm_end (context_wrapper , agent , final_response )
1069
+ if agent .hooks
1070
+ else _coro .noop_coroutine ()
1071
+ ),
1072
+ hooks .on_llm_end (context_wrapper , agent , final_response ),
1073
+ )
1061
1074
1062
1075
# 2. At this point, the streaming is complete for this turn of the agent loop.
1063
1076
if not final_response :
@@ -1150,6 +1163,7 @@ async def _run_single_turn(
1150
1163
output_schema ,
1151
1164
all_tools ,
1152
1165
handoffs ,
1166
+ hooks ,
1153
1167
context_wrapper ,
1154
1168
run_config ,
1155
1169
tool_use_tracker ,
@@ -1345,6 +1359,7 @@ async def _get_new_response(
1345
1359
output_schema : AgentOutputSchemaBase | None ,
1346
1360
all_tools : list [Tool ],
1347
1361
handoffs : list [Handoff ],
1362
+ hooks : RunHooks [TContext ],
1348
1363
context_wrapper : RunContextWrapper [TContext ],
1349
1364
run_config : RunConfig ,
1350
1365
tool_use_tracker : AgentToolUseTracker ,
@@ -1364,14 +1379,21 @@ async def _get_new_response(
1364
1379
model = cls ._get_model (agent , run_config )
1365
1380
model_settings = agent .model_settings .resolve (run_config .model_settings )
1366
1381
model_settings = RunImpl .maybe_reset_tool_choice (agent , tool_use_tracker , model_settings )
1367
- # If the agent has hooks, we need to call them before and after the LLM call
1368
- if agent .hooks :
1369
- await agent .hooks .on_llm_start (
1370
- context_wrapper ,
1371
- agent ,
1372
- filtered .instructions , # Use filtered instructions
1373
- filtered .input , # Use filtered input
1374
- )
1382
+
1383
+ # If we have run hooks, or if the agent has hooks, we need to call them before the LLM call
1384
+ await asyncio .gather (
1385
+ hooks .on_llm_start (context_wrapper , agent , filtered .instructions , filtered .input ),
1386
+ (
1387
+ agent .hooks .on_llm_start (
1388
+ context_wrapper ,
1389
+ agent ,
1390
+ filtered .instructions , # Use filtered instructions
1391
+ filtered .input , # Use filtered input
1392
+ )
1393
+ if agent .hooks
1394
+ else _coro .noop_coroutine ()
1395
+ ),
1396
+ )
1375
1397
1376
1398
new_response = await model .get_response (
1377
1399
system_instructions = filtered .instructions ,
@@ -1387,9 +1409,15 @@ async def _get_new_response(
1387
1409
conversation_id = conversation_id ,
1388
1410
prompt = prompt_config ,
1389
1411
)
1390
- # If the agent has hooks, we need to call them after the LLM call
1391
- if agent .hooks :
1392
- await agent .hooks .on_llm_end (context_wrapper , agent , new_response )
1412
+ # If we have run hooks, or if the agent has hooks, we need to call them after the LLM call
1413
+ await asyncio .gather (
1414
+ (
1415
+ agent .hooks .on_llm_end (context_wrapper , agent , new_response )
1416
+ if agent .hooks
1417
+ else _coro .noop_coroutine ()
1418
+ ),
1419
+ hooks .on_llm_end (context_wrapper , agent , new_response ),
1420
+ )
1393
1421
1394
1422
context_wrapper .usage .add (new_response .usage )
1395
1423
0 commit comments