Skip to content

Commit cdcfa4e

Browse files
committed
fix: llm_start/_end not firing on Runner.run
- added run hooks handling with agent hooks
1 parent de9d1fd commit cdcfa4e

File tree

1 file changed

+45
-17
lines changed

1 file changed

+45
-17
lines changed

src/agents/run.py

Lines changed: 45 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -994,10 +994,16 @@ async def _run_single_turn_streamed(
994994
)
995995

996996
# Call hook just before the model is invoked, with the correct system_prompt.
997-
if agent.hooks:
998-
await agent.hooks.on_llm_start(
999-
context_wrapper, agent, filtered.instructions, filtered.input
1000-
)
997+
await asyncio.gather(
998+
hooks.on_llm_start(context_wrapper, agent, filtered.instructions, filtered.input),
999+
(
1000+
agent.hooks.on_llm_start(
1001+
context_wrapper, agent, filtered.instructions, filtered.input
1002+
)
1003+
if agent.hooks
1004+
else _coro.noop_coroutine()
1005+
),
1006+
)
10011007

10021008
# 1. Stream the output events
10031009
async for event in model.stream_response(
@@ -1056,8 +1062,15 @@ async def _run_single_turn_streamed(
10561062
streamed_result._event_queue.put_nowait(RawResponsesStreamEvent(data=event))
10571063

10581064
# Call hook just after the model response is finalized.
1059-
if agent.hooks and final_response is not None:
1060-
await agent.hooks.on_llm_end(context_wrapper, agent, final_response)
1065+
if final_response is not None:
1066+
await asyncio.gather(
1067+
(
1068+
agent.hooks.on_llm_end(context_wrapper, agent, final_response)
1069+
if agent.hooks
1070+
else _coro.noop_coroutine()
1071+
),
1072+
hooks.on_llm_end(context_wrapper, agent, final_response),
1073+
)
10611074

10621075
# 2. At this point, the streaming is complete for this turn of the agent loop.
10631076
if not final_response:
@@ -1150,6 +1163,7 @@ async def _run_single_turn(
11501163
output_schema,
11511164
all_tools,
11521165
handoffs,
1166+
hooks,
11531167
context_wrapper,
11541168
run_config,
11551169
tool_use_tracker,
@@ -1345,6 +1359,7 @@ async def _get_new_response(
13451359
output_schema: AgentOutputSchemaBase | None,
13461360
all_tools: list[Tool],
13471361
handoffs: list[Handoff],
1362+
hooks: RunHooks[TContext],
13481363
context_wrapper: RunContextWrapper[TContext],
13491364
run_config: RunConfig,
13501365
tool_use_tracker: AgentToolUseTracker,
@@ -1364,14 +1379,21 @@ async def _get_new_response(
13641379
model = cls._get_model(agent, run_config)
13651380
model_settings = agent.model_settings.resolve(run_config.model_settings)
13661381
model_settings = RunImpl.maybe_reset_tool_choice(agent, tool_use_tracker, model_settings)
1367-
# If the agent has hooks, we need to call them before and after the LLM call
1368-
if agent.hooks:
1369-
await agent.hooks.on_llm_start(
1370-
context_wrapper,
1371-
agent,
1372-
filtered.instructions, # Use filtered instructions
1373-
filtered.input, # Use filtered input
1374-
)
1382+
1383+
# If we have run hooks, or if the agent has hooks, we need to call them before the LLM call
1384+
await asyncio.gather(
1385+
hooks.on_llm_start(context_wrapper, agent, filtered.instructions, filtered.input),
1386+
(
1387+
agent.hooks.on_llm_start(
1388+
context_wrapper,
1389+
agent,
1390+
filtered.instructions, # Use filtered instructions
1391+
filtered.input, # Use filtered input
1392+
)
1393+
if agent.hooks
1394+
else _coro.noop_coroutine()
1395+
),
1396+
)
13751397

13761398
new_response = await model.get_response(
13771399
system_instructions=filtered.instructions,
@@ -1387,9 +1409,15 @@ async def _get_new_response(
13871409
conversation_id=conversation_id,
13881410
prompt=prompt_config,
13891411
)
1390-
# If the agent has hooks, we need to call them after the LLM call
1391-
if agent.hooks:
1392-
await agent.hooks.on_llm_end(context_wrapper, agent, new_response)
1412+
# If we have run hooks, or if the agent has hooks, we need to call them after the LLM call
1413+
await asyncio.gather(
1414+
(
1415+
agent.hooks.on_llm_end(context_wrapper, agent, new_response)
1416+
if agent.hooks
1417+
else _coro.noop_coroutine()
1418+
),
1419+
hooks.on_llm_end(context_wrapper, agent, new_response),
1420+
)
13931421

13941422
context_wrapper.usage.add(new_response.usage)
13951423

0 commit comments

Comments
 (0)