Skip to content

Commit a26fd39

Browse files
committed
feat: Add lifecycle hooks for the agent LLM request/response cycle
1 parent 83876d5 commit a26fd39

File tree

3 files changed

+13
-17
lines changed

3 files changed

+13
-17
lines changed

src/agents/lifecycle.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -34,9 +34,7 @@ async def on_llm_end(
3434
"""Called immediately after the LLM call returns for this agent."""
3535
pass
3636

37-
async def on_agent_start(
38-
self, context: RunContextWrapper[TContext], agent: TAgent
39-
) -> None:
37+
async def on_agent_start(self, context: RunContextWrapper[TContext], agent: TAgent) -> None:
4038
"""Called before the agent is invoked. Called each time the current agent changes."""
4139
pass
4240

@@ -152,4 +150,4 @@ async def on_llm_end(
152150
"""Run hooks when using `Agent`."""
153151

154152
AgentHooks = AgentHooksBase[TContext, Agent]
155-
"""Agent hooks for `Agent`s."""
153+
"""Agent hooks for `Agent`s."""

src/agents/run.py

Lines changed: 10 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -943,11 +943,13 @@ async def _run_single_turn_streamed(
943943
input_items=input,
944944
system_instructions=system_prompt,
945945
)
946-
946+
947947
# Call hook just before the model is invoked, with the correct system_prompt.
948948
if agent.hooks:
949-
await agent.hooks.on_llm_start(context_wrapper, agent, filtered.instructions, filtered.input)
950-
949+
await agent.hooks.on_llm_start(
950+
context_wrapper, agent, filtered.instructions, filtered.input
951+
)
952+
951953
# 1. Stream the output events
952954
async for event in model.stream_response(
953955
filtered.instructions,
@@ -985,7 +987,7 @@ async def _run_single_turn_streamed(
985987
streamed_result._event_queue.put_nowait(RawResponsesStreamEvent(data=event))
986988

987989
# Call hook just after the model response is finalized.
988-
if agent.hooks:
990+
if agent.hooks and final_response is not None:
989991
await agent.hooks.on_llm_end(context_wrapper, agent, final_response)
990992

991993
# 2. At this point, the streaming is complete for this turn of the agent loop.
@@ -1266,8 +1268,8 @@ async def _get_new_response(
12661268
await agent.hooks.on_llm_start(
12671269
context_wrapper,
12681270
agent,
1269-
filtered.instructions, # Use filtered instructions
1270-
filtered.input # Use filtered input
1271+
filtered.instructions, # Use filtered instructions
1272+
filtered.input, # Use filtered input
12711273
)
12721274

12731275
new_response = await model.get_response(
@@ -1285,11 +1287,7 @@ async def _get_new_response(
12851287
)
12861288
# If the agent has hooks, we need to call them after the LLM call
12871289
if agent.hooks:
1288-
await agent.hooks.on_llm_end(
1289-
context_wrapper,
1290-
agent,
1291-
new_response
1292-
)
1290+
await agent.hooks.on_llm_end(context_wrapper, agent, new_response)
12931291

12941292
context_wrapper.usage.add(new_response.usage)
12951293

@@ -1404,4 +1402,4 @@ async def _save_result_to_session(
14041402
def _copy_str_or_list(input: str | list[TResponseInputItem]) -> str | list[TResponseInputItem]:
14051403
if isinstance(input, str):
14061404
return input
1407-
return input.copy()
1405+
return input.copy()

tests/test_agent_llm_hooks.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -127,4 +127,4 @@ async def test_streamed_agent_hooks_with_llm():
127127
print(f"[EVENT] message_output_item → {text}")
128128

129129
# Expect one on_start, one on_llm_start, one on_llm_end, and one on_end
130-
assert hooks.events == {"on_start": 1, "on_llm_start": 1, "on_llm_end": 1, "on_end": 1}
130+
assert hooks.events == {"on_start": 1, "on_llm_start": 1, "on_llm_end": 1, "on_end": 1}

0 commit comments

Comments
 (0)