From a11e5bc5771ff87896aca6c540888447a756021e Mon Sep 17 00:00:00 2001 From: Peter Hoburg Date: Fri, 18 Apr 2025 16:37:11 -0400 Subject: [PATCH 1/3] Added request_id to the model response. Docs and tests still need to be updated. Only changed the bedrock model to support the new field. --- pydantic_ai_slim/pydantic_ai/messages.py | 3 +++ pydantic_ai_slim/pydantic_ai/models/bedrock.py | 3 ++- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/pydantic_ai_slim/pydantic_ai/messages.py b/pydantic_ai_slim/pydantic_ai/messages.py index efe816ac4..96e4e4a24 100644 --- a/pydantic_ai_slim/pydantic_ai/messages.py +++ b/pydantic_ai_slim/pydantic_ai/messages.py @@ -556,6 +556,9 @@ class ModelResponse: kind: Literal['response'] = 'response' """Message type identifier, this is available on all parts as a discriminator.""" + request_id: str | None = None + """Request ID as specified by the model provider.""" + def otel_events(self) -> list[Event]: """Return OpenTelemetry events for the response.""" result: list[Event] = [] diff --git a/pydantic_ai_slim/pydantic_ai/models/bedrock.py b/pydantic_ai_slim/pydantic_ai/models/bedrock.py index a8882ba62..0851ab4ec 100644 --- a/pydantic_ai_slim/pydantic_ai/models/bedrock.py +++ b/pydantic_ai_slim/pydantic_ai/models/bedrock.py @@ -269,7 +269,8 @@ async def _process_response(self, response: ConverseResponseTypeDef) -> tuple[Mo response_tokens=response['usage']['outputTokens'], total_tokens=response['usage']['totalTokens'], ) - return ModelResponse(items, model_name=self.model_name), u + request_id = response.get('ResponseMetadata', {}).get('RequestId', None) + return ModelResponse(items, model_name=self.model_name, request_id=request_id), u @overload async def _messages_create( From 654b6570c380de010e865efa7279fb8d401d2297 Mon Sep 17 00:00:00 2001 From: Peter Hoburg Date: Fri, 18 Apr 2025 22:46:21 -0400 Subject: [PATCH 2/3] Fixed tests. --- docs/agents.md | 4 ++++ docs/message-history.md | 6 ++++++ docs/tools.md | 3 +++ pydantic_ai_slim/pydantic_ai/agent.py | 3 +++ tests/test_agent.py | 1 + 5 files changed, 17 insertions(+) diff --git a/docs/agents.md b/docs/agents.md index e32b30c30..699443de9 100644 --- a/docs/agents.md +++ b/docs/agents.md @@ -148,6 +148,7 @@ async def main(): model_name='gpt-4o', timestamp=datetime.datetime(...), kind='response', + request_id=None, ) ), End(data=FinalResult(output='Paris', tool_name=None, tool_call_id=None)), @@ -212,6 +213,7 @@ async def main(): model_name='gpt-4o', timestamp=datetime.datetime(...), kind='response', + request_id=None, ) ), End(data=FinalResult(output='Paris', tool_name=None, tool_call_id=None)), @@ -808,6 +810,7 @@ with capture_run_messages() as messages: # (2)! model_name='gpt-4o', timestamp=datetime.datetime(...), kind='response', + request_id=None, ), ModelRequest( parts=[ @@ -834,6 +837,7 @@ with capture_run_messages() as messages: # (2)! model_name='gpt-4o', timestamp=datetime.datetime(...), kind='response', + request_id=None, ), ] """ diff --git a/docs/message-history.md b/docs/message-history.md index 608113471..92034832a 100644 --- a/docs/message-history.md +++ b/docs/message-history.md @@ -67,6 +67,7 @@ print(result.all_messages()) model_name='gpt-4o', timestamp=datetime.datetime(...), kind='response', + request_id=None, ), ] """ @@ -145,6 +146,7 @@ async def main(): model_name='gpt-4o', timestamp=datetime.datetime(...), kind='response', + request_id=None, ), ] """ @@ -204,6 +206,7 @@ print(result2.all_messages()) model_name='gpt-4o', timestamp=datetime.datetime(...), kind='response', + request_id=None, ), ModelRequest( parts=[ @@ -226,6 +229,7 @@ print(result2.all_messages()) model_name='gpt-4o', timestamp=datetime.datetime(...), kind='response', + request_id=None, ), ] """ @@ -332,6 +336,7 @@ print(result2.all_messages()) model_name='gpt-4o', timestamp=datetime.datetime(...), kind='response', + request_id=None, ), ModelRequest( parts=[ @@ -354,6 +359,7 @@ print(result2.all_messages()) model_name='gemini-1.5-pro', timestamp=datetime.datetime(...), kind='response', + request_id=None, ), ] """ diff --git a/docs/tools.md b/docs/tools.md index 519eda2af..76816eed8 100644 --- a/docs/tools.md +++ b/docs/tools.md @@ -97,6 +97,7 @@ print(dice_result.all_messages()) model_name='gemini-1.5-flash', timestamp=datetime.datetime(...), kind='response', + request_id=None, ), ModelRequest( parts=[ @@ -123,6 +124,7 @@ print(dice_result.all_messages()) model_name='gemini-1.5-flash', timestamp=datetime.datetime(...), kind='response', + request_id=None, ), ModelRequest( parts=[ @@ -147,6 +149,7 @@ print(dice_result.all_messages()) model_name='gemini-1.5-flash', timestamp=datetime.datetime(...), kind='response', + request_id=None, ), ] """ diff --git a/pydantic_ai_slim/pydantic_ai/agent.py b/pydantic_ai_slim/pydantic_ai/agent.py index 25303a11c..25ce90ad2 100644 --- a/pydantic_ai_slim/pydantic_ai/agent.py +++ b/pydantic_ai_slim/pydantic_ai/agent.py @@ -538,6 +538,7 @@ async def main(): model_name='gpt-4o', timestamp=datetime.datetime(...), kind='response', + request_id=None, ) ), End(data=FinalResult(output='Paris', tool_name=None, tool_call_id=None)), @@ -1664,6 +1665,7 @@ async def main(): model_name='gpt-4o', timestamp=datetime.datetime(...), kind='response', + request_id=None, ) ), End(data=FinalResult(output='Paris', tool_name=None, tool_call_id=None)), @@ -1802,6 +1804,7 @@ async def main(): model_name='gpt-4o', timestamp=datetime.datetime(...), kind='response', + request_id=None, ) ), End(data=FinalResult(output='Paris', tool_name=None, tool_call_id=None)), diff --git a/tests/test_agent.py b/tests/test_agent.py index cc9a7a339..c0649b930 100644 --- a/tests/test_agent.py +++ b/tests/test_agent.py @@ -1697,6 +1697,7 @@ def test_binary_content_all_messages_json(): { 'parts': [{'content': 'success (no tool calls)', 'part_kind': 'text'}], 'model_name': 'test', + 'request_id': None, 'timestamp': IsStr(), 'kind': 'response', }, From 2b856f4650fa42afe95454c0e1c47aba2d93a8ff Mon Sep 17 00:00:00 2001 From: Peter Hoburg Date: Mon, 28 Apr 2025 21:16:53 -0400 Subject: [PATCH 3/3] changed request_id to vendor_id --- docs/agents.md | 8 ++++---- docs/message-history.md | 12 ++++++------ docs/tools.md | 6 +++--- pydantic_ai_slim/pydantic_ai/agent.py | 6 +++--- pydantic_ai_slim/pydantic_ai/messages.py | 4 ++-- pydantic_ai_slim/pydantic_ai/models/bedrock.py | 4 ++-- tests/test_agent.py | 2 +- 7 files changed, 21 insertions(+), 21 deletions(-) diff --git a/docs/agents.md b/docs/agents.md index 699443de9..a16aacf0a 100644 --- a/docs/agents.md +++ b/docs/agents.md @@ -148,7 +148,7 @@ async def main(): model_name='gpt-4o', timestamp=datetime.datetime(...), kind='response', - request_id=None, + vendor_id=None, ) ), End(data=FinalResult(output='Paris', tool_name=None, tool_call_id=None)), @@ -213,7 +213,7 @@ async def main(): model_name='gpt-4o', timestamp=datetime.datetime(...), kind='response', - request_id=None, + vendor_id=None, ) ), End(data=FinalResult(output='Paris', tool_name=None, tool_call_id=None)), @@ -810,7 +810,7 @@ with capture_run_messages() as messages: # (2)! model_name='gpt-4o', timestamp=datetime.datetime(...), kind='response', - request_id=None, + vendor_id=None, ), ModelRequest( parts=[ @@ -837,7 +837,7 @@ with capture_run_messages() as messages: # (2)! model_name='gpt-4o', timestamp=datetime.datetime(...), kind='response', - request_id=None, + vendor_id=None, ), ] """ diff --git a/docs/message-history.md b/docs/message-history.md index 92034832a..2a8243c55 100644 --- a/docs/message-history.md +++ b/docs/message-history.md @@ -67,7 +67,7 @@ print(result.all_messages()) model_name='gpt-4o', timestamp=datetime.datetime(...), kind='response', - request_id=None, + vendor_id=None, ), ] """ @@ -146,7 +146,7 @@ async def main(): model_name='gpt-4o', timestamp=datetime.datetime(...), kind='response', - request_id=None, + vendor_id=None, ), ] """ @@ -206,7 +206,7 @@ print(result2.all_messages()) model_name='gpt-4o', timestamp=datetime.datetime(...), kind='response', - request_id=None, + vendor_id=None, ), ModelRequest( parts=[ @@ -229,7 +229,7 @@ print(result2.all_messages()) model_name='gpt-4o', timestamp=datetime.datetime(...), kind='response', - request_id=None, + vendor_id=None, ), ] """ @@ -336,7 +336,7 @@ print(result2.all_messages()) model_name='gpt-4o', timestamp=datetime.datetime(...), kind='response', - request_id=None, + vendor_id=None, ), ModelRequest( parts=[ @@ -359,7 +359,7 @@ print(result2.all_messages()) model_name='gemini-1.5-pro', timestamp=datetime.datetime(...), kind='response', - request_id=None, + vendor_id=None, ), ] """ diff --git a/docs/tools.md b/docs/tools.md index 7e170eb81..5696f7f04 100644 --- a/docs/tools.md +++ b/docs/tools.md @@ -99,7 +99,7 @@ print(dice_result.all_messages()) model_name='gemini-1.5-flash', timestamp=datetime.datetime(...), kind='response', - request_id=None, + vendor_id=None, ), ModelRequest( parts=[ @@ -126,7 +126,7 @@ print(dice_result.all_messages()) model_name='gemini-1.5-flash', timestamp=datetime.datetime(...), kind='response', - request_id=None, + vendor_id=None, ), ModelRequest( parts=[ @@ -151,7 +151,7 @@ print(dice_result.all_messages()) model_name='gemini-1.5-flash', timestamp=datetime.datetime(...), kind='response', - request_id=None, + vendor_id=None, ), ] """ diff --git a/pydantic_ai_slim/pydantic_ai/agent.py b/pydantic_ai_slim/pydantic_ai/agent.py index 31a974047..98178e592 100644 --- a/pydantic_ai_slim/pydantic_ai/agent.py +++ b/pydantic_ai_slim/pydantic_ai/agent.py @@ -554,7 +554,7 @@ async def main(): model_name='gpt-4o', timestamp=datetime.datetime(...), kind='response', - request_id=None, + vendor_id=None, ) ), End(data=FinalResult(output='Paris', tool_name=None, tool_call_id=None)), @@ -1718,7 +1718,7 @@ async def main(): model_name='gpt-4o', timestamp=datetime.datetime(...), kind='response', - request_id=None, + vendor_id=None, ) ), End(data=FinalResult(output='Paris', tool_name=None, tool_call_id=None)), @@ -1857,7 +1857,7 @@ async def main(): model_name='gpt-4o', timestamp=datetime.datetime(...), kind='response', - request_id=None, + vendor_id=None, ) ), End(data=FinalResult(output='Paris', tool_name=None, tool_call_id=None)), diff --git a/pydantic_ai_slim/pydantic_ai/messages.py b/pydantic_ai_slim/pydantic_ai/messages.py index b11336adc..f0a71b100 100644 --- a/pydantic_ai_slim/pydantic_ai/messages.py +++ b/pydantic_ai_slim/pydantic_ai/messages.py @@ -566,8 +566,8 @@ class ModelResponse: kind: Literal['response'] = 'response' """Message type identifier, this is available on all parts as a discriminator.""" - request_id: str | None = None - """Request ID as specified by the model provider.""" + vendor_id: str | None = None + """Vendor ID as specified by the model provider. This can be used to track the specific request to the model.""" def otel_events(self) -> list[Event]: """Return OpenTelemetry events for the response.""" diff --git a/pydantic_ai_slim/pydantic_ai/models/bedrock.py b/pydantic_ai_slim/pydantic_ai/models/bedrock.py index 0851ab4ec..5996d7b64 100644 --- a/pydantic_ai_slim/pydantic_ai/models/bedrock.py +++ b/pydantic_ai_slim/pydantic_ai/models/bedrock.py @@ -269,8 +269,8 @@ async def _process_response(self, response: ConverseResponseTypeDef) -> tuple[Mo response_tokens=response['usage']['outputTokens'], total_tokens=response['usage']['totalTokens'], ) - request_id = response.get('ResponseMetadata', {}).get('RequestId', None) - return ModelResponse(items, model_name=self.model_name, request_id=request_id), u + vendor_id = response.get('ResponseMetadata', {}).get('RequestId', None) + return ModelResponse(items, model_name=self.model_name, vendor_id=vendor_id), u @overload async def _messages_create( diff --git a/tests/test_agent.py b/tests/test_agent.py index 43b1b8f5e..1e5523f0e 100644 --- a/tests/test_agent.py +++ b/tests/test_agent.py @@ -1697,7 +1697,7 @@ def test_binary_content_all_messages_json(): { 'parts': [{'content': 'success (no tool calls)', 'part_kind': 'text'}], 'model_name': 'test', - 'request_id': None, + 'vendor_id': None, 'timestamp': IsStr(), 'kind': 'response', },