|
2 | 2 |
|
3 | 3 | from __future__ import annotations |
4 | 4 |
|
| 5 | +import uuid |
5 | 6 | from collections.abc import Sequence |
6 | 7 | from functools import cached_property |
7 | 8 | from typing import ( |
|
11 | 12 |
|
12 | 13 | from ... import ExternalToolset, ToolDefinition |
13 | 14 | from ...messages import ( |
| 15 | + BaseToolCallPart, |
14 | 16 | BuiltinToolCallPart, |
15 | 17 | BuiltinToolReturnPart, |
16 | 18 | ModelMessage, |
| 19 | + ModelRequest, |
| 20 | + ModelRequestPart, |
| 21 | + ModelResponse, |
| 22 | + ModelResponsePart, |
| 23 | + RetryPromptPart, |
17 | 24 | SystemPromptPart, |
18 | 25 | TextPart, |
| 26 | + ThinkingPart, |
19 | 27 | ToolCallPart, |
20 | 28 | ToolReturnPart, |
21 | 29 | UserPromptPart, |
|
29 | 37 | AssistantMessage, |
30 | 38 | BaseEvent, |
31 | 39 | DeveloperMessage, |
| 40 | + FunctionCall, |
32 | 41 | Message, |
33 | 42 | RunAgentInput, |
34 | 43 | SystemMessage, |
35 | 44 | Tool as AGUITool, |
| 45 | + ToolCall, |
36 | 46 | ToolMessage, |
37 | 47 | UserMessage, |
38 | 48 | ) |
@@ -185,3 +195,172 @@ def load_messages(cls, messages: Sequence[Message]) -> list[ModelMessage]: |
185 | 195 | ) |
186 | 196 |
|
187 | 197 | return builder.messages |
| 198 | + |
| 199 | + @classmethod |
| 200 | + def dump_messages(cls, messages: Sequence[ModelMessage]) -> list[Message]: |
| 201 | + """Transform Pydantic AI messages into AG-UI messages. |
| 202 | +
|
| 203 | + This is the reverse operation of [`load_messages`][pydantic_ai.ui.ag_ui.AGUIAdapter.load_messages]. |
| 204 | +
|
| 205 | + Args: |
| 206 | + messages: Sequence of Pydantic AI ModelMessage objects (ModelRequest or ModelResponse). |
| 207 | +
|
| 208 | + Returns: |
| 209 | + List of AG-UI Message objects. |
| 210 | +
|
| 211 | + Example: |
| 212 | + ```python |
| 213 | + from pydantic_ai.messages import ModelRequest, UserPromptPart |
| 214 | + from pydantic_ai.ui.ag_ui import AGUIAdapter |
| 215 | +
|
| 216 | + messages = [ModelRequest(parts=[UserPromptPart(content='Hello!')])] |
| 217 | + ag_ui_messages = AGUIAdapter.dump_messages(messages) |
| 218 | + ``` |
| 219 | +
|
| 220 | + Notes: |
| 221 | + - `ModelRequest` parts (UserPromptPart, SystemPromptPart, ToolReturnPart, RetryPromptPart) |
| 222 | + become separate AG-UI messages. |
| 223 | + - `ModelResponse` parts (TextPart, ToolCallPart, BuiltinToolCallPart) are combined |
| 224 | + into a single AssistantMessage. |
| 225 | + - `BuiltinToolReturnPart` becomes a separate ToolMessage with prefixed ID. |
| 226 | + - `ThinkingPart` is skipped as it's not part of the conversational message history. |
| 227 | + """ |
| 228 | + result: list[Message] = [] |
| 229 | + |
| 230 | + for message in messages: |
| 231 | + if isinstance(message, ModelRequest): |
| 232 | + for part in message.parts: |
| 233 | + converted = _convert_request_part(part) |
| 234 | + if converted: |
| 235 | + result.append(converted) |
| 236 | + |
| 237 | + elif isinstance(message, ModelResponse): |
| 238 | + assistant_messages, builtin_returns = _convert_response_parts(message.parts) |
| 239 | + result.extend(assistant_messages) |
| 240 | + |
| 241 | + # Create separate ToolMessages for builtin tool returns |
| 242 | + for builtin_return in builtin_returns: |
| 243 | + prefixed_id = _get_builtin_tool_call_id( |
| 244 | + builtin_return.tool_call_id, builtin_return.provider_name or '' |
| 245 | + ) |
| 246 | + result.append( |
| 247 | + ToolMessage( |
| 248 | + id=str(uuid.uuid4()), |
| 249 | + tool_call_id=prefixed_id, |
| 250 | + content=builtin_return.model_response_str(), |
| 251 | + ) |
| 252 | + ) |
| 253 | + |
| 254 | + return result |
| 255 | + |
| 256 | + |
| 257 | +def _convert_request_part(part: ModelRequestPart) -> Message | None: |
| 258 | + """Convert a ModelRequest part to an AG-UI message. |
| 259 | +
|
| 260 | + Args: |
| 261 | + part: A part from a ModelRequest. |
| 262 | +
|
| 263 | + Returns: |
| 264 | + An AG-UI Message object, or None if the part should be skipped. |
| 265 | + """ |
| 266 | + match part: |
| 267 | + case UserPromptPart(): |
| 268 | + return UserMessage( |
| 269 | + id=str(uuid.uuid4()), |
| 270 | + content=part.content if isinstance(part.content, str) else str(part.content), |
| 271 | + ) |
| 272 | + case SystemPromptPart(): |
| 273 | + return SystemMessage( |
| 274 | + id=str(uuid.uuid4()), |
| 275 | + content=part.content if isinstance(part.content, str) else str(part.content), |
| 276 | + ) |
| 277 | + case ToolReturnPart(): |
| 278 | + return ToolMessage( |
| 279 | + id=str(uuid.uuid4()), |
| 280 | + tool_call_id=part.tool_call_id, |
| 281 | + content=part.model_response_str(), |
| 282 | + ) |
| 283 | + case RetryPromptPart(): |
| 284 | + if part.tool_call_id: |
| 285 | + return ToolMessage( |
| 286 | + id=str(uuid.uuid4()), |
| 287 | + tool_call_id=part.tool_call_id, |
| 288 | + content=part.model_response(), |
| 289 | + ) |
| 290 | + else: |
| 291 | + return UserMessage( |
| 292 | + id=str(uuid.uuid4()), |
| 293 | + content=part.model_response(), |
| 294 | + ) |
| 295 | + case _: # pragma: no cover |
| 296 | + return None |
| 297 | + |
| 298 | + |
| 299 | +def _convert_response_parts(parts: Sequence[ModelResponsePart]) -> tuple[list[Message], list[BuiltinToolReturnPart]]: |
| 300 | + """Convert ModelResponse parts to AG-UI messages and collect builtin returns. |
| 301 | +
|
| 302 | + Args: |
| 303 | + parts: Sequence of parts from a ModelResponse. |
| 304 | +
|
| 305 | + Returns: |
| 306 | + A tuple of (list of AG-UI messages, list of builtin tool return parts). |
| 307 | + """ |
| 308 | + content_parts: list[str] = [] |
| 309 | + tool_calls: list[ToolCall] = [] |
| 310 | + builtin_returns: list[BuiltinToolReturnPart] = [] |
| 311 | + last_was_text = False |
| 312 | + |
| 313 | + for part in parts: |
| 314 | + if isinstance(part, TextPart): |
| 315 | + content_parts.append(part.content) |
| 316 | + last_was_text = True |
| 317 | + elif isinstance(part, BaseToolCallPart): |
| 318 | + tool_call_id = part.tool_call_id |
| 319 | + if isinstance(part, BuiltinToolCallPart): |
| 320 | + # Text parts that are interrupted by a built-in tool call should not be joined together directly |
| 321 | + if last_was_text: |
| 322 | + content_parts.append('\n\n') |
| 323 | + last_was_text = False |
| 324 | + tool_call_id = _get_builtin_tool_call_id(tool_call_id, part.provider_name or '') |
| 325 | + tool_calls.append( |
| 326 | + ToolCall( |
| 327 | + id=tool_call_id, |
| 328 | + function=FunctionCall( |
| 329 | + name=part.tool_name, |
| 330 | + arguments=part.args_as_json_str(), |
| 331 | + ), |
| 332 | + ) |
| 333 | + ) |
| 334 | + elif isinstance(part, BuiltinToolReturnPart): |
| 335 | + builtin_returns.append(part) |
| 336 | + # Built-in tool returns also interrupt text flow |
| 337 | + last_was_text = False |
| 338 | + elif isinstance(part, ThinkingPart): |
| 339 | + # ThinkingPart is not currently supported in AssistantMessage format |
| 340 | + # It's handled separately in the streaming events |
| 341 | + continue |
| 342 | + |
| 343 | + messages: list[Message] = [] |
| 344 | + if content_parts or tool_calls: |
| 345 | + messages.append( |
| 346 | + AssistantMessage( |
| 347 | + id=str(uuid.uuid4()), |
| 348 | + content=''.join(content_parts) if content_parts else None, |
| 349 | + tool_calls=tool_calls if tool_calls else None, |
| 350 | + ) |
| 351 | + ) |
| 352 | + |
| 353 | + return messages, builtin_returns |
| 354 | + |
| 355 | + |
| 356 | +def _get_builtin_tool_call_id(tool_call_id: str, provider_name: str) -> str: |
| 357 | + """Generate a prefixed tool call ID for builtin tools. |
| 358 | +
|
| 359 | + Args: |
| 360 | + tool_call_id: The original tool call ID. |
| 361 | + provider_name: The name of the provider (e.g., 'function', 'openai'). |
| 362 | +
|
| 363 | + Returns: |
| 364 | + The prefixed tool call ID in the format 'pyd_ai_builtin|{provider_name}|{tool_call_id}'. |
| 365 | + """ |
| 366 | + return f'{BUILTIN_TOOL_CALL_ID_PREFIX}|{provider_name}|{tool_call_id}' |
0 commit comments