|
| 1 | + |
| 2 | +import uuid |
| 3 | +import asyncio |
| 4 | +from typing import Optional, Any |
| 5 | +import json |
| 6 | +import sys |
| 7 | + |
| 8 | +from tools import weather_tool |
| 9 | + |
| 10 | +from multi_agent_orchestrator.orchestrator import MultiAgentOrchestrator, OrchestratorConfig |
| 11 | +from multi_agent_orchestrator.agents import (BedrockLLMAgent, |
| 12 | + BedrockLLMAgentOptions, |
| 13 | + AgentResponse, |
| 14 | + AgentStreamResponse, |
| 15 | + AgentCallbacks) |
| 16 | +from multi_agent_orchestrator.types import ConversationMessage, ParticipantRole |
| 17 | +from multi_agent_orchestrator.utils import AgentTools |
| 18 | + |
| 19 | +class LLMAgentCallbacks(AgentCallbacks): |
| 20 | + def on_llm_new_token(self, token: str) -> None: |
| 21 | + print(token, end='', flush=True) |
| 22 | + |
| 23 | + |
| 24 | +async def handle_request(_orchestrator: MultiAgentOrchestrator, _user_input:str, _user_id:str, _session_id:str): |
| 25 | + stream_response = True |
| 26 | + response:AgentResponse = await _orchestrator.route_request(_user_input, _user_id, _session_id, {}, stream_response) |
| 27 | + |
| 28 | + # Print metadata |
| 29 | + print("\nMetadata:") |
| 30 | + print(f"Selected Agent: {response.metadata.agent_name}") |
| 31 | + if stream_response and response.streaming: |
| 32 | + async for chunk in response.output: |
| 33 | + if isinstance(chunk, AgentStreamResponse): |
| 34 | + if response.streaming: |
| 35 | + print(chunk.text, end='', flush=True) |
| 36 | + else: |
| 37 | + if isinstance(response.output, ConversationMessage): |
| 38 | + print(response.output.content[0]['text']) |
| 39 | + elif isinstance(response.output, str): |
| 40 | + print(response.output) |
| 41 | + else: |
| 42 | + print(response.output) |
| 43 | + |
| 44 | +def custom_input_payload_encoder(input_text: str, |
| 45 | + chat_history: list[Any], |
| 46 | + user_id: str, |
| 47 | + session_id: str, |
| 48 | + additional_params: Optional[dict[str, str]] = None) -> str: |
| 49 | + return json.dumps({ |
| 50 | + 'hello':'world' |
| 51 | + }) |
| 52 | + |
| 53 | +def custom_output_payload_decoder(response: dict[str, Any]) -> Any: |
| 54 | + decoded_response = json.loads( |
| 55 | + json.loads( |
| 56 | + response['Payload'].read().decode('utf-8') |
| 57 | + )['body'])['response'] |
| 58 | + return ConversationMessage( |
| 59 | + role=ParticipantRole.ASSISTANT.value, |
| 60 | + content=[{'text': decoded_response}] |
| 61 | + ) |
| 62 | + |
| 63 | +if __name__ == "__main__": |
| 64 | + |
| 65 | + # Initialize the orchestrator with some options |
| 66 | + orchestrator = MultiAgentOrchestrator(options=OrchestratorConfig( |
| 67 | + LOG_AGENT_CHAT=True, |
| 68 | + LOG_CLASSIFIER_CHAT=True, |
| 69 | + LOG_CLASSIFIER_RAW_OUTPUT=True, |
| 70 | + LOG_CLASSIFIER_OUTPUT=True, |
| 71 | + LOG_EXECUTION_TIMES=True, |
| 72 | + MAX_RETRIES=3, |
| 73 | + USE_DEFAULT_AGENT_IF_NONE_IDENTIFIED=True, |
| 74 | + MAX_MESSAGE_PAIRS_PER_AGENT=10, |
| 75 | + )) |
| 76 | + |
| 77 | + # Add some agents |
| 78 | + tech_agent = BedrockLLMAgent(BedrockLLMAgentOptions( |
| 79 | + name="Tech Agent", |
| 80 | + streaming=True, |
| 81 | + description="Specializes in technology areas including software development, hardware, AI, \ |
| 82 | + cybersecurity, blockchain, cloud computing, emerging tech innovations, and pricing/costs \ |
| 83 | + related to technology products and services.", |
| 84 | + model_id="anthropic.claude-3-sonnet-20240229-v1:0", |
| 85 | + # callbacks=LLMAgentCallbacks() |
| 86 | + )) |
| 87 | + orchestrator.add_agent(tech_agent) |
| 88 | + |
| 89 | + # Add some agents |
| 90 | + tech_agent = BedrockLLMAgent(BedrockLLMAgentOptions( |
| 91 | + name="Health Agent", |
| 92 | + streaming=False, |
| 93 | + description="Specializes in health and well being.", |
| 94 | + model_id="anthropic.claude-3-sonnet-20240229-v1:0", |
| 95 | + )) |
| 96 | + orchestrator.add_agent(tech_agent) |
| 97 | + |
| 98 | + # Add a Anthropic weather agent with a tool in anthropic's tool format |
| 99 | + # weather_agent = AnthropicAgent(AnthropicAgentOptions( |
| 100 | + # api_key='api-key', |
| 101 | + # name="Weather Agent", |
| 102 | + # streaming=False, |
| 103 | + # description="Specialized agent for giving weather condition from a city.", |
| 104 | + # tool_config={ |
| 105 | + # 'tool': [tool.to_claude_format() for tool in weather_tool.weather_tools.tools], |
| 106 | + # 'toolMaxRecursions': 5, |
| 107 | + # 'useToolHandler': weather_tool.anthropic_weather_tool_handler |
| 108 | + # }, |
| 109 | + # callbacks=LLMAgentCallbacks() |
| 110 | + # )) |
| 111 | + |
| 112 | + # Add an Anthropic weather agent with Tools class |
| 113 | + # weather_agent = AnthropicAgent(AnthropicAgentOptions( |
| 114 | + # api_key='api-key', |
| 115 | + # name="Weather Agent", |
| 116 | + # streaming=True, |
| 117 | + # description="Specialized agent for giving weather condition from a city.", |
| 118 | + # tool_config={ |
| 119 | + # 'tool': weather_tool.weather_tools, |
| 120 | + # 'toolMaxRecursions': 5, |
| 121 | + # }, |
| 122 | + # callbacks=LLMAgentCallbacks() |
| 123 | + # )) |
| 124 | + |
| 125 | + # Add a Bedrock weather agent with Tools class |
| 126 | + # weather_agent = BedrockLLMAgent(BedrockLLMAgentOptions( |
| 127 | + # name="Weather Agent", |
| 128 | + # streaming=False, |
| 129 | + # description="Specialized agent for giving weather condition from a city.", |
| 130 | + # tool_config={ |
| 131 | + # 'tool': weather_tool.weather_tools, |
| 132 | + # 'toolMaxRecursions': 5, |
| 133 | + # }, |
| 134 | + # callbacks=LLMAgentCallbacks(), |
| 135 | + # )) |
| 136 | + |
| 137 | + # Add a Bedrock weather agent with custom handler and bedrock's tool format |
| 138 | + weather_agent = BedrockLLMAgent(BedrockLLMAgentOptions( |
| 139 | + name="Weather Agent", |
| 140 | + streaming=False, |
| 141 | + description="Specialized agent for giving weather condition from a city.", |
| 142 | + tool_config={ |
| 143 | + 'tool': [tool.to_bedrock_format() for tool in weather_tool.weather_tools.tools], |
| 144 | + 'toolMaxRecursions': 5, |
| 145 | + 'useToolHandler': weather_tool.bedrock_weather_tool_handler |
| 146 | + } |
| 147 | + )) |
| 148 | + |
| 149 | + |
| 150 | + weather_agent.set_system_prompt(weather_tool.weather_tool_prompt) |
| 151 | + orchestrator.add_agent(weather_agent) |
| 152 | + |
| 153 | + USER_ID = "user123" |
| 154 | + SESSION_ID = str(uuid.uuid4()) |
| 155 | + |
| 156 | + print("Welcome to the interactive Multi-Agent system. Type 'quit' to exit.") |
| 157 | + |
| 158 | + while True: |
| 159 | + # Get user input |
| 160 | + user_input = input("\nYou: ").strip() |
| 161 | + |
| 162 | + if user_input.lower() == 'quit': |
| 163 | + print("Exiting the program. Goodbye!") |
| 164 | + sys.exit() |
| 165 | + |
| 166 | + # Run the async function |
| 167 | + asyncio.run(handle_request(orchestrator, user_input, USER_ID, SESSION_ID)) |
0 commit comments