diff --git a/.github/workflows/e2e-test.yml b/.github/workflows/e2e-test.yml index e72e5b6bb..94a08de23 100644 --- a/.github/workflows/e2e-test.yml +++ b/.github/workflows/e2e-test.yml @@ -23,6 +23,7 @@ on: - 'helm/**' jobs: e2e-test: + timeout-minutes: 25 runs-on: ubuntu-latest env: AGENTSTACK__HOME: ${{ github.workspace }}/.agentstack @@ -49,7 +50,7 @@ jobs: - run: mise run agentstack-server:test:e2e env: LLM_API_BASE: "${{ secrets.OPENAI_API_BASE }}" - LLM_MODEL: "${{ secrets.OPENAI_MODEL }}" + LLM_MODEL: "${{ vars.OPENAI_MODEL }}" LLM_API_KEY: "${{ secrets.OPENAI_API_KEY }}" # LLM_API_BASE: "https://api.groq.com/openai/v1" # LLM_MODEL: "groq:groq/compound" diff --git a/.github/workflows/integration-test.yml b/.github/workflows/integration-test.yml index 4cba3c2d5..82cd58f7d 100644 --- a/.github/workflows/integration-test.yml +++ b/.github/workflows/integration-test.yml @@ -21,6 +21,7 @@ on: - 'apps/agentstack-cli/**' jobs: integration-test: + timeout-minutes: 25 runs-on: ubuntu-latest steps: - name: Maximize build space diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 000000000..b89a59a44 --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1,7 @@ +# Agent Stack + +## GitHub Operations + +Use `gh` command for GitHub operations. + +Repo: `i-am-bee/agentstack` diff --git a/agents/chat/.vscode/launch.json b/agents/chat/.vscode/launch.json index 7ad26a9d3..c201198d6 100644 --- a/agents/chat/.vscode/launch.json +++ b/agents/chat/.vscode/launch.json @@ -7,6 +7,7 @@ { "name": "agent-chat", "type": "debugpy", + "justMyCode": false, "request": "launch", "program": "${workspaceFolder}/src/chat/agent.py", "console": "integratedTerminal" diff --git a/agents/chat/Dockerfile b/agents/chat/Dockerfile index f1a7c0117..b2c1ebf6f 100644 --- a/agents/chat/Dockerfile +++ b/agents/chat/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.13-alpine3.22 +FROM python:3.13-slim ARG RELEASE_VERSION="main" COPY ./agents/chat/ /app/agents/chat COPY ./apps/agentstack-sdk-py/ /app/apps/agentstack-sdk-py/ diff --git a/agents/chat/pyproject.toml b/agents/chat/pyproject.toml index 83cfeaf22..e87d36d62 100644 --- a/agents/chat/pyproject.toml +++ b/agents/chat/pyproject.toml @@ -7,7 +7,7 @@ authors = [ ] requires-python = ">=3.13,<3.14" dependencies = [ - "beeai-framework[duckduckgo,wikipedia,a2a]>=0.1.70", + "beeai-framework[duckduckgo,wikipedia,a2a]==0.1.70", "tiktoken>=0.12.0", # constraint for beeai-framework dependency (first version with musl arm64 wheel) "fastuuid>=0.14.0", # constraint for beeai-framework dependency (first version with musl arm64 wheel) "openai>=1.107.1", diff --git a/agents/chat/src/chat/agent.py b/agents/chat/src/chat/agent.py index f2b7b12a9..92a85f96b 100644 --- a/agents/chat/src/chat/agent.py +++ b/agents/chat/src/chat/agent.py @@ -2,71 +2,58 @@ # SPDX-License-Identifier: Apache-2.0 import logging import os -from typing import Annotated from textwrap import dedent +from typing import Annotated from a2a.types import ( AgentSkill, Message, ) -from beeai_framework.agents.requirement.utils._tool import FinalAnswerTool -from beeai_framework.errors import FrameworkError -from pydantic import BaseModel - from agentstack_sdk.a2a.extensions import ( AgentDetail, AgentDetailContributor, AgentDetailTool, - BaseExtensionServer, - BaseExtensionSpec, CitationExtensionServer, CitationExtensionSpec, ErrorExtensionParams, ErrorExtensionServer, ErrorExtensionSpec, - TrajectoryExtensionServer, - TrajectoryExtensionSpec, LLMServiceExtensionServer, LLMServiceExtensionSpec, + TrajectoryExtensionServer, + TrajectoryExtensionSpec, ) - -# Monkey-patch to remove FormExtensionSpec which no longer exists -# TODO: remove after next release -import agentstack_sdk.a2a.extensions as agentstack_extensions -from chat.tools.files.file_reader import FileReaderTool - -agentstack_extensions.FormExtensionSpec = BaseExtensionSpec -agentstack_extensions.FormExtensionServer = BaseExtensionServer -agentstack_extensions.TextField = BaseModel - +from agentstack_sdk.a2a.extensions.services.platform import ( + PlatformApiExtensionServer, + PlatformApiExtensionSpec, +) +from agentstack_sdk.a2a.types import AgentArtifact, AgentMessage +from agentstack_sdk.server import Server +from agentstack_sdk.server.context import RunContext +from agentstack_sdk.server.middleware.platform_auth_backend import PlatformAuthBackend +from agentstack_sdk.server.store.platform_context_store import PlatformContextStore from beeai_framework.adapters.agentstack.backend.chat import AgentStackChatModel from beeai_framework.agents.requirement import RequirementAgent from beeai_framework.agents.requirement.events import ( - RequirementAgentSuccessEvent, RequirementAgentFinalAnswerEvent, + RequirementAgentSuccessEvent, ) +from beeai_framework.agents.requirement.utils._tool import FinalAnswerTool +from beeai_framework.backend import AssistantMessage, ChatModelParameters +from beeai_framework.errors import FrameworkError from beeai_framework.middleware.trajectory import GlobalTrajectoryMiddleware -from beeai_framework.tools import Tool, AnyTool +from beeai_framework.tools import AnyTool, Tool from beeai_framework.tools.search.duckduckgo import DuckDuckGoSearchTool from beeai_framework.tools.search.wikipedia import WikipediaTool from beeai_framework.tools.weather import OpenMeteoTool -from beeai_framework.backend import ChatModelParameters, AssistantMessage -from agentstack_sdk.a2a.extensions.services.platform import ( - PlatformApiExtensionServer, - PlatformApiExtensionSpec, -) -from agentstack_sdk.a2a.types import AgentMessage, AgentArtifact -from agentstack_sdk.server import Server -from agentstack_sdk.server.context import RunContext from openinference.instrumentation.beeai import BeeAIInstrumentor from chat.helpers.citations import extract_citations from chat.helpers.trajectory import TrajectoryContent from chat.tools.files.file_creator import FileCreatorTool, FileCreatorToolOutput +from chat.tools.files.file_reader import FileReaderTool from chat.tools.files.utils import extract_files, to_framework_message -from agentstack_sdk.server.store.platform_context_store import PlatformContextStore - BeeAIInstrumentor().instrument() logger = logging.getLogger(__name__) @@ -300,6 +287,7 @@ def serve(): port=int(os.getenv("PORT", 8000)), configure_telemetry=True, context_store=PlatformContextStore(), + auth_backend=PlatformAuthBackend(), ) except KeyboardInterrupt: pass diff --git a/agents/chat/uv.lock b/agents/chat/uv.lock index 4f9d2c633..35fb022d9 100644 --- a/agents/chat/uv.lock +++ b/agents/chat/uv.lock @@ -41,7 +41,9 @@ source = { editable = "../../apps/agentstack-sdk-py" } dependencies = [ { name = "a2a-sdk" }, { name = "anyio" }, + { name = "async-lru" }, { name = "asyncclick" }, + { name = "authlib" }, { name = "fastapi" }, { name = "httpx" }, { name = "janus" }, @@ -61,7 +63,9 @@ dependencies = [ requires-dist = [ { name = "a2a-sdk", specifier = "==0.3.21" }, { name = "anyio", specifier = ">=4.9.0" }, + { name = "async-lru", specifier = ">=2.0.4" }, { name = "asyncclick", specifier = ">=8.1.8" }, + { name = "authlib", specifier = ">=1.3.0" }, { name = "fastapi", specifier = ">=0.116.1" }, { name = "httpx" }, { name = "janus", specifier = ">=2.0.0" }, @@ -107,7 +111,7 @@ wheels = [ [[package]] name = "aiohttp" -version = "3.13.2" +version = "3.13.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiohappyeyeballs" }, @@ -118,25 +122,25 @@ dependencies = [ { name = "propcache" }, { name = "yarl" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1c/ce/3b83ebba6b3207a7135e5fcaba49706f8a4b6008153b4e30540c982fae26/aiohttp-3.13.2.tar.gz", hash = "sha256:40176a52c186aefef6eb3cad2cdd30cd06e3afbe88fe8ab2af9c0b90f228daca", size = 7837994, upload-time = "2025-10-28T20:59:39.937Z" } +sdist = { url = "https://files.pythonhosted.org/packages/50/42/32cf8e7704ceb4481406eb87161349abb46a57fee3f008ba9cb610968646/aiohttp-3.13.3.tar.gz", hash = "sha256:a949eee43d3782f2daae4f4a2819b2cb9b0c5d3b7f7a927067cc84dafdbb9f88", size = 7844556, upload-time = "2026-01-03T17:33:05.204Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/bf/78/7e90ca79e5aa39f9694dcfd74f4720782d3c6828113bb1f3197f7e7c4a56/aiohttp-3.13.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7519bdc7dfc1940d201651b52bf5e03f5503bda45ad6eacf64dda98be5b2b6be", size = 732139, upload-time = "2025-10-28T20:57:02.455Z" }, - { url = "https://files.pythonhosted.org/packages/db/ed/1f59215ab6853fbaa5c8495fa6cbc39edfc93553426152b75d82a5f32b76/aiohttp-3.13.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:088912a78b4d4f547a1f19c099d5a506df17eacec3c6f4375e2831ec1d995742", size = 490082, upload-time = "2025-10-28T20:57:04.784Z" }, - { url = "https://files.pythonhosted.org/packages/68/7b/fe0fe0f5e05e13629d893c760465173a15ad0039c0a5b0d0040995c8075e/aiohttp-3.13.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5276807b9de9092af38ed23ce120539ab0ac955547b38563a9ba4f5b07b95293", size = 489035, upload-time = "2025-10-28T20:57:06.894Z" }, - { url = "https://files.pythonhosted.org/packages/d2/04/db5279e38471b7ac801d7d36a57d1230feeee130bbe2a74f72731b23c2b1/aiohttp-3.13.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1237c1375eaef0db4dcd7c2559f42e8af7b87ea7d295b118c60c36a6e61cb811", size = 1720387, upload-time = "2025-10-28T20:57:08.685Z" }, - { url = "https://files.pythonhosted.org/packages/31/07/8ea4326bd7dae2bd59828f69d7fdc6e04523caa55e4a70f4a8725a7e4ed2/aiohttp-3.13.2-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:96581619c57419c3d7d78703d5b78c1e5e5fc0172d60f555bdebaced82ded19a", size = 1688314, upload-time = "2025-10-28T20:57:10.693Z" }, - { url = "https://files.pythonhosted.org/packages/48/ab/3d98007b5b87ffd519d065225438cc3b668b2f245572a8cb53da5dd2b1bc/aiohttp-3.13.2-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a2713a95b47374169409d18103366de1050fe0ea73db358fc7a7acb2880422d4", size = 1756317, upload-time = "2025-10-28T20:57:12.563Z" }, - { url = "https://files.pythonhosted.org/packages/97/3d/801ca172b3d857fafb7b50c7c03f91b72b867a13abca982ed6b3081774ef/aiohttp-3.13.2-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:228a1cd556b3caca590e9511a89444925da87d35219a49ab5da0c36d2d943a6a", size = 1858539, upload-time = "2025-10-28T20:57:14.623Z" }, - { url = "https://files.pythonhosted.org/packages/f7/0d/4764669bdf47bd472899b3d3db91fffbe925c8e3038ec591a2fd2ad6a14d/aiohttp-3.13.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ac6cde5fba8d7d8c6ac963dbb0256a9854e9fafff52fbcc58fdf819357892c3e", size = 1739597, upload-time = "2025-10-28T20:57:16.399Z" }, - { url = "https://files.pythonhosted.org/packages/c4/52/7bd3c6693da58ba16e657eb904a5b6decfc48ecd06e9ac098591653b1566/aiohttp-3.13.2-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f2bef8237544f4e42878c61cef4e2839fee6346dc60f5739f876a9c50be7fcdb", size = 1555006, upload-time = "2025-10-28T20:57:18.288Z" }, - { url = "https://files.pythonhosted.org/packages/48/30/9586667acec5993b6f41d2ebcf96e97a1255a85f62f3c653110a5de4d346/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:16f15a4eac3bc2d76c45f7ebdd48a65d41b242eb6c31c2245463b40b34584ded", size = 1683220, upload-time = "2025-10-28T20:57:20.241Z" }, - { url = "https://files.pythonhosted.org/packages/71/01/3afe4c96854cfd7b30d78333852e8e851dceaec1c40fd00fec90c6402dd2/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:bb7fb776645af5cc58ab804c58d7eba545a97e047254a52ce89c157b5af6cd0b", size = 1712570, upload-time = "2025-10-28T20:57:22.253Z" }, - { url = "https://files.pythonhosted.org/packages/11/2c/22799d8e720f4697a9e66fd9c02479e40a49de3de2f0bbe7f9f78a987808/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:e1b4951125ec10c70802f2cb09736c895861cd39fd9dcb35107b4dc8ae6220b8", size = 1733407, upload-time = "2025-10-28T20:57:24.37Z" }, - { url = "https://files.pythonhosted.org/packages/34/cb/90f15dd029f07cebbd91f8238a8b363978b530cd128488085b5703683594/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:550bf765101ae721ee1d37d8095f47b1f220650f85fe1af37a90ce75bab89d04", size = 1550093, upload-time = "2025-10-28T20:57:26.257Z" }, - { url = "https://files.pythonhosted.org/packages/69/46/12dce9be9d3303ecbf4d30ad45a7683dc63d90733c2d9fe512be6716cd40/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:fe91b87fc295973096251e2d25a811388e7d8adf3bd2b97ef6ae78bc4ac6c476", size = 1758084, upload-time = "2025-10-28T20:57:28.349Z" }, - { url = "https://files.pythonhosted.org/packages/f9/c8/0932b558da0c302ffd639fc6362a313b98fdf235dc417bc2493da8394df7/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e0c8e31cfcc4592cb200160344b2fb6ae0f9e4effe06c644b5a125d4ae5ebe23", size = 1716987, upload-time = "2025-10-28T20:57:30.233Z" }, - { url = "https://files.pythonhosted.org/packages/5d/8b/f5bd1a75003daed099baec373aed678f2e9b34f2ad40d85baa1368556396/aiohttp-3.13.2-cp313-cp313-win32.whl", hash = "sha256:0740f31a60848d6edb296a0df827473eede90c689b8f9f2a4cdde74889eb2254", size = 425859, upload-time = "2025-10-28T20:57:32.105Z" }, - { url = "https://files.pythonhosted.org/packages/5d/28/a8a9fc6957b2cee8902414e41816b5ab5536ecf43c3b1843c10e82c559b2/aiohttp-3.13.2-cp313-cp313-win_amd64.whl", hash = "sha256:a88d13e7ca367394908f8a276b89d04a3652044612b9a408a0bb22a5ed976a1a", size = 452192, upload-time = "2025-10-28T20:57:34.166Z" }, + { url = "https://files.pythonhosted.org/packages/97/8a/12ca489246ca1faaf5432844adbfce7ff2cc4997733e0af120869345643a/aiohttp-3.13.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:5dff64413671b0d3e7d5918ea490bdccb97a4ad29b3f311ed423200b2203e01c", size = 734190, upload-time = "2026-01-03T17:30:45.832Z" }, + { url = "https://files.pythonhosted.org/packages/32/08/de43984c74ed1fca5c014808963cc83cb00d7bb06af228f132d33862ca76/aiohttp-3.13.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:87b9aab6d6ed88235aa2970294f496ff1a1f9adcd724d800e9b952395a80ffd9", size = 491783, upload-time = "2026-01-03T17:30:47.466Z" }, + { url = "https://files.pythonhosted.org/packages/17/f8/8dd2cf6112a5a76f81f81a5130c57ca829d101ad583ce57f889179accdda/aiohttp-3.13.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:425c126c0dc43861e22cb1c14ba4c8e45d09516d0a3ae0a3f7494b79f5f233a3", size = 490704, upload-time = "2026-01-03T17:30:49.373Z" }, + { url = "https://files.pythonhosted.org/packages/6d/40/a46b03ca03936f832bc7eaa47cfbb1ad012ba1be4790122ee4f4f8cba074/aiohttp-3.13.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7f9120f7093c2a32d9647abcaf21e6ad275b4fbec5b55969f978b1a97c7c86bf", size = 1720652, upload-time = "2026-01-03T17:30:50.974Z" }, + { url = "https://files.pythonhosted.org/packages/f7/7e/917fe18e3607af92657e4285498f500dca797ff8c918bd7d90b05abf6c2a/aiohttp-3.13.3-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:697753042d57f4bf7122cab985bf15d0cef23c770864580f5af4f52023a56bd6", size = 1692014, upload-time = "2026-01-03T17:30:52.729Z" }, + { url = "https://files.pythonhosted.org/packages/71/b6/cefa4cbc00d315d68973b671cf105b21a609c12b82d52e5d0c9ae61d2a09/aiohttp-3.13.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6de499a1a44e7de70735d0b39f67c8f25eb3d91eb3103be99ca0fa882cdd987d", size = 1759777, upload-time = "2026-01-03T17:30:54.537Z" }, + { url = "https://files.pythonhosted.org/packages/fb/e3/e06ee07b45e59e6d81498b591fc589629be1553abb2a82ce33efe2a7b068/aiohttp-3.13.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:37239e9f9a7ea9ac5bf6b92b0260b01f8a22281996da609206a84df860bc1261", size = 1861276, upload-time = "2026-01-03T17:30:56.512Z" }, + { url = "https://files.pythonhosted.org/packages/7c/24/75d274228acf35ceeb2850b8ce04de9dd7355ff7a0b49d607ee60c29c518/aiohttp-3.13.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f76c1e3fe7d7c8afad7ed193f89a292e1999608170dcc9751a7462a87dfd5bc0", size = 1743131, upload-time = "2026-01-03T17:30:58.256Z" }, + { url = "https://files.pythonhosted.org/packages/04/98/3d21dde21889b17ca2eea54fdcff21b27b93f45b7bb94ca029c31ab59dc3/aiohttp-3.13.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fc290605db2a917f6e81b0e1e0796469871f5af381ce15c604a3c5c7e51cb730", size = 1556863, upload-time = "2026-01-03T17:31:00.445Z" }, + { url = "https://files.pythonhosted.org/packages/9e/84/da0c3ab1192eaf64782b03971ab4055b475d0db07b17eff925e8c93b3aa5/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4021b51936308aeea0367b8f006dc999ca02bc118a0cc78c303f50a2ff6afb91", size = 1682793, upload-time = "2026-01-03T17:31:03.024Z" }, + { url = "https://files.pythonhosted.org/packages/ff/0f/5802ada182f575afa02cbd0ec5180d7e13a402afb7c2c03a9aa5e5d49060/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:49a03727c1bba9a97d3e93c9f93ca03a57300f484b6e935463099841261195d3", size = 1716676, upload-time = "2026-01-03T17:31:04.842Z" }, + { url = "https://files.pythonhosted.org/packages/3f/8c/714d53bd8b5a4560667f7bbbb06b20c2382f9c7847d198370ec6526af39c/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3d9908a48eb7416dc1f4524e69f1d32e5d90e3981e4e37eb0aa1cd18f9cfa2a4", size = 1733217, upload-time = "2026-01-03T17:31:06.868Z" }, + { url = "https://files.pythonhosted.org/packages/7d/79/e2176f46d2e963facea939f5be2d26368ce543622be6f00a12844d3c991f/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:2712039939ec963c237286113c68dbad80a82a4281543f3abf766d9d73228998", size = 1552303, upload-time = "2026-01-03T17:31:08.958Z" }, + { url = "https://files.pythonhosted.org/packages/ab/6a/28ed4dea1759916090587d1fe57087b03e6c784a642b85ef48217b0277ae/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:7bfdc049127717581866fa4708791220970ce291c23e28ccf3922c700740fdc0", size = 1763673, upload-time = "2026-01-03T17:31:10.676Z" }, + { url = "https://files.pythonhosted.org/packages/e8/35/4a3daeb8b9fab49240d21c04d50732313295e4bd813a465d840236dd0ce1/aiohttp-3.13.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8057c98e0c8472d8846b9c79f56766bcc57e3e8ac7bfd510482332366c56c591", size = 1721120, upload-time = "2026-01-03T17:31:12.575Z" }, + { url = "https://files.pythonhosted.org/packages/bc/9f/d643bb3c5fb99547323e635e251c609fbbc660d983144cfebec529e09264/aiohttp-3.13.3-cp313-cp313-win32.whl", hash = "sha256:1449ceddcdbcf2e0446957863af03ebaaa03f94c090f945411b61269e2cb5daf", size = 427383, upload-time = "2026-01-03T17:31:14.382Z" }, + { url = "https://files.pythonhosted.org/packages/4e/f1/ab0395f8a79933577cdd996dd2f9aa6014af9535f65dddcf88204682fe62/aiohttp-3.13.3-cp313-cp313-win_amd64.whl", hash = "sha256:693781c45a4033d31d4187d2436f5ac701e7bbfe5df40d917736108c1cc7436e", size = 453899, upload-time = "2026-01-03T17:31:15.958Z" }, ] [[package]] @@ -190,6 +194,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/91/be/317c2c55b8bbec407257d45f5c8d1b6867abc76d12043f2d3d58c538a4ea/asgiref-3.11.0-py3-none-any.whl", hash = "sha256:1db9021efadb0d9512ce8ffaf72fcef601c7b73a8807a1bb2ef143dc6b14846d", size = 24096, upload-time = "2025-11-19T15:32:19.004Z" }, ] +[[package]] +name = "async-lru" +version = "2.0.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/4d/71ec4d3939dc755264f680f6c2b4906423a304c3d18e96853f0a595dfe97/async_lru-2.0.5.tar.gz", hash = "sha256:481d52ccdd27275f42c43a928b4a50c3bfb2d67af4e78b170e3e0bb39c66e5bb", size = 10380, upload-time = "2025-03-16T17:25:36.919Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/03/49/d10027df9fce941cb8184e78a02857af36360d33e1721df81c5ed2179a1a/async_lru-2.0.5-py3-none-any.whl", hash = "sha256:ab95404d8d2605310d345932697371a5f40def0487c03d6d0ad9138de52c9943", size = 6069, upload-time = "2025-03-16T17:25:35.422Z" }, +] + [[package]] name = "asyncclick" version = "8.3.0.7" @@ -211,9 +224,21 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373", size = 67615, upload-time = "2025-10-06T13:54:43.17Z" }, ] +[[package]] +name = "authlib" +version = "1.6.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cryptography" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/bb/9b/b1661026ff24bc641b76b78c5222d614776b0c085bcfdac9bd15a1cb4b35/authlib-1.6.6.tar.gz", hash = "sha256:45770e8e056d0f283451d9996fbb59b70d45722b45d854d58f32878d0a40c38e", size = 164894, upload-time = "2025-12-12T08:01:41.464Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/51/321e821856452f7386c4e9df866f196720b1ad0c5ea1623ea7399969ae3b/authlib-1.6.6-py2.py3-none-any.whl", hash = "sha256:7d9e9bc535c13974313a87f53e8430eb6ea3d1cf6ae4f6efcd793f2e949143fd", size = 244005, upload-time = "2025-12-12T08:01:40.209Z" }, +] + [[package]] name = "beeai-framework" -version = "0.1.73" +version = "0.1.70" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiofiles" }, @@ -227,9 +252,9 @@ dependencies = [ { name = "pydantic-settings" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/40/74/6b9b020609a71f6671c3543bf3838b067663098918e5bda61caba474c570/beeai_framework-0.1.73.tar.gz", hash = "sha256:008d44b66b07a2d80c1fa36d0da1504dc92f84185a9bec67a6b90ed9534f6620", size = 185920, upload-time = "2025-12-11T17:51:06.216Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a5/27/a26b1a55444de2271325d5471ba09e8a5f8c40bee25bf0f891eff52942ab/beeai_framework-0.1.70.tar.gz", hash = "sha256:7fe1b4f22015f8ea3ceffdbbaf1bba4ed390065d23c17aa3d406e25302ea24eb", size = 184126, upload-time = "2025-11-26T11:47:07.117Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cc/66/f43d85ba1ab5b571974d6b191c97b181e3b2ba4535f1abc9b03f609dee4b/beeai_framework-0.1.73-py3-none-any.whl", hash = "sha256:c3978289648c9dcbf5816add777ccdbb2fa2b578786c01dd3c6c006d8b02c4b1", size = 342516, upload-time = "2025-12-11T17:51:04.882Z" }, + { url = "https://files.pythonhosted.org/packages/bb/50/fb1172b5c1625832b6589a1c6150197940b73c9aaeadfda4d7fef2067763/beeai_framework-0.1.70-py3-none-any.whl", hash = "sha256:a0e9350cc9a8360f2b8f2251ac96bcc41511ed24ac6f27bc7fb6c3e5dbc260e2", size = 340421, upload-time = "2025-11-26T11:47:05.498Z" }, ] [package.optional-dependencies] @@ -284,20 +309,20 @@ wheels = [ [[package]] name = "cachetools" -version = "6.2.3" +version = "6.2.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b5/44/5dc354b9f2df614673c2a542a630ef95d578b4a8673a1046d1137a7e2453/cachetools-6.2.3.tar.gz", hash = "sha256:64e0a4ddf275041dd01f5b873efa87c91ea49022b844b8c5d1ad3407c0f42f1f", size = 31641, upload-time = "2025-12-12T21:18:06.011Z" } +sdist = { url = "https://files.pythonhosted.org/packages/bc/1d/ede8680603f6016887c062a2cf4fc8fdba905866a3ab8831aa8aa651320c/cachetools-6.2.4.tar.gz", hash = "sha256:82c5c05585e70b6ba2d3ae09ea60b79548872185d2f24ae1f2709d37299fd607", size = 31731, upload-time = "2025-12-15T18:24:53.744Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ab/de/aa4cfc69feb5b3d604310214369979bb222ed0df0e2575a1b6e7af1a5579/cachetools-6.2.3-py3-none-any.whl", hash = "sha256:3fde34f7033979efb1e79b07ae529c2c40808bdd23b0b731405a48439254fba5", size = 11554, upload-time = "2025-12-12T21:18:04.556Z" }, + { url = "https://files.pythonhosted.org/packages/2c/fc/1d7b80d0eb7b714984ce40efc78859c022cd930e402f599d8ca9e39c78a4/cachetools-6.2.4-py3-none-any.whl", hash = "sha256:69a7a52634fed8b8bf6e24a050fb60bff1c9bd8f6d24572b99c32d4e71e62a51", size = 11551, upload-time = "2025-12-15T18:24:52.332Z" }, ] [[package]] name = "certifi" -version = "2025.11.12" +version = "2026.1.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a2/8c/58f469717fa48465e4a50c014a0400602d3c437d7c0c468e17ada824da3a/certifi-2025.11.12.tar.gz", hash = "sha256:d8ab5478f2ecd78af242878415affce761ca6bc54a22a27e026d7c25357c3316", size = 160538, upload-time = "2025-11-12T02:54:51.517Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e0/2d/a891ca51311197f6ad14a7ef42e2399f36cf2f9bd44752b3dc4eab60fdc5/certifi-2026.1.4.tar.gz", hash = "sha256:ac726dd470482006e014ad384921ed6438c457018f4b3d204aea4281258b2120", size = 154268, upload-time = "2026-01-04T02:42:41.825Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/70/7d/9bc192684cea499815ff478dfcdc13835ddf401365057044fb721ec6bddb/certifi-2025.11.12-py3-none-any.whl", hash = "sha256:97de8790030bbd5c2d96b7ec782fc2f7820ef8dba6db909ccf95449f2d062d4b", size = 159438, upload-time = "2025-11-12T02:54:49.735Z" }, + { url = "https://files.pythonhosted.org/packages/e6/ad/3cc14f097111b4de0040c83a525973216457bbeeb63739ef1ed275c1c021/certifi-2026.1.4-py3-none-any.whl", hash = "sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c", size = 152900, upload-time = "2026-01-04T02:42:40.15Z" }, ] [[package]] @@ -371,7 +396,7 @@ dev = [ [package.metadata] requires-dist = [ { name = "agentstack-sdk", editable = "../../apps/agentstack-sdk-py" }, - { name = "beeai-framework", extras = ["duckduckgo", "wikipedia", "a2a"], specifier = ">=0.1.70" }, + { name = "beeai-framework", extras = ["duckduckgo", "wikipedia", "a2a"], specifier = "==0.1.70" }, { name = "fastuuid", specifier = ">=0.14.0" }, { name = "openai", specifier = ">=1.107.1" }, { name = "openinference-instrumentation-beeai", specifier = ">=0.1.12" }, @@ -456,7 +481,7 @@ wheels = [ [[package]] name = "ddgs" -version = "9.9.3" +version = "9.10.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, @@ -465,9 +490,9 @@ dependencies = [ { name = "lxml" }, { name = "primp" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5a/e5/93cb56402815f86f29fccc1beae0e879e991273c2731dcd4743c299df260/ddgs-9.9.3.tar.gz", hash = "sha256:367b4b055790a44c11e96c2f85ca570e65dbeb59c7399817e00c5eaa0b7076db", size = 36103, upload-time = "2025-12-05T12:25:21.102Z" } +sdist = { url = "https://files.pythonhosted.org/packages/07/76/8dc0323d1577037abad7a679f8af150ebb73a94995d3012de71a8898e6e6/ddgs-9.10.0.tar.gz", hash = "sha256:d9381ff75bdf1ad6691d3d1dc2be12be190d1d32ecd24f1002c492143c52c34f", size = 31491, upload-time = "2025-12-17T23:30:15.021Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/65/4d/0ab756d83e06e18f4a7ef48adc2940577bf308bfdd19c6cea6dab00baec1/ddgs-9.9.3-py3-none-any.whl", hash = "sha256:60a1d5d4b72cf23991495ea6b87d9389640d5fb3452224ecaad8e2ff17b93466", size = 41635, upload-time = "2025-12-05T12:25:19.47Z" }, + { url = "https://files.pythonhosted.org/packages/b5/0e/d4b7d6a8df5074cf67bc14adead39955b0bf847c947ff6cad0bb527887f4/ddgs-9.10.0-py3-none-any.whl", hash = "sha256:81233d79309836eb03e7df2a0d2697adc83c47c342713132c0ba618f1f2c6eee", size = 40311, upload-time = "2025-12-17T23:30:13.606Z" }, ] [[package]] @@ -502,7 +527,7 @@ wheels = [ [[package]] name = "fastapi" -version = "0.124.4" +version = "0.128.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-doc" }, @@ -510,9 +535,9 @@ dependencies = [ { name = "starlette" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/cd/21/ade3ff6745a82ea8ad88552b4139d27941549e4f19125879f848ac8f3c3d/fastapi-0.124.4.tar.gz", hash = "sha256:0e9422e8d6b797515f33f500309f6e1c98ee4e85563ba0f2debb282df6343763", size = 378460, upload-time = "2025-12-12T15:00:43.891Z" } +sdist = { url = "https://files.pythonhosted.org/packages/52/08/8c8508db6c7b9aae8f7175046af41baad690771c9bcde676419965e338c7/fastapi-0.128.0.tar.gz", hash = "sha256:1cc179e1cef10a6be60ffe429f79b829dce99d8de32d7acb7e6c8dfdf7f2645a", size = 365682, upload-time = "2025-12-27T15:21:13.714Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3e/57/aa70121b5008f44031be645a61a7c4abc24e0e888ad3fc8fda916f4d188e/fastapi-0.124.4-py3-none-any.whl", hash = "sha256:6d1e703698443ccb89e50abe4893f3c84d9d6689c0cf1ca4fad6d3c15cf69f15", size = 113281, upload-time = "2025-12-12T15:00:42.44Z" }, + { url = "https://files.pythonhosted.org/packages/5c/05/5cbb59154b093548acd0f4c7c474a118eda06da25aa75c616b72d8fcd92a/fastapi-0.128.0-py3-none-any.whl", hash = "sha256:aebd93f9716ee3b4f4fcfe13ffb7cf308d99c9f3ab5622d8877441072561582d", size = 103094, upload-time = "2025-12-27T15:21:12.154Z" }, ] [[package]] @@ -536,11 +561,11 @@ wheels = [ [[package]] name = "filelock" -version = "3.20.0" +version = "3.20.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/58/46/0028a82567109b5ef6e4d2a1f04a583fb513e6cf9527fcdd09afd817deeb/filelock-3.20.0.tar.gz", hash = "sha256:711e943b4ec6be42e1d4e6690b48dc175c822967466bb31c0c293f34334c13f4", size = 18922, upload-time = "2025-10-08T18:03:50.056Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c1/e0/a75dbe4bca1e7d41307323dad5ea2efdd95408f74ab2de8bd7dba9b51a1a/filelock-3.20.2.tar.gz", hash = "sha256:a2241ff4ddde2a7cebddf78e39832509cb045d18ec1a09d7248d6bfc6bfbbe64", size = 19510, upload-time = "2026-01-02T15:33:32.582Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/76/91/7216b27286936c16f5b4d0c530087e4a54eead683e6b0b73dd0c64844af6/filelock-3.20.0-py3-none-any.whl", hash = "sha256:339b4732ffda5cd79b13f4e2711a31b0365ce445d95d243bb996273d072546a2", size = 16054, upload-time = "2025-10-08T18:03:48.35Z" }, + { url = "https://files.pythonhosted.org/packages/9a/30/ab407e2ec752aa541704ed8f93c11e2a5d92c168b8a755d818b74a3c5c2d/filelock-3.20.2-py3-none-any.whl", hash = "sha256:fbba7237d6ea277175a32c54bb71ef814a8546d8601269e1bfc388de333974e8", size = 16697, upload-time = "2026-01-02T15:33:31.133Z" }, ] [[package]] @@ -611,16 +636,16 @@ wheels = [ [[package]] name = "google-auth" -version = "2.43.0" +version = "2.45.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cachetools" }, { name = "pyasn1-modules" }, { name = "rsa" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ff/ef/66d14cf0e01b08d2d51ffc3c20410c4e134a1548fc246a6081eae585a4fe/google_auth-2.43.0.tar.gz", hash = "sha256:88228eee5fc21b62a1b5fe773ca15e67778cb07dc8363adcb4a8827b52d81483", size = 296359, upload-time = "2025-11-06T00:13:36.587Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e5/00/3c794502a8b892c404b2dea5b3650eb21bfc7069612fbfd15c7f17c1cb0d/google_auth-2.45.0.tar.gz", hash = "sha256:90d3f41b6b72ea72dd9811e765699ee491ab24139f34ebf1ca2b9cc0c38708f3", size = 320708, upload-time = "2025-12-15T22:58:42.889Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6f/d1/385110a9ae86d91cc14c5282c61fe9f4dc41c0b9f7d423c6ad77038c4448/google_auth-2.43.0-py2.py3-none-any.whl", hash = "sha256:af628ba6fa493f75c7e9dbe9373d148ca9f4399b5ea29976519e0a3848eddd16", size = 223114, upload-time = "2025-11-06T00:13:35.209Z" }, + { url = "https://files.pythonhosted.org/packages/c6/97/451d55e05487a5cd6279a01a7e34921858b16f7dc8aa38a2c684743cd2b3/google_auth-2.45.0-py2.py3-none-any.whl", hash = "sha256:82344e86dc00410ef5382d99be677c6043d72e502b625aa4f4afa0bdacca0f36", size = 233312, upload-time = "2025-12-15T22:58:40.777Z" }, ] [[package]] @@ -843,14 +868,14 @@ wheels = [ [[package]] name = "importlib-metadata" -version = "8.7.0" +version = "8.7.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "zipp" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/76/66/650a33bd90f786193e4de4b3ad86ea60b53c89b669a5c7be931fac31cdb0/importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000", size = 56641, upload-time = "2025-04-27T15:29:01.736Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f3/49/3b30cad09e7771a4982d9975a8cbf64f00d4a1ececb53297f1d9a7be1b10/importlib_metadata-8.7.1.tar.gz", hash = "sha256:49fef1ae6440c182052f407c8d34a68f72efc36db9ca90dc0113398f2fdde8bb", size = 57107, upload-time = "2025-12-21T10:00:19.278Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/20/b0/36bd937216ec521246249be3bf9855081de4c5e06a0c9b4219dbeda50373/importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd", size = 27656, upload-time = "2025-04-27T15:29:00.214Z" }, + { url = "https://files.pythonhosted.org/packages/fa/5e/f8e9a1d23b9c20a551a8a02ea3637b4642e22c2626e3a13a9a29cdea99eb/importlib_metadata-8.7.1-py3-none-any.whl", hash = "sha256:5a1f80bf1daa489495071efbb095d75a634cf28a8bc299581244063b53176151", size = 27865, upload-time = "2025-12-21T10:00:18.329Z" }, ] [[package]] @@ -947,7 +972,7 @@ wheels = [ [[package]] name = "litellm" -version = "1.80.10" +version = "1.80.11" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiohttp" }, @@ -964,9 +989,9 @@ dependencies = [ { name = "tiktoken" }, { name = "tokenizers" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/dd/44/0aaa7449e7c4aa05668ec03f1f68a01b1e476591071d9659a68db19371a2/litellm-1.80.10.tar.gz", hash = "sha256:4a4aff7558945c2f7e5c6523e67c1b5525a46b10b0e1ad6b8f847cb13b16779e", size = 12764777, upload-time = "2025-12-14T02:07:05.362Z" } +sdist = { url = "https://files.pythonhosted.org/packages/55/47/be6cd7b356418ca8bef3b843507940ce77b76ef2dfe515f2b4ba9b461ff0/litellm-1.80.11.tar.gz", hash = "sha256:c9fc63e7acb6360363238fe291bcff1488c59ff66020416d8376c0ee56414a19", size = 13189510, upload-time = "2025-12-22T12:47:29.181Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8c/a9/4814b6aa58f6705df2831eaadeb5bc8240684c8c9d5964245212f85049d1/litellm-1.80.10-py3-none-any.whl", hash = "sha256:9b3e561efaba0eb1291cb1555d3dcb7283cf7f3cb65aadbcdb42e2a8765898c8", size = 11264240, upload-time = "2025-12-14T02:07:02.414Z" }, + { url = "https://files.pythonhosted.org/packages/97/0b/9e637344f24f3fe0e8039cd2337389fe05e0d31f518bc3e0a5cdbe45784a/litellm-1.80.11-py3-none-any.whl", hash = "sha256:406283d66ead77dc7ff0e0b2559c80e9e497d8e7c2257efb1cb9210a20d09d54", size = 11456346, upload-time = "2025-12-22T12:47:26.469Z" }, ] [[package]] @@ -1027,7 +1052,7 @@ wheels = [ [[package]] name = "mcp" -version = "1.24.0" +version = "1.25.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, @@ -1045,9 +1070,9 @@ dependencies = [ { name = "typing-inspection" }, { name = "uvicorn", marker = "sys_platform != 'emscripten'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d6/2c/db9ae5ab1fcdd9cd2bcc7ca3b7361b712e30590b64d5151a31563af8f82d/mcp-1.24.0.tar.gz", hash = "sha256:aeaad134664ce56f2721d1abf300666a1e8348563f4d3baff361c3b652448efc", size = 604375, upload-time = "2025-12-12T14:19:38.205Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d5/2d/649d80a0ecf6a1f82632ca44bec21c0461a9d9fc8934d38cb5b319f2db5e/mcp-1.25.0.tar.gz", hash = "sha256:56310361ebf0364e2d438e5b45f7668cbb124e158bb358333cd06e49e83a6802", size = 605387, upload-time = "2025-12-19T10:19:56.985Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/61/0d/5cf14e177c8ae655a2fd9324a6ef657ca4cafd3fc2201c87716055e29641/mcp-1.24.0-py3-none-any.whl", hash = "sha256:db130e103cc50ddc3dffc928382f33ba3eaef0b711f7a87c05e7ded65b1ca062", size = 232896, upload-time = "2025-12-12T14:19:36.14Z" }, + { url = "https://files.pythonhosted.org/packages/e2/fc/6dc7659c2ae5ddf280477011f4213a74f806862856b796ef08f028e664bf/mcp-1.25.0-py3-none-any.whl", hash = "sha256:b37c38144a666add0862614cc79ec276e97d72aa8ca26d622818d4e278b9721a", size = 233076, upload-time = "2025-12-19T10:19:55.416Z" }, ] [[package]] @@ -1106,7 +1131,7 @@ wheels = [ [[package]] name = "openai" -version = "2.11.0" +version = "2.14.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, @@ -1118,9 +1143,9 @@ dependencies = [ { name = "tqdm" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f4/8c/aa6aea6072f985ace9d6515046b9088ff00c157f9654da0c7b1e129d9506/openai-2.11.0.tar.gz", hash = "sha256:b3da01d92eda31524930b6ec9d7167c535e843918d7ba8a76b1c38f1104f321e", size = 624540, upload-time = "2025-12-11T19:11:58.539Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/b1/12fe1c196bea326261718eb037307c1c1fe1dedc2d2d4de777df822e6238/openai-2.14.0.tar.gz", hash = "sha256:419357bedde9402d23bf8f2ee372fca1985a73348debba94bddff06f19459952", size = 626938, upload-time = "2025-12-19T03:28:45.742Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/f1/d9251b565fce9f8daeb45611e3e0d2f7f248429e40908dcee3b6fe1b5944/openai-2.11.0-py3-none-any.whl", hash = "sha256:21189da44d2e3d027b08c7a920ba4454b8b7d6d30ae7e64d9de11dbe946d4faa", size = 1064131, upload-time = "2025-12-11T19:11:56.816Z" }, + { url = "https://files.pythonhosted.org/packages/27/4b/7c1a00c2c3fbd004253937f7520f692a9650767aa73894d7a34f0d65d3f4/openai-2.14.0-py3-none-any.whl", hash = "sha256:7ea40aca4ffc4c4a776e77679021b47eec1160e341f42ae086ba949c9dcc9183", size = 1067558, upload-time = "2025-12-19T03:28:43.727Z" }, ] [[package]] @@ -1369,14 +1394,14 @@ wheels = [ [[package]] name = "proto-plus" -version = "1.26.1" +version = "1.27.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "protobuf" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f4/ac/87285f15f7cce6d4a008f33f1757fb5a13611ea8914eb58c3d0d26243468/proto_plus-1.26.1.tar.gz", hash = "sha256:21a515a4c4c0088a773899e23c7bbade3d18f9c66c73edd4c7ee3816bc96a012", size = 56142, upload-time = "2025-03-10T15:54:38.843Z" } +sdist = { url = "https://files.pythonhosted.org/packages/01/89/9cbe2f4bba860e149108b683bc2efec21f14d5f7ed6e25562ad86acbc373/proto_plus-1.27.0.tar.gz", hash = "sha256:873af56dd0d7e91836aee871e5799e1c6f1bda86ac9a983e0bb9f0c266a568c4", size = 56158, upload-time = "2025-12-16T13:46:25.729Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4e/6d/280c4c2ce28b1593a19ad5239c8b826871fc6ec275c21afc8e1820108039/proto_plus-1.26.1-py3-none-any.whl", hash = "sha256:13285478c2dcf2abb829db158e1047e2f1e8d63a077d94263c2b88b043c75a66", size = 50163, upload-time = "2025-03-10T15:54:37.335Z" }, + { url = "https://files.pythonhosted.org/packages/cd/24/3b7a0818484df9c28172857af32c2397b6d8fcd99d9468bd4684f98ebf0a/proto_plus-1.27.0-py3-none-any.whl", hash = "sha256:1baa7f81cf0f8acb8bc1f6d085008ba4171eaf669629d1b6d1673b21ed1c0a82", size = 50205, upload-time = "2025-12-16T13:46:24.76Z" }, ] [[package]] @@ -1502,11 +1527,11 @@ wheels = [ [[package]] name = "python-multipart" -version = "0.0.20" +version = "0.0.21" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f3/87/f44d7c9f274c7ee665a29b885ec97089ec5dc034c7f3fafa03da9e39a09e/python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13", size = 37158, upload-time = "2024-12-16T19:45:46.972Z" } +sdist = { url = "https://files.pythonhosted.org/packages/78/96/804520d0850c7db98e5ccb70282e29208723f0964e88ffd9d0da2f52ea09/python_multipart-0.0.21.tar.gz", hash = "sha256:7137ebd4d3bbf70ea1622998f902b97a29434a9e8dc40eb203bbcf7c2a2cba92", size = 37196, upload-time = "2025-12-17T09:24:22.446Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/45/58/38b5afbc1a800eeea951b9285d3912613f2603bdf897a4ab0f4bd7f405fc/python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104", size = 24546, upload-time = "2024-12-16T19:45:44.423Z" }, + { url = "https://files.pythonhosted.org/packages/aa/76/03af049af4dcee5d27442f71b6924f01f3efb5d2bd34f23fcd563f2cc5f5/python_multipart-0.0.21-py3-none-any.whl", hash = "sha256:cf7a6713e01c87aa35387f4774e812c4361150938d20d232800f75ffcf266090", size = 24541, upload-time = "2025-12-17T09:24:21.153Z" }, ] [[package]] @@ -1688,15 +1713,15 @@ wheels = [ [[package]] name = "sse-starlette" -version = "3.0.4" +version = "3.1.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, { name = "starlette" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/17/8b/54651ad49bce99a50fd61a7f19c2b6a79fbb072e693101fbb1194c362054/sse_starlette-3.0.4.tar.gz", hash = "sha256:5e34286862e96ead0eb70f5ddd0bd21ab1f6473a8f44419dd267f431611383dd", size = 22576, upload-time = "2025-12-14T16:22:52.493Z" } +sdist = { url = "https://files.pythonhosted.org/packages/da/34/f5df66cb383efdbf4f2db23cabb27f51b1dcb737efaf8a558f6f1d195134/sse_starlette-3.1.2.tar.gz", hash = "sha256:55eff034207a83a0eb86de9a68099bd0157838f0b8b999a1b742005c71e33618", size = 26303, upload-time = "2025-12-31T08:02:20.023Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/71/22/8ab1066358601163e1ac732837adba3672f703818f693e179b24e0d3b65c/sse_starlette-3.0.4-py3-none-any.whl", hash = "sha256:32c80ef0d04506ced4b0b6ab8fe300925edc37d26f666afb1874c754895f5dc3", size = 11764, upload-time = "2025-12-14T16:22:51.453Z" }, + { url = "https://files.pythonhosted.org/packages/b7/95/8c4b76eec9ae574474e5d2997557cebf764bcd3586458956c30631ae08f4/sse_starlette-3.1.2-py3-none-any.whl", hash = "sha256:cd800dd349f4521b317b9391d3796fa97b71748a4da9b9e00aafab32dda375c8", size = 12484, upload-time = "2025-12-31T08:02:18.894Z" }, ] [[package]] @@ -1748,27 +1773,28 @@ wheels = [ [[package]] name = "tokenizers" -version = "0.22.1" +version = "0.22.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "huggingface-hub" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1c/46/fb6854cec3278fbfa4a75b50232c77622bc517ac886156e6afbfa4d8fc6e/tokenizers-0.22.1.tar.gz", hash = "sha256:61de6522785310a309b3407bac22d99c4db5dba349935e99e4d15ea2226af2d9", size = 363123, upload-time = "2025-09-19T09:49:23.424Z" } +sdist = { url = "https://files.pythonhosted.org/packages/73/6f/f80cfef4a312e1fb34baf7d85c72d4411afde10978d4657f8cdd811d3ccc/tokenizers-0.22.2.tar.gz", hash = "sha256:473b83b915e547aa366d1eee11806deaf419e17be16310ac0a14077f1e28f917", size = 372115, upload-time = "2026-01-05T10:45:15.988Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/bf/33/f4b2d94ada7ab297328fc671fed209368ddb82f965ec2224eb1892674c3a/tokenizers-0.22.1-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:59fdb013df17455e5f950b4b834a7b3ee2e0271e6378ccb33aa74d178b513c73", size = 3069318, upload-time = "2025-09-19T09:49:11.848Z" }, - { url = "https://files.pythonhosted.org/packages/1c/58/2aa8c874d02b974990e89ff95826a4852a8b2a273c7d1b4411cdd45a4565/tokenizers-0.22.1-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:8d4e484f7b0827021ac5f9f71d4794aaef62b979ab7608593da22b1d2e3c4edc", size = 2926478, upload-time = "2025-09-19T09:49:09.759Z" }, - { url = "https://files.pythonhosted.org/packages/1e/3b/55e64befa1e7bfea963cf4b787b2cea1011362c4193f5477047532ce127e/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19d2962dd28bc67c1f205ab180578a78eef89ac60ca7ef7cbe9635a46a56422a", size = 3256994, upload-time = "2025-09-19T09:48:56.701Z" }, - { url = "https://files.pythonhosted.org/packages/71/0b/fbfecf42f67d9b7b80fde4aabb2b3110a97fac6585c9470b5bff103a80cb/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:38201f15cdb1f8a6843e6563e6e79f4abd053394992b9bbdf5213ea3469b4ae7", size = 3153141, upload-time = "2025-09-19T09:48:59.749Z" }, - { url = "https://files.pythonhosted.org/packages/17/a9/b38f4e74e0817af8f8ef925507c63c6ae8171e3c4cb2d5d4624bf58fca69/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1cbe5454c9a15df1b3443c726063d930c16f047a3cc724b9e6e1a91140e5a21", size = 3508049, upload-time = "2025-09-19T09:49:05.868Z" }, - { url = "https://files.pythonhosted.org/packages/d2/48/dd2b3dac46bb9134a88e35d72e1aa4869579eacc1a27238f1577270773ff/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e7d094ae6312d69cc2a872b54b91b309f4f6fbce871ef28eb27b52a98e4d0214", size = 3710730, upload-time = "2025-09-19T09:49:01.832Z" }, - { url = "https://files.pythonhosted.org/packages/93/0e/ccabc8d16ae4ba84a55d41345207c1e2ea88784651a5a487547d80851398/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:afd7594a56656ace95cdd6df4cca2e4059d294c5cfb1679c57824b605556cb2f", size = 3412560, upload-time = "2025-09-19T09:49:03.867Z" }, - { url = "https://files.pythonhosted.org/packages/d0/c6/dc3a0db5a6766416c32c034286d7c2d406da1f498e4de04ab1b8959edd00/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2ef6063d7a84994129732b47e7915e8710f27f99f3a3260b8a38fc7ccd083f4", size = 3250221, upload-time = "2025-09-19T09:49:07.664Z" }, - { url = "https://files.pythonhosted.org/packages/d7/a6/2c8486eef79671601ff57b093889a345dd3d576713ef047776015dc66de7/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ba0a64f450b9ef412c98f6bcd2a50c6df6e2443b560024a09fa6a03189726879", size = 9345569, upload-time = "2025-09-19T09:49:14.214Z" }, - { url = "https://files.pythonhosted.org/packages/6b/16/32ce667f14c35537f5f605fe9bea3e415ea1b0a646389d2295ec348d5657/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:331d6d149fa9c7d632cde4490fb8bbb12337fa3a0232e77892be656464f4b446", size = 9271599, upload-time = "2025-09-19T09:49:16.639Z" }, - { url = "https://files.pythonhosted.org/packages/51/7c/a5f7898a3f6baa3fc2685c705e04c98c1094c523051c805cdd9306b8f87e/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:607989f2ea68a46cb1dfbaf3e3aabdf3f21d8748312dbeb6263d1b3b66c5010a", size = 9533862, upload-time = "2025-09-19T09:49:19.146Z" }, - { url = "https://files.pythonhosted.org/packages/36/65/7e75caea90bc73c1dd8d40438adf1a7bc26af3b8d0a6705ea190462506e1/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a0f307d490295717726598ef6fa4f24af9d484809223bbc253b201c740a06390", size = 9681250, upload-time = "2025-09-19T09:49:21.501Z" }, - { url = "https://files.pythonhosted.org/packages/30/2c/959dddef581b46e6209da82df3b78471e96260e2bc463f89d23b1bf0e52a/tokenizers-0.22.1-cp39-abi3-win32.whl", hash = "sha256:b5120eed1442765cd90b903bb6cfef781fd8fe64e34ccaecbae4c619b7b12a82", size = 2472003, upload-time = "2025-09-19T09:49:27.089Z" }, - { url = "https://files.pythonhosted.org/packages/b3/46/e33a8c93907b631a99377ef4c5f817ab453d0b34f93529421f42ff559671/tokenizers-0.22.1-cp39-abi3-win_amd64.whl", hash = "sha256:65fd6e3fb11ca1e78a6a93602490f134d1fdeb13bcef99389d5102ea318ed138", size = 2674684, upload-time = "2025-09-19T09:49:24.953Z" }, + { url = "https://files.pythonhosted.org/packages/92/97/5dbfabf04c7e348e655e907ed27913e03db0923abb5dfdd120d7b25630e1/tokenizers-0.22.2-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:544dd704ae7238755d790de45ba8da072e9af3eea688f698b137915ae959281c", size = 3100275, upload-time = "2026-01-05T10:41:02.158Z" }, + { url = "https://files.pythonhosted.org/packages/2e/47/174dca0502ef88b28f1c9e06b73ce33500eedfac7a7692108aec220464e7/tokenizers-0.22.2-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:1e418a55456beedca4621dbab65a318981467a2b188e982a23e117f115ce5001", size = 2981472, upload-time = "2026-01-05T10:41:00.276Z" }, + { url = "https://files.pythonhosted.org/packages/d6/84/7990e799f1309a8b87af6b948f31edaa12a3ed22d11b352eaf4f4b2e5753/tokenizers-0.22.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2249487018adec45d6e3554c71d46eb39fa8ea67156c640f7513eb26f318cec7", size = 3290736, upload-time = "2026-01-05T10:40:32.165Z" }, + { url = "https://files.pythonhosted.org/packages/78/59/09d0d9ba94dcd5f4f1368d4858d24546b4bdc0231c2354aa31d6199f0399/tokenizers-0.22.2-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25b85325d0815e86e0bac263506dd114578953b7b53d7de09a6485e4a160a7dd", size = 3168835, upload-time = "2026-01-05T10:40:38.847Z" }, + { url = "https://files.pythonhosted.org/packages/47/50/b3ebb4243e7160bda8d34b731e54dd8ab8b133e50775872e7a434e524c28/tokenizers-0.22.2-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bfb88f22a209ff7b40a576d5324bf8286b519d7358663db21d6246fb17eea2d5", size = 3521673, upload-time = "2026-01-05T10:40:56.614Z" }, + { url = "https://files.pythonhosted.org/packages/e0/fa/89f4cb9e08df770b57adb96f8cbb7e22695a4cb6c2bd5f0c4f0ebcf33b66/tokenizers-0.22.2-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c774b1276f71e1ef716e5486f21e76333464f47bece56bbd554485982a9e03e", size = 3724818, upload-time = "2026-01-05T10:40:44.507Z" }, + { url = "https://files.pythonhosted.org/packages/64/04/ca2363f0bfbe3b3d36e95bf67e56a4c88c8e3362b658e616d1ac185d47f2/tokenizers-0.22.2-cp39-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df6c4265b289083bf710dff49bc51ef252f9d5be33a45ee2bed151114a56207b", size = 3379195, upload-time = "2026-01-05T10:40:51.139Z" }, + { url = "https://files.pythonhosted.org/packages/2e/76/932be4b50ef6ccedf9d3c6639b056a967a86258c6d9200643f01269211ca/tokenizers-0.22.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:369cc9fc8cc10cb24143873a0d95438bb8ee257bb80c71989e3ee290e8d72c67", size = 3274982, upload-time = "2026-01-05T10:40:58.331Z" }, + { url = "https://files.pythonhosted.org/packages/1d/28/5f9f5a4cc211b69e89420980e483831bcc29dade307955cc9dc858a40f01/tokenizers-0.22.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:29c30b83d8dcd061078b05ae0cb94d3c710555fbb44861139f9f83dcca3dc3e4", size = 9478245, upload-time = "2026-01-05T10:41:04.053Z" }, + { url = "https://files.pythonhosted.org/packages/6c/fb/66e2da4704d6aadebf8cb39f1d6d1957df667ab24cff2326b77cda0dcb85/tokenizers-0.22.2-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:37ae80a28c1d3265bb1f22464c856bd23c02a05bb211e56d0c5301a435be6c1a", size = 9560069, upload-time = "2026-01-05T10:45:10.673Z" }, + { url = "https://files.pythonhosted.org/packages/16/04/fed398b05caa87ce9b1a1bb5166645e38196081b225059a6edaff6440fac/tokenizers-0.22.2-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:791135ee325f2336f498590eb2f11dc5c295232f288e75c99a36c5dbce63088a", size = 9899263, upload-time = "2026-01-05T10:45:12.559Z" }, + { url = "https://files.pythonhosted.org/packages/05/a1/d62dfe7376beaaf1394917e0f8e93ee5f67fea8fcf4107501db35996586b/tokenizers-0.22.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:38337540fbbddff8e999d59970f3c6f35a82de10053206a7562f1ea02d046fa5", size = 10033429, upload-time = "2026-01-05T10:45:14.333Z" }, + { url = "https://files.pythonhosted.org/packages/fd/18/a545c4ea42af3df6effd7d13d250ba77a0a86fb20393143bbb9a92e434d4/tokenizers-0.22.2-cp39-abi3-win32.whl", hash = "sha256:a6bf3f88c554a2b653af81f3204491c818ae2ac6fbc09e76ef4773351292bc92", size = 2502363, upload-time = "2026-01-05T10:45:20.593Z" }, + { url = "https://files.pythonhosted.org/packages/65/71/0670843133a43d43070abeb1949abfdef12a86d490bea9cd9e18e37c5ff7/tokenizers-0.22.2-cp39-abi3-win_amd64.whl", hash = "sha256:c9ea31edff2968b44a88f97d784c2f16dc0729b8b143ed004699ebca91f05c48", size = 2747786, upload-time = "2026-01-05T10:45:18.411Z" }, + { url = "https://files.pythonhosted.org/packages/72/f4/0de46cfa12cdcbcd464cc59fde36912af405696f687e53a091fb432f694c/tokenizers-0.22.2-cp39-abi3-win_arm64.whl", hash = "sha256:9ce725d22864a1e965217204946f830c37876eee3b2ba6fc6255e8e903d5fcbc", size = 2612133, upload-time = "2026-01-05T10:45:17.232Z" }, ] [[package]] @@ -1785,15 +1811,15 @@ wheels = [ [[package]] name = "typer-slim" -version = "0.20.0" +version = "0.21.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/8e/45/81b94a52caed434b94da65729c03ad0fb7665fab0f7db9ee54c94e541403/typer_slim-0.20.0.tar.gz", hash = "sha256:9fc6607b3c6c20f5c33ea9590cbeb17848667c51feee27d9e314a579ab07d1a3", size = 106561, upload-time = "2025-10-20T17:03:46.642Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/3b/2f60ce16f578b1db5b8816d37d6a4d9786b33b76407fc8c13b0b86312c31/typer_slim-0.21.0.tar.gz", hash = "sha256:f2dbd150cfa0fead2242e21fa9f654dfc64773763ddf07c6be9a49ad34f79557", size = 106841, upload-time = "2025-12-25T09:54:55.998Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5e/dd/5cbf31f402f1cc0ab087c94d4669cfa55bd1e818688b910631e131d74e75/typer_slim-0.20.0-py3-none-any.whl", hash = "sha256:f42a9b7571a12b97dddf364745d29f12221865acef7a2680065f9bb29c7dc89d", size = 47087, upload-time = "2025-10-20T17:03:44.546Z" }, + { url = "https://files.pythonhosted.org/packages/b4/84/e97abf10e4a699194ff07fd586ec7f4cf867d9d04bead559a65f9e7aff84/typer_slim-0.21.0-py3-none-any.whl", hash = "sha256:92aee2188ac6fc2b2924bd75bb61a340b78bd8cd51fd9735533ce5a856812c8e", size = 47174, upload-time = "2025-12-25T09:54:54.609Z" }, ] [[package]] diff --git a/agents/form/src/form/agent.py b/agents/form/src/form/agent.py index e02cce740..8453c163d 100644 --- a/agents/form/src/form/agent.py +++ b/agents/form/src/form/agent.py @@ -3,29 +3,27 @@ import os from typing import Annotated -from pydantic import BaseModel import a2a.types -from a2a.types import Message - - -from agentstack_sdk.server import Server - import agentstack_sdk.a2a.extensions +from a2a.types import Message from agentstack_sdk.a2a.extensions.common.form import ( + CheckboxField, DateField, - TextField, FileField, FileInfo, - CheckboxField, + FormRender, MultiSelectField, OptionItem, - FormRender, + TextField, ) from agentstack_sdk.a2a.extensions.services.form import ( FormServiceExtensionServer, FormServiceExtensionSpec, ) +from agentstack_sdk.server import Server +from agentstack_sdk.server.middleware.platform_auth_backend import PlatformAuthBackend +from pydantic import BaseModel agent_detail_extension_spec = agentstack_sdk.a2a.extensions.AgentDetailExtensionSpec( params=agentstack_sdk.a2a.extensions.AgentDetail( @@ -120,6 +118,7 @@ def serve(): host=os.getenv("HOST", "127.0.0.1"), port=int(os.getenv("PORT", 10001)), configure_telemetry=True, + auth_backend=PlatformAuthBackend(), ) except KeyboardInterrupt: pass diff --git a/agents/rag/.vscode/launch.json b/agents/rag/.vscode/launch.json index ea5414cb1..fa75e9686 100644 --- a/agents/rag/.vscode/launch.json +++ b/agents/rag/.vscode/launch.json @@ -7,6 +7,7 @@ { "name": "agent-rag", "type": "debugpy", + "justMyCode": false, "request": "launch", "program": "${workspaceFolder}/src/rag/agent.py", "console": "integratedTerminal" diff --git a/agents/rag/src/rag/agent.py b/agents/rag/src/rag/agent.py index 8645448a3..f8678a25c 100644 --- a/agents/rag/src/rag/agent.py +++ b/agents/rag/src/rag/agent.py @@ -2,79 +2,51 @@ # SPDX-License-Identifier: Apache-2.0 import functools import json -from collections import defaultdict import logging -from typing import Annotated import os +from collections import defaultdict +from typing import Annotated -from pydantic import BaseModel +from a2a.types import AgentSkill, Message from agentstack_sdk.a2a.extensions import ( AgentDetail, CitationExtensionServer, CitationExtensionSpec, - TrajectoryExtensionServer, - TrajectoryExtensionSpec, - LLMServiceExtensionServer, - LLMServiceExtensionSpec, EmbeddingServiceExtensionServer, EmbeddingServiceExtensionSpec, - BaseExtensionSpec, - BaseExtensionServer, + LLMServiceExtensionServer, + LLMServiceExtensionSpec, + TrajectoryExtensionServer, + TrajectoryExtensionSpec, ) - -# Monkey-patch to remove FormExtensionSpec which no longer exists -# TODO: remove after next release -import agentstack_sdk.a2a.extensions as agentstack_extensions - -agentstack_extensions.FormExtensionSpec = BaseExtensionSpec -agentstack_extensions.FormExtensionServer = BaseExtensionServer -agentstack_extensions.TextField = BaseModel - -from a2a.types import AgentSkill, Message +from agentstack_sdk.a2a.extensions.services.platform import PlatformApiExtensionServer, PlatformApiExtensionSpec +from agentstack_sdk.a2a.types import AgentArtifact, AgentMessage +from agentstack_sdk.server import Server +from agentstack_sdk.server.context import RunContext +from agentstack_sdk.server.middleware.platform_auth_backend import PlatformAuthBackend +from agentstack_sdk.server.store.platform_context_store import PlatformContextStore from beeai_framework.adapters.agentstack.backend.chat import AgentStackChatModel from beeai_framework.agents.requirement import RequirementAgent - +from beeai_framework.agents.requirement.utils._tool import FinalAnswerTool from beeai_framework.emitter import EmitterOptions from beeai_framework.memory import UnconstrainedMemory from beeai_framework.middleware.trajectory import GlobalTrajectoryMiddleware from beeai_framework.tools import Tool from openai import AsyncOpenAI - -from agentstack_sdk.a2a.extensions.services.platform import ( - PlatformApiExtensionServer, - PlatformApiExtensionSpec, -) -from beeai_framework.agents.requirement.utils._tool import FinalAnswerTool -from agentstack_sdk.a2a.types import AgentMessage, AgentArtifact -from agentstack_sdk.server import Server -from agentstack_sdk.server.context import RunContext from openinference.instrumentation.beeai import BeeAIInstrumentor + from rag.helpers.citations import extract_citations -from rag.helpers.trajectory import ToolCallTrajectoryEvent from rag.helpers.event_binder import EventBinder -from rag.helpers.vectore_store import ( - EmbeddingFunction, - embed_all_files, - CreateVectorStoreEvent, - create_vector_store, -) +from rag.helpers.trajectory import ToolCallTrajectoryEvent +from rag.helpers.vectore_store import CreateVectorStoreEvent, EmbeddingFunction, create_vector_store, embed_all_files from rag.tools.files.file_creator import FileCreatorTool, FileCreatorToolOutput from rag.tools.files.file_reader import create_file_reader_tool_class from rag.tools.files.utils import extract_files, to_framework_message from rag.tools.files.vector_search import VectorSearchTool -from rag.tools.general.act import ( - ActAlwaysFirstRequirement, - ActTool, - act_tool_middleware, -) -from rag.tools.general.clarification import ( - ClarificationTool, - clarification_tool_middleware, -) +from rag.tools.general.act import ActAlwaysFirstRequirement, ActTool, act_tool_middleware +from rag.tools.general.clarification import ClarificationTool, clarification_tool_middleware from rag.tools.general.current_time import CurrentTimeTool -from agentstack_sdk.server.store.platform_context_store import PlatformContextStore - BeeAIInstrumentor().instrument() logger = logging.getLogger(__name__) @@ -349,6 +321,7 @@ def serve(): port=int(os.getenv("PORT", 8000)), configure_telemetry=True, context_store=PlatformContextStore(), + auth_backend=PlatformAuthBackend(), ) except KeyboardInterrupt: pass diff --git a/apps/agentstack-cli/pyproject.toml b/apps/agentstack-cli/pyproject.toml index 1ae253b32..02e404e83 100644 --- a/apps/agentstack-cli/pyproject.toml +++ b/apps/agentstack-cli/pyproject.toml @@ -75,7 +75,6 @@ lint.ignore = [ force-exclude = true [tool.pyright] -reportUnusedCallResult = false ignore = ["tests/**", "examples/cli.py"] venvPath = "." venv = ".venv" diff --git a/apps/agentstack-cli/src/agentstack_cli/api.py b/apps/agentstack-cli/src/agentstack_cli/api.py index dffb0e045..66f6a14fa 100644 --- a/apps/agentstack-cli/src/agentstack_cli/api.py +++ b/apps/agentstack-cli/src/agentstack_cli/api.py @@ -16,6 +16,7 @@ import pydantic from a2a.client import A2AClientHTTPError, Client, ClientConfig, ClientFactory from a2a.types import AgentCard +from agentstack_sdk.platform.context import ContextToken from httpx import HTTPStatusError from httpx._types import RequestFiles @@ -127,14 +128,10 @@ class OpenAPISchema(pydantic.BaseModel): @asynccontextmanager -async def a2a_client(agent_card: AgentCard, use_auth: bool = True) -> AsyncIterator[Client]: +async def a2a_client(agent_card: AgentCard, context_token: ContextToken) -> AsyncIterator[Client]: try: async with httpx.AsyncClient( - headers=( - {"Authorization": f"Bearer {token}"} - if use_auth and (token := await config.auth_manager.load_auth_token()) - else {} - ), + headers={"Authorization": f"Bearer {context_token.token.get_secret_value()}"}, follow_redirects=True, timeout=timedelta(hours=1).total_seconds(), ) as httpx_client: diff --git a/apps/agentstack-cli/src/agentstack_cli/commands/agent.py b/apps/agentstack-cli/src/agentstack_cli/commands/agent.py index 763a91f69..52914c659 100644 --- a/apps/agentstack-cli/src/agentstack_cli/commands/agent.py +++ b/apps/agentstack-cli/src/agentstack_cli/commands/agent.py @@ -571,7 +571,7 @@ async def _run_agent( console.print() # Add newline after completion return case Task(id=task_id), TaskStatusUpdateEvent( - status=TaskStatus(state=TaskState.working, message=message) + status=TaskStatus(state=TaskState.working | TaskState.submitted, message=message) ): # Handle streaming content during working state if message: @@ -960,7 +960,7 @@ async def run_agent( if interaction_mode == InteractionMode.MULTI_TURN: console.print(f"{user_greeting}\n") turn_input = await _ask_form_questions(initial_form_render) if initial_form_render else handle_input() - async with a2a_client(provider.agent_card) as client: + async with a2a_client(provider.agent_card, context_token=context_token) as client: while True: console.print() await _run_agent( @@ -977,7 +977,7 @@ async def run_agent( user_greeting = ui_annotations.get("user_greeting", None) or "Enter your instructions." console.print(f"{user_greeting}\n") console.print() - async with a2a_client(provider.agent_card) as client: + async with a2a_client(provider.agent_card, context_token=context_token) as client: await _run_agent( client, input=await _ask_form_questions(initial_form_render) if initial_form_render else handle_input(), @@ -988,7 +988,7 @@ async def run_agent( ) else: - async with a2a_client(provider.agent_card) as client: + async with a2a_client(provider.agent_card, context_token=context_token) as client: await _run_agent( client, input, diff --git a/apps/agentstack-cli/uv.lock b/apps/agentstack-cli/uv.lock index a25ecfbaf..2a2adf6e1 100644 --- a/apps/agentstack-cli/uv.lock +++ b/apps/agentstack-cli/uv.lock @@ -86,7 +86,9 @@ source = { editable = "../agentstack-sdk-py" } dependencies = [ { name = "a2a-sdk" }, { name = "anyio" }, + { name = "async-lru" }, { name = "asyncclick" }, + { name = "authlib" }, { name = "fastapi" }, { name = "httpx" }, { name = "janus" }, @@ -106,7 +108,9 @@ dependencies = [ requires-dist = [ { name = "a2a-sdk", specifier = "==0.3.21" }, { name = "anyio", specifier = ">=4.9.0" }, + { name = "async-lru", specifier = ">=2.0.4" }, { name = "asyncclick", specifier = ">=8.1.8" }, + { name = "authlib", specifier = ">=1.3.0" }, { name = "fastapi", specifier = ">=0.116.1" }, { name = "httpx" }, { name = "janus", specifier = ">=2.0.0" }, @@ -172,6 +176,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/91/be/317c2c55b8bbec407257d45f5c8d1b6867abc76d12043f2d3d58c538a4ea/asgiref-3.11.0-py3-none-any.whl", hash = "sha256:1db9021efadb0d9512ce8ffaf72fcef601c7b73a8807a1bb2ef143dc6b14846d", size = 24096, upload-time = "2025-11-19T15:32:19.004Z" }, ] +[[package]] +name = "async-lru" +version = "2.0.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/4d/71ec4d3939dc755264f680f6c2b4906423a304c3d18e96853f0a595dfe97/async_lru-2.0.5.tar.gz", hash = "sha256:481d52ccdd27275f42c43a928b4a50c3bfb2d67af4e78b170e3e0bb39c66e5bb", size = 10380, upload-time = "2025-03-16T17:25:36.919Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/03/49/d10027df9fce941cb8184e78a02857af36360d33e1721df81c5ed2179a1a/async_lru-2.0.5-py3-none-any.whl", hash = "sha256:ab95404d8d2605310d345932697371a5f40def0487c03d6d0ad9138de52c9943", size = 6069, upload-time = "2025-03-16T17:25:35.422Z" }, +] + [[package]] name = "asyncclick" version = "8.3.0.7" diff --git a/apps/agentstack-sdk-py/examples/connectors_client.py b/apps/agentstack-sdk-py/examples/connectors_client.py index ade636078..14aa8bff7 100644 --- a/apps/agentstack-sdk-py/examples/connectors_client.py +++ b/apps/agentstack-sdk-py/examples/connectors_client.py @@ -27,6 +27,7 @@ async def run( ): context = await Context.create(provider_id=agent_id) context_token = await context.generate_token( + providers=[agent_id], grant_global_permissions=Permissions(connectors={"proxy"}), grant_context_permissions=ContextPermissions(context_data={"*"}), ) diff --git a/apps/agentstack-sdk-py/pyproject.toml b/apps/agentstack-sdk-py/pyproject.toml index a9d23af31..2efc47dbb 100644 --- a/apps/agentstack-sdk-py/pyproject.toml +++ b/apps/agentstack-sdk-py/pyproject.toml @@ -22,6 +22,8 @@ dependencies = [ "httpx", # version determined by a2a-sdk "mcp>=1.12.3", "fastapi>=0.116.1", + "authlib>=1.3.0", + "async-lru>=2.0.4", ] [dependency-groups] diff --git a/apps/agentstack-sdk-py/src/agentstack_sdk/a2a/extensions/auth/oauth/oauth.py b/apps/agentstack-sdk-py/src/agentstack_sdk/a2a/extensions/auth/oauth/oauth.py index 0acf54c60..8de7cd59e 100644 --- a/apps/agentstack-sdk-py/src/agentstack_sdk/a2a/extensions/auth/oauth/oauth.py +++ b/apps/agentstack-sdk-py/src/agentstack_sdk/a2a/extensions/auth/oauth/oauth.py @@ -8,10 +8,13 @@ from typing import TYPE_CHECKING, Any, Self from urllib.parse import parse_qs -import a2a.types import pydantic +from a2a.server.agent_execution import RequestContext +from a2a.types import Message as A2AMessage +from a2a.types import Role, TextPart from mcp.client.auth import OAuthClientProvider from mcp.shared.auth import OAuthClientMetadata +from typing_extensions import override from agentstack_sdk.a2a.extensions.auth.oauth.storage import MemoryTokenStorageFactory, TokenStorageFactory from agentstack_sdk.a2a.extensions.base import BaseExtensionClient, BaseExtensionServer, BaseExtensionSpec @@ -58,13 +61,17 @@ class OAuthExtensionMetadata(pydantic.BaseModel): class OAuthExtensionServer(BaseExtensionServer[OAuthExtensionSpec, OAuthExtensionMetadata]): + context: RunContext + token_storage_factory: TokenStorageFactory + def __init__(self, spec: OAuthExtensionSpec, token_storage_factory: TokenStorageFactory | None = None) -> None: super().__init__(spec) self.token_storage_factory = token_storage_factory or MemoryTokenStorageFactory() - def handle_incoming_message(self, message: a2a.types.Message, context: RunContext): - super().handle_incoming_message(message, context) - self.context = context + @override + def handle_incoming_message(self, message: A2AMessage, run_context: RunContext, request_context: RequestContext): + super().handle_incoming_message(message, run_context, request_context) + self.context = run_context def _get_fulfillment_for_resource(self, resource_url: pydantic.AnyUrl): if not self.data: @@ -117,7 +124,7 @@ def create_auth_request(self, *, authorization_endpoint_url: pydantic.AnyUrl): data = AuthRequest(authorization_endpoint_url=authorization_endpoint_url) return AgentMessage(text="Authorization required", metadata={self.spec.URI: data.model_dump(mode="json")}) - def parse_auth_response(self, *, message: a2a.types.Message): + def parse_auth_response(self, *, message: A2AMessage): if not message or not message.metadata or not (data := message.metadata.get(self.spec.URI)): raise RuntimeError("Invalid auth response") return AuthResponse.model_validate(data) @@ -127,7 +134,7 @@ class OAuthExtensionClient(BaseExtensionClient[OAuthExtensionSpec, NoneType]): def fulfillment_metadata(self, *, oauth_fulfillments: dict[str, Any]) -> dict[str, Any]: return {self.spec.URI: OAuthExtensionMetadata(oauth_fulfillments=oauth_fulfillments).model_dump(mode="json")} - def parse_auth_request(self, *, message: a2a.types.Message): + def parse_auth_request(self, *, message: A2AMessage): if not message or not message.metadata or not (data := message.metadata.get(self.spec.URI)): raise ValueError("Invalid auth request") return AuthRequest.model_validate(data) @@ -135,10 +142,10 @@ def parse_auth_request(self, *, message: a2a.types.Message): def create_auth_response(self, *, task_id: str, redirect_uri: pydantic.AnyUrl): data = AuthResponse(redirect_uri=redirect_uri) - return a2a.types.Message( + return A2AMessage( message_id=str(uuid.uuid4()), - role=a2a.types.Role.user, - parts=[a2a.types.TextPart(text="Authorization completed")], # type: ignore + role=Role.user, + parts=[TextPart(text="Authorization completed")], # type: ignore task_id=task_id, metadata={self.spec.URI: data.model_dump(mode="json")}, ) diff --git a/apps/agentstack-sdk-py/src/agentstack_sdk/a2a/extensions/auth/secrets/secrets.py b/apps/agentstack-sdk-py/src/agentstack_sdk/a2a/extensions/auth/secrets/secrets.py index 2d8783987..ad4a50b76 100644 --- a/apps/agentstack-sdk-py/src/agentstack_sdk/a2a/extensions/auth/secrets/secrets.py +++ b/apps/agentstack-sdk-py/src/agentstack_sdk/a2a/extensions/auth/secrets/secrets.py @@ -1,11 +1,14 @@ # Copyright 2025 © BeeAI a Series of LF Projects, LLC # SPDX-License-Identifier: Apache-2.0 -import typing -from typing import TYPE_CHECKING +from __future__ import annotations + +from typing import TYPE_CHECKING, Self import pydantic +from a2a.server.agent_execution.context import RequestContext from a2a.types import Message as A2AMessage +from typing_extensions import override from agentstack_sdk.a2a.extensions.base import BaseExtensionClient, BaseExtensionServer, BaseExtensionSpec from agentstack_sdk.a2a.types import AgentMessage, AuthRequired @@ -35,7 +38,7 @@ class SecretsExtensionSpec(BaseExtensionSpec[SecretsServiceExtensionParams | Non URI: str = "https://a2a-extensions.agentstack.beeai.dev/auth/secrets/v1" @classmethod - def single_demand(cls, name: str, key: str | None = None, description: str | None = None) -> typing.Self: + def single_demand(cls, name: str, key: str | None = None, description: str | None = None) -> Self: return cls( params=SecretsServiceExtensionParams( secret_demands={key or "default": SecretDemand(description=description, name=name)} @@ -44,9 +47,12 @@ def single_demand(cls, name: str, key: str | None = None, description: str | Non class SecretsExtensionServer(BaseExtensionServer[SecretsExtensionSpec, SecretsServiceExtensionMetadata]): - def handle_incoming_message(self, message: A2AMessage, context: "RunContext"): - super().handle_incoming_message(message, context) - self.context = context + context: RunContext + + @override + def handle_incoming_message(self, message: A2AMessage, run_context: RunContext, request_context: RequestContext): + super().handle_incoming_message(message, run_context, request_context) + self.context = run_context def parse_secret_response(self, message: A2AMessage) -> SecretsServiceExtensionMetadata: if not message or not message.metadata or not (data := message.metadata.get(self.spec.URI)): diff --git a/apps/agentstack-sdk-py/src/agentstack_sdk/a2a/extensions/base.py b/apps/agentstack-sdk-py/src/agentstack_sdk/a2a/extensions/base.py index d0159cf6d..69746ce4e 100644 --- a/apps/agentstack-sdk-py/src/agentstack_sdk/a2a/extensions/base.py +++ b/apps/agentstack-sdk-py/src/agentstack_sdk/a2a/extensions/base.py @@ -9,8 +9,11 @@ from contextlib import asynccontextmanager from types import NoneType -import a2a.types import pydantic +from a2a.server.agent_execution.context import RequestContext +from a2a.types import AgentCard, AgentExtension +from a2a.types import Message as A2AMessage +from typing_extensions import override ParamsT = typing.TypeVar("ParamsT") MetadataFromClientT = typing.TypeVar("MetadataFromClientT") @@ -19,6 +22,7 @@ if typing.TYPE_CHECKING: from agentstack_sdk.server.context import RunContext + from agentstack_sdk.server.dependencies import Dependency def _get_generic_args(cls: type, base_class: type) -> tuple[typing.Any, ...]: @@ -68,7 +72,7 @@ def __init__(self, params: ParamsT) -> None: self.params = params @classmethod - def from_agent_card(cls, agent: a2a.types.AgentCard) -> typing.Self | None: + def from_agent_card(cls, agent: AgentCard) -> typing.Self | None: """ Client should construct an extension instance using this classmethod. """ @@ -81,14 +85,14 @@ def from_agent_card(cls, agent: a2a.types.AgentCard) -> typing.Self | None: except StopIteration: return None - def to_agent_card_extensions(self, *, required: bool = False) -> list[a2a.types.AgentExtension]: + def to_agent_card_extensions(self, *, required: bool = False) -> list[AgentExtension]: """ Agent should use this method to obtain extension definitions to advertise on the agent card. This returns a list, as it's possible to support multiple A2A extensions within a single class. (Usually, that would be different versions of the extension spec.) """ return [ - a2a.types.AgentExtension( + AgentExtension( uri=self.URI, description=self.DESCRIPTION, params=typing.cast( @@ -105,7 +109,8 @@ def __init__(self): super().__init__(None) @classmethod - def from_agent_card(cls, agent: a2a.types.AgentCard) -> typing.Self | None: + @override + def from_agent_card(cls, agent: AgentCard) -> typing.Self | None: if any(e.uri == cls.URI for e in agent.capabilities.extensions or []): return cls() return None @@ -125,7 +130,7 @@ def __init_subclass__(cls, **kwargs): cls.MetadataFromClient = _get_generic_args(cls, BaseExtensionServer)[1] _metadata_from_client: MetadataFromClientT | None = None - _dependencies: dict + _dependencies: dict[str, Dependency] = {} # noqa: RUF012 @property def data(self): @@ -139,7 +144,7 @@ def __init__(self, spec: ExtensionSpecT, *args, **kwargs) -> None: self._args = args self._kwargs = kwargs - def parse_client_metadata(self, message: a2a.types.Message) -> MetadataFromClientT | None: + def parse_client_metadata(self, message: A2AMessage) -> MetadataFromClientT | None: """ Server should use this method to retrieve extension-associated metadata from a message. """ @@ -149,7 +154,7 @@ def parse_client_metadata(self, message: a2a.types.Message) -> MetadataFromClien else pydantic.TypeAdapter(self.MetadataFromClient).validate_python(message.metadata[self.spec.URI]) ) - def handle_incoming_message(self, message: a2a.types.Message, context: RunContext): + def handle_incoming_message(self, message: A2AMessage, run_context: RunContext, request_context: RequestContext): if self._metadata_from_client is None: self._metadata_from_client = self.parse_client_metadata(message) @@ -158,12 +163,16 @@ def _fork(self) -> typing.Self: return type(self)(self.spec, *self._args, **self._kwargs) def __call__( - self, message: a2a.types.Message, context: RunContext, dependencies: dict[str, typing.Any] + self, + message: A2AMessage, + run_context: RunContext, + request_context: RequestContext, + dependencies: dict[str, Dependency], ) -> typing.Self: """Works as a dependency constructor - create a private instance for the request""" instance = self._fork() instance._dependencies = dependencies - instance.handle_incoming_message(message, context) + instance.handle_incoming_message(message, run_context, request_context) return instance @asynccontextmanager @@ -185,7 +194,7 @@ def __init_subclass__(cls, **kwargs): def __init__(self, spec: ExtensionSpecT) -> None: self.spec = spec - def parse_server_metadata(self, message: a2a.types.Message) -> MetadataFromServerT | None: + def parse_server_metadata(self, message: A2AMessage) -> MetadataFromServerT | None: """ Client should use this method to retrieve extension-associated metadata from a message. """ diff --git a/apps/agentstack-sdk-py/src/agentstack_sdk/a2a/extensions/services/embedding.py b/apps/agentstack-sdk-py/src/agentstack_sdk/a2a/extensions/services/embedding.py index 2130ef42a..4a69c5af7 100644 --- a/apps/agentstack-sdk-py/src/agentstack_sdk/a2a/extensions/services/embedding.py +++ b/apps/agentstack-sdk-py/src/agentstack_sdk/a2a/extensions/services/embedding.py @@ -5,12 +5,18 @@ import re from types import NoneType -from typing import Any, Self +from typing import TYPE_CHECKING, Any, Self import pydantic +from a2a.server.agent_execution.context import RequestContext +from a2a.types import Message as A2AMessage +from typing_extensions import override from agentstack_sdk.a2a.extensions.base import BaseExtensionClient, BaseExtensionServer, BaseExtensionSpec +if TYPE_CHECKING: + from agentstack_sdk.server.context import RunContext + class EmbeddingFulfillment(pydantic.BaseModel): identifier: str | None = None @@ -78,10 +84,11 @@ class EmbeddingServiceExtensionMetadata(pydantic.BaseModel): class EmbeddingServiceExtensionServer( BaseExtensionServer[EmbeddingServiceExtensionSpec, EmbeddingServiceExtensionMetadata] ): - def handle_incoming_message(self, message, context): + @override + def handle_incoming_message(self, message: A2AMessage, run_context: RunContext, request_context: RequestContext): from agentstack_sdk.platform import get_platform_client - super().handle_incoming_message(message, context) + super().handle_incoming_message(message, run_context, request_context) if not self.data: return diff --git a/apps/agentstack-sdk-py/src/agentstack_sdk/a2a/extensions/services/llm.py b/apps/agentstack-sdk-py/src/agentstack_sdk/a2a/extensions/services/llm.py index 5bf76b47e..ba302f396 100644 --- a/apps/agentstack-sdk-py/src/agentstack_sdk/a2a/extensions/services/llm.py +++ b/apps/agentstack-sdk-py/src/agentstack_sdk/a2a/extensions/services/llm.py @@ -8,12 +8,13 @@ from typing import TYPE_CHECKING, Any, Self import pydantic +from a2a.server.agent_execution.context import RequestContext +from a2a.types import Message as A2AMessage +from typing_extensions import override from agentstack_sdk.a2a.extensions.base import BaseExtensionClient, BaseExtensionServer, BaseExtensionSpec if TYPE_CHECKING: - from a2a.types import Message - from agentstack_sdk.server.context import RunContext @@ -81,10 +82,11 @@ class LLMServiceExtensionMetadata(pydantic.BaseModel): class LLMServiceExtensionServer(BaseExtensionServer[LLMServiceExtensionSpec, LLMServiceExtensionMetadata]): - def handle_incoming_message(self, message: Message, context: RunContext): + @override + def handle_incoming_message(self, message: A2AMessage, run_context: RunContext, request_context: RequestContext): from agentstack_sdk.platform import get_platform_client - super().handle_incoming_message(message, context) + super().handle_incoming_message(message, run_context, request_context) if not self.data: return diff --git a/apps/agentstack-sdk-py/src/agentstack_sdk/a2a/extensions/services/mcp.py b/apps/agentstack-sdk-py/src/agentstack_sdk/a2a/extensions/services/mcp.py index 6a7dc3a49..a3a71e928 100644 --- a/apps/agentstack-sdk-py/src/agentstack_sdk/a2a/extensions/services/mcp.py +++ b/apps/agentstack-sdk-py/src/agentstack_sdk/a2a/extensions/services/mcp.py @@ -8,10 +8,12 @@ from types import NoneType from typing import TYPE_CHECKING, Annotated, Any, Literal, Self -import a2a.types import pydantic +from a2a.server.agent_execution.context import RequestContext +from a2a.types import Message as A2AMessage from mcp.client.stdio import StdioServerParameters, stdio_client from mcp.client.streamable_http import streamablehttp_client +from typing_extensions import override from agentstack_sdk.a2a.extensions.auth.oauth.oauth import OAuthExtensionServer from agentstack_sdk.a2a.extensions.base import BaseExtensionClient, BaseExtensionServer, BaseExtensionSpec @@ -102,8 +104,9 @@ class MCPServiceExtensionMetadata(pydantic.BaseModel): class MCPServiceExtensionServer(BaseExtensionServer[MCPServiceExtensionSpec, MCPServiceExtensionMetadata]): - def handle_incoming_message(self, message: a2a.types.Message, context: RunContext): - super().handle_incoming_message(message, context) + @override + def handle_incoming_message(self, message: A2AMessage, run_context: RunContext, request_context: RequestContext): + super().handle_incoming_message(message, run_context, request_context) if not self.data: return @@ -115,7 +118,8 @@ def handle_incoming_message(self, message: a2a.types.Message, context: RunContex except Exception: logger.warning("Platform URL substitution failed", exc_info=True) - def parse_client_metadata(self, message: a2a.types.Message) -> MCPServiceExtensionMetadata | None: + @override + def parse_client_metadata(self, message: A2AMessage) -> MCPServiceExtensionMetadata | None: metadata = super().parse_client_metadata(message) if metadata: for name, demand in self.spec.params.mcp_demands.items(): diff --git a/apps/agentstack-sdk-py/src/agentstack_sdk/a2a/extensions/services/platform.py b/apps/agentstack-sdk-py/src/agentstack_sdk/a2a/extensions/services/platform.py index ac0e24710..7e1acfeff 100644 --- a/apps/agentstack-sdk-py/src/agentstack_sdk/a2a/extensions/services/platform.py +++ b/apps/agentstack-sdk-py/src/agentstack_sdk/a2a/extensions/services/platform.py @@ -9,9 +9,12 @@ from types import NoneType from typing import TYPE_CHECKING -import a2a.types import pydantic +from a2a.server.agent_execution.context import RequestContext +from a2a.types import Message as A2AMessage +from fastapi.security.utils import get_authorization_scheme_param from pydantic.networks import HttpUrl +from typing_extensions import override from agentstack_sdk.a2a.extensions.base import ( BaseExtensionClient, @@ -21,6 +24,7 @@ from agentstack_sdk.a2a.extensions.exceptions import ExtensionError from agentstack_sdk.platform import use_platform_client from agentstack_sdk.platform.client import PlatformClient +from agentstack_sdk.server.middleware.platform_auth_backend import PlatformAuthenticatedUser from agentstack_sdk.util.httpx import BearerAuth if TYPE_CHECKING: @@ -29,7 +33,7 @@ class PlatformApiExtensionMetadata(pydantic.BaseModel): base_url: HttpUrl | None = None - auth_token: pydantic.Secret[str] + auth_token: pydantic.Secret[str] | None = None expires_at: pydantic.AwareDatetime | None = None @@ -53,13 +57,8 @@ def __init__(self, params: PlatformApiExtensionParams | None = None) -> None: class PlatformApiExtensionServer(BaseExtensionServer[PlatformApiExtensionSpec, PlatformApiExtensionMetadata]): context_id: str | None = None - def parse_client_metadata(self, message: a2a.types.Message) -> PlatformApiExtensionMetadata | None: - self.context_id = message.context_id - # we assume that the context id is the same ID as the platform context id - # if different IDs are passed, api requests to platform using this token will fail - return super().parse_client_metadata(message) - @asynccontextmanager + @override async def lifespan(self) -> AsyncIterator[None]: """Called when entering the agent context after the first message was parsed (__call__ was already called)""" if self.data and self.spec.params.auto_use: @@ -68,25 +67,44 @@ async def lifespan(self) -> AsyncIterator[None]: else: yield - def handle_incoming_message(self, message: a2a.types.Message, context: RunContext): - super().handle_incoming_message(message, context) - if self.data: - self.data.base_url = self.data.base_url or HttpUrl(os.getenv("PLATFORM_URL", "http://127.0.0.1:8333")) + def _get_header_token(self, request_context: RequestContext) -> pydantic.Secret[str] | None: + header_token = None + call_context = request_context.call_context + assert call_context + if isinstance(call_context.user, PlatformAuthenticatedUser): + header_token = call_context.user.auth_token.get_secret_value() + elif auth_header := call_context.state.get("headers", {}).get("authorization", None): + _scheme, header_token = get_authorization_scheme_param(auth_header) + return pydantic.Secret(header_token) if header_token else None + + @override + def handle_incoming_message(self, message: A2AMessage, run_context: RunContext, request_context: RequestContext): + super().handle_incoming_message(message, run_context, request_context) + # we assume that request context id is the same ID as the platform context id + # if different IDs are passed, api requests to platform using this token will fail + self.context_id = request_context.context_id + + self._metadata_from_client = self._metadata_from_client or PlatformApiExtensionMetadata() + data = self._metadata_from_client + data.base_url = data.base_url or HttpUrl(os.getenv("PLATFORM_URL", "http://127.0.0.1:8333")) + data.auth_token = data.auth_token or self._get_header_token(request_context) + + if not data.auth_token: + raise ExtensionError(self.spec, "Platform extension metadata was not provided") @asynccontextmanager async def use_client(self) -> AsyncIterator[PlatformClient]: - if not self.data: + if not self.data or not self.data.auth_token: raise ExtensionError(self.spec, "Platform extension metadata was not provided") - auth_token = self.data.auth_token.get_secret_value() async with use_platform_client( context_id=self.context_id, base_url=str(self.data.base_url), - auth_token=auth_token, + auth_token=self.data.auth_token.get_secret_value(), ) as client: yield client async def create_httpx_auth(self) -> BearerAuth: - if not self.data: + if not self.data or not self.data.auth_token: raise ExtensionError(self.spec, "Platform extension metadata was not provided") return BearerAuth(token=self.data.auth_token.get_secret_value()) diff --git a/apps/agentstack-sdk-py/src/agentstack_sdk/a2a/extensions/ui/canvas.py b/apps/agentstack-sdk-py/src/agentstack_sdk/a2a/extensions/ui/canvas.py index 9ba6736ea..160a68393 100644 --- a/apps/agentstack-sdk-py/src/agentstack_sdk/a2a/extensions/ui/canvas.py +++ b/apps/agentstack-sdk-py/src/agentstack_sdk/a2a/extensions/ui/canvas.py @@ -6,8 +6,10 @@ from typing import TYPE_CHECKING import pydantic +from a2a.server.agent_execution.context import RequestContext from a2a.types import Artifact, TextPart from a2a.types import Message as A2AMessage +from typing_extensions import override if TYPE_CHECKING: from agentstack_sdk.server.context import RunContext @@ -37,12 +39,13 @@ class CanvasExtensionSpec(NoParamsBaseExtensionSpec): class CanvasExtensionServer(BaseExtensionServer[CanvasExtensionSpec, CanvasEditRequestMetadata]): - def handle_incoming_message(self, message: A2AMessage, context: RunContext): + @override + def handle_incoming_message(self, message: A2AMessage, run_context: RunContext, request_context: RequestContext): if message.metadata and self.spec.URI in message.metadata and message.parts: message.parts = [part for part in message.parts if not isinstance(part.root, TextPart)] - super().handle_incoming_message(message, context) - self.context = context + super().handle_incoming_message(message, run_context, request_context) + self.context = run_context async def parse_canvas_edit_request(self, *, message: A2AMessage) -> CanvasEditRequest | None: if not message or not message.metadata or not (data := message.metadata.get(self.spec.URI)): diff --git a/apps/agentstack-sdk-py/src/agentstack_sdk/a2a/extensions/ui/error.py b/apps/agentstack-sdk-py/src/agentstack_sdk/a2a/extensions/ui/error.py index 71c38bf78..056124287 100644 --- a/apps/agentstack-sdk-py/src/agentstack_sdk/a2a/extensions/ui/error.py +++ b/apps/agentstack-sdk-py/src/agentstack_sdk/a2a/extensions/ui/error.py @@ -18,7 +18,8 @@ BaseExtensionServer, BaseExtensionSpec, ) -from agentstack_sdk.a2a.types import AgentMessage, JsonDict, Metadata +from agentstack_sdk.a2a.types import AgentMessage, Metadata +from agentstack_sdk.types import JsonValue from agentstack_sdk.util import resource_context logger = logging.getLogger(__name__) @@ -68,7 +69,7 @@ class ErrorMetadata(pydantic.BaseModel): error: Error | ErrorGroup stack_trace: str | None = None - context: JsonDict | None = None + context: JsonValue | None = None class ErrorExtensionParams(pydantic.BaseModel): @@ -133,7 +134,7 @@ async def lifespan(self) -> AsyncIterator[None]: yield @property - def context(self) -> JsonDict: + def context(self) -> JsonValue: """Get the current request's error context.""" try: return get_error_extension_context().context @@ -214,7 +215,7 @@ class ErrorExtensionClient(BaseExtensionClient[ErrorExtensionSpec, ErrorMetadata class ErrorContext(pydantic.BaseModel, arbitrary_types_allowed=True): server: ErrorExtensionServer = pydantic.Field(default=DEFAULT_ERROR_EXTENSION) - context: JsonDict = pydantic.Field(default_factory=dict) + context: JsonValue = pydantic.Field(default_factory=dict) get_error_extension_context, use_error_extension_context = resource_context( diff --git a/apps/agentstack-sdk-py/src/agentstack_sdk/a2a/extensions/ui/form_request.py b/apps/agentstack-sdk-py/src/agentstack_sdk/a2a/extensions/ui/form_request.py index acc0eda1e..4ad216866 100644 --- a/apps/agentstack-sdk-py/src/agentstack_sdk/a2a/extensions/ui/form_request.py +++ b/apps/agentstack-sdk-py/src/agentstack_sdk/a2a/extensions/ui/form_request.py @@ -5,8 +5,10 @@ from typing import TYPE_CHECKING, TypeVar, cast +from a2a.server.agent_execution.context import RequestContext from a2a.types import Message as A2AMessage from pydantic import TypeAdapter +from typing_extensions import override from agentstack_sdk.a2a.extensions.base import ( BaseExtensionClient, @@ -27,9 +29,10 @@ class FormRequestExtensionSpec(NoParamsBaseExtensionSpec): class FormRequestExtensionServer(BaseExtensionServer[FormRequestExtensionSpec, FormResponse]): - def handle_incoming_message(self, message: A2AMessage, context: RunContext): - super().handle_incoming_message(message, context) - self.context = context + @override + def handle_incoming_message(self, message: A2AMessage, run_context: RunContext, request_context: RequestContext): + super().handle_incoming_message(message, run_context, request_context) + self.context = run_context async def request_form(self, *, form: FormRender, model: type[T] = FormResponse) -> T | None: message = await self.context.yield_async( diff --git a/apps/agentstack-sdk-py/src/agentstack_sdk/a2a/types.py b/apps/agentstack-sdk-py/src/agentstack_sdk/a2a/types.py index 99ecb6aa7..a27278be2 100644 --- a/apps/agentstack-sdk-py/src/agentstack_sdk/a2a/types.py +++ b/apps/agentstack-sdk-py/src/agentstack_sdk/a2a/types.py @@ -1,7 +1,7 @@ # Copyright 2025 © BeeAI a Series of LF Projects, LLC # SPDX-License-Identifier: Apache-2.0 import uuid -from typing import TYPE_CHECKING, Literal, TypeAlias +from typing import Literal, TypeAlias from a2a.types import ( Artifact, @@ -20,16 +20,7 @@ ) from pydantic import Field, model_validator -if TYPE_CHECKING: - JsonValue: TypeAlias = list["JsonValue"] | dict[str, "JsonValue"] | str | bool | int | float | None - JsonDict: TypeAlias = dict[str, JsonValue] -else: - from typing import Union - - from typing_extensions import TypeAliasType - - JsonValue = TypeAliasType("JsonValue", "Union[dict[str, JsonValue], list[JsonValue], str, int, float, bool, None]") # noqa: UP007 - JsonDict = TypeAliasType("JsonDict", "dict[str, JsonValue]") +from agentstack_sdk.types import JsonDict, JsonValue class Metadata(dict[str, JsonValue]): ... diff --git a/apps/agentstack-sdk-py/src/agentstack_sdk/platform/client.py b/apps/agentstack-sdk-py/src/agentstack_sdk/platform/client.py index 6faa0e9a5..8988ec05f 100644 --- a/apps/agentstack-sdk-py/src/agentstack_sdk/platform/client.py +++ b/apps/agentstack-sdk-py/src/agentstack_sdk/platform/client.py @@ -5,7 +5,7 @@ import os import ssl import typing -from collections.abc import AsyncIterator +from collections.abc import AsyncIterator, Mapping from types import TracebackType import httpx @@ -14,6 +14,7 @@ from httpx._config import DEFAULT_LIMITS, DEFAULT_MAX_REDIRECTS, Limits from httpx._types import AuthTypes, CertTypes, CookieTypes, HeaderTypes, ProxyTypes, QueryParamTypes, TimeoutTypes from pydantic import Secret +from typing_extensions import override from agentstack_sdk.util import resource_context @@ -26,7 +27,7 @@ class PlatformClient(httpx.AsyncClient): def __init__( self, context_id: str | None = None, # Enter context scope - auth_token: str | Secret | None = None, + auth_token: str | Secret[str] | None = None, *, auth: AuthTypes | None = None, params: QueryParamTypes | None = None, @@ -37,12 +38,12 @@ def __init__( http1: bool = True, http2: bool = False, proxy: ProxyTypes | None = None, - mounts: None | (typing.Mapping[str, AsyncBaseTransport | None]) = None, + mounts: None | (Mapping[str, AsyncBaseTransport | None]) = None, timeout: TimeoutTypes = DEFAULT_SDK_TIMEOUT, follow_redirects: bool = False, limits: Limits = DEFAULT_LIMITS, max_redirects: int = DEFAULT_MAX_REDIRECTS, - event_hooks: None | (typing.Mapping[str, list[EventHook]]) = None, + event_hooks: None | (Mapping[str, list[EventHook]]) = None, base_url: URL | str = "", transport: AsyncBaseTransport | None = None, trust_env: bool = True, @@ -74,16 +75,18 @@ def __init__( self.context_id = context_id if auth_token: self.headers["Authorization"] = f"Bearer {auth_token}" - self._ref_count = 0 - self._context_manager_lock = asyncio.Lock() + self._ref_count: int = 0 + self._context_manager_lock: asyncio.Lock = asyncio.Lock() + @override async def __aenter__(self) -> typing.Self: async with self._context_manager_lock: self._ref_count += 1 if self._ref_count == 1: - await super().__aenter__() + _ = await super().__aenter__() return self + @override async def __aexit__( self, exc_type: type[BaseException] | None = None, @@ -94,7 +97,6 @@ async def __aexit__( self._ref_count -= 1 if self._ref_count == 0: await super().__aexit__(exc_type, exc_value, traceback) - self._resource = None get_platform_client, set_platform_client = resource_context(factory=PlatformClient, default_factory=PlatformClient) diff --git a/apps/agentstack-sdk-py/src/agentstack_sdk/platform/context.py b/apps/agentstack-sdk-py/src/agentstack_sdk/platform/context.py index 7762c4758..b178055d5 100644 --- a/apps/agentstack-sdk-py/src/agentstack_sdk/platform/context.py +++ b/apps/agentstack-sdk-py/src/agentstack_sdk/platform/context.py @@ -13,6 +13,7 @@ from agentstack_sdk.platform.client import PlatformClient, get_platform_client from agentstack_sdk.platform.common import PaginatedResult +from agentstack_sdk.platform.provider import Provider from agentstack_sdk.platform.types import Metadata, MetadataPatch from agentstack_sdk.util.utils import filter_dict @@ -40,7 +41,7 @@ class ContextPermissions(pydantic.BaseModel): class Permissions(ContextPermissions): llm: set[Literal["*"] | str] = set() embeddings: set[Literal["*"] | str] = set() - a2a_proxy: set[Literal["*"]] = set() + a2a_proxy: set[Literal["*"] | str] = set() model_providers: set[Literal["read", "write", "*"]] = set() variables: SerializeAsAny[set[Literal["read", "write", "*"]]] = set() @@ -179,6 +180,7 @@ async def delete( async def generate_token( self: Context | str, *, + providers: list[str] | list[Provider] | None = None, client: PlatformClient | None = None, grant_global_permissions: Permissions | None = None, grant_context_permissions: ContextPermissions | None = None, @@ -193,6 +195,18 @@ async def generate_token( context_id = self if isinstance(self, str) else self.id grant_global_permissions = grant_global_permissions or Permissions() grant_context_permissions = grant_context_permissions or Permissions() + + if isinstance(self, Context) and self.metadata and (provider_id := self.metadata.get("provider_id", None)): + providers = providers or [provider_id] + + if "*" not in grant_global_permissions.a2a_proxy and not grant_global_permissions.a2a_proxy: + if not providers: + raise ValueError( + "Invalid audience: You must specify providers or use '*' in grant_global_permissions.a2a_proxy." + ) + + grant_global_permissions.a2a_proxy |= {p.id if isinstance(p, Provider) else p for p in providers} + async with client or get_platform_client() as client: token_response = ( ( diff --git a/apps/agentstack-sdk-py/src/agentstack_sdk/server/agent.py b/apps/agentstack-sdk-py/src/agentstack_sdk/server/agent.py index def433da5..2a896a17c 100644 --- a/apps/agentstack-sdk-py/src/agentstack_sdk/server/agent.py +++ b/apps/agentstack-sdk-py/src/agentstack_sdk/server/agent.py @@ -46,7 +46,7 @@ from agentstack_sdk.a2a.types import ArtifactChunk, Metadata, RunYield, RunYieldResume from agentstack_sdk.server.constants import _IMPLICIT_DEPENDENCY_PREFIX from agentstack_sdk.server.context import RunContext -from agentstack_sdk.server.dependencies import Depends, extract_dependencies +from agentstack_sdk.server.dependencies import Dependency, Depends, extract_dependencies from agentstack_sdk.server.store.context_store import ContextStore from agentstack_sdk.server.utils import cancel_task from agentstack_sdk.util.logging import logger @@ -234,6 +234,7 @@ def __init__(self, agent: Agent, context_store: ContextStore, on_finish: Callabl self.last_invocation: datetime = datetime.now() self.resume_queue: asyncio.Queue[RunYieldResume] = asyncio.Queue() self._run_context: RunContext | None = None + self._request_context: RequestContext | None = None self._task_updater: TaskUpdater | None = None self._context_store: ContextStore = context_store self._lock: asyncio.Lock = asyncio.Lock() @@ -246,6 +247,12 @@ def run_context(self) -> RunContext: raise RuntimeError("Accessing run context for run that has not been started") return self._run_context + @property + def request_context(self) -> RequestContext: + if not self._request_context: + raise RuntimeError("Accessing request context for run that has not been started") + return self._request_context + @property def task_updater(self) -> TaskUpdater: if not self._task_updater: @@ -261,7 +268,6 @@ def _handle_finish(self) -> None: self._on_finish() async def start(self, request_context: RequestContext, event_queue: EventQueue): - # These are incorrectly typed in a2a async with self._lock: if self._working or self.done: raise RuntimeError("Attempting to start a run that is already executing or done") @@ -274,6 +280,7 @@ async def start(self, request_context: RequestContext, event_queue: EventQueue): current_task=request_context.current_task, related_tasks=request_context.related_tasks, ) + self._request_context = request_context self._task_updater = TaskUpdater(event_queue, task_id, context_id) if not request_context.current_task: await self._task_updater.submit() @@ -288,11 +295,12 @@ async def resume(self, request_context: RequestContext, event_queue: EventQueue) raise RuntimeError("Attempting to resume a run that is already executing or done") task_id, context_id, message = request_context.task_id, request_context.context_id, request_context.message assert task_id and context_id and message + self._request_context = request_context self._task_updater = TaskUpdater(event_queue, task_id, context_id) for dependency in self._agent.dependencies.values(): if dependency.extension: - dependency.extension.handle_incoming_message(message, self.run_context) + dependency.extension.handle_incoming_message(message, self.run_context, request_context) self._working = True await self.resume_queue.put(message) @@ -311,15 +319,15 @@ async def cancel(self, request_context: RequestContext, event_queue: EventQueue) await cancel_task(self._task) @asynccontextmanager - async def _dependencies_lifespan(self, message: Message) -> AsyncIterator[dict[str, Depends]]: + async def _dependencies_lifespan(self, message: Message) -> AsyncIterator[dict[str, Dependency]]: async with AsyncExitStack() as stack: - dependency_args: dict[str, Depends] = {} + dependency_args: dict[str, Dependency] = {} initialize_deps_exceptions: list[Exception] = [] for pname, depends in self._agent.dependencies.items(): # call dependencies with the first message and initialize their lifespan try: dependency_args[pname] = await stack.enter_async_context( - depends(message, self.run_context, dependency_args) + depends(message, self.run_context, self.request_context, dependency_args) ) except Exception as e: initialize_deps_exceptions.append(e) @@ -524,6 +532,8 @@ async def execute(self, context: RequestContext, event_queue: EventQueue) -> Non match await tapped_queue.dequeue_event(): case TaskStatusUpdateEvent(final=True): break + case _: + pass except CancelledError: if agent_run: @@ -571,14 +581,14 @@ async def cleanup_fn(): event = await queue.dequeue_event(no_wait=True) if not isinstance(event, TaskStatusUpdateEvent) or event.status.state != TaskState.canceled: raise RuntimeError(f"Something strange occured during scheduled cancel, event: {event}") - _ = await manager.save_task_event(event) + await manager.save_task_event(event) break await asyncio.sleep(2) except Exception as ex: logger.error("Error when cleaning up task", exc_info=ex) finally: - _ = self._running_tasks.pop(task_id, None) - _ = self._scheduled_cleanups.pop(task_id, None) + self._running_tasks.pop(task_id, None) + self._scheduled_cleanups.pop(task_id, None) self._scheduled_cleanups[task_id] = asyncio.create_task(cleanup_fn()) self._scheduled_cleanups[task_id].add_done_callback(lambda _: ...) diff --git a/apps/agentstack-sdk-py/src/agentstack_sdk/server/app.py b/apps/agentstack-sdk-py/src/agentstack_sdk/server/app.py index 15fbfe32b..070f69641 100644 --- a/apps/agentstack-sdk-py/src/agentstack_sdk/server/app.py +++ b/apps/agentstack-sdk-py/src/agentstack_sdk/server/app.py @@ -18,6 +18,8 @@ from a2a.types import AgentInterface, TransportProtocol from fastapi import APIRouter, Depends, FastAPI from fastapi.applications import AppType +from starlette.authentication import AuthenticationBackend +from starlette.middleware.authentication import AuthenticationMiddleware from starlette.types import Lifespan from agentstack_sdk.server.agent import Agent, Executor @@ -37,6 +39,7 @@ def create_app( dependencies: list[Depends] | None = None, # pyright: ignore [reportGeneralTypeIssues] override_interfaces: bool = True, task_timeout: timedelta = timedelta(minutes=10), + auth_backend: AuthenticationBackend | None = None, **kwargs, ) -> FastAPI: queue_manager = queue_manager or InMemoryQueueManager() @@ -75,6 +78,10 @@ def create_app( **kwargs, ) + if auth_backend: + rest_app.add_middleware(AuthenticationMiddleware, backend=auth_backend) + jsonrpc_app.add_middleware(AuthenticationMiddleware, backend=auth_backend) + rest_app.mount("/jsonrpc", jsonrpc_app) rest_app.include_router(APIRouter(lifespan=lifespan)) return rest_app diff --git a/apps/agentstack-sdk-py/src/agentstack_sdk/server/dependencies.py b/apps/agentstack-sdk-py/src/agentstack_sdk/server/dependencies.py index 944a06c40..5e459b214 100644 --- a/apps/agentstack-sdk-py/src/agentstack_sdk/server/dependencies.py +++ b/apps/agentstack-sdk-py/src/agentstack_sdk/server/dependencies.py @@ -1,6 +1,8 @@ # Copyright 2025 © BeeAI a Series of LF Projects, LLC # SPDX-License-Identifier: Apache-2.0 +from __future__ import annotations + import inspect from collections import Counter from collections.abc import AsyncIterator, Callable @@ -8,6 +10,7 @@ from inspect import isclass from typing import Annotated, Any, TypeAlias, Unpack, get_args, get_origin +from a2a.server.agent_execution.context import RequestContext from a2a.types import Message from typing_extensions import Doc @@ -15,7 +18,9 @@ from agentstack_sdk.a2a.extensions.base import BaseExtensionServer from agentstack_sdk.server.context import RunContext -Dependency: TypeAlias = Callable[[Message, RunContext, dict[str, "Dependency"]], Any] | BaseExtensionServer[Any, Any] +Dependency: TypeAlias = ( + Callable[[Message, RunContext, RequestContext, dict[str, "Dependency"]], Any] | BaseExtensionServer[Any, Any] +) # Inspired by fastapi.Depends @@ -34,17 +39,17 @@ def __init__( ), ], ): - self._dependency_callable = dependency + self._dependency_callable: Dependency = dependency if isinstance(dependency, BaseExtensionServer): self.extension = dependency def __call__( - self, message: Message, context: RunContext, dependencies: dict[str, Any] - ) -> AbstractAsyncContextManager[Any]: - instance = self._dependency_callable(message, context, dependencies) + self, message: Message, context: RunContext, request_context: RequestContext, dependencies: dict[str, Any] + ) -> AbstractAsyncContextManager[Dependency]: + instance = self._dependency_callable(message, context, request_context, dependencies) @asynccontextmanager - async def lifespan() -> AsyncIterator[Any]: + async def lifespan() -> AsyncIterator[Dependency]: if self.extension or hasattr(instance, "lifespan"): async with instance.lifespan(): yield instance @@ -80,10 +85,10 @@ def process_args(name: str, args: tuple[Any, ...]) -> None: elif inspect.isclass(param.annotation): # message: Message if param.annotation == Message: - dependencies[name] = Depends(lambda message, _context, _dependencies: message) + dependencies[name] = Depends(lambda message, _run_context, _request_context, _dependencies: message) # context: Context elif param.annotation == RunContext: - dependencies[name] = Depends(lambda _message, context, _dependencies: context) + dependencies[name] = Depends(lambda _message, run_context, _request_context, _dependencies: run_context) # extension: BaseExtensionServer = BaseExtensionSpec() # TODO: this does not get past linters, should we enable it or somehow fix the typing? # elif issubclass(param.annotation, BaseExtensionServer) and isinstance(param.default, BaseExtensionSpec): diff --git a/apps/agentstack-sdk-py/src/agentstack_sdk/server/exceptions.py b/apps/agentstack-sdk-py/src/agentstack_sdk/server/exceptions.py new file mode 100644 index 000000000..32f1f133b --- /dev/null +++ b/apps/agentstack-sdk-py/src/agentstack_sdk/server/exceptions.py @@ -0,0 +1,3 @@ +# Copyright 2025 © BeeAI a Series of LF Projects, LLC +# SPDX-License-Identifier: Apache-2.0 + diff --git a/apps/agentstack-sdk-py/src/agentstack_sdk/server/middleware/__init__.py b/apps/agentstack-sdk-py/src/agentstack_sdk/server/middleware/__init__.py new file mode 100644 index 000000000..76c1d2392 --- /dev/null +++ b/apps/agentstack-sdk-py/src/agentstack_sdk/server/middleware/__init__.py @@ -0,0 +1,3 @@ +# Copyright 2026 © BeeAI a Series of LF Projects, LLC +# SPDX-License-Identifier: Apache-2.0 + diff --git a/apps/agentstack-sdk-py/src/agentstack_sdk/server/middleware/platform_auth_backend.py b/apps/agentstack-sdk-py/src/agentstack_sdk/server/middleware/platform_auth_backend.py new file mode 100644 index 000000000..aa316f110 --- /dev/null +++ b/apps/agentstack-sdk-py/src/agentstack_sdk/server/middleware/platform_auth_backend.py @@ -0,0 +1,131 @@ +# Copyright 2025 © BeeAI a Series of LF Projects, LLC +# SPDX-License-Identifier: Apache-2.0 + +import logging +import os +from datetime import timedelta +from urllib.parse import urljoin + +from a2a.auth.user import User +from async_lru import alru_cache +from authlib.jose import JsonWebKey, JWTClaims, KeySet, jwt +from authlib.jose.errors import JoseError +from fastapi import Request +from fastapi.security import HTTPBearer +from pydantic import Secret +from starlette.authentication import ( + AuthCredentials, + AuthenticationBackend, + AuthenticationError, + BaseUser, +) +from starlette.requests import HTTPConnection +from typing_extensions import override + +from agentstack_sdk.platform import use_platform_client +from agentstack_sdk.types import JsonValue + +logger = logging.getLogger(__name__) + + +class PlatformAuthenticatedUser(User, BaseUser): + def __init__(self, claims: dict[str, JsonValue], auth_token: str): + self.claims: dict[str, JsonValue] = claims + self.auth_token: Secret[str] = Secret(auth_token) + + @property + @override + def is_authenticated(self) -> bool: + return True + + @property + @override + def user_name(self) -> str: + sub = self.claims.get("sub", None) + assert sub and isinstance(sub, str) + return sub + + @property + @override + def display_name(self) -> str: + name = self.claims.get("name", None) + assert name and isinstance(name, str) + return name + + @property + @override + def identity(self) -> str: + return self.user_name + + +@alru_cache(ttl=timedelta(minutes=15).seconds) +async def discover_jwks() -> KeySet: + try: + async with use_platform_client() as client: + response = await client.get("/.well-known/jwks") + return JsonWebKey.import_key_set(response.raise_for_status().json()) # pyright: ignore[reportAny] + except Exception as e: + url = "{platform_url}/.well-known/jwks" + logger.warning(f"JWKS discovery failed for url {url}: {e}") + raise RuntimeError(f"JWKS discovery failed for url {url}") from e + + +class PlatformAuthBackend(AuthenticationBackend): + def __init__(self, public_url: str | None = None, skip_audience_validation: bool | None = None) -> None: + self.skip_audience_validation: bool = ( + skip_audience_validation + if skip_audience_validation is not None + else os.getenv("PLATFORM_AUTH__SKIP_AUDIENCE_VALIDATION", "false").lower() in ("true", "1") + ) + self._audience: str | None = public_url or os.getenv("PLATFORM_AUTH__PUBLIC_URL", None) + if not self.skip_audience_validation and not self._audience: + logger.warning( + "Public URL is not provided and audience validation is enabled. Proceeding to check audience from the request target URL. " + + "This may not work when requests to agents are proxied. (hint: set PLATFORM_AUTH__PUBLIC_URL env variable)" + ) + + self.security: HTTPBearer = HTTPBearer(auto_error=False) + + @override + async def authenticate(self, conn: HTTPConnection) -> tuple[AuthCredentials, BaseUser] | None: + # We construct a Request object from the scope for compatibility with HTTPBearer and logging + request = Request(scope=conn.scope) + + if request.url.path in ["/healthcheck", "/.well-known/agent-card.json"]: + return None + + if not (auth := await self.security(request)): + raise AuthenticationError("Missing Authorization header") + + audiences: list[str] = [] + if not self.skip_audience_validation: + if self._audience: + audiences = [urljoin(self._audience, path) for path in ["/", "/jsonrpc"]] + else: + audiences = [str(request.url.replace(path=path)) for path in ["/", "/jsonrpc"]] + + try: + # check only hostname urljoin("http://host:port/a/b", "/") -> "http://host:port/" + jwks = await discover_jwks() + + # Verify signature + claims: JWTClaims = jwt.decode( + auth.credentials, + jwks, + claims_options={ + "sub": {"essential": True}, + "exp": {"essential": True}, + # "iss": {"essential": True}, # Issuer validation might be tricky if internal/external URLs differ + } + | ({"aud": {"essential": True, "values": audiences}} if not self.skip_audience_validation else {}), + ) + claims.validate() + + return AuthCredentials(["authenticated"]), PlatformAuthenticatedUser(claims, auth.credentials) + + except (ValueError, JoseError) as e: + logger.warning(f"Authentication failed: {e}") + raise AuthenticationError("Invalid token") from e + except Exception as e: + logger.error(f"Authentication error: {e}") + raise AuthenticationError(f"Authentication failed: {e}") from e diff --git a/apps/agentstack-sdk-py/src/agentstack_sdk/server/server.py b/apps/agentstack-sdk-py/src/agentstack_sdk/server/server.py index 3165ee4dd..af6c4b84f 100644 --- a/apps/agentstack-sdk-py/src/agentstack_sdk/server/server.py +++ b/apps/agentstack-sdk-py/src/agentstack_sdk/server/server.py @@ -21,16 +21,15 @@ from a2a.types import AgentExtension from fastapi import FastAPI from fastapi.applications import AppType +from fastapi.responses import PlainTextResponse from httpx import HTTPError, HTTPStatusError from pydantic import AnyUrl +from starlette.authentication import AuthenticationBackend, AuthenticationError +from starlette.middleware.authentication import AuthenticationMiddleware +from starlette.requests import HTTPConnection from starlette.types import Lifespan from tenacity import AsyncRetrying, retry_if_exception_type, stop_after_attempt, wait_exponential -from agentstack_sdk.a2a.extensions import AgentDetail, AgentDetailExtensionSpec -from agentstack_sdk.a2a.extensions.services.platform import ( - _PlatformSelfRegistrationExtensionParams, - _PlatformSelfRegistrationExtensionSpec, -) from agentstack_sdk.platform import get_platform_client from agentstack_sdk.platform.client import PlatformClient from agentstack_sdk.platform.provider import Provider @@ -132,6 +131,7 @@ async def serve( factory: bool = False, h11_max_incomplete_event_size: int | None = None, self_registration_client_factory: Callable[[], PlatformClient] | None = None, + auth_backend: AuthenticationBackend | None = None, ) -> None: if self.server: raise RuntimeError("The server is already running") @@ -179,6 +179,11 @@ async def _lifespan_fn(app: FastAPI) -> AsyncGenerator[None, None]: self._agent.card.url = f"http://{host}:{port}" if self_registration: + from agentstack_sdk.a2a.extensions.services.platform import ( + _PlatformSelfRegistrationExtensionParams, + _PlatformSelfRegistrationExtensionSpec, + ) + self._agent.card.capabilities.extensions = [ *(self._agent.card.capabilities.extensions or []), *_PlatformSelfRegistrationExtensionSpec( @@ -198,6 +203,13 @@ async def _lifespan_fn(app: FastAPI) -> AsyncGenerator[None, None]: request_context_builder=request_context_builder, ) + if auth_backend: + + def on_error(connection: HTTPConnection, error: AuthenticationError) -> PlainTextResponse: + return PlainTextResponse("Unauthorized", status_code=401) + + app.add_middleware(AuthenticationMiddleware, backend=auth_backend, on_error=on_error) + if configure_logger: configure_logger_func(log_level) @@ -286,6 +298,8 @@ async def _reload_variables_periodically(self): await self._load_variables() async def _load_variables(self, first_run: bool = False) -> None: + from agentstack_sdk.a2a.extensions import AgentDetail, AgentDetailExtensionSpec + assert self.server and self._agent if not self._provider_id: return diff --git a/apps/agentstack-sdk-py/src/agentstack_sdk/types.py b/apps/agentstack-sdk-py/src/agentstack_sdk/types.py new file mode 100644 index 000000000..1c44b41b3 --- /dev/null +++ b/apps/agentstack-sdk-py/src/agentstack_sdk/types.py @@ -0,0 +1,15 @@ +# Copyright 2025 © BeeAI a Series of LF Projects, LLC +# SPDX-License-Identifier: Apache-2.0 + +from typing import TYPE_CHECKING, TypeAlias + +if TYPE_CHECKING: + JsonValue: TypeAlias = list["JsonValue"] | dict[str, "JsonValue"] | str | bool | int | float | None + JsonDict: TypeAlias = dict[str, JsonValue] +else: + from typing import Union + + from typing_extensions import TypeAliasType + + JsonValue = TypeAliasType("JsonValue", "Union[dict[str, JsonValue], list[JsonValue], str, int, float, bool, None]") # noqa: UP007 + JsonDict = TypeAliasType("JsonDict", "dict[str, JsonValue]") diff --git a/apps/agentstack-sdk-py/uv.lock b/apps/agentstack-sdk-py/uv.lock index 384682cbe..52c7efdec 100644 --- a/apps/agentstack-sdk-py/uv.lock +++ b/apps/agentstack-sdk-py/uv.lock @@ -29,7 +29,9 @@ source = { editable = "." } dependencies = [ { name = "a2a-sdk" }, { name = "anyio" }, + { name = "async-lru" }, { name = "asyncclick" }, + { name = "authlib" }, { name = "fastapi" }, { name = "httpx" }, { name = "janus" }, @@ -59,7 +61,9 @@ dev = [ requires-dist = [ { name = "a2a-sdk", specifier = "==0.3.21" }, { name = "anyio", specifier = ">=4.9.0" }, + { name = "async-lru", specifier = ">=2.0.4" }, { name = "asyncclick", specifier = ">=8.1.8" }, + { name = "authlib", specifier = ">=1.3.0" }, { name = "fastapi", specifier = ">=0.116.1" }, { name = "httpx" }, { name = "janus", specifier = ">=2.0.0" }, @@ -224,6 +228,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/91/be/317c2c55b8bbec407257d45f5c8d1b6867abc76d12043f2d3d58c538a4ea/asgiref-3.11.0-py3-none-any.whl", hash = "sha256:1db9021efadb0d9512ce8ffaf72fcef601c7b73a8807a1bb2ef143dc6b14846d", size = 24096, upload-time = "2025-11-19T15:32:19.004Z" }, ] +[[package]] +name = "async-lru" +version = "2.0.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/4d/71ec4d3939dc755264f680f6c2b4906423a304c3d18e96853f0a595dfe97/async_lru-2.0.5.tar.gz", hash = "sha256:481d52ccdd27275f42c43a928b4a50c3bfb2d67af4e78b170e3e0bb39c66e5bb", size = 10380, upload-time = "2025-03-16T17:25:36.919Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/03/49/d10027df9fce941cb8184e78a02857af36360d33e1721df81c5ed2179a1a/async_lru-2.0.5-py3-none-any.whl", hash = "sha256:ab95404d8d2605310d345932697371a5f40def0487c03d6d0ad9138de52c9943", size = 6069, upload-time = "2025-03-16T17:25:35.422Z" }, +] + [[package]] name = "asyncclick" version = "8.3.0.7" @@ -245,6 +258,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373", size = 67615, upload-time = "2025-10-06T13:54:43.17Z" }, ] +[[package]] +name = "authlib" +version = "1.6.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cryptography" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/bb/9b/b1661026ff24bc641b76b78c5222d614776b0c085bcfdac9bd15a1cb4b35/authlib-1.6.6.tar.gz", hash = "sha256:45770e8e056d0f283451d9996fbb59b70d45722b45d854d58f32878d0a40c38e", size = 164894, upload-time = "2025-12-12T08:01:41.464Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/51/321e821856452f7386c4e9df866f196720b1ad0c5ea1623ea7399969ae3b/authlib-1.6.6-py2.py3-none-any.whl", hash = "sha256:7d9e9bc535c13974313a87f53e8430eb6ea3d1cf6ae4f6efcd793f2e949143fd", size = 244005, upload-time = "2025-12-12T08:01:40.209Z" }, +] + [[package]] name = "beeai-framework" version = "0.1.73" diff --git a/apps/agentstack-sdk-ts/src/client/a2a/create-authenticated-fetch.ts b/apps/agentstack-sdk-ts/src/client/a2a/create-authenticated-fetch.ts new file mode 100644 index 000000000..4ebad1c26 --- /dev/null +++ b/apps/agentstack-sdk-ts/src/client/a2a/create-authenticated-fetch.ts @@ -0,0 +1,21 @@ +/** + * Copyright 2025 © BeeAI a Series of LF Projects, LLC + * SPDX-License-Identifier: Apache-2.0 + */ + +export function createAuthenticatedFetch(token: string, baseFetch?: typeof fetch): typeof fetch { + const fetchImpl = baseFetch ?? (typeof globalThis.fetch !== 'undefined' ? globalThis.fetch : undefined); + + if (!fetchImpl) { + throw new Error( + 'fetch is not available. In Node.js < 18 or environments without global fetch, ' + + 'provide a fetch implementation via the baseFetch parameter.', + ); + } + + return async (input: RequestInfo | URL, init?: RequestInit) => { + const headers = new Headers(init?.headers); + headers.set('Authorization', `Bearer ${token}`); + return fetchImpl(input, { ...init, headers }); + }; +} diff --git a/apps/agentstack-sdk-ts/src/client/a2a/extensions/handle-agent-card.ts b/apps/agentstack-sdk-ts/src/client/a2a/extensions/handle-agent-card.ts index 940610394..6a788af10 100644 --- a/apps/agentstack-sdk-ts/src/client/a2a/extensions/handle-agent-card.ts +++ b/apps/agentstack-sdk-ts/src/client/a2a/extensions/handle-agent-card.ts @@ -33,6 +33,9 @@ export interface Fulfillments { secrets: (demand: SecretDemands) => Promise; form: (demand: FormDemands) => Promise; oauthRedirectUri: () => string | null; + /** + * @deprecated - keeping this for backwards compatibility, context token is now passed via A2A client headers + */ getContextToken: () => ContextToken; } diff --git a/apps/agentstack-sdk-ts/src/client/api/build-api-client.ts b/apps/agentstack-sdk-ts/src/client/api/build-api-client.ts index 5601d4255..4f19f6687 100644 --- a/apps/agentstack-sdk-ts/src/client/api/build-api-client.ts +++ b/apps/agentstack-sdk-ts/src/client/api/build-api-client.ts @@ -6,7 +6,14 @@ import type { z } from 'zod'; import type { ContextPermissionsGrant, GlobalPermissionsGrant, ModelCapability } from './types'; -import { contextSchema, contextTokenSchema, listConnectorsResponseSchema, modelProviderMatchSchema } from './types'; +import { + contextPermissionsGrantSchema, + contextSchema, + contextTokenSchema, + globalPermissionsGrantSchema, + listConnectorsResponseSchema, + modelProviderMatchSchema, +} from './types'; export interface MatchProvidersParams { suggestedModels: string[] | null; @@ -79,12 +86,15 @@ export const buildApiClient = ( await callApi('POST', '/api/v1/contexts', { metadata: {}, provider_id: providerId }, contextSchema); const createContextToken = async ({ contextId, globalPermissions, contextPermissions }: CreateContextTokenParams) => { + const validatedGlobalPerms = globalPermissionsGrantSchema.parse(globalPermissions); + const validatedContextPerms = contextPermissionsGrantSchema.parse(contextPermissions); + const token = await callApi( 'POST', `/api/v1/contexts/${contextId}/token`, { - grant_global_permissions: globalPermissions, - grant_context_permissions: contextPermissions, + grant_global_permissions: validatedGlobalPerms, + grant_context_permissions: validatedContextPerms, }, contextTokenSchema, ); diff --git a/apps/agentstack-sdk-ts/src/client/api/types.ts b/apps/agentstack-sdk-ts/src/client/api/types.ts index d678b2a5f..b2b8640e3 100644 --- a/apps/agentstack-sdk-ts/src/client/api/types.ts +++ b/apps/agentstack-sdk-ts/src/client/api/types.ts @@ -58,26 +58,50 @@ export const contextPermissionsGrantSchema = z.object({ export type ContextPermissionsGrant = z.infer; -export const globalPermissionsGrantSchema = contextPermissionsGrantSchema.extend({ - feedback: z.array(z.literal('write')).optional(), - - llm: z.array(z.union([z.literal('*'), resourceIdPermissionSchema])).optional(), - embeddings: z.array(z.union([z.literal('*'), resourceIdPermissionSchema])).optional(), - model_providers: z.array(z.literal(['read', 'write', '*'])).optional(), - - a2a_proxy: z.array(z.literal('*')).optional(), - - providers: z.array(z.literal(['read', 'write', '*'])).optional(), - provider_variables: z.array(z.literal(['read', 'write', '*'])).optional(), - - contexts: z.array(z.literal(['read', 'write', '*'])).optional(), - - mcp_providers: z.array(z.literal(['read', 'write', '*'])).optional(), - mcp_tools: z.array(z.literal(['read', '*'])).optional(), - mcp_proxy: z.array(z.literal('*')).optional(), - - connectors: z.array(z.literal(['read', 'write', 'proxy', '*'])).optional(), -}); +export const globalPermissionsGrantSchema = contextPermissionsGrantSchema + .extend({ + feedback: z.array(z.literal('write')).optional(), + + llm: z.array(z.union([z.literal('*'), resourceIdPermissionSchema])).optional(), + embeddings: z.array(z.union([z.literal('*'), resourceIdPermissionSchema])).optional(), + model_providers: z.array(z.literal(['read', 'write', '*'])).optional(), + + a2a_proxy: z.array(z.union([z.literal('*'), z.string()])).optional(), + + providers: z.array(z.literal(['read', 'write', '*'])).optional(), + provider_variables: z.array(z.literal(['read', 'write', '*'])).optional(), + + contexts: z.array(z.literal(['read', 'write', '*'])).optional(), + + mcp_providers: z.array(z.literal(['read', 'write', '*'])).optional(), + mcp_tools: z.array(z.literal(['read', '*'])).optional(), + mcp_proxy: z.array(z.literal('*')).optional(), + + connectors: z.array(z.literal(['read', 'write', 'proxy', '*'])).optional(), + }) + .superRefine((val, ctx) => { + if (!val.a2a_proxy) return; + + if (val.a2a_proxy.length === 0) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: 'a2a_proxy cannot be empty array', + path: ['a2a_proxy'], + }); + return; + } + + const hasWildcard = val.a2a_proxy.includes('*'); + const hasOthers = val.a2a_proxy.some((v) => v !== '*'); + + if (hasWildcard && hasOthers) { + ctx.addIssue({ + code: z.ZodIssueCode.custom, + message: "a2a_proxy cannot mix '*' with specific providers", + path: ['a2a_proxy'], + }); + } + }); export type GlobalPermissionsGrant = z.infer; diff --git a/apps/agentstack-sdk-ts/src/index.ts b/apps/agentstack-sdk-ts/src/index.ts index 1ef9a528d..838c48f99 100644 --- a/apps/agentstack-sdk-ts/src/index.ts +++ b/apps/agentstack-sdk-ts/src/index.ts @@ -3,6 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ +export { createAuthenticatedFetch } from './client/a2a/create-authenticated-fetch'; export * from './client/a2a/extensions/common/form'; export * from './client/a2a/extensions/fulfillment-resolvers/build-llm-extension-fulfillment-resolver'; export { type Fulfillments, handleAgentCard } from './client/a2a/extensions/handle-agent-card'; diff --git a/apps/agentstack-server/src/agentstack_server/api/auth/auth.py b/apps/agentstack-server/src/agentstack_server/api/auth/auth.py index 2c7af26a3..deeee1c87 100644 --- a/apps/agentstack-server/src/agentstack_server/api/auth/auth.py +++ b/apps/agentstack-server/src/agentstack_server/api/auth/auth.py @@ -2,7 +2,7 @@ # SPDX-License-Identifier: Apache-2.0 import logging -from datetime import timedelta +from datetime import UTC, datetime, timedelta from typing import Any from uuid import UUID @@ -102,34 +102,38 @@ def issue_internal_jwt( global_permissions: Permissions, context_permissions: Permissions, configuration: Configuration, + audience: list[str] | None = None, + expires_at: AwareDatetime | None = None, ) -> tuple[str, AwareDatetime]: - assert configuration.auth.jwt_secret_key - secret_key = configuration.auth.jwt_secret_key.get_secret_value() + assert configuration.auth.jwt_private_key + secret_key = configuration.auth.jwt_private_key.get_secret_value() now = utc_now() - expires_at = now + timedelta(minutes=20) - header = {"alg": "HS256"} + if expires_at is None: + expires_at = now + timedelta(minutes=20) + header = {"alg": "RS256"} + payload = { "context_id": str(context_id), "sub": str(user_id), "exp": expires_at, "iat": now, "iss": "agentstack-server", - "aud": "agentstack-server", # the token is for ourselves, noone else should consume it + "aud": [*(audience or []), "agentstack-server"], "resource": [f"context:{context_id}"], "scope": { "global": global_permissions.model_dump(mode="json"), "context": context_permissions.model_dump(mode="json"), }, } - return jwt.encode(header, payload, key=secret_key), expires_at + return jwt.encode(header, payload, key=secret_key).decode("utf-8"), expires_at def verify_internal_jwt(token: str, configuration: Configuration) -> ParsedToken: - assert configuration.auth.jwt_secret_key - secret_key = configuration.auth.jwt_secret_key.get_secret_value() - payload = jwt.decode( + assert configuration.auth.jwt_public_key + public_key = configuration.auth.jwt_public_key.get_secret_value() + claims: JWTClaims = jwt.decode( token, - key=secret_key, + key=public_key, claims_options={ "sub": {"essential": True}, "exp": {"essential": True}, @@ -137,14 +141,31 @@ def verify_internal_jwt(token: str, configuration: Configuration) -> ParsedToken "aud": {"essential": True, "value": "agentstack-server"}, }, ) - context_id = UUID(payload["resource"][0].replace("context:", "")) + claims.validate() + context_id = UUID(claims["resource"][0].replace("context:", "")) # pyright: ignore[reportAny] return ParsedToken( - global_permissions=Permissions.model_validate(payload["scope"]["global"]), - context_permissions=Permissions.model_validate(payload["scope"]["context"]), + global_permissions=Permissions.model_validate(claims["scope"]["global"]), + context_permissions=Permissions.model_validate(claims["scope"]["context"]), context_id=context_id, - user_id=UUID(payload["sub"]), - iat=payload["iat"], - raw=payload, + user_id=UUID(claims["sub"]), # pyright: ignore[reportAny] + iat=claims["iat"], # pyright: ignore[reportAny] + raw=claims, + ) + + +def exchange_internal_jwt( + token: str, configuration: Configuration, audience: list[str] | None = None +) -> tuple[str, AwareDatetime]: + parsed_token = verify_internal_jwt(token, configuration) + expires_at = datetime.fromtimestamp(parsed_token.raw["exp"], UTC) + return issue_internal_jwt( + user_id=parsed_token.user_id, + context_id=parsed_token.context_id, + global_permissions=parsed_token.global_permissions, + context_permissions=parsed_token.context_permissions, + configuration=configuration, + audience=audience, + expires_at=expires_at, ) @@ -216,7 +237,7 @@ def extract_oauth_token( async def validate_jwt(token: str, *, provider: OidcProvider, aud: str | None = None) -> JWTClaims | Exception: keyset = await discover_jwks(provider) try: - claims = jwt.decode( + claims = jwt.decode( # pyright: ignore[reportUnknownMemberType] token, key=keyset, claims_options={ @@ -226,7 +247,7 @@ async def validate_jwt(token: str, *, provider: OidcProvider, aud: str | None = } | ({"aud": {"essential": True, "value": aud}} if aud is not None else {}), ) - claims.validate() + claims.validate() # pyright: ignore[reportUnknownMemberType] return claims except Exception as e: return e # Cache exception response diff --git a/apps/agentstack-server/src/agentstack_server/api/routes/a2a.py b/apps/agentstack-server/src/agentstack_server/api/routes/a2a.py index 45fbb1e7f..0c7ecd3d0 100644 --- a/apps/agentstack-server/src/agentstack_server/api/routes/a2a.py +++ b/apps/agentstack-server/src/agentstack_server/api/routes/a2a.py @@ -18,6 +18,7 @@ ConfigurationDependency, ProviderServiceDependency, RequiresPermissions, + authorized_user, ) from agentstack_server.configuration import Configuration from agentstack_server.domain.models.permissions import AuthorizedUser @@ -71,8 +72,13 @@ async def get_agent_card( request: Request, provider_service: ProviderServiceDependency, configuration: ConfigurationDependency, - _: Annotated[AuthorizedUser, Depends(RequiresPermissions(providers={"read"}))], + user: Annotated[AuthorizedUser, Depends(authorized_user)], ) -> AgentCard: + try: + user = RequiresPermissions(providers={"read"})(user) # try provider read permissions + except HTTPException: + user = RequiresPermissions(a2a_proxy={provider_id})(user) # try a2a proxy permissions + provider = await provider_service.get_provider(provider_id=provider_id) return create_proxy_agent_card( provider.agent_card, provider_id=provider.id, request=request, configuration=configuration @@ -87,8 +93,10 @@ async def a2a_proxy_jsonrpc_transport( a2a_proxy: A2AProxyServiceDependency, provider_service: ProviderServiceDependency, configuration: ConfigurationDependency, - user: Annotated[AuthorizedUser, Depends(RequiresPermissions(a2a_proxy={"*"}))], + user: Annotated[AuthorizedUser, Depends(authorized_user)], ): + user = RequiresPermissions(a2a_proxy={provider_id})(user) + provider = await provider_service.get_provider(provider_id=provider_id) agent_card = create_proxy_agent_card( provider.agent_card, provider_id=provider.id, request=request, configuration=configuration @@ -109,9 +117,10 @@ async def a2a_proxy_http_transport( a2a_proxy: A2AProxyServiceDependency, provider_service: ProviderServiceDependency, configuration: ConfigurationDependency, - user: Annotated[AuthorizedUser, Depends(RequiresPermissions(a2a_proxy={"*"}))], + user: Annotated[AuthorizedUser, Depends(authorized_user)], path: str = "", ): + user = RequiresPermissions(a2a_proxy={provider_id})(user) provider = await provider_service.get_provider(provider_id=provider_id) agent_card = create_proxy_agent_card( provider.agent_card, provider_id=provider.id, request=request, configuration=configuration diff --git a/apps/agentstack-server/src/agentstack_server/api/routes/auth.py b/apps/agentstack-server/src/agentstack_server/api/routes/auth.py index 29fb40010..2876a0093 100644 --- a/apps/agentstack-server/src/agentstack_server/api/routes/auth.py +++ b/apps/agentstack-server/src/agentstack_server/api/routes/auth.py @@ -2,10 +2,12 @@ # SPDX-License-Identifier: Apache-2.0 import logging +from authlib.jose.rfc7517 import JsonWebKey from fastapi import APIRouter, Request from agentstack_server.api.auth.utils import create_resource_uri from agentstack_server.api.dependencies import AuthServiceDependency +from agentstack_server.configuration import get_configuration logger = logging.getLogger(__name__) @@ -19,3 +21,10 @@ def protected_resource_metadata( resource: str = "", ): return auth_service.protected_resource_metadata(resource=create_resource_uri(request.url.replace(path=resource))) + + +@well_known_router.get("/jwks") +def jwks(): + config = get_configuration() + key = JsonWebKey.import_key(config.auth.jwt_public_key.get_secret_value(), {"use": "sig", "alg": "RS256"}) + return {"keys": [key.as_dict()]} diff --git a/apps/agentstack-server/src/agentstack_server/api/schema/contexts.py b/apps/agentstack-server/src/agentstack_server/api/schema/contexts.py index 00aa765d1..db17ee125 100644 --- a/apps/agentstack-server/src/agentstack_server/api/schema/contexts.py +++ b/apps/agentstack-server/src/agentstack_server/api/schema/contexts.py @@ -3,7 +3,7 @@ from typing import Literal from uuid import UUID -from pydantic import AwareDatetime, BaseModel, Field, RootModel +from pydantic import AwareDatetime, BaseModel, Field, RootModel, field_validator from agentstack_server.api.schema.common import PaginationQuery from agentstack_server.domain.models.common import Metadata, MetadataPatch @@ -50,7 +50,7 @@ class GlobalPermissionGrant(BaseModel): embeddings: list[Literal["*"] | str] = Field(default_factory=list) model_providers: list[Literal["read", "write", "*"]] = Field(default_factory=list) - a2a_proxy: list[Literal["*"]] = Field(default_factory=list) + a2a_proxy: list[Literal["*"]] | list[UUID] = Field(default_factory=list) # agent providers providers: list[Literal["read", "write", "*"]] = Field( @@ -67,6 +67,13 @@ class GlobalPermissionGrant(BaseModel): connectors: list[Literal["read", "write", "proxy", "*"]] = Field(default_factory=list) + @field_validator("a2a_proxy", mode="after") + @classmethod + def validate_a2a_proxy(cls, v: list[Literal["*"]] | list[UUID]) -> list[Literal["*"]] | list[UUID]: + if "*" in v and len(v) > 1: + raise ValueError("a2a_proxy cannot be a mix of * and UUIDs") + return v + class ContextTokenCreateRequest(BaseModel): grant_global_permissions: GlobalPermissionGrant = Field( diff --git a/apps/agentstack-server/src/agentstack_server/configuration.py b/apps/agentstack-server/src/agentstack_server/configuration.py index 275064cb6..48468a841 100644 --- a/apps/agentstack-server/src/agentstack_server/configuration.py +++ b/apps/agentstack-server/src/agentstack_server/configuration.py @@ -11,6 +11,7 @@ from textwrap import dedent from typing import Any, Literal, cast +from authlib.jose import jwt from limits import RateLimitItem, parse_many from pydantic import AnyUrl, BaseModel, Field, Secret, ValidationError, field_validator, model_validator from pydantic_core.core_schema import ValidationInfo @@ -118,7 +119,8 @@ def validate_auth(self): class AuthConfiguration(BaseModel): - jwt_secret_key: Secret[str] = Secret("dummy") + jwt_private_key: Secret[str] = Secret("dummy") + jwt_public_key: Secret[str] = Secret("dummy") disable_auth: bool = False oidc: OidcConfiguration = Field(default_factory=OidcConfiguration) basic: BasicAuthConfiguration = Field(default_factory=BasicAuthConfiguration) @@ -129,8 +131,24 @@ def validate_auth(self): return self if not self.basic.enabled and not self.oidc.enabled: raise ValueError("If auth is enabled, either basic or oidc must be enabled") - if self.jwt_secret_key.get_secret_value() == "dummy": - raise ValueError("JWT secret key must be provided if authentication is enabled") + return self + + @model_validator(mode="after") + def set_default_jwt_keys(self): + if self.jwt_private_key.get_secret_value() == "dummy" or self.jwt_public_key.get_secret_value() == "dummy": + logger.warning("JWT private and public keys are not set. Generating default keys.") + from authlib.jose import JsonWebKey + + key = JsonWebKey.generate_key("RSA", 4096, is_private=True) + self.jwt_private_key = Secret(key.as_pem(is_private=True).decode("utf-8")) + self.jwt_public_key = Secret(key.as_pem(is_private=False).decode("utf-8")) + else: + try: + # Verify that the keys are matching + token = jwt.encode({"alg": "RS256"}, {"test": "payload"}, self.jwt_private_key.get_secret_value()) + jwt.decode(token, self.jwt_public_key.get_secret_value()) + except Exception as e: + raise ValueError(f"JWT private and public keys do not match or are invalid: {e}") from e return self diff --git a/apps/agentstack-server/src/agentstack_server/domain/models/permissions.py b/apps/agentstack-server/src/agentstack_server/domain/models/permissions.py index 4876afe0a..59bae3cae 100644 --- a/apps/agentstack-server/src/agentstack_server/domain/models/permissions.py +++ b/apps/agentstack-server/src/agentstack_server/domain/models/permissions.py @@ -30,7 +30,7 @@ class Permissions(BaseModel): llm: SerializeAsAny[set[Literal["*"] | str]] = set() embeddings: SerializeAsAny[set[Literal["*"] | str]] = set() - a2a_proxy: SerializeAsAny[set[Literal["*"]]] = set() + a2a_proxy: SerializeAsAny[set[Literal["*"] | UUID]] = set() # agent providers providers: SerializeAsAny[set[Literal["read", "write", "*"]]] = set() # write includes "show logs" permission diff --git a/apps/agentstack-server/src/agentstack_server/exceptions.py b/apps/agentstack-server/src/agentstack_server/exceptions.py index 07770c08d..01efcbb44 100644 --- a/apps/agentstack-server/src/agentstack_server/exceptions.py +++ b/apps/agentstack-server/src/agentstack_server/exceptions.py @@ -32,8 +32,6 @@ def __init__( class ManifestLoadError(PlatformError): - location: "ProviderLocation" - def __init__( self, location: "ProviderLocation", message: str | None = None, status_code: int = status.HTTP_404_NOT_FOUND ): @@ -98,7 +96,7 @@ class ModelLoadFailedError(PlatformError): def __init__( self, provider: ModelProvider, exception: HTTPError, status_code: int = status.HTTP_424_FAILED_DEPENDENCY ): - from agentstack_server.application import extract_messages + from agentstack_server.utils.utils import extract_messages super().__init__( f"Failed to load models from {provider.type} provider ({provider.base_url}): {extract_messages(exception)}", diff --git a/apps/agentstack-server/src/agentstack_server/infrastructure/kubernetes/provider_deployment_manager.py b/apps/agentstack-server/src/agentstack_server/infrastructure/kubernetes/provider_deployment_manager.py index 19cb1a865..458793be7 100644 --- a/apps/agentstack-server/src/agentstack_server/infrastructure/kubernetes/provider_deployment_manager.py +++ b/apps/agentstack-server/src/agentstack_server/infrastructure/kubernetes/provider_deployment_manager.py @@ -96,7 +96,10 @@ async def create_or_replace(self, *, provider: Provider, env: dict[str, str] | N ), api=api, ) - env = {**(env or {}), **global_provider_variables()} + env = { + **(env or {}), + **global_provider_variables(provider_url=await self.get_provider_url(provider_id=provider.id)), + } secret = Secret( await self._render_template( TemplateKind.SECRET, diff --git a/apps/agentstack-server/src/agentstack_server/service_layer/deployment_manager.py b/apps/agentstack-server/src/agentstack_server/service_layer/deployment_manager.py index f41ed1f74..4b72a6131 100644 --- a/apps/agentstack-server/src/agentstack_server/service_layer/deployment_manager.py +++ b/apps/agentstack-server/src/agentstack_server/service_layer/deployment_manager.py @@ -14,12 +14,13 @@ @inject -def global_provider_variables(configuration: Configuration): +def global_provider_variables(configuration: Configuration, provider_url: HttpUrl): return { "PORT": "8000", "HOST": "0.0.0.0", "OTEL_EXPORTER_OTLP_ENDPOINT": str(configuration.telemetry.collector_url), "PLATFORM_URL": f"http://{configuration.platform_service_url}", + "PLATFORM_AUTH__PUBLIC_URL": str(provider_url), } diff --git a/apps/agentstack-server/src/agentstack_server/service_layer/services/a2a.py b/apps/agentstack-server/src/agentstack_server/service_layer/services/a2a.py index cc26d262f..baf68f0c2 100644 --- a/apps/agentstack-server/src/agentstack_server/service_layer/services/a2a.py +++ b/apps/agentstack-server/src/agentstack_server/service_layer/services/a2a.py @@ -42,8 +42,11 @@ from kink import inject from opentelemetry import trace from pydantic import HttpUrl +from starlette.datastructures import URL from structlog.contextvars import bind_contextvars, unbind_contextvars +from agentstack_server.api.auth.auth import exchange_internal_jwt +from agentstack_server.api.auth.utils import create_resource_uri from agentstack_server.configuration import Configuration from agentstack_server.domain.models.provider import ( NetworkProviderLocation, @@ -149,9 +152,11 @@ def __init__( # Calling the factory have side-effects, such as rotating the agent agent_card_factory: Callable[[], Awaitable[AgentCard]] | None = None, agent_card: AgentCard | None = None, + configuration: Configuration, ): if agent_card_factory is None and agent_card is None: raise ValueError("One of agent_card_factory or agent_card must be provided") + self._configuration = configuration self._agent_card_factory = agent_card_factory self._agent_card = agent_card self._provider_id = provider_id @@ -159,12 +164,28 @@ def __init__( self._uow = uow @asynccontextmanager - async def _client_transport(self) -> AsyncIterator[ClientTransport]: + async def _client_transport(self, context: ServerCallContext | None = None) -> AsyncIterator[ClientTransport]: + from fastapi.security.utils import get_authorization_scheme_param + if self._agent_card is None: assert self._agent_card_factory is not None self._agent_card = await self._agent_card_factory() - async with httpx.AsyncClient(follow_redirects=True, timeout=timedelta(hours=1).total_seconds()) as httpx_client: + headers = {} if not context else context.state.get("headers", {}) + headers.pop("host", None) + headers.pop("content-length", None) + if auth_header := headers.get("authorization", None): + _scheme, header_token = get_authorization_scheme_param(auth_header) + try: + audience = create_resource_uri(URL(self._agent_card.url)) + token, _ = exchange_internal_jwt(header_token, self._configuration, audience=[audience]) + headers["authorization"] = f"Bearer {token}" + except Exception: + headers.pop("authorization", None) # forward header only if it's a valid context token + + async with httpx.AsyncClient( + follow_redirects=True, timeout=timedelta(hours=1).total_seconds(), headers=headers + ) as httpx_client: client: BaseClient = cast( BaseClient, ClientFactory(config=ClientConfig(httpx_client=httpx_client)).create(card=self._agent_card), @@ -207,13 +228,13 @@ def _forward_context(self, context: ServerCallContext | None = None) -> ClientCa @_handle_exception async def on_get_task(self, params: TaskQueryParams, context: ServerCallContext | None = None) -> Task | None: await self._check_task(params.id) - async with self._client_transport() as transport: + async with self._client_transport(context) as transport: return await transport.get_task(params, context=self._forward_context(context)) @_handle_exception async def on_cancel_task(self, params: TaskIdParams, context: ServerCallContext | None = None) -> Task | None: await self._check_task(params.id) - async with self._client_transport() as transport: + async with self._client_transport(context) as transport: return await transport.cancel_task(params, context=self._forward_context(context)) @_handle_exception @@ -226,7 +247,7 @@ async def on_message_send( params.message.context_id = params.message.context_id or str(uuid.uuid4()) await self._check_and_record_request(params.message.task_id, params.message.context_id, trace_id=trace_id) - async with self._client_transport() as transport: + async with self._client_transport(context) as transport: response = await transport.send_message(params, context=self._forward_context(context)) match response: case Task(id=task_id) | Message(task_id=task_id): @@ -248,7 +269,7 @@ async def on_message_send_stream( seen_tasks = {params.message.task_id} if params.message.task_id else set() - async with self._client_transport() as transport: + async with self._client_transport(context) as transport: async for event in transport.send_message_streaming(params, context=self._forward_context(context)): match event: case ( @@ -276,7 +297,7 @@ async def on_set_task_push_notification_config( context: ServerCallContext | None = None, ) -> TaskPushNotificationConfig: await self._check_task(params.task_id) - async with self._client_transport() as transport: + async with self._client_transport(context) as transport: return await transport.set_task_callback(params) @_handle_exception @@ -286,7 +307,7 @@ async def on_get_task_push_notification_config( context: ServerCallContext | None = None, ) -> TaskPushNotificationConfig: await self._check_task(params.id) - async with self._client_transport() as transport: + async with self._client_transport(context) as transport: if isinstance(params, TaskIdParams): params = GetTaskPushNotificationConfigParams(id=params.id, metadata=params.metadata) return await transport.get_task_callback(params, context=self._forward_context(context)) @@ -296,7 +317,7 @@ async def on_resubscribe_to_task( self, params: TaskIdParams, context: ServerCallContext | None = None ) -> AsyncGenerator[Event]: await self._check_task(params.id) - async with self._client_transport() as transport: + async with self._client_transport(context) as transport: async for event in transport.resubscribe(params): yield event @@ -345,6 +366,7 @@ async def agent_card_factory() -> AgentCard: provider_id=provider.id, uow=self._uow, user=user, + configuration=self._config, ) async def expire_requests(self) -> dict[str, int]: diff --git a/apps/agentstack-server/tasks.toml b/apps/agentstack-server/tasks.toml index d18ef284b..97ad7a529 100644 --- a/apps/agentstack-server/tasks.toml +++ b/apps/agentstack-server/tasks.toml @@ -260,8 +260,11 @@ run = """ #!/bin/bash VM_NAME=e2e-test-run -{{ mise_bin }} run agentstack:stop-all -{{ mise_bin }} run agentstack:delete --vm-name=${VM_NAME} +NO_CLEAN='{{flag(name="no-clean")}}' +if [ "$NO_CLEAN" != "true" ]; then + {{ mise_bin }} run agentstack:stop-all + {{ mise_bin }} run agentstack:delete --vm-name=${VM_NAME} +fi curl http://localhost:8333 >/dev/null 2>&1 && echo "Another instance at localhost:8333 is already running" && exit 2 {{ mise_bin }} run agentstack:start \ @@ -271,10 +274,7 @@ curl http://localhost:8333 >/dev/null 2>&1 && echo "Another instance at localhos --set auth.enabled="true" \ --set auth.basic.enabled="true" \ --set auth.basic.adminPassword="test-password" \ - --set auth.jwtSecretKey="test-secret-key" \ --set docling.enabled=true \ - --set providerBuilds.enabled=true \ - --set localDockerRegistry.enabled=true \ --set connector.presets[0].url=mcp+stdio://test \ --set connector.presets[0].stdio.image=mcp/aws-documentation \ --set connector.presets[0].metadata.name="Test MCP Server" @@ -282,6 +282,14 @@ curl http://localhost:8333 >/dev/null 2>&1 && echo "Another instance at localhos eval "$( {{ mise_bin }} run agentstack:shell --vm-name="$VM_NAME" )" +if [ -z "${TEST_AGENT_IMAGE}" ]; then + echo "Building test agent..." + # Build chat agent and push to local registry + TEST_AGENT_IMAGE=agentstack-registry-svc.default:5001/chat-test:latest + {{ mise_bin }} run agentstack-cli:run -- client-side-build -v "${PWD}/../.." --vm-name=${VM_NAME} --dockerfile "${PWD}/../../agents/chat/Dockerfile" --tag ${TEST_AGENT_IMAGE} +fi +export TEST_AGENT_IMAGE + export SERVER_URL="http://localhost:8333" export DB_URL="postgresql+asyncpg://agentstack-user:password@localhost:5432/agentstack" export LLM_API_BASE="${LLM_API_BASE:-http://host.docker.internal:11434/v1}" @@ -302,7 +310,12 @@ if [ $result -ne 0 ]; then kubectl get event fi -{{ mise_bin }} run agentstack-cli:run -- platform delete --vm-name=${VM_NAME} +if [ "$NO_CLEAN" != "true" ]; then + {{ mise_bin }} run agentstack-cli:run -- platform delete --vm-name=${VM_NAME} +else + {{ mise_bin }} run agentstack-cli:run -- platform stop --vm-name=${VM_NAME} +fi + kill %1 exit $result """ diff --git a/apps/agentstack-server/tests/conftest.py b/apps/agentstack-server/tests/conftest.py index 0036df008..b92cc1a8f 100644 --- a/apps/agentstack-server/tests/conftest.py +++ b/apps/agentstack-server/tests/conftest.py @@ -26,7 +26,7 @@ class TestConfiguration(BaseSettings): llm_api_base: Secret[str] = Secret("http://localhost:11434/v1") llm_model: str = "other:llama3.1:8b" llm_api_key: Secret[str] = Secret("dummy") - test_agent_image: str = "ghcr.io/i-am-bee/agentstack/agents/chat:0.4.2-rc9" + test_agent_image: str = "agentstack-registry-svc.default:5001/chat-test:latest" test_agent_build_repo: str = "https://github.com/i-am-bee/agentstack-starter" server_url: str = "http://agentstack-server-svc:8333" db_url: str = "postgresql+asyncpg://agentstack-user:password@postgresql:5432/agentstack" diff --git a/apps/agentstack-server/tests/e2e/agents/conftest.py b/apps/agentstack-server/tests/e2e/agents/conftest.py index 655207860..4ec71b6c6 100644 --- a/apps/agentstack-server/tests/e2e/agents/conftest.py +++ b/apps/agentstack-server/tests/e2e/agents/conftest.py @@ -10,6 +10,7 @@ from a2a.client import Client from a2a.types import AgentCard from agentstack_sdk.platform import PlatformClient, Provider +from agentstack_sdk.platform.context import ContextToken from agentstack_sdk.server import Server from agentstack_sdk.server.store.context_store import ContextStore from tenacity import AsyncRetrying, stop_after_attempt, wait_fixed @@ -21,7 +22,8 @@ async def run_server( server: Server, port: int, - a2a_client_factory: Callable[[AgentCard | dict[str, Any]], AsyncIterator[Client]], + a2a_client_factory: Callable[[AgentCard | dict[str, Any], ContextToken], AsyncIterator[Client]], + context_token: ContextToken, context_store: ContextStore | None = None, ) -> AsyncGenerator[tuple[Server, Client]]: async with asyncio.TaskGroup() as tg: @@ -41,7 +43,7 @@ async def run_server( raise ConnectionError("Server hasn't started yet") providers = [p for p in await Provider.list() if f":{port}" in p.source] assert len(providers) == 1, "Provider not registered" - async with a2a_client_factory(providers[0].agent_card) as client: + async with a2a_client_factory(providers[0].agent_card, context_token=context_token) as client: yield server, client finally: server.should_exit = True @@ -57,11 +59,19 @@ def create_server_with_agent( """Factory fixture that creates a server with the given agent function.""" @asynccontextmanager - async def _create_server(agent_fn, context_store: ContextStore | None = None): + async def _create_server( + agent_fn, + context_token: ContextToken, + context_store: ContextStore | None = None, + ): server = Server() server.agent()(agent_fn) async with run_server( - server, free_port, a2a_client_factory=a2a_client_factory, context_store=context_store + server, + free_port, + a2a_client_factory=a2a_client_factory, + context_store=context_store, + context_token=context_token, ) as (server, client): yield server, client diff --git a/apps/agentstack-server/tests/e2e/agents/test_agent_builds.py b/apps/agentstack-server/tests/e2e/agents/test_agent_builds.py index 6c124d565..bfb22375c 100644 --- a/apps/agentstack-server/tests/e2e/agents/test_agent_builds.py +++ b/apps/agentstack-server/tests/e2e/agents/test_agent_builds.py @@ -10,6 +10,7 @@ TaskState, ) from agentstack_sdk.platform import AddProvider, BuildState, Provider, ProviderBuild +from agentstack_sdk.platform.context import Context pytestmark = pytest.mark.e2e @@ -41,12 +42,16 @@ async def test_remote_agent_build_and_start( with subtests.test("run example agent"): providers = await Provider.list() assert len(providers) == 1 - assert providers[0].source == build.destination - assert providers[0].id == build.provider_id - assert providers[0].agent_card - assert test_configuration.test_agent_build_repo in providers[0].origin + provider = providers[0] + assert provider.source == build.destination + assert provider.id == build.provider_id + assert provider.agent_card + assert test_configuration.test_agent_build_repo in provider.origin - async with a2a_client_factory(providers[0].agent_card) as a2a_client: + context = await Context.create() + context_token = await context.generate_token(providers={provider.id}) + + async with a2a_client_factory(provider.agent_card, context_token) as a2a_client: message = create_text_message_object(content="test of sirens") task = await get_final_task_from_stream(a2a_client.send_message(message)) diff --git a/apps/agentstack-server/tests/e2e/agents/test_agent_starts.py b/apps/agentstack-server/tests/e2e/agents/test_agent_starts.py index 8ccd78a01..3edc14ee0 100644 --- a/apps/agentstack-server/tests/e2e/agents/test_agent_starts.py +++ b/apps/agentstack-server/tests/e2e/agents/test_agent_starts.py @@ -2,23 +2,18 @@ # SPDX-License-Identifier: Apache-2.0 import asyncio +import uuid +from textwrap import dedent +import kr8s import pytest +from a2a.client import A2AClientHTTPError from a2a.client.helpers import create_text_message_object -from a2a.types import ( - Role, - Task, - TaskState, -) -from agentstack_sdk.a2a.extensions import ( - LLMFulfillment, - LLMServiceExtensionClient, - LLMServiceExtensionSpec, - PlatformApiExtensionClient, - PlatformApiExtensionSpec, -) +from a2a.types import Role, Task, TaskState +from agentstack_sdk.a2a.extensions import LLMFulfillment, LLMServiceExtensionClient, LLMServiceExtensionSpec from agentstack_sdk.platform import ModelProvider, Provider from agentstack_sdk.platform.context import Context, ContextPermissions, Permissions +from kr8s.asyncio.objects import Deployment pytestmark = pytest.mark.e2e @@ -29,13 +24,16 @@ def extract_agent_text_from_stream(task: Task) -> str: @pytest.mark.usefixtures("clean_up", "setup_real_llm", "setup_platform_client") -async def test_remote_agent(subtests, a2a_client_factory, get_final_task_from_stream, test_configuration): +async def test_imported_agent( + subtests, a2a_client_factory, get_final_task_from_stream, test_configuration, kr8s_client: kr8s.Api +): agent_image = test_configuration.test_agent_image with subtests.test("add chat agent"): _ = await Provider.create(location=agent_image) providers = await Provider.list() context = await Context.create() context_token = await context.generate_token( + providers=providers, grant_global_permissions=Permissions(llm={"*"}), grant_context_permissions=ContextPermissions(context_data={"*"}), ) @@ -43,47 +41,98 @@ async def test_remote_agent(subtests, a2a_client_factory, get_final_task_from_st assert providers[0].source == agent_image assert providers[0].agent_card - async with a2a_client_factory(providers[0].agent_card) as a2a_client: - with subtests.test("run chat agent for the first time"): - num_parallel = 3 - message = create_text_message_object( - content=( - "How do you say informal hello in italian in 4 letters? " - "DO NOT SEARCH THE INTERNET FOR THIS, ANSWER DIRECTLY FROM YOUR KNOWLEDGE. " - "ANSWER ONLY THOSE FOUR LETTERS." - ) - ) - spec = LLMServiceExtensionSpec.from_agent_card(providers[0].agent_card) - platform_api_spec = PlatformApiExtensionSpec.from_agent_card(providers[0].agent_card) - message.metadata = LLMServiceExtensionClient(spec).fulfillment_metadata( - llm_fulfillments={ - "default": LLMFulfillment( - api_key=context_token.token.get_secret_value(), - api_model=(await ModelProvider.match())[0].model_id, - api_base="{platform_url}/api/v1/openai/", - ) - } - ) | PlatformApiExtensionClient(platform_api_spec).api_auth_metadata( - auth_token=context_token.token, - expires_at=context_token.expires_at, + async with a2a_client_factory(providers[0].agent_card, context_token) as a2a_client: + with subtests.test("run chat agent for the first time"): + num_parallel = 3 + message = create_text_message_object( + content=( + "How do you say informal hello in italian in 4 letters? " + "DO NOT SEARCH THE INTERNET FOR THIS, ANSWER DIRECTLY FROM YOUR KNOWLEDGE. " + "ANSWER ONLY THOSE FOUR LETTERS." ) - message.context_id = context.id - task = await get_final_task_from_stream(a2a_client.send_message(message)) + ) + spec = LLMServiceExtensionSpec.from_agent_card(providers[0].agent_card) + message.metadata = LLMServiceExtensionClient(spec).fulfillment_metadata( + llm_fulfillments={ + "default": LLMFulfillment( + api_key=context_token.token.get_secret_value(), + api_model=(await ModelProvider.match())[0].model_id, + api_base="{platform_url}/api/v1/openai/", + ) + } + ) + message.context_id = context.id + task = await get_final_task_from_stream(a2a_client.send_message(message)) - # Verify response + # Verify response + assert task.status.state == TaskState.completed, f"Fail: {task.status.message.parts[0].root.text}" + assert "ciao" in extract_agent_text_from_stream(task).lower() + + # Run 3 requests in parallel (test that each request waits) + run_results = await asyncio.gather( + *(get_final_task_from_stream(a2a_client.send_message(message)) for _ in range(num_parallel)) + ) + + for task in run_results: assert task.status.state == TaskState.completed, f"Fail: {task.status.message.parts[0].root.text}" assert "ciao" in extract_agent_text_from_stream(task).lower() - # Run 3 requests in parallel (test that each request waits) - run_results = await asyncio.gather( - *(get_final_task_from_stream(a2a_client.send_message(message)) for _ in range(num_parallel)) - ) + with subtests.test("run chat agent for the second time"): + task = await get_final_task_from_stream(a2a_client.send_message(message)) + assert task.status.state == TaskState.completed, f"Fail: {task.status.message.parts[0].root.text}" + assert "ciao" in extract_agent_text_from_stream(task).lower() - for task in run_results: - assert task.status.state == TaskState.completed, f"Fail: {task.status.message.parts[0].root.text}" - assert "ciao" in extract_agent_text_from_stream(task).lower() + with subtests.test("the context token will not work with direct call to agent (server exchange is required)"): + deployment = await Deployment.get("agentstack-server", api=kr8s_client) + script = dedent( + f"""\ + import asyncio + import sys + import httpx - with subtests.test("run chat agent for the second time"): - task = await get_final_task_from_stream(a2a_client.send_message(message)) - assert task.status.state == TaskState.completed, f"Fail: {task.status.message.parts[0].root.text}" - assert "ciao" in extract_agent_text_from_stream(task).lower() + async def main(): + url = "http://agentstack-provider-{providers[0].id}-svc:8000/jsonrpc/" + print(f"Connecting to {{url}}...") + try: + async with httpx.AsyncClient(timeout=None, headers={{"Authorization": "Bearer {context_token.token.get_secret_value()}"}}) as httpx_client: + response = await httpx_client.post(url, json={{ + "jsonrpc": "2.0", + "id": "1", + "method": "message/send", + "params": {{ + "message": {{ + "role": "agent", + "parts": [{{"kind": "text", "text": "Hello"}}], + "messageId": "1", + "kind": "message", + }} + }} + }}) + response.raise_for_status() + except Exception as e: + if "401" in str(e): + print("Success: Request failed as expected with 401") + sys.exit(0) + print(f"Error: {{e}}") + sys.exit(1) + + print("Error: Request succeeded unexpectedly") + sys.exit(1) + + asyncio.run(main()) + """ + ) + resp = await deployment.exec(["python", "-c", script], check=False) + assert resp.returncode == 0, resp.stdout.decode("utf-8") + "\n" + resp.stderr.decode("utf-8") + + invalid_context_token = await context.generate_token( + providers=[str(uuid.uuid4())], # different target provider + grant_global_permissions=Permissions(llm={"*"}), + grant_context_permissions=ContextPermissions(context_data={"*"}), + ) + async with a2a_client_factory(providers[0].agent_card, invalid_context_token) as a2a_client: + with ( + subtests.test("run chat agent with invalid token"), + pytest.raises(A2AClientHTTPError, match="403 Forbidden"), + ): + await get_final_task_from_stream(a2a_client.send_message(message)) diff --git a/apps/agentstack-server/tests/e2e/agents/test_context_store.py b/apps/agentstack-server/tests/e2e/agents/test_context_store.py index da475462d..8789d6eaa 100644 --- a/apps/agentstack-server/tests/e2e/agents/test_context_store.py +++ b/apps/agentstack-server/tests/e2e/agents/test_context_store.py @@ -8,7 +8,7 @@ from a2a.types import Message, Role, Task from agentstack_sdk.a2a.extensions.services.platform import PlatformApiExtensionClient, PlatformApiExtensionSpec from agentstack_sdk.a2a.types import RunYield -from agentstack_sdk.platform.context import Context, ContextPermissions, ContextToken +from agentstack_sdk.platform.context import Context, ContextPermissions, ContextToken, Permissions from agentstack_sdk.server import Server from agentstack_sdk.server.context import RunContext from agentstack_sdk.server.store.platform_context_store import PlatformContextStore @@ -38,7 +38,13 @@ async def history_agent(input: Message, context: RunContext) -> AsyncGenerator[R yield message await context.store(message) - async with create_server_with_agent(history_agent, context_store=PlatformContextStore()) as (server, client): + context = await Context.create() + token = await context.generate_token(grant_global_permissions=Permissions(a2a_proxy={"*"})) + async with create_server_with_agent( + history_agent, + context_token=token, + context_store=PlatformContextStore(), + ) as (server, client): yield server, client @@ -56,7 +62,10 @@ async def test_agent_history(history_agent, subtests): with subtests.test("history repeats itself"): context1 = await Context.create() - token = await context1.generate_token(grant_context_permissions=ContextPermissions(context_data={"*"})) + token = await context1.generate_token( + grant_context_permissions=ContextPermissions(context_data={"*"}), + grant_global_permissions=Permissions(a2a_proxy={"*"}), + ) final_task = await get_final_task_from_stream(client.send_message(create_message(token, "first message"))) agent_messages = [msg.parts[0].root.text for msg in final_task.history] @@ -89,7 +98,10 @@ async def test_agent_history(history_agent, subtests): with subtests.test("other context id does not mix history"): context2 = await Context.create() - token = await context2.generate_token(grant_context_permissions=ContextPermissions(context_data={"*"})) + token = await context2.generate_token( + grant_context_permissions=ContextPermissions(context_data={"*"}), + grant_global_permissions=Permissions(a2a_proxy={"*"}), + ) final_task = await get_final_task_from_stream(client.send_message(create_message(token, "first message"))) agent_messages = [msg.parts[0].root.text for msg in final_task.history] assert agent_messages == ["first message"] diff --git a/apps/agentstack-server/tests/e2e/agents/test_platform_extensions.py b/apps/agentstack-server/tests/e2e/agents/test_platform_extensions.py index 91f223e65..abaa55cfb 100644 --- a/apps/agentstack-server/tests/e2e/agents/test_platform_extensions.py +++ b/apps/agentstack-server/tests/e2e/agents/test_platform_extensions.py @@ -1,7 +1,8 @@ # Copyright 2025 © BeeAI a Series of LF Projects, LLC # SPDX-License-Identifier: Apache-2.0 import os -from collections.abc import AsyncGenerator, AsyncIterator +from collections.abc import AsyncGenerator, AsyncIterator, Awaitable, Callable +from contextlib import asynccontextmanager from typing import Annotated from uuid import uuid4 @@ -15,7 +16,7 @@ ) from agentstack_sdk.a2a.types import RunYield from agentstack_sdk.platform import File, Provider -from agentstack_sdk.platform.context import Context, ContextPermissions +from agentstack_sdk.platform.context import Context, ContextPermissions, ContextToken, Permissions from agentstack_sdk.server import Server from agentstack_sdk.util.file import load_file from tenacity import AsyncRetrying, stop_after_delay, wait_fixed @@ -24,23 +25,29 @@ @pytest.fixture -async def file_reader_writer(create_server_with_agent) -> AsyncGenerator[tuple[Server, Client]]: - async def file_reader_writer( - message: Message, - _: Annotated[PlatformApiExtensionServer, PlatformApiExtensionSpec()], - ) -> AsyncIterator[RunYield]: - for part in message.parts: - match part.root: - case FilePart() as fp: - async with load_file(fp, stream=True) as open_file: - async for chunk in open_file.aiter_text(chunk_size=5): - yield chunk - - file = await File.create(filename="1.txt", content=message.context_id.encode(), content_type="text/plain") - yield file.to_file_part() - - async with create_server_with_agent(file_reader_writer) as (server, test_client): - yield server, test_client +async def file_reader_writer_factory( + create_server_with_agent, +) -> Callable[[ContextToken], Awaitable[AsyncGenerator[tuple[Server, Client]]]]: + @asynccontextmanager + async def _file_reader_writer_factory(context_token: ContextToken) -> AsyncGenerator[tuple[Server, Client]]: + async def file_reader_writer( + message: Message, + _: Annotated[PlatformApiExtensionServer, PlatformApiExtensionSpec()], + ) -> AsyncIterator[RunYield]: + for part in message.parts: + match part.root: + case FilePart() as fp: + async with load_file(fp, stream=True) as open_file: + async for chunk in open_file.aiter_text(chunk_size=5): + yield chunk + + file = await File.create(filename="1.txt", content=message.context_id.encode(), content_type="text/plain") + yield file.to_file_part() + + async with create_server_with_agent(file_reader_writer, context_token=context_token) as (server, test_client): + yield server, test_client + + return _file_reader_writer_factory @pytest.mark.parametrize( @@ -51,46 +58,48 @@ async def file_reader_writer( ], ) @pytest.mark.usefixtures("clean_up", "setup_platform_client") -async def test_platform_api_extension(file_reader_writer, permissions, should_fail, get_final_task_from_stream): - _, client = file_reader_writer - +async def test_platform_api_extension(file_reader_writer_factory, permissions, should_fail, get_final_task_from_stream): # create context and token context = await Context.create() - token = await context.generate_token(grant_context_permissions=permissions) - - # upload test file - file = await File.create(filename="f.txt", content=b"0123456789", content_type="text/plain", context_id=context.id) + token = await context.generate_token( + grant_context_permissions=permissions, grant_global_permissions=Permissions(a2a_proxy={"*"}) + ) + async with file_reader_writer_factory(token) as (_, client): + # upload test file + file = await File.create( + filename="f.txt", content=b"0123456789", content_type="text/plain", context_id=context.id + ) - # create message with auth credentials - api_extension_client = PlatformApiExtensionClient(PlatformApiExtensionSpec()) + # create message with auth credentials + api_extension_client = PlatformApiExtensionClient(PlatformApiExtensionSpec()) - message = Message( - role=Role.user, - parts=[file.to_file_part()], - message_id=str(uuid4()), - context_id=context.id, - metadata=api_extension_client.api_auth_metadata(auth_token=token.token, expires_at=token.expires_at), - ) + message = Message( + role=Role.user, + parts=[file.to_file_part()], + message_id=str(uuid4()), + context_id=context.id, + metadata=api_extension_client.api_auth_metadata(auth_token=token.token, expires_at=token.expires_at), + ) - # send message - task = await get_final_task_from_stream(client.send_message(message)) + # send message + task = await get_final_task_from_stream(client.send_message(message)) - if should_fail: - assert task.status.state == TaskState.failed - assert "403 Forbidden" in task.status.message.parts[0].root.text - else: - assert task.status.state == TaskState.completed, f"Fail: {task.status.message.parts[0].root.text}" + if should_fail: + assert task.status.state == TaskState.failed + assert "403 Forbidden" in task.status.message.parts[0].root.text + else: + assert task.status.state == TaskState.completed, f"Fail: {task.status.message.parts[0].root.text}" - # check that first message is the content of the first_file - first_message_text = task.history[0].parts[0].root.text - assert first_message_text == "01234" + # check that first message is the content of the first_file + first_message_text = task.history[0].parts[0].root.text + assert first_message_text == "01234" - second_message_text = task.history[1].parts[0].root.text - assert second_message_text == "56789" + second_message_text = task.history[1].parts[0].root.text + assert second_message_text == "56789" - # check that the agent uploaded a new file with correct context_id as content - async with load_file(task.history[2].parts[0].root) as file: - assert file.text == context.id + # check that the agent uploaded a new file with correct context_id as content + async with load_file(task.history[2].parts[0].root) as file: + assert file.text == context.id SELF_REGISTRATION_TEST_VAR_NAME = "_SELF_REGISTRATION_TEST_VAR" @@ -101,7 +110,9 @@ async def self_registration_agent(create_server_with_agent) -> AsyncGenerator[tu async def self_registration_agent() -> AsyncIterator[RunYield]: yield os.environ.get(SELF_REGISTRATION_TEST_VAR_NAME, "empty") - async with create_server_with_agent(self_registration_agent) as (server, test_client): + context = await Context.create() + token = await context.generate_token(grant_global_permissions=Permissions(a2a_proxy={"*"})) + async with create_server_with_agent(self_registration_agent, context_token=token) as (server, test_client): yield server, test_client diff --git a/apps/agentstack-server/tests/e2e/conftest.py b/apps/agentstack-server/tests/e2e/conftest.py index 43e089bb6..be238cba0 100644 --- a/apps/agentstack-server/tests/e2e/conftest.py +++ b/apps/agentstack-server/tests/e2e/conftest.py @@ -12,6 +12,7 @@ from a2a.client import Client, ClientConfig, ClientEvent, ClientFactory from a2a.types import AgentCard, Message, Task from agentstack_sdk.platform import ModelProvider, SystemConfiguration, use_platform_client +from agentstack_sdk.platform.context import ContextToken logger = logging.getLogger(__name__) @@ -33,10 +34,11 @@ async def fn(stream: AsyncIterator[ClientEvent | Message]) -> Task: @pytest.fixture() -async def a2a_client_factory() -> Callable[[AgentCard | dict[str, Any]], AsyncIterator[Client]]: +async def a2a_client_factory() -> Callable[[AgentCard | dict[str, Any], ContextToken], AsyncIterator[Client]]: @asynccontextmanager - async def a2a_client_factory(agent_card: AgentCard | dict) -> AsyncIterator[Client]: - async with httpx.AsyncClient(timeout=None, auth=("admin", "test-password")) as client: + async def a2a_client_factory(agent_card: AgentCard | dict, context_token: ContextToken) -> AsyncIterator[Client]: + token = context_token.token.get_secret_value() + async with httpx.AsyncClient(timeout=None, headers={"Authorization": f"Bearer {token}"}) as client: yield ClientFactory(ClientConfig(httpx_client=client)).create(card=agent_card) return a2a_client_factory diff --git a/apps/agentstack-server/tests/e2e/routes/test_files.py b/apps/agentstack-server/tests/e2e/routes/test_files.py index 2f96c35bf..2bb995189 100644 --- a/apps/agentstack-server/tests/e2e/routes/test_files.py +++ b/apps/agentstack-server/tests/e2e/routes/test_files.py @@ -9,7 +9,7 @@ import pytest from agentstack_sdk.platform import use_platform_client from agentstack_sdk.platform.client import PlatformClient -from agentstack_sdk.platform.context import Context, ContextPermissions +from agentstack_sdk.platform.context import Context, ContextPermissions, Permissions from agentstack_sdk.platform.file import File from httpx import AsyncClient from tenacity import AsyncRetrying, stop_after_delay, wait_fixed @@ -204,14 +204,19 @@ async def test_text_extraction_plain_text_workflow(subtests): async def test_context_scoped_file_access(subtests): """Test that files are properly scoped to contexts and users cannot access files from other contexts.""" + global_permissions = Permissions(a2a_proxy={"*"}) with subtests.test("create two different contexts"): ctx1 = await Context.create() ctx2 = await Context.create() with subtests.test("generate context tokens"): - permissions = ContextPermissions(files={"read", "write", "extract"}) - token_1 = await ctx1.generate_token(grant_context_permissions=permissions) - token_2 = await ctx2.generate_token(grant_context_permissions=permissions) + ctx_permissions = ContextPermissions(files={"read", "write", "extract"}) + token_1 = await ctx1.generate_token( + grant_context_permissions=ctx_permissions, grant_global_permissions=global_permissions + ) + token_2 = await ctx2.generate_token( + grant_context_permissions=ctx_permissions, grant_global_permissions=global_permissions + ) # Create platform clients with context tokens async with ( @@ -305,9 +310,14 @@ async def test_file_extraction_context_isolation(subtests, test_configuration): ctx1 = await Context.create() ctx2 = await Context.create() - permissions = ContextPermissions(files={"read", "write", "extract"}) - token_1 = await ctx1.generate_token(grant_context_permissions=permissions) - token_2 = await ctx2.generate_token(grant_context_permissions=permissions) + ctx_permissions = ContextPermissions(files={"read", "write", "extract"}) + global_permissions = Permissions(a2a_proxy={"*"}) + token_1 = await ctx1.generate_token( + grant_context_permissions=ctx_permissions, grant_global_permissions=global_permissions + ) + token_2 = await ctx2.generate_token( + grant_context_permissions=ctx_permissions, grant_global_permissions=global_permissions + ) # Create platform clients with context tokens async with ( @@ -507,9 +517,14 @@ async def test_files_list(subtests): ctx2 = await Context.create() # Generate context tokens - permissions = ContextPermissions(files={"read", "write"}) - token_1 = await ctx1.generate_token(grant_context_permissions=permissions) - token_2 = await ctx2.generate_token(grant_context_permissions=permissions) + ctx_permissions = ContextPermissions(files={"read", "write"}) + global_permissions = Permissions(a2a_proxy={"*"}) + token_1 = await ctx1.generate_token( + grant_context_permissions=ctx_permissions, grant_global_permissions=global_permissions + ) + token_2 = await ctx2.generate_token( + grant_context_permissions=ctx_permissions, grant_global_permissions=global_permissions + ) # Create platform clients with context tokens async with ( @@ -589,9 +604,14 @@ async def test_files_list_user_global_and_context_scoped(subtests): ctx2 = await Context.create() with subtests.test("generate context tokens"): - permissions = ContextPermissions(files={"read", "write"}) - token_1 = await ctx1.generate_token(grant_context_permissions=permissions) - token_2 = await ctx2.generate_token(grant_context_permissions=permissions) + ctx_permissions = ContextPermissions(files={"read", "write"}) + global_permissions = Permissions(a2a_proxy={"*"}) + token_1 = await ctx1.generate_token( + grant_context_permissions=ctx_permissions, grant_global_permissions=global_permissions + ) + token_2 = await ctx2.generate_token( + grant_context_permissions=ctx_permissions, grant_global_permissions=global_permissions + ) async with ( PlatformClient(context_id=ctx1.id, auth_token=token_1.token.get_secret_value()) as client_1, diff --git a/apps/agentstack-server/tests/e2e/routes/test_openai.py b/apps/agentstack-server/tests/e2e/routes/test_openai.py index 3a00cac06..1d4f33bc0 100644 --- a/apps/agentstack-server/tests/e2e/routes/test_openai.py +++ b/apps/agentstack-server/tests/e2e/routes/test_openai.py @@ -20,14 +20,14 @@ async def test_llm_permission_enforcement_with_context_token(subtests, test_conf with subtests.test("LLM request denied with insufficient global permissions"): ctx = await Context.create() - token = await ctx.generate_token(grant_global_permissions=Permissions(files={"read"})) + token = await ctx.generate_token(grant_global_permissions=Permissions(files={"read"}, a2a_proxy={"*"})) openai_client = openai.AsyncOpenAI(api_key=token.token.get_secret_value(), base_url=openai_base_url) with pytest.raises(openai.PermissionDeniedError): resp = await openai_client.chat.completions.create(**test_message, model=test_configuration.llm_model) # Test with sufficient global permissions with subtests.test("LLM request succeeds with sufficient global permissions"): - token = await ctx.generate_token(grant_global_permissions=Permissions(llm={"*"})) + token = await ctx.generate_token(grant_global_permissions=Permissions(llm={"*"}, a2a_proxy={"*"})) openai_client = openai.AsyncOpenAI(api_key=token.token.get_secret_value(), base_url=openai_base_url) resp = await openai_client.chat.completions.create(**test_message, model=test_configuration.llm_model) assert resp.choices[0].message.content @@ -42,7 +42,9 @@ async def test_models_endpoint(subtests, test_configuration): with subtests.test("models endpoint returns default model"): # Create a context with LLM permissions to access models ctx = await Context.create() - token = await ctx.generate_token(grant_global_permissions=Permissions(llm={"*"}, model_providers={"read"})) + token = await ctx.generate_token( + grant_global_permissions=Permissions(llm={"*"}, model_providers={"read"}, a2a_proxy={"*"}) + ) openai_client = openai.AsyncOpenAI(api_key=token.token.get_secret_value(), base_url=openai_base_url) # Get available models diff --git a/apps/agentstack-server/uv.lock b/apps/agentstack-server/uv.lock index dea6fa22f..f7e6d6b4d 100644 --- a/apps/agentstack-server/uv.lock +++ b/apps/agentstack-server/uv.lock @@ -25,7 +25,9 @@ source = { editable = "../agentstack-sdk-py" } dependencies = [ { name = "a2a-sdk" }, { name = "anyio" }, + { name = "async-lru" }, { name = "asyncclick" }, + { name = "authlib" }, { name = "fastapi" }, { name = "httpx" }, { name = "janus" }, @@ -45,7 +47,9 @@ dependencies = [ requires-dist = [ { name = "a2a-sdk", specifier = "==0.3.21" }, { name = "anyio", specifier = ">=4.9.0" }, + { name = "async-lru", specifier = ">=2.0.4" }, { name = "asyncclick", specifier = ">=8.1.8" }, + { name = "authlib", specifier = ">=1.3.0" }, { name = "fastapi", specifier = ">=0.116.1" }, { name = "httpx" }, { name = "janus", specifier = ">=2.0.0" }, diff --git a/apps/agentstack-ui/src/api/a2a/agent-card.ts b/apps/agentstack-ui/src/api/a2a/agent-card.ts index ac88217eb..76cdd758a 100644 --- a/apps/agentstack-ui/src/api/a2a/agent-card.ts +++ b/apps/agentstack-ui/src/api/a2a/agent-card.ts @@ -4,14 +4,16 @@ */ import { A2AClient } from '@a2a-js/sdk/client'; +import { createAuthenticatedFetch } from 'agentstack-sdk'; import { UnauthenticatedError } from '#api/errors.ts'; import { getBaseUrl } from '#utils/api/getBaseUrl.ts'; -export async function getAgentClient(providerId: string) { +export async function getAgentClient(providerId: string, token?: string) { const agentCardUrl = `${getBaseUrl()}/api/v1/a2a/${providerId}/.well-known/agent-card.json`; - return await A2AClient.fromCardUrl(agentCardUrl, { fetchImpl: clientFetch }); + const fetchImpl = token ? createAuthenticatedFetch(token, clientFetch) : clientFetch; + return await A2AClient.fromCardUrl(agentCardUrl, { fetchImpl }); } export async function clientFetch(input: RequestInfo, init?: RequestInit) { diff --git a/apps/agentstack-ui/src/api/a2a/client.ts b/apps/agentstack-ui/src/api/a2a/client.ts index 845041b7f..4d88abf84 100644 --- a/apps/agentstack-ui/src/api/a2a/client.ts +++ b/apps/agentstack-ui/src/api/a2a/client.ts @@ -72,13 +72,17 @@ function handleArtifactUpdate(event: TaskArtifactUpdateEvent): UIMessagePart[] { export interface CreateA2AClientParams { providerId: string; onStatusUpdate?: (event: TaskStatusUpdateEvent) => UIGenericPart[]; + authToken?: { token: string } | string | null | undefined; } export const buildA2AClient = async ({ providerId, onStatusUpdate, + authToken: contextToken, }: CreateA2AClientParams) => { - const client = await getAgentClient(providerId); + const tokenData = contextToken; + const token = typeof tokenData === 'string' ? tokenData : tokenData?.token; + const client = await getAgentClient(providerId, token ?? undefined); const card = await client.getAgentCard(); const { resolveMetadata: resolveAgentCardMetadata, demands } = handleAgentCard(card); diff --git a/apps/agentstack-ui/src/api/schema.d.ts b/apps/agentstack-ui/src/api/schema.d.ts index 978710278..11737fca9 100644 --- a/apps/agentstack-ui/src/api/schema.d.ts +++ b/apps/agentstack-ui/src/api/schema.d.ts @@ -9,6 +9,23 @@ */ export interface paths { + '/.well-known/jwks': { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** Jwks */ + get: operations['jwks__well_known_jwks_get']; + put?: never; + post?: never; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; '/.well-known/oauth-protected-resource/{resource}': { parameters: { query?: never; @@ -85,19 +102,19 @@ export interface paths { cookie?: never; }; /** A2A Proxy Http Transport */ - get: operations['a2a_proxy_http_transport_api_v1_a2a__provider_id__http_head']; + get: operations['a2a_proxy_http_transport_api_v1_a2a__provider_id__http_get']; /** A2A Proxy Http Transport */ - put: operations['a2a_proxy_http_transport_api_v1_a2a__provider_id__http_head']; + put: operations['a2a_proxy_http_transport_api_v1_a2a__provider_id__http_get']; /** A2A Proxy Http Transport */ - post: operations['a2a_proxy_http_transport_api_v1_a2a__provider_id__http_head']; + post: operations['a2a_proxy_http_transport_api_v1_a2a__provider_id__http_get']; /** A2A Proxy Http Transport */ - delete: operations['a2a_proxy_http_transport_api_v1_a2a__provider_id__http_head']; + delete: operations['a2a_proxy_http_transport_api_v1_a2a__provider_id__http_get']; /** A2A Proxy Http Transport */ - options: operations['a2a_proxy_http_transport_api_v1_a2a__provider_id__http_head']; + options: operations['a2a_proxy_http_transport_api_v1_a2a__provider_id__http_get']; /** A2A Proxy Http Transport */ - head: operations['a2a_proxy_http_transport_api_v1_a2a__provider_id__http_head']; + head: operations['a2a_proxy_http_transport_api_v1_a2a__provider_id__http_get']; /** A2A Proxy Http Transport */ - patch: operations['a2a_proxy_http_transport_api_v1_a2a__provider_id__http_head']; + patch: operations['a2a_proxy_http_transport_api_v1_a2a__provider_id__http_get']; trace?: never; }; '/api/v1/a2a/{provider_id}/http/{path}': { @@ -108,19 +125,19 @@ export interface paths { cookie?: never; }; /** A2A Proxy Http Transport */ - get: operations['a2a_proxy_http_transport_api_v1_a2a__provider_id__http__path__head']; + get: operations['a2a_proxy_http_transport_api_v1_a2a__provider_id__http__path__get']; /** A2A Proxy Http Transport */ - put: operations['a2a_proxy_http_transport_api_v1_a2a__provider_id__http__path__head']; + put: operations['a2a_proxy_http_transport_api_v1_a2a__provider_id__http__path__get']; /** A2A Proxy Http Transport */ - post: operations['a2a_proxy_http_transport_api_v1_a2a__provider_id__http__path__head']; + post: operations['a2a_proxy_http_transport_api_v1_a2a__provider_id__http__path__get']; /** A2A Proxy Http Transport */ - delete: operations['a2a_proxy_http_transport_api_v1_a2a__provider_id__http__path__head']; + delete: operations['a2a_proxy_http_transport_api_v1_a2a__provider_id__http__path__get']; /** A2A Proxy Http Transport */ - options: operations['a2a_proxy_http_transport_api_v1_a2a__provider_id__http__path__head']; + options: operations['a2a_proxy_http_transport_api_v1_a2a__provider_id__http__path__get']; /** A2A Proxy Http Transport */ - head: operations['a2a_proxy_http_transport_api_v1_a2a__provider_id__http__path__head']; + head: operations['a2a_proxy_http_transport_api_v1_a2a__provider_id__http__path__get']; /** A2A Proxy Http Transport */ - patch: operations['a2a_proxy_http_transport_api_v1_a2a__provider_id__http__path__head']; + patch: operations['a2a_proxy_http_transport_api_v1_a2a__provider_id__http__path__get']; trace?: never; }; '/api/v1/configurations/system': { @@ -359,7 +376,8 @@ export interface paths { path?: never; cookie?: never; }; - get?: never; + /** List Files */ + get: operations['list_files_api_v1_files_get']; put?: never; /** Upload File */ post: operations['upload_file_api_v1_files_post']; @@ -1454,6 +1472,64 @@ export interface components { } & { [key: string]: unknown; }; + /** ProviderWithStateResponse */ + agentstack_server__api__schema__common__EntityModel____class_getitem_____locals___ModelOutput__3: { + agent_card: components['schemas']['AgentCard-Output']; + /** + * Auto Stop Timeout + * Format: duration + * @default PT20M + */ + auto_stop_timeout: string; + /** + * Created At + * Format: date-time + */ + created_at?: string; + /** + * Created By + * Format: uuid + */ + created_by: string; + /** Env */ + readonly env: components['schemas']['EnvVar'][]; + /** + * Id + * Format: uuid + */ + id: string; + /** + * Last Active At + * Format: date-time + */ + last_active_at?: string; + last_error?: components['schemas']['ProviderErrorMessage'] | null; + /** Managed */ + readonly managed: boolean; + /** Missing Configuration */ + missing_configuration?: components['schemas']['EnvVar'][]; + /** Origin */ + origin: string; + /** Registry */ + registry?: + | components['schemas']['GithubRegistryLocation'] + | components['schemas']['NetworkRegistryLocation'] + | components['schemas']['FileSystemRegistryLocation'] + | null; + /** Source */ + source: components['schemas']['DockerImageProviderLocation'] | components['schemas']['NetworkProviderLocation']; + /** State */ + state: components['schemas']['ProviderDeploymentState'] | components['schemas']['UnmanagedState']; + readonly type: components['schemas']['ProviderType']; + /** + * Updated At + * Format: date-time + */ + updated_at?: string; + version_info?: components['schemas']['VersionInfo']; + } & { + [key: string]: unknown; + }; /** ModelProviderResponse */ agentstack_server__api__schema__common__EntityModel____class_getitem_____locals___ModelOutput__4: { /** @@ -1645,8 +1721,8 @@ export interface components { created_at?: string; /** Error Message */ error_message?: string | null; - /** Extracted File Id */ - extracted_file_id?: string | null; + /** Extracted Files */ + extracted_files?: components['schemas']['ExtractedFileInfo'][]; extraction_metadata?: components['schemas']['ExtractionMetadata'] | null; /** * File Id @@ -1676,8 +1752,8 @@ export interface components { created_at?: string; /** Error Message */ error_message?: string | null; - /** Extracted File Id */ - extracted_file_id?: string | null; + /** Extracted Files */ + extracted_files?: components['schemas']['ExtractedFileInfo'][]; extraction_metadata?: components['schemas']['ExtractionMetadata'] | null; /** * File Id @@ -1699,40 +1775,6 @@ export interface components { status: components['schemas']['ExtractionStatus']; }; /** ContextResponse */ - agentstack_server__api__schema__common__EntityModel____class_getitem_____locals___ModelOutput__12: { - /** - * Created At - * Format: date-time - */ - created_at?: string; - /** - * Created By - * Format: uuid - */ - created_by: string; - /** - * Id - * Format: uuid - */ - id: string; - /** - * Last Active At - * Format: date-time - */ - last_active_at?: string; - /** Metadata */ - metadata?: { - [key: string]: string; - } | null; - /** Provider Id */ - provider_id?: string | null; - /** - * Updated At - * Format: date-time - */ - updated_at?: string; - }; - /** ContextResponse */ agentstack_server__api__schema__common__EntityModel____class_getitem_____locals___ModelOutput__13: { /** * Created At @@ -1898,15 +1940,8 @@ export interface components { name?: string | null; stats?: components['schemas']['VectorStoreStats'] | null; }; - /** ProviderWithStateResponse */ + /** ContextResponse */ 'agentstack_server__api__schema__common__EntityModel____class_getitem______ModelOutput': { - agent_card: components['schemas']['AgentCard-Output']; - /** - * Auto Stop Timeout - * Format: duration - * @default PT20M - */ - auto_stop_timeout: string; /** * Created At * Format: date-time @@ -1917,8 +1952,6 @@ export interface components { * Format: uuid */ created_by: string; - /** Env */ - readonly env: components['schemas']['EnvVar'][]; /** * Id * Format: uuid @@ -1929,32 +1962,17 @@ export interface components { * Format: date-time */ last_active_at?: string; - last_error?: components['schemas']['ProviderErrorMessage'] | null; - /** Managed */ - readonly managed: boolean; - /** Missing Configuration */ - missing_configuration?: components['schemas']['EnvVar'][]; - /** Origin */ - origin: string; - /** Registry */ - registry?: - | components['schemas']['GithubRegistryLocation'] - | components['schemas']['NetworkRegistryLocation'] - | components['schemas']['FileSystemRegistryLocation'] - | null; - /** Source */ - source: components['schemas']['DockerImageProviderLocation'] | components['schemas']['NetworkProviderLocation']; - /** State */ - state: components['schemas']['ProviderDeploymentState'] | components['schemas']['UnmanagedState']; - readonly type: components['schemas']['ProviderType']; + /** Metadata */ + metadata?: { + [key: string]: string; + } | null; + /** Provider Id */ + provider_id?: string | null; /** * Updated At * Format: date-time */ updated_at?: string; - version_info?: components['schemas']['VersionInfo']; - } & { - [key: string]: unknown; }; /** * APIKeySecurityScheme @@ -2013,7 +2031,11 @@ export interface components { /** Parts */ parts: components['schemas']['Part-Output'][]; }; - /** Audio */ + /** + * Audio + * @description Data about a previous audio response from the model. + * [Learn more](https://platform.openai.com/docs/guides/audio). + */ Audio: { /** Id */ id: string; @@ -2068,7 +2090,10 @@ export interface components { * @enum {string} */ BuildState: 'missing' | 'in_progress' | 'build_completed' | 'completed' | 'failed'; - /** ChatCompletionAllowedToolChoiceParam */ + /** + * ChatCompletionAllowedToolChoiceParam + * @description Constrains the tools available to the model to a pre-defined set. + */ ChatCompletionAllowedToolChoiceParam: { allowed_tools: components['schemas']['ChatCompletionAllowedToolsParam']; /** @@ -2077,7 +2102,10 @@ export interface components { */ type: 'allowed_tools'; }; - /** ChatCompletionAllowedToolsParam */ + /** + * ChatCompletionAllowedToolsParam + * @description Constrains the tools available to the model to a pre-defined set. + */ ChatCompletionAllowedToolsParam: { /** * Mode @@ -2089,7 +2117,10 @@ export interface components { [key: string]: unknown; }[]; }; - /** ChatCompletionAssistantMessageParam */ + /** + * ChatCompletionAssistantMessageParam + * @description Messages sent by the model in response to user messages. + */ ChatCompletionAssistantMessageParam: { audio?: components['schemas']['Audio'] | null; /** Content */ @@ -2116,7 +2147,13 @@ export interface components { | components['schemas']['ChatCompletionMessageCustomToolCallParam'] )[]; }; - /** ChatCompletionAudioParam */ + /** + * ChatCompletionAudioParam + * @description Parameters for audio output. + * + * Required when audio output is requested with + * `modalities: ["audio"]`. [Learn more](https://platform.openai.com/docs/guides/audio). + */ ChatCompletionAudioParam: { /** * Format @@ -2128,7 +2165,10 @@ export interface components { | string | ('alloy' | 'ash' | 'ballad' | 'coral' | 'echo' | 'sage' | 'shimmer' | 'verse' | 'marin' | 'cedar'); }; - /** ChatCompletionContentPartImageParam */ + /** + * ChatCompletionContentPartImageParam + * @description Learn about [image inputs](https://platform.openai.com/docs/guides/vision). + */ ChatCompletionContentPartImageParam: { image_url: components['schemas']['ImageURL']; /** @@ -2137,7 +2177,10 @@ export interface components { */ type: 'image_url'; }; - /** ChatCompletionContentPartInputAudioParam */ + /** + * ChatCompletionContentPartInputAudioParam + * @description Learn about [audio inputs](https://platform.openai.com/docs/guides/audio). + */ ChatCompletionContentPartInputAudioParam: { input_audio: components['schemas']['InputAudio']; /** @@ -2156,7 +2199,10 @@ export interface components { */ type: 'refusal'; }; - /** ChatCompletionContentPartTextParam */ + /** + * ChatCompletionContentPartTextParam + * @description Learn about [text inputs](https://platform.openai.com/docs/guides/text-generation). + */ ChatCompletionContentPartTextParam: { /** Text */ text: string; @@ -2166,7 +2212,12 @@ export interface components { */ type: 'text'; }; - /** ChatCompletionDeveloperMessageParam */ + /** + * ChatCompletionDeveloperMessageParam + * @description Developer-provided instructions that the model should follow, regardless of + * messages sent by the user. With o1 models and newer, `developer` messages + * replace the previous `system` messages. + */ ChatCompletionDeveloperMessageParam: { /** Content */ content: string | components['schemas']['ChatCompletionContentPartTextParam'][]; @@ -2178,7 +2229,10 @@ export interface components { */ role: 'developer'; }; - /** ChatCompletionFunctionCallOptionParam */ + /** + * ChatCompletionFunctionCallOptionParam + * @description Specifying a particular function via `{"name": "my_function"}` forces the model to call that function. + */ ChatCompletionFunctionCallOptionParam: { /** Name */ name: string; @@ -2195,7 +2249,10 @@ export interface components { */ role: 'function'; }; - /** ChatCompletionFunctionToolParam */ + /** + * ChatCompletionFunctionToolParam + * @description A function tool that can be used to generate a response. + */ ChatCompletionFunctionToolParam: { function: components['schemas']['FunctionDefinition']; /** @@ -2204,7 +2261,10 @@ export interface components { */ type: 'function'; }; - /** ChatCompletionMessageCustomToolCallParam */ + /** + * ChatCompletionMessageCustomToolCallParam + * @description A call to a custom tool created by the model. + */ ChatCompletionMessageCustomToolCallParam: { custom: components['schemas']['openai__types__chat__chat_completion_message_custom_tool_call_param__Custom']; /** Id */ @@ -2215,7 +2275,10 @@ export interface components { */ type: 'custom'; }; - /** ChatCompletionMessageFunctionToolCallParam */ + /** + * ChatCompletionMessageFunctionToolCallParam + * @description A call to a function tool created by the model. + */ ChatCompletionMessageFunctionToolCallParam: { function: components['schemas']['openai__types__chat__chat_completion_message_function_tool_call_param__Function']; /** Id */ @@ -2226,7 +2289,12 @@ export interface components { */ type: 'function'; }; - /** ChatCompletionNamedToolChoiceCustomParam */ + /** + * ChatCompletionNamedToolChoiceCustomParam + * @description Specifies a tool the model should use. + * + * Use to force the model to call a specific custom tool. + */ ChatCompletionNamedToolChoiceCustomParam: { custom: components['schemas']['openai__types__chat__chat_completion_named_tool_choice_custom_param__Custom']; /** @@ -2235,7 +2303,12 @@ export interface components { */ type: 'custom'; }; - /** ChatCompletionNamedToolChoiceParam */ + /** + * ChatCompletionNamedToolChoiceParam + * @description Specifies a tool the model should use. + * + * Use to force the model to call a specific function. + */ ChatCompletionNamedToolChoiceParam: { function: components['schemas']['openai__types__chat__chat_completion_named_tool_choice_param__Function']; /** @@ -2244,7 +2317,11 @@ export interface components { */ type: 'function'; }; - /** ChatCompletionPredictionContentParam */ + /** + * ChatCompletionPredictionContentParam + * @description Static predicted output content, such as the content of a text file that is + * being regenerated. + */ ChatCompletionPredictionContentParam: { /** Content */ content: string | components['schemas']['ChatCompletionContentPartTextParam'][]; @@ -2295,6 +2372,16 @@ export interface components { model: | string | ( + | 'gpt-5.2' + | 'gpt-5.2-2025-12-11' + | 'gpt-5.2-chat-latest' + | 'gpt-5.2-pro' + | 'gpt-5.2-pro-2025-12-11' + | 'gpt-5.1' + | 'gpt-5.1-2025-11-13' + | 'gpt-5.1-codex' + | 'gpt-5.1-mini' + | 'gpt-5.1-chat-latest' | 'gpt-5' | 'gpt-5-mini' | 'gpt-5-nano' @@ -2366,7 +2453,7 @@ export interface components { /** Presence Penalty */ presence_penalty?: number | null; /** Reasoning Effort */ - reasoning_effort?: ('minimal' | 'low' | 'medium' | 'high') | null; + reasoning_effort?: ('none' | 'minimal' | 'low' | 'medium' | 'high' | 'xhigh') | null; /** Response Format */ response_format?: | components['schemas']['ResponseFormatText'] @@ -2403,14 +2490,22 @@ export interface components { user?: string | null; web_search_options?: components['schemas']['WebSearchOptions'] | null; }; - /** ChatCompletionStreamOptionsParam */ + /** + * ChatCompletionStreamOptionsParam + * @description Options for streaming response. Only set this when you set `stream: true`. + */ ChatCompletionStreamOptionsParam: { /** Include Obfuscation */ include_obfuscation?: boolean; /** Include Usage */ include_usage?: boolean; }; - /** ChatCompletionSystemMessageParam */ + /** + * ChatCompletionSystemMessageParam + * @description Developer-provided instructions that the model should follow, regardless of + * messages sent by the user. With o1 models and newer, use `developer` messages + * for this purpose instead. + */ ChatCompletionSystemMessageParam: { /** Content */ content: string | components['schemas']['ChatCompletionContentPartTextParam'][]; @@ -2434,7 +2529,11 @@ export interface components { /** Tool Call Id */ tool_call_id: string; }; - /** ChatCompletionUserMessageParam */ + /** + * ChatCompletionUserMessageParam + * @description Messages sent by an end user, containing prompts or additional context + * information. + */ ChatCompletionUserMessageParam: { /** Content */ content: @@ -2443,7 +2542,7 @@ export interface components { | components['schemas']['ChatCompletionContentPartTextParam'] | components['schemas']['ChatCompletionContentPartImageParam'] | components['schemas']['ChatCompletionContentPartInputAudioParam'] - | components['schemas']['File'] + | components['schemas']['File-Input'] )[]; /** Name */ name?: string; @@ -2469,6 +2568,8 @@ export interface components { }; /** ConnectorConnectRequest */ ConnectorConnectRequest: { + /** Access Token */ + access_token?: string | null; /** Redirect Url */ redirect_url?: string | null; }; @@ -2788,10 +2889,28 @@ export interface components { } & { [key: string]: unknown; }; + /** + * ExtractedFileInfo + * @description Information about an extracted file. + */ + ExtractedFileInfo: { + /** + * File Id + * Format: uuid + */ + file_id: string; + format?: components['schemas']['ExtractionFormat'] | null; + }; + /** + * ExtractionFormat + * @enum {string} + */ + ExtractionFormat: 'markdown' | 'vendor_specific_json'; /** ExtractionMetadata */ ExtractionMetadata: { /** Backend */ - backend: string; + backend?: string | null; + settings?: components['schemas']['TextExtractionSettings'] | null; } & { [key: string]: unknown; }; @@ -2800,8 +2919,11 @@ export interface components { * @enum {string} */ ExtractionStatus: 'pending' | 'in_progress' | 'completed' | 'failed' | 'cancelled'; - /** File */ - File: { + /** + * File + * @description Learn about [file inputs](https://platform.openai.com/docs/guides/text) for text generation. + */ + 'File-Input': { file: components['schemas']['FileFile']; /** * Type @@ -2809,6 +2931,36 @@ export interface components { */ type: 'file'; }; + /** File */ + 'File-Output': { + /** Content Type */ + content_type: string; + /** Context Id */ + context_id?: string | null; + /** + * Created At + * Format: date-time + */ + created_at?: string; + /** + * Created By + * Format: uuid + */ + created_by: string; + /** File Size Bytes */ + file_size_bytes?: number | null; + /** @default user_upload */ + file_type: components['schemas']['FileType']; + /** Filename */ + filename: string; + /** + * Id + * Format: uuid + */ + id?: string; + /** Parent File Id */ + parent_file_id?: string | null; + }; /** FileFile */ FileFile: { /** File Data */ @@ -2818,6 +2970,27 @@ export interface components { /** Filename */ filename?: string; }; + /** + * FileListQuery + * @description Query schema for listing files. + */ + FileListQuery: { + /** Content Type */ + content_type?: string | null; + /** + * Filename Search + * @description Case-insensitive partial match search on filename (e.g., 'doc' matches 'my_document.pdf') + */ + filename_search?: string | null; + /** Limit */ + limit?: number; + /** Order */ + order?: string; + /** Order By */ + order_by?: string; + /** Page Token */ + page_token?: string | null; + }; /** * FilePart * @description Represents a file segment within a message or artifact. The file content can be @@ -2871,7 +3044,12 @@ export interface components { /** Uri */ uri: string; }; - /** FunctionCall */ + /** + * FunctionCall + * @description Deprecated and replaced by `tool_calls`. + * + * The name and arguments of a function that should be called, as generated by the model. + */ FunctionCall: { /** Arguments */ arguments: string; @@ -2903,7 +3081,7 @@ export interface components { /** GlobalPermissionGrant */ GlobalPermissionGrant: { /** A2A Proxy */ - a2a_proxy?: '*'[]; + a2a_proxy?: ('*' | string)[]; /** Connectors */ connectors?: ('read' | 'write' | 'proxy' | '*')[]; /** Context Data */ @@ -2911,13 +3089,13 @@ export interface components { /** Contexts */ contexts?: ('read' | 'write' | '*')[]; /** Embeddings */ - embeddings?: ('*' | components['schemas']['ResourceIdPermission'])[]; + embeddings?: ('*' | string)[]; /** Feedback */ feedback?: 'write'[]; /** Files */ files?: ('read' | 'write' | 'extract' | '*')[]; /** Llm */ - llm?: ('*' | components['schemas']['ResourceIdPermission'])[]; + llm?: ('*' | string)[]; /** Mcp Providers */ mcp_providers?: ('read' | 'write' | '*')[]; /** Mcp Proxy */ @@ -3028,7 +3206,10 @@ export interface components { */ task_id: string; }; - /** JSONSchema */ + /** + * JSONSchema + * @description Structured Outputs configuration options, including a JSON Schema. + */ JSONSchema: { /** Description */ description?: string; @@ -3325,14 +3506,20 @@ export interface components { | components['schemas']['AddProvider'] | components['schemas']['UpdateProvider'] | components['schemas']['NoAction']; - /** Custom */ + /** + * Custom + * @description The custom tool that the model called. + */ openai__types__chat__chat_completion_message_custom_tool_call_param__Custom: { /** Input */ input: string; /** Name */ name: string; }; - /** Function */ + /** + * Function + * @description The function that the model called. + */ openai__types__chat__chat_completion_message_function_tool_call_param__Function: { /** Arguments */ arguments: string; @@ -3457,6 +3644,20 @@ export interface components { /** Total Count */ total_count: number; }; + /** PaginatedResult[File] */ + PaginatedResult_File_: { + /** + * Has More + * @default false + */ + has_more: boolean; + /** Items */ + items: components['schemas']['File-Output'][]; + /** Next Page Token */ + readonly next_page_token: string | null; + /** Total Count */ + total_count: number; + }; /** PaginatedResult[ModelProvider] */ PaginatedResult_ModelProvider_: { /** @@ -3715,12 +3916,15 @@ export interface components { version: string; version_type: components['schemas']['GithubVersionType']; }; - /** ResourceIdPermission */ - ResourceIdPermission: { - /** Id */ - id: string; - }; - /** ResponseFormatJSONObject */ + /** + * ResponseFormatJSONObject + * @description JSON object response format. + * + * An older method of generating JSON responses. + * Using `json_schema` is recommended for models that support it. Note that the + * model will not generate JSON without a system or user message instructing it + * to do so. + */ ResponseFormatJSONObject: { /** * Type @@ -3728,7 +3932,13 @@ export interface components { */ type: 'json_object'; }; - /** ResponseFormatJSONSchema */ + /** + * ResponseFormatJSONSchema + * @description JSON Schema response format. + * + * Used to generate structured JSON responses. + * Learn more about [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs). + */ ResponseFormatJSONSchema: { json_schema: components['schemas']['JSONSchema']; /** @@ -3737,7 +3947,10 @@ export interface components { */ type: 'json_schema'; }; - /** ResponseFormatText */ + /** + * ResponseFormatText + * @description Default response format. Used to generate text responses. + */ ResponseFormatText: { /** * Type @@ -3782,6 +3995,19 @@ export interface components { | components['schemas']['OAuth2SecurityScheme-Output'] | components['schemas']['OpenIdConnectSecurityScheme'] | components['schemas']['MutualTLSSecurityScheme']; + /** + * TextExtractionRequest + * @description Request schema for text extraction. + */ + TextExtractionRequest: { + /** @description Additional options for text extraction */ + settings?: components['schemas']['TextExtractionSettings'] | null; + }; + /** TextExtractionSettings */ + TextExtractionSettings: { + /** Formats */ + formats?: components['schemas']['ExtractionFormat'][]; + }; /** * TextPart * @description Represents a text segment within a message or artifact. @@ -3959,7 +4185,11 @@ export interface components { docker?: components['schemas']['ResolvedDockerImageID'] | null; github?: components['schemas']['ResolvedGithubUrl'] | null; }; - /** WebSearchOptions */ + /** + * WebSearchOptions + * @description This tool searches the web for relevant results to use in a response. + * Learn more about the [web search tool](https://platform.openai.com/docs/guides/tools-web-search?api-mode=chat). + */ WebSearchOptions: { /** * Search Context Size @@ -3968,7 +4198,10 @@ export interface components { search_context_size?: 'low' | 'medium' | 'high'; user_location?: components['schemas']['WebSearchOptionsUserLocation'] | null; }; - /** WebSearchOptionsUserLocation */ + /** + * WebSearchOptionsUserLocation + * @description Approximate location parameters for the search. + */ WebSearchOptionsUserLocation: { approximate: components['schemas']['WebSearchOptionsUserLocationApproximate']; /** @@ -3977,7 +4210,10 @@ export interface components { */ type: 'approximate'; }; - /** WebSearchOptionsUserLocationApproximate */ + /** + * WebSearchOptionsUserLocationApproximate + * @description Approximate location parameters for the search. + */ WebSearchOptionsUserLocationApproximate: { /** City */ city?: string; @@ -3997,6 +4233,26 @@ export interface components { } export type $defs = Record; export interface operations { + jwks__well_known_jwks_get: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + requestBody?: never; + responses: { + /** @description Successful Response */ + 200: { + headers: { + [name: string]: unknown; + }; + content: { + 'application/json': unknown; + }; + }; + }; + }; protected_resource_metadata__well_known_oauth_protected_resource__resource__get: { parameters: { query?: never; @@ -4121,7 +4377,7 @@ export interface operations { }; }; }; - a2a_proxy_http_transport_api_v1_a2a__provider_id__http_head: { + a2a_proxy_http_transport_api_v1_a2a__provider_id__http_get: { parameters: { query?: { path?: string; @@ -4154,7 +4410,7 @@ export interface operations { }; }; }; - a2a_proxy_http_transport_api_v1_a2a__provider_id__http_head: { + a2a_proxy_http_transport_api_v1_a2a__provider_id__http_get: { parameters: { query?: { path?: string; @@ -4187,7 +4443,7 @@ export interface operations { }; }; }; - a2a_proxy_http_transport_api_v1_a2a__provider_id__http_head: { + a2a_proxy_http_transport_api_v1_a2a__provider_id__http_get: { parameters: { query?: { path?: string; @@ -4220,7 +4476,7 @@ export interface operations { }; }; }; - a2a_proxy_http_transport_api_v1_a2a__provider_id__http_head: { + a2a_proxy_http_transport_api_v1_a2a__provider_id__http_get: { parameters: { query?: { path?: string; @@ -4253,7 +4509,7 @@ export interface operations { }; }; }; - a2a_proxy_http_transport_api_v1_a2a__provider_id__http_head: { + a2a_proxy_http_transport_api_v1_a2a__provider_id__http_get: { parameters: { query?: { path?: string; @@ -4286,7 +4542,7 @@ export interface operations { }; }; }; - a2a_proxy_http_transport_api_v1_a2a__provider_id__http_head: { + a2a_proxy_http_transport_api_v1_a2a__provider_id__http_get: { parameters: { query?: { path?: string; @@ -4319,7 +4575,7 @@ export interface operations { }; }; }; - a2a_proxy_http_transport_api_v1_a2a__provider_id__http_head: { + a2a_proxy_http_transport_api_v1_a2a__provider_id__http_get: { parameters: { query?: { path?: string; @@ -4352,7 +4608,7 @@ export interface operations { }; }; }; - a2a_proxy_http_transport_api_v1_a2a__provider_id__http__path__head: { + a2a_proxy_http_transport_api_v1_a2a__provider_id__http__path__get: { parameters: { query?: never; header?: never; @@ -4384,7 +4640,7 @@ export interface operations { }; }; }; - a2a_proxy_http_transport_api_v1_a2a__provider_id__http__path__head: { + a2a_proxy_http_transport_api_v1_a2a__provider_id__http__path__get: { parameters: { query?: never; header?: never; @@ -4416,7 +4672,7 @@ export interface operations { }; }; }; - a2a_proxy_http_transport_api_v1_a2a__provider_id__http__path__head: { + a2a_proxy_http_transport_api_v1_a2a__provider_id__http__path__get: { parameters: { query?: never; header?: never; @@ -4448,7 +4704,7 @@ export interface operations { }; }; }; - a2a_proxy_http_transport_api_v1_a2a__provider_id__http__path__head: { + a2a_proxy_http_transport_api_v1_a2a__provider_id__http__path__get: { parameters: { query?: never; header?: never; @@ -4480,7 +4736,7 @@ export interface operations { }; }; }; - a2a_proxy_http_transport_api_v1_a2a__provider_id__http__path__head: { + a2a_proxy_http_transport_api_v1_a2a__provider_id__http__path__get: { parameters: { query?: never; header?: never; @@ -4512,7 +4768,7 @@ export interface operations { }; }; }; - a2a_proxy_http_transport_api_v1_a2a__provider_id__http__path__head: { + a2a_proxy_http_transport_api_v1_a2a__provider_id__http__path__get: { parameters: { query?: never; header?: never; @@ -4544,7 +4800,7 @@ export interface operations { }; }; }; - a2a_proxy_http_transport_api_v1_a2a__provider_id__http__path__head: { + a2a_proxy_http_transport_api_v1_a2a__provider_id__http__path__get: { parameters: { query?: never; header?: never; @@ -4978,7 +5234,7 @@ export interface operations { [name: string]: unknown; }; content: { - 'application/json': components['schemas']['agentstack_server__api__schema__common__EntityModel____class_getitem_____locals___ModelOutput__12']; + 'application/json': components['schemas']['agentstack_server__api__schema__common__EntityModel____class_getitem______ModelOutput']; }; }; /** @description Validation Error */ @@ -5228,6 +5484,38 @@ export interface operations { }; }; }; + list_files_api_v1_files_get: { + parameters: { + query: { + context_id?: string | null; + query: components['schemas']['FileListQuery']; + }; + header?: never; + path?: never; + cookie?: never; + }; + requestBody?: never; + responses: { + /** @description Successful Response */ + 200: { + headers: { + [name: string]: unknown; + }; + content: { + 'application/json': components['schemas']['PaginatedResult_File_']; + }; + }; + /** @description Validation Error */ + 422: { + headers: { + [name: string]: unknown; + }; + content: { + 'application/json': components['schemas']['HTTPValidationError']; + }; + }; + }; + }; upload_file_api_v1_files_post: { parameters: { query?: { @@ -5404,7 +5692,11 @@ export interface operations { }; cookie?: never; }; - requestBody?: never; + requestBody?: { + content: { + 'application/json': components['schemas']['TextExtractionRequest'] | null; + }; + }; responses: { /** @description Successful Response */ 201: { @@ -6488,7 +6780,7 @@ export interface operations { [name: string]: unknown; }; content: { - 'application/json': components['schemas']['agentstack_server__api__schema__common__EntityModel____class_getitem______ModelOutput']; + 'application/json': components['schemas']['agentstack_server__api__schema__common__EntityModel____class_getitem_____locals___ModelOutput__3']; }; }; /** @description Validation Error */ diff --git a/apps/agentstack-ui/src/app/api/[...path]/route.ts b/apps/agentstack-ui/src/app/api/[...path]/route.ts index a2ad9a105..41525e820 100644 --- a/apps/agentstack-ui/src/app/api/[...path]/route.ts +++ b/apps/agentstack-ui/src/app/api/[...path]/route.ts @@ -19,6 +19,8 @@ type RouteContext = { }>; }; +const isA2AEndpoint = (path: string[]) => path[0] === 'v1' && path[1] === 'a2a'; + async function handler(request: NextRequest, context: RouteContext) { const { isAuthEnabled } = runtimeConfig; const { method, headers, body, nextUrl } = request; @@ -31,8 +33,12 @@ async function handler(request: NextRequest, context: RouteContext) { targetUrl += '/'; } targetUrl += search; + if (isAuthEnabled && !isA2AEndpoint(path)) { + if (isA2AEndpoint(path)) { + // Skip JWT auth for A2A endpoints - they use context tokens passed via A2A client + return; + } - if (isAuthEnabled) { const token = await ensureToken(request); if (!token?.accessToken) { diff --git a/apps/agentstack-ui/src/modules/platform-context/api/keys.ts b/apps/agentstack-ui/src/modules/platform-context/api/keys.ts index 04ae83372..b4833e55a 100644 --- a/apps/agentstack-ui/src/modules/platform-context/api/keys.ts +++ b/apps/agentstack-ui/src/modules/platform-context/api/keys.ts @@ -12,4 +12,6 @@ export const contextKeys = { histories: () => [...contextKeys.all(), 'history'] as const, history: ({ contextId, query = {} }: ListContextHistoryParams) => [...contextKeys.histories(), contextId, query] as const, + tokens: () => [...contextKeys.all(), 'token'] as const, + token: (contextId: string, providerId: string) => [...contextKeys.tokens(), contextId, providerId] as const, }; diff --git a/apps/agentstack-ui/src/modules/platform-context/api/queries/useContextToken.ts b/apps/agentstack-ui/src/modules/platform-context/api/queries/useContextToken.ts new file mode 100644 index 000000000..0f5ef7c07 --- /dev/null +++ b/apps/agentstack-ui/src/modules/platform-context/api/queries/useContextToken.ts @@ -0,0 +1,44 @@ +/** + * Copyright 2025 © BeeAI a Series of LF Projects, LLC + * SPDX-License-Identifier: Apache-2.0 + */ + +import { useQuery } from '@tanstack/react-query'; +import type { ContextToken } from 'agentstack-sdk'; + +import { useApp } from '#contexts/App/index.ts'; +import type { Agent } from '#modules/agents/api/types.ts'; + +import { usePlatformContext } from '../../contexts'; +import { createContextToken } from '..'; +import { contextKeys } from '../keys'; + +export function useContextToken(agent: Agent) { + const { + config: { contextTokenPermissions }, + } = useApp(); + const { contextId } = usePlatformContext(); + + return useQuery({ + queryKey: contextKeys.token(contextId ?? '', agent.provider.id), + queryFn: async () => { + if (!contextId) { + throw new Error('Context ID is not set.'); + } + + const token = await createContextToken({ + contextId, + contextPermissions: contextTokenPermissions.grant_context_permissions ?? {}, + globalPermissions: contextTokenPermissions.grant_global_permissions ?? {}, + }); + + if (!token) { + throw new Error('Could not generate context token'); + } + + return token; + }, + enabled: !!contextId, + staleTime: Infinity, + }); +} diff --git a/apps/agentstack-ui/src/modules/platform-context/constants.ts b/apps/agentstack-ui/src/modules/platform-context/constants.ts index a3b32b432..0eeeff6ea 100644 --- a/apps/agentstack-ui/src/modules/platform-context/constants.ts +++ b/apps/agentstack-ui/src/modules/platform-context/constants.ts @@ -15,7 +15,7 @@ export const contextTokenPermissionsDefaults: DeepRequired = Omit, 'provider providerId?: string; }; -export function useBuildA2AClient({ providerId = '', onStatusUpdate }: Props) { +export function useBuildA2AClient({ + providerId = '', + onStatusUpdate, + authToken, +}: Props) { const { data: agentClient } = useQuery({ - queryKey: runKeys.client(providerId), + queryKey: runKeys.client(`${providerId}${Boolean(authToken)}`), queryFn: async () => buildA2AClient({ providerId, onStatusUpdate, + authToken, }), enabled: Boolean(providerId), staleTime: Infinity, diff --git a/apps/agentstack-ui/src/modules/runs/contexts/a2a-client/A2AClientProvider.tsx b/apps/agentstack-ui/src/modules/runs/contexts/a2a-client/A2AClientProvider.tsx new file mode 100644 index 000000000..e2e43ff54 --- /dev/null +++ b/apps/agentstack-ui/src/modules/runs/contexts/a2a-client/A2AClientProvider.tsx @@ -0,0 +1,43 @@ +/** + * Copyright 2025 © BeeAI a Series of LF Projects, LLC + * SPDX-License-Identifier: Apache-2.0 + */ + +'use client'; +import type { PropsWithChildren } from 'react'; +import { useMemo } from 'react'; + +import type { Agent } from '#modules/agents/api/types.ts'; +import { useContextToken } from '#modules/platform-context/api/queries/useContextToken.ts'; +import { useBuildA2AClient } from '#modules/runs/api/queries/useBuildA2AClient.ts'; + +import { A2AClientContext } from './a2a-client-context'; + +interface Props { + agent: Agent; +} + +export function A2AClientProvider({ agent, children }: PropsWithChildren) { + const { data: contextToken } = useContextToken(agent); + const { agentClient } = useBuildA2AClient({ + providerId: agent.provider.id, + authToken: contextToken, + }); + + const contextValue = useMemo(() => { + if (!contextToken || !agentClient) { + return null; + } + + return { + contextToken, + agentClient, + }; + }, [contextToken, agentClient]); + + if (!contextValue) { + return null; + } + + return {children}; +} diff --git a/apps/agentstack-ui/src/modules/runs/contexts/a2a-client/a2a-client-context.ts b/apps/agentstack-ui/src/modules/runs/contexts/a2a-client/a2a-client-context.ts new file mode 100644 index 000000000..4d2d532b4 --- /dev/null +++ b/apps/agentstack-ui/src/modules/runs/contexts/a2a-client/a2a-client-context.ts @@ -0,0 +1,16 @@ +/** + * Copyright 2025 © BeeAI a Series of LF Projects, LLC + * SPDX-License-Identifier: Apache-2.0 + */ +'use client'; +import type { ContextToken } from 'agentstack-sdk'; +import { createContext } from 'react'; + +import type { AgentA2AClient } from '#api/a2a/types.ts'; + +export const A2AClientContext = createContext(null); + +export interface A2AClientContextValue { + contextToken: ContextToken; + agentClient: AgentA2AClient; +} diff --git a/apps/agentstack-ui/src/modules/runs/contexts/a2a-client/index.ts b/apps/agentstack-ui/src/modules/runs/contexts/a2a-client/index.ts new file mode 100644 index 000000000..4d2b6fe76 --- /dev/null +++ b/apps/agentstack-ui/src/modules/runs/contexts/a2a-client/index.ts @@ -0,0 +1,20 @@ +/** + * Copyright 2025 © BeeAI a Series of LF Projects, LLC + * SPDX-License-Identifier: Apache-2.0 + */ + +import { use } from 'react'; + +import { A2AClientContext } from './a2a-client-context'; + +export function useA2AClient() { + const context = use(A2AClientContext); + + if (!context) { + throw new Error('useA2AClient must be used within A2AClientProvider'); + } + + return context; +} + +export { A2AClientProvider } from './A2AClientProvider'; diff --git a/apps/agentstack-ui/src/modules/runs/contexts/agent-demands/AgentDemandsProvider.tsx b/apps/agentstack-ui/src/modules/runs/contexts/agent-demands/AgentDemandsProvider.tsx index cba967840..2ecd11bc2 100644 --- a/apps/agentstack-ui/src/modules/runs/contexts/agent-demands/AgentDemandsProvider.tsx +++ b/apps/agentstack-ui/src/modules/runs/contexts/agent-demands/AgentDemandsProvider.tsx @@ -6,28 +6,19 @@ import { type AgentSettings, type FormFulfillments, ModelCapability } from 'agentstack-sdk'; import { type PropsWithChildren, useCallback, useRef, useState } from 'react'; -import type { AgentA2AClient } from '#api/a2a/types.ts'; -import { useApp } from '#contexts/App/index.ts'; import { useListConnectors } from '#modules/connectors/api/queries/useListConnectors.ts'; import type { RunFormValues } from '#modules/form/types.ts'; -import { useCreateContextToken } from '#modules/platform-context/api/mutations/useCreateContextToken.ts'; import { useMatchProviders } from '#modules/platform-context/api/mutations/useMatchProviders.ts'; -import { usePlatformContext } from '#modules/platform-context/contexts/index.ts'; import { getSettingsDemandsDefaultValues } from '#modules/runs/settings/utils.ts'; +import { useA2AClient } from '../a2a-client'; import { useAgentSecrets } from '../agent-secrets'; import type { FulfillmentsContext } from './agent-demands-context'; import { AgentDemandsContext } from './agent-demands-context'; import { buildFulfillments } from './build-fulfillments'; -interface Props { - agentClient: AgentA2AClient; -} - -export function AgentDemandsProvider({ - agentClient, - children, -}: PropsWithChildren>) { +export function AgentDemandsProvider({ children }: PropsWithChildren) { + const { agentClient, contextToken } = useA2AClient(); const { demandedSecrets } = useAgentSecrets(); const [selectedEmbeddingProviders, setSelectedEmbeddingProviders] = useState>({}); @@ -38,13 +29,6 @@ export function AgentDemandsProvider({ getSettingsDemandsDefaultValues(agentClient.demands.settingsDemands ?? { fields: [] }), ); - const { - config: { contextTokenPermissions }, - } = useApp(); - const { contextId } = usePlatformContext(); - - const { mutateAsync: createContextToken } = useCreateContextToken(); - const onUpdateSettings = useCallback((value: AgentSettings) => { setSelectedSettings(value); }, []); @@ -115,28 +99,8 @@ export function AgentDemandsProvider({ const { data: connectorsData } = useListConnectors(); - const getContextToken = useCallback(async () => { - if (contextId === null) { - throw new Error('Illegal State - Context ID is not set.'); - } - - const contextToken = await createContextToken({ - contextId, - contextPermissions: contextTokenPermissions.grant_context_permissions ?? {}, - globalPermissions: contextTokenPermissions.grant_global_permissions ?? {}, - }); - - if (!contextToken) { - throw new Error('Could not generate context token'); - } - - return contextToken; - }, [contextId, contextTokenPermissions, createContextToken]); - const getFulfillments = useCallback( async (fulfillmentsContext: FulfillmentsContext) => { - const contextToken = await getContextToken(); - const providedSecrets = demandedSecrets.reduce((memo, secret) => { if (secret.isReady) { memo[secret.key] = secret.value; @@ -157,14 +121,7 @@ export function AgentDemandsProvider({ connectors: connectorsData?.items ?? [], }); }, - [ - getContextToken, - selectedLLMProviders, - selectedEmbeddingProviders, - selectedSettings, - demandedSecrets, - connectorsData, - ], + [contextToken, selectedLLMProviders, selectedEmbeddingProviders, selectedSettings, demandedSecrets, connectorsData], ); return ( diff --git a/apps/agentstack-ui/src/modules/runs/contexts/agent-demands/build-fulfillments.ts b/apps/agentstack-ui/src/modules/runs/contexts/agent-demands/build-fulfillments.ts index 46aeb231a..8d8b90462 100644 --- a/apps/agentstack-ui/src/modules/runs/contexts/agent-demands/build-fulfillments.ts +++ b/apps/agentstack-ui/src/modules/runs/contexts/agent-demands/build-fulfillments.ts @@ -38,6 +38,7 @@ export const buildFulfillments = ({ connectors, }: BuildFulfillmentsParams): Fulfillments => { return { + // @deprecated - token now passed via A2A client headers getContextToken: () => contextToken, settings: async () => { diff --git a/apps/agentstack-ui/src/modules/runs/contexts/agent-run/AgentRunProvider.tsx b/apps/agentstack-ui/src/modules/runs/contexts/agent-run/AgentRunProvider.tsx index 0bfbfdda5..f54714374 100644 --- a/apps/agentstack-ui/src/modules/runs/contexts/agent-run/AgentRunProvider.tsx +++ b/apps/agentstack-ui/src/modules/runs/contexts/agent-run/AgentRunProvider.tsx @@ -10,7 +10,7 @@ import type { PropsWithChildren } from 'react'; import { useCallback, useMemo, useRef, useState } from 'react'; import { v4 as uuid } from 'uuid'; -import type { AgentA2AClient, ChatRun } from '#api/a2a/types.ts'; +import type { ChatRun } from '#api/a2a/types.ts'; import { createTextPart } from '#api/a2a/utils.ts'; import { getErrorCode } from '#api/utils.ts'; import { useHandleError } from '#hooks/useHandleError.ts'; @@ -30,7 +30,6 @@ import { addMessagePart, isAgentMessage } from '#modules/messages/utils.ts'; import { contextKeys } from '#modules/platform-context/api/keys.ts'; import { usePlatformContext } from '#modules/platform-context/contexts/index.ts'; import { useEnsurePlatformContext } from '#modules/platform-context/hooks/useEnsurePlatformContext.ts'; -import { useBuildA2AClient } from '#modules/runs/api/queries/useBuildA2AClient.ts'; import { useStartOAuth } from '#modules/runs/hooks/useStartOAuth.ts'; import type { RunStats } from '#modules/runs/types.ts'; import { SourcesProvider } from '#modules/sources/contexts/SourcesProvider.tsx'; @@ -38,6 +37,7 @@ import { getMessagesSourcesMap } from '#modules/sources/utils.ts'; import type { TaskId } from '#modules/tasks/api/types.ts'; import { isNotNull } from '#utils/helpers.ts'; +import { A2AClientProvider, useA2AClient } from '../a2a-client'; import { useAgentDemands } from '../agent-demands'; import type { FulfillmentsContext } from '../agent-demands/agent-demands-context'; import { AgentDemandsProvider } from '../agent-demands/AgentDemandsProvider'; @@ -50,40 +50,29 @@ interface Props { } export function AgentRunProviders({ agent, children }: PropsWithChildren) { - const { agentClient } = useBuildA2AClient({ - providerId: agent.provider.id, - }); - useEnsurePlatformContext(agent); - if (!agentClient) { - return null; - } - return ( - - - - - - - {children} - - - - - - + + + + + + + {children} + + + + + + ); } -interface AgentRunProviderProps extends Props { - agentClient?: AgentA2AClient; -} - -function AgentRunProvider({ agent, agentClient, children }: PropsWithChildren) { +function AgentRunProvider({ agent, children }: PropsWithChildren) { const queryClient = useQueryClient(); const errorHandler = useHandleError(); + const { agentClient } = useA2AClient(); const { messages, getMessages, setMessages } = useMessages(); diff --git a/apps/agentstack-ui/src/modules/runs/contexts/agent-secrets/AgentSecretsProvider.tsx b/apps/agentstack-ui/src/modules/runs/contexts/agent-secrets/AgentSecretsProvider.tsx index ce5b6dfb1..c144f4bce 100644 --- a/apps/agentstack-ui/src/modules/runs/contexts/agent-secrets/AgentSecretsProvider.tsx +++ b/apps/agentstack-ui/src/modules/runs/contexts/agent-secrets/AgentSecretsProvider.tsx @@ -8,17 +8,16 @@ import { useCallback, useMemo } from 'react'; import { useLocalStorage } from 'usehooks-ts'; import z from 'zod'; -import type { AgentA2AClient } from '#api/a2a/types.ts'; import type { Agent } from '#modules/agents/api/types.ts'; import { useListVariables } from '#modules/variables/api/queries/useListVariables.ts'; import { AGENT_SECRETS_SETTINGS_STORAGE_KEY } from '#utils/constants.ts'; +import { useA2AClient } from '../a2a-client'; import { AgentSecretsContext } from './agent-secrets-context'; import type { NonReadySecretDemand, ReadySecretDemand } from './types'; interface Props { agent: Agent; - agentClient?: AgentA2AClient; } const secretsSchema = z.record( @@ -41,7 +40,8 @@ const secretsLocalStorageOptions = { }, }; -export function AgentSecretsProvider({ agent, agentClient, children }: PropsWithChildren) { +export function AgentSecretsProvider({ agent, children }: PropsWithChildren) { + const { agentClient } = useA2AClient(); const [agentSecrets, setAgentSecrets] = useLocalStorage( AGENT_SECRETS_SETTINGS_STORAGE_KEY, {}, @@ -56,7 +56,7 @@ export function AgentSecretsProvider({ agent, agentClient, children }: PropsWith }, [agentSecrets, agent.provider.id]); const secretDemands = useMemo(() => { - return agentClient?.demands.secretDemands ?? null; + return agentClient.demands.secretDemands ?? null; }, [agentClient]); const markModalAsSeen = useCallback(() => { diff --git a/docs/development/custom-ui/permissions-and-tokens.mdx b/docs/development/custom-ui/permissions-and-tokens.mdx index 378747ac2..7b0223ef7 100644 --- a/docs/development/custom-ui/permissions-and-tokens.mdx +++ b/docs/development/custom-ui/permissions-and-tokens.mdx @@ -222,19 +222,21 @@ Here is an example how you can create a context and generate a custom token with from agentstack_sdk.platform.context import Context, Permissions, ContextPermissions # Create a context + providers = await Provider.list() context = await Context.create() # Generate a token with specific permissions token = await context.generate_token( - grant_global_permissions=Permissions( - files={"read", "write"}, - llm={"*"}, - embeddings={"*"}, - ), - grant_context_permissions=ContextPermissions( - files={"read", "write"}, - context_data={"read", "write"}, - ), + providers=[providers[0]], + grant_global_permissions=Permissions( + files={"read", "write"}, + llm={"*"}, + embeddings={"*"}, + ), + grant_context_permissions=ContextPermissions( + files={"read", "write"}, + context_data={"read", "write"}, + ), ) # Use the token @@ -246,8 +248,8 @@ Here is an example how you can create a context and generate a custom token with ```bash REST API # 1. Create a context curl -X POST "https://api.agentstack.example.com/api/v1/contexts" \ - -H "Authorization: Bearer YOUR_USER_TOKEN" \ - -H "Content-Type: application/json" + -H "Authorization: Bearer YOUR_USER_TOKEN" \ + -H "Content-Type: application/json" # Response { @@ -257,19 +259,20 @@ Here is an example how you can create a context and generate a custom token with # 2. Generate a context token curl -X POST "https://api.agentstack.example.com/api/v1/contexts/{context_id}/token" \ - -H "Authorization: Bearer YOUR_USER_TOKEN" \ - -H "Content-Type: application/json" \ - -d '{ - "grant_global_permissions": { - "files": ["read", "write"], - "llm": ["*"], - "embeddings": ["*"] -}, - "grant_context_permissions": { - "files": ["read", "write"], - "context_data": ["read", "write"] -} -}' + -H "Authorization: Bearer YOUR_USER_TOKEN" \ + -H "Content-Type: application/json" \ + -d '{ + "grant_global_permissions": { + "files": ["read", "write"], + "llm": ["*"], + "embeddings": ["*"], + "a2a_proxy": [""] + }, + "grant_context_permissions": { + "files": ["read", "write"], + "context_data": ["read", "write"] + } + }' # Response { diff --git a/docs/development/deploy-agent-stack/deployment-guide.mdx b/docs/development/deploy-agent-stack/deployment-guide.mdx index 100b1f0d1..9ab3ef201 100644 --- a/docs/development/deploy-agent-stack/deployment-guide.mdx +++ b/docs/development/deploy-agent-stack/deployment-guide.mdx @@ -52,7 +52,6 @@ encryptionKey: "encryption-key-from-command" # This requires passing an admin password to certain endpoints, you can disable auth for insecure deployments auth: enabled: true - jwtSecretKey: "my-secret-key" basic: # CAUTION: this leaves most features accessible without authentication, please read the authentication section below enabled: true @@ -128,7 +127,6 @@ For production deployments, multi-user environments, or cost control, use OIDC a ```yaml auth: enabled: true - jwtSecretKey: "my-secret-key" # fill in a strong secret basic: enabled: true adminPassword: "my-admin-password" # fill in a strong admin password @@ -143,7 +141,6 @@ trustProxyHeaders: true # This is important if validate_audience is enabled auth: enabled: true - jwtSecretKey: "my-secret-key" # fill in a strong secret oidc: # Important: redirect URIs must be configured correctly in your provider: # - UI endpoint: "https://your-public-url/api/auth/callback" diff --git a/helm/templates/_validators.tpl b/helm/templates/_validators.tpl index a311f26ec..5c0800d97 100644 --- a/helm/templates/_validators.tpl +++ b/helm/templates/_validators.tpl @@ -31,11 +31,18 @@ Validate authentication configuration. */}} {{- define "agentstack.validate.authConfig" -}} {{- if .Values.auth.enabled -}} - {{- if empty .Values.auth.jwtSecretKey -}} + {{- if and (empty .Values.auth.jwtPrivateKey) (not (empty .Values.auth.jwtPublicKey)) -}} {{- fail ` -ERROR: .Values.auth.jwtSecretKey is missing. +ERROR: .Values.auth.jwtPrivateKey is missing but .Values.auth.jwtPublicKey is provided. -When authentication is enabled, you must provide a JWT secret key. +Please provide both keys or neither (to auto-generate them). +` -}} + {{- end -}} + {{- if and (not (empty .Values.auth.jwtPrivateKey)) (empty .Values.auth.jwtPublicKey) -}} + {{- fail ` +ERROR: .Values.auth.jwtPublicKey is missing but .Values.auth.jwtPrivateKey is provided. + +Please provide both keys or neither (to auto-generate them). ` -}} {{- end -}} {{- if and .Values.auth.basic.enabled (empty .Values.auth.basic.adminPassword) -}} diff --git a/helm/templates/config/secret.yaml b/helm/templates/config/secret.yaml index 4cda70514..169604e03 100644 --- a/helm/templates/config/secret.yaml +++ b/helm/templates/config/secret.yaml @@ -1,3 +1,21 @@ +{{- $jwtPrivateKey := .Values.auth.jwtPrivateKey -}} +{{- $jwtPublicKey := .Values.auth.jwtPublicKey -}} + +{{- if and (empty $jwtPrivateKey) (empty $jwtPublicKey) -}} + {{- $secret := (lookup "v1" "Secret" .Release.Namespace "agentstack-secret") -}} + {{- if and $secret $secret.data -}} + {{- if and (hasKey $secret.data "jwtPrivateKey") (hasKey $secret.data "jwtPublicKey") -}} + {{- $jwtPrivateKey = index $secret.data "jwtPrivateKey" | b64dec -}} + {{- $jwtPublicKey = index $secret.data "jwtPublicKey" | b64dec -}} + {{- end -}} + {{- end -}} + + {{- if and (empty $jwtPrivateKey) (empty $jwtPublicKey) -}} + {{- $gen := genSelfSignedCert "agentstack-jwt" nil nil 3650 -}} + {{- $jwtPrivateKey = $gen.Key -}} + {{- $jwtPublicKey = $gen.Cert -}} + {{- end -}} +{{- end -}} apiVersion: v1 kind: Secret metadata: @@ -6,6 +24,7 @@ metadata: app: agentstack-server {{- include "agentstack.labels" . | nindent 4 }} type: Opaque + data: encryptionKey: {{ .Values.encryptionKey | b64enc | quote }} sqlConnection: {{ printf "postgresql+asyncpg://%s:%s@%s:%s/%s" @@ -27,7 +46,8 @@ data: }} {{- end }} adminPassword: {{ .Values.auth.basic.adminPassword | b64enc | quote }} - jwtSecretKey: {{ .Values.auth.jwtSecretKey | b64enc | quote }} + jwtPrivateKey: {{ $jwtPrivateKey | b64enc | quote }} + jwtPublicKey: {{ $jwtPublicKey | b64enc | quote }} s3AccessKeyId: {{ include "agentstack.s3.accessKeyID" . | b64enc | quote }} s3AccessKeySecret: {{ include "agentstack.s3.accessKeySecret" . | b64enc | quote }} {{- if .Values.github.auths }} diff --git a/helm/templates/deployment.yaml b/helm/templates/deployment.yaml index ddebe2a04..ce2a5627d 100644 --- a/helm/templates/deployment.yaml +++ b/helm/templates/deployment.yaml @@ -240,10 +240,15 @@ spec: {{- if .Values.auth.enabled }} - name: AUTH__DISABLE_AUTH value: "false" - - name: AUTH__JWT_SECRET_KEY + - name: AUTH__JWT_PRIVATE_KEY valueFrom: secretKeyRef: - key: jwtSecretKey + key: jwtPrivateKey + name: agentstack-secret + - name: AUTH__JWT_PUBLIC_KEY + valueFrom: + secretKeyRef: + key: jwtPublicKey name: agentstack-secret {{- if .Values.auth.basic.enabled}} - name: AUTH__BASIC__ENABLED diff --git a/helm/values.yaml b/helm/values.yaml index fa1fb66a8..562e3128c 100644 --- a/helm/values.yaml +++ b/helm/values.yaml @@ -70,7 +70,7 @@ contextTokenPermissions: llm: ["*"] embeddings: ["*"] model_providers: [] - a2a_proxy: [] + a2a_proxy: ["*"] providers: [] provider_variables: [] contexts: [] @@ -115,7 +115,8 @@ agentRegistrySyncPeriodCron: "*/30 * * * * *" encryptionKey: "" auth: enabled: false # Warning, disable only for local deployments - jwtSecretKey: "" + jwtPrivateKey: "" # If empty, a key pair will be auto-generated + jwtPublicKey: "" # If empty, a key pair will be auto-generated basic: enabled: false adminPassword: "" diff --git a/tasks.toml b/tasks.toml index a6a7ade33..b725ef197 100644 --- a/tasks.toml +++ b/tasks.toml @@ -471,7 +471,6 @@ images=$(helm template \ --set phoenix.enabled=true \ --set encryptionKey=dummy \ --set auth.enabled=false \ - --set auth.jwtSecretKey=kyticka \ --set providerBuilds.enabled=true \ --set localDockerRegistry.enabled=true \ --set redis.enabled=true \