-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathrouter.py
More file actions
3255 lines (2760 loc) · 111 KB
/
router.py
File metadata and controls
3255 lines (2760 loc) · 111 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
"""
router.py — Central Router for the Unified Router for Agents system.
Implements the ESB-style router that handles all task routing, state
management, proxy file serving, access control, and agent onboarding.
Stack: FastAPI (asyncio), SQLite (WAL mode), httpx.
NOTE: For high-traffic production deployments, replace the synchronous
sqlite3 DB calls with aiosqlite to avoid blocking the event loop.
All DB helper functions are structured so the switch is straightforward:
replace `sqlite3.connect` / `conn.cursor()` with `aiosqlite.connect`
and add `await` where indicated.
"""
from __future__ import annotations
import asyncio
import importlib.util
import json
import os
import secrets
import sqlite3
import sys
import uuid
from contextlib import asynccontextmanager
from datetime import datetime, timedelta, timezone
from pathlib import Path
from typing import Any, AsyncIterator, Optional
import httpx
from fastapi import (
FastAPI,
Header,
HTTPException,
)
from fastapi.responses import FileResponse, JSONResponse, StreamingResponse
from pydantic import BaseModel
from helper import AgentInfo, OnboardRequest
# ---------------------------------------------------------------------------
# Configuration (from environment variables with defaults)
# ---------------------------------------------------------------------------
DB_PATH: str = os.environ.get("DB_PATH", "router.db")
PROXYFILE_DIR: str = os.environ.get("PROXYFILE_DIR", "proxyfiles")
_PROJECT_ROOT: str = str(Path(__file__).resolve().parent)
def _is_safe_path(path: str, allowed_roots: list[str] | None = None) -> bool:
"""Check that a resolved path is under an allowed root directory."""
resolved = Path(path).resolve()
roots = [Path(r).resolve() for r in (allowed_roots or [_PROJECT_ROOT])]
return any(resolved == root or str(resolved).startswith(str(root) + os.sep) for root in roots)
def _sanitize_task_id(task_id: str) -> str:
"""Strip path-traversal characters from task_id."""
import re
sanitized = re.sub(r'[^a-zA-Z0-9_\-]', '_', task_id)
if not sanitized:
sanitized = "unknown"
return sanitized
AGENTS_DIR: str = os.environ.get("AGENTS_DIR", "agents")
GLOBAL_TIMEOUT_HOURS: int = int(os.environ.get("GLOBAL_TIMEOUT_HOURS", "1"))
MAX_DEPTH: int = int(os.environ.get("MAX_DEPTH", "10"))
MAX_WIDTH: int = int(os.environ.get("MAX_WIDTH", "50"))
MAX_PAYLOAD_BYTES: int = int(os.environ.get("MAX_PAYLOAD_BYTES", str(1 * 1024 * 1024))) # 1 MB
MAX_FILE_BYTES: int = int(os.environ.get("MAX_FILE_BYTES", str(50 * 1024 * 1024))) # 50 MB
ADMIN_TOKEN: str = os.environ.get("ADMIN_TOKEN", "")
EMBEDDED_AGENT_TIMEOUT: float = float(os.environ.get("EMBEDDED_AGENT_TIMEOUT", "300"))
TASK_RETENTION_HOURS: int = int(os.environ.get("TASK_RETENTION_HOURS", "72"))
# In-memory registry of loaded embedded agent ASGI apps.
# NOTE: For high-traffic production deployments, consider aiosqlite for DB
# and a proper service-registry if agents can be hot-loaded.
embedded_apps: dict[str, Any] = {}
# Alive agents set — agents that are reachable.
# Embedded agents are always alive. External agents are probed periodically.
# Agents that send messages are auto-added.
_alive_agents: set[str] = set()
AGENT_HEALTH_INTERVAL: int = int(os.environ.get("AGENT_HEALTH_INTERVAL", "60"))
# In-memory progress event pub/sub.
# task_id → list of subscriber asyncio.Queues.
# Cleaned up when task reaches terminal state or all subscribers disconnect.
# Thread-safety note: All access happens in the single-threaded asyncio event
# loop. Dict/list mutations between ``await`` points are atomic under
# cooperative scheduling, so no explicit lock is needed.
_progress_queues: dict[str, list[asyncio.Queue]] = {}
# ---------------------------------------------------------------------------
# Database helpers
# ---------------------------------------------------------------------------
def get_db() -> sqlite3.Connection:
"""
Open and return a SQLite connection in WAL mode.
NOTE: For high-traffic production deployments, replace with aiosqlite:
async with aiosqlite.connect(DB_PATH) as conn:
conn.row_factory = aiosqlite.Row
...
Returns:
A configured sqlite3.Connection with WAL mode and row_factory set.
"""
conn = sqlite3.connect(DB_PATH, check_same_thread=False)
conn.row_factory = sqlite3.Row
conn.execute("PRAGMA journal_mode=WAL")
conn.execute("PRAGMA foreign_keys=ON")
return conn
def init_db() -> None:
"""
Initialise the database schema.
Creates all tables if they do not already exist and inserts the default
embedded→embedded group allowlist entry.
NOTE: For high-traffic production deployments, use aiosqlite and await
all execute/commit calls.
"""
conn = get_db()
try:
cursor = conn.cursor()
cursor.executescript("""
CREATE TABLE IF NOT EXISTS tasks (
task_id TEXT PRIMARY KEY,
parent_task_id TEXT,
identifier TEXT,
origin_agent_id TEXT NOT NULL,
handler_agent_id TEXT,
depth_count INTEGER NOT NULL,
width_count INTEGER DEFAULT 0,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
timeout_at DATETIME NOT NULL,
status TEXT DEFAULT 'active'
);
CREATE TABLE IF NOT EXISTS events (
event_id INTEGER PRIMARY KEY AUTOINCREMENT,
task_id TEXT NOT NULL,
agent_id TEXT NOT NULL,
destination_agent_id TEXT,
event_type TEXT NOT NULL,
status_code INTEGER,
payload TEXT,
timestamp DATETIME DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY(task_id) REFERENCES tasks(task_id)
);
CREATE INDEX IF NOT EXISTS idx_events_task_id ON events(task_id);
CREATE TABLE IF NOT EXISTS agents (
agent_id TEXT PRIMARY KEY,
endpoint_url TEXT,
agent_path TEXT,
auth_token TEXT NOT NULL,
inbound_groups TEXT DEFAULT '[]',
outbound_groups TEXT DEFAULT '[]',
is_embedded INTEGER DEFAULT 0,
agent_info TEXT DEFAULT '{}',
documentation_path TEXT,
registered_at DATETIME DEFAULT CURRENT_TIMESTAMP
);
CREATE TABLE IF NOT EXISTS invitation_tokens (
token TEXT PRIMARY KEY,
inbound_groups TEXT DEFAULT '[]',
outbound_groups TEXT DEFAULT '[]',
expires_at DATETIME NOT NULL,
used INTEGER DEFAULT 0,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
);
CREATE TABLE IF NOT EXISTS group_allowlist (
inbound_group TEXT NOT NULL,
outbound_group TEXT NOT NULL,
PRIMARY KEY (inbound_group, outbound_group)
);
CREATE TABLE IF NOT EXISTS individual_allowlist (
agent_id TEXT NOT NULL,
destination_agent_id TEXT NOT NULL,
PRIMARY KEY (agent_id, destination_agent_id)
);
CREATE TABLE IF NOT EXISTS proxy_files (
file_key TEXT PRIMARY KEY,
file_path TEXT NOT NULL,
original_filename TEXT,
task_id TEXT,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY(task_id) REFERENCES tasks(task_id)
);
""")
# Migration: add handler_agent_id column if missing (for existing DBs).
cols = {row[1] for row in cursor.execute("PRAGMA table_info(tasks)").fetchall()}
if "handler_agent_id" not in cols:
cursor.execute("ALTER TABLE tasks ADD COLUMN handler_agent_id TEXT")
# Seed default group allowlist rules.
# These define the ACL routing policy between agent groups.
# Additional rules can be added via the admin API at runtime.
_default_rules = [
("embedded", "embedded"), # legacy compat: embedded agents can call each other
("core", "infra"), # core can call LLM
("core", "tool"), # core can call stateless tools
("core", "usertool"), # core can call user-specific tools
("core", "channel"), # core can send DMs via channel
("channel", "core"), # channel routes user messages to core
("channel", "channel"), # channel agents can talk to each other (e.g. webapp → channel for token auth)
("tool", "infra"), # tools can call LLM
("usertool", "infra"), # user-tools can call LLM
("usertool", "tool"), # user-tools can call stateless tools (e.g. kb→md_converter)
("notify", "core"), # proactive agents (reminder/cron) can reach core
("notify", "channel"), # proactive agents can reach channel for direct delivery
("bridge", "tool"), # MCP bridge can expose stateless tools
("bridge", "infra"), # MCP bridge can call LLM
("admin", "core"), # admin web UI can test core
("admin", "tool"), # admin can test tools
("admin", "usertool"), # admin can test user-tools
("admin", "infra"), # admin can test LLM
("admin", "channel"), # admin can test channel delivery
]
for outbound, inbound in _default_rules:
cursor.execute(
"INSERT OR IGNORE INTO group_allowlist (inbound_group, outbound_group) VALUES (?, ?)",
(inbound, outbound),
)
conn.commit()
finally:
conn.close()
# ---------------------------------------------------------------------------
# Pydantic request / response models
# ---------------------------------------------------------------------------
class RouteRequest(BaseModel):
"""Routing payload sent by agents to POST /route."""
agent_id: str
task_id: str
identifier: Optional[str] = None
parent_task_id: Optional[str] = None
destination_agent_id: Optional[str] = None
timestamp: str
status_code: Optional[int] = None
payload: dict[str, Any] = {}
available_destinations: Optional[dict[str, Any]] = None
class InvitationCreateRequest(BaseModel):
"""Admin request to create a new invitation token."""
inbound_groups: list[str] = []
outbound_groups: list[str] = []
expires_in_hours: int = 24
class GroupAllowlistRequest(BaseModel):
"""Admin request to add a group-level routing permission."""
inbound_group: str
outbound_group: str
class IndividualAllowlistRequest(BaseModel):
"""Admin request to add an individual agent routing permission."""
agent_id: str
destination_agent_id: str
class UpdateAgentGroupsRequest(BaseModel):
"""Admin request to update an agent's inbound/outbound group membership."""
inbound_groups: list[str]
outbound_groups: list[str]
class UpdateAgentInfoRequest(BaseModel):
"""Agent self-update of its own AgentInfo."""
agent_id: str
description: Optional[str] = None
input_schema: Optional[str] = None
output_schema: Optional[str] = None
required_input: Optional[list[str]] = None
documentation_url: Optional[str] = None
endpoint_url: Optional[str] = None
# ---------------------------------------------------------------------------
# Embedded agent loader
# ---------------------------------------------------------------------------
async def load_embedded_agents() -> None:
"""
Scan AGENTS_DIR for embedded agent packages and register them.
Each subdirectory of AGENTS_DIR is treated as a potential agent package.
A valid agent directory must contain ``agent.py`` (with a ``app: FastAPI``
attribute) and optionally a ``.env`` file and an ``AGENT_INFO: AgentInfo``
module-level attribute.
Agents not yet present in the DB are auto-registered with:
- A freshly generated auth token.
- Group assignments from the module's ``AGENT_GROUPS`` attribute.
- ``is_embedded = 1``.
The loaded ASGI app is placed in the module-level ``embedded_apps`` dict
keyed by agent_id (the subdirectory name).
"""
# Per-agent group assignments: each agent.py defines
# AGENT_GROUPS = (["inbound"], ["outbound"]).
# Agents without AGENT_GROUPS get the default ["embedded"] group.
_DEFAULT_GROUPS = (["embedded"], ["embedded"])
agents_path = Path(AGENTS_DIR)
if not agents_path.exists():
return
conn = get_db()
try:
for entry in agents_path.iterdir():
if not entry.is_dir():
continue
agent_py = entry / "agent.py"
if not agent_py.exists():
continue
# Skip agents with an ignore flag file.
if (entry / ".ignore_agent").exists():
print(f"[router] Skipping embedded agent '{entry.name}' (.ignore_agent present)")
continue
agent_id = entry.name
# Dynamically import the agent module.
# (Agents read their own config.json directly — no env injection needed.)
#
# Clear stale bare-module entries so each agent's
# `from tools import ...` resolves against its own directory,
# not a previously loaded agent's cached module. Without this,
# two agents sharing a filename (e.g. tools.py, config.py, db.py)
# collide in sys.modules and the second one gets wrong symbols.
sibling_names = [
p.stem for p in entry.iterdir()
if p.suffix == ".py" and p.stem not in ("agent", "__init__")
]
for _mod_name in sibling_names:
sys.modules.pop(_mod_name, None)
spec = importlib.util.spec_from_file_location(
f"agents.{agent_id}.agent", str(agent_py)
)
if spec is None or spec.loader is None:
continue
module = importlib.util.module_from_spec(spec)
sys.modules[f"agents.{agent_id}.agent"] = module
try:
spec.loader.exec_module(module) # type: ignore[union-attr]
except Exception as exc:
print(f"[router] Failed to load embedded agent '{agent_id}': {exc}")
continue
app = getattr(module, "app", None)
if app is None:
print(f"[router] Skipping '{agent_id}': no 'app' attribute in agent.py")
continue
embedded_apps[agent_id] = app
# Persist agent record if not already in DB.
# NOTE: For high-traffic production deployments, use aiosqlite here.
existing = conn.execute(
"SELECT agent_id FROM agents WHERE agent_id = ?", (agent_id,)
).fetchone()
if existing is None:
auth_token = secrets.token_urlsafe(32)
agent_info_obj = getattr(module, "AGENT_INFO", None)
agent_info_json: str
doc_url: Optional[str] = None
if agent_info_obj is not None:
try:
agent_info_json = agent_info_obj.model_dump_json()
doc_url = getattr(agent_info_obj, "documentation_url", None)
except Exception:
agent_info_json = "{}"
else:
agent_info_json = "{}"
# Fetch and store documentation if the agent provides a URL.
documentation_path: Optional[str] = None
if doc_url:
try:
doc_bytes = await _fetch_documentation(doc_url)
except Exception as _doc_exc:
print(f"[router] Failed to fetch documentation for '{agent_id}': {_doc_exc}")
doc_bytes = None
if doc_bytes:
documentation_path = _store_agent_documentation(
agent_id, doc_bytes, conn
)
# Resolution order: module AGENT_GROUPS → hardcoded map → default
module_groups = getattr(module, "AGENT_GROUPS", None)
if module_groups and isinstance(module_groups, (list, tuple)) and len(module_groups) == 2:
inbound_g, outbound_g = list(module_groups[0]), list(module_groups[1])
else:
inbound_g, outbound_g = _DEFAULT_GROUPS
conn.execute(
"""
INSERT INTO agents
(agent_id, agent_path, auth_token, inbound_groups,
outbound_groups, is_embedded, agent_info, documentation_path)
VALUES (?, ?, ?, ?, ?, 1, ?, ?)
""",
(
agent_id,
str(agent_py),
auth_token,
json.dumps(inbound_g),
json.dumps(outbound_g),
agent_info_json,
documentation_path,
),
)
conn.commit()
print(f"[router] Registered embedded agent '{agent_id}'")
else:
# Existing embedded agent: regenerate a fresh token so we can
# inject the raw value into the environment for the agent.
auth_token = secrets.token_urlsafe(32)
conn.execute(
"UPDATE agents SET auth_token = ? WHERE agent_id = ?",
(auth_token, agent_id),
)
conn.commit()
# Inject the raw auth token as an env var for the agent to read.
# NOTE: This runs AFTER exec_module(), so embedded agents must NOT
# read this env var at import time — only lazily on first request.
os.environ[f"{agent_id.upper()}_AUTH_TOKEN"] = auth_token
finally:
conn.close()
async def _fetch_documentation(url: str) -> Optional[bytes]:
"""
Fetch documentation content from a URL.
Supports ``http://``, ``https://``, ``file://``, and bare filesystem paths.
Args:
url: The documentation URL or local path.
Returns:
The raw bytes of the documentation, or None on failure.
"""
if not url:
return None
# file:// URI or bare filesystem path
if url.startswith("file://"):
local_path = Path(url[7:])
elif not url.startswith(("http://", "https://")):
local_path = Path(url)
else:
local_path = None
if local_path is not None:
if not _is_safe_path(str(local_path)):
print(f"[router] _fetch_documentation: rejected path outside project root: {local_path}")
return None
try:
return local_path.read_bytes()
except Exception as exc:
print(f"[router] _fetch_documentation: failed to read '{local_path}': {exc}")
return None
# HTTP(S) fetch
try:
async with httpx.AsyncClient(timeout=30.0) as http_client:
resp = await http_client.get(url)
resp.raise_for_status()
return resp.content
except Exception as exc:
print(f"[router] _fetch_documentation: failed to fetch '{url}': {exc}")
return None
def _store_agent_documentation(
agent_id: str,
doc_bytes: bytes,
conn: sqlite3.Connection,
) -> Optional[str]:
"""
Write documentation bytes to the proxy vault and register in proxy_files.
Args:
agent_id: The agent this documentation belongs to.
doc_bytes: Raw documentation content.
conn: An open database connection (caller must commit).
Returns:
The on-disk path string, or None on failure.
"""
try:
doc_dir = Path(PROXYFILE_DIR) / "agent_documentations"
doc_dir.mkdir(parents=True, exist_ok=True)
doc_filename = f"{agent_id}.md"
doc_dest = doc_dir / doc_filename
doc_dest.write_bytes(doc_bytes)
documentation_path = str(doc_dest)
# Upsert proxy_files row.
existing_pf = conn.execute(
"SELECT file_key FROM proxy_files WHERE file_path = ?",
(documentation_path,),
).fetchone()
if not existing_pf:
doc_file_key = secrets.token_urlsafe(32)
conn.execute(
"INSERT INTO proxy_files (file_key, file_path, original_filename, task_id) "
"VALUES (?, ?, ?, NULL)",
(doc_file_key, documentation_path, doc_filename),
)
return documentation_path
except Exception as exc:
print(f"[router] _store_agent_documentation: failed for '{agent_id}': {exc}")
return None
# ---------------------------------------------------------------------------
# ACL helpers
# ---------------------------------------------------------------------------
def can_route(agent_id: str, dest_id: str, conn: sqlite3.Connection) -> bool:
"""
Determine whether ``agent_id`` is permitted to send messages to ``dest_id``.
ACL resolution order:
1. If ``individual_allowlist`` has **any** entries for ``agent_id``, only
those explicit destinations are permitted (group rules are ignored).
2. Otherwise, resolve via group membership:
- Fetch the agent's ``outbound_groups``.
- Look up which ``inbound_groups`` are allowed to receive from those
outbound groups via ``group_allowlist``.
- Check if ``dest_id``'s ``inbound_groups`` intersect the allowed set.
NOTE: For high-traffic production deployments, use aiosqlite here.
Args:
agent_id: The source agent ID.
dest_id: The target agent ID.
conn: An open database connection.
Returns:
True if routing is permitted, False otherwise.
"""
# Check for any individual allowlist entries for this agent.
individual_entries = conn.execute(
"SELECT destination_agent_id FROM individual_allowlist WHERE agent_id = ?",
(agent_id,),
).fetchall()
if individual_entries:
# Individual rules fully supersede group rules.
allowed_ids = {row["destination_agent_id"] for row in individual_entries}
return dest_id in allowed_ids
# Group-based resolution.
src_row = conn.execute(
"SELECT outbound_groups FROM agents WHERE agent_id = ?", (agent_id,)
).fetchone()
if src_row is None:
return False
outbound_groups: list[str] = json.loads(src_row["outbound_groups"] or "[]")
if not outbound_groups:
return False
# Find all inbound_groups that the outbound_groups are allowed to reach.
placeholders = ",".join("?" * len(outbound_groups))
allowed_inbound_rows = conn.execute(
f"""
SELECT DISTINCT inbound_group
FROM group_allowlist
WHERE outbound_group IN ({placeholders})
""",
outbound_groups,
).fetchall()
allowed_inbound_groups = {row["inbound_group"] for row in allowed_inbound_rows}
if not allowed_inbound_groups:
return False
# Check if dest_id belongs to any of the allowed inbound groups.
dst_row = conn.execute(
"SELECT inbound_groups FROM agents WHERE agent_id = ?", (dest_id,)
).fetchone()
if dst_row is None:
return False
dest_inbound_groups: list[str] = json.loads(dst_row["inbound_groups"] or "[]")
return bool(set(dest_inbound_groups) & allowed_inbound_groups)
def get_available_destinations(agent_id: str, conn: sqlite3.Connection) -> dict[str, Any]:
"""
Build the ACL-filtered map of destinations the given agent may contact.
The returned dict is keyed by agent_id and contains the destination
agent's AgentInfo fields (description, input_schema, output_schema,
required_input).
NOTE: For high-traffic production deployments, use aiosqlite here.
Args:
agent_id: The agent whose reachable destinations should be computed.
conn: An open database connection.
Returns:
A dict mapping reachable agent IDs to their AgentInfo metadata.
"""
all_agents = conn.execute(
"SELECT agent_id, agent_info, documentation_path FROM agents WHERE agent_id != ?",
(agent_id,),
).fetchall()
destinations: dict[str, Any] = {}
for row in all_agents:
dest_id: str = row["agent_id"]
if dest_id not in _alive_agents:
continue
if can_route(agent_id, dest_id, conn):
try:
info = json.loads(row["agent_info"] or "{}")
except json.JSONDecodeError:
info = {}
doc_path: Optional[str] = row["documentation_path"]
if doc_path:
doc_key_row = conn.execute(
"SELECT file_key FROM proxy_files WHERE file_path = ?",
(doc_path,),
).fetchone()
info["documentation_file"] = {
"path": f"/docs/{dest_id}",
"protocol": "router-proxy",
"key": doc_key_row["file_key"] if doc_key_row else None,
}
else:
info["documentation_file"] = None
destinations[dest_id] = info
return destinations
# ---------------------------------------------------------------------------
# Auth dependency
# ---------------------------------------------------------------------------
def _require_admin(authorization: str) -> None:
"""
Validate an admin-level Authorization header.
Args:
authorization: The ``Authorization`` header value.
Raises:
HTTPException 401: If the header is missing or malformed.
HTTPException 403: If the token does not match ADMIN_TOKEN.
"""
if not authorization.startswith("Bearer "):
raise HTTPException(status_code=401, detail="Missing or malformed Bearer token.")
token = authorization[len("Bearer "):]
if not ADMIN_TOKEN:
raise HTTPException(
status_code=500,
detail="ADMIN_TOKEN environment variable is not configured.",
)
if not secrets.compare_digest(token, ADMIN_TOKEN):
raise HTTPException(status_code=403, detail="Invalid admin token.")
# ---------------------------------------------------------------------------
# Delivery helpers
# ---------------------------------------------------------------------------
async def deliver_to_agent(agent_id: str, payload: dict[str, Any]) -> None:
"""
Deliver a routing payload to an agent (embedded or external).
For **embedded** agents: the router makes an in-process ASGI call via
``httpx.ASGITransport`` to ``POST /receive``. A 200 response body is
expected to be a valid routing payload dict; the router processes it
inline via ``_process_route_internal``.
For **external** agents: the router POSTs the payload to the agent's
``endpoint_url`` and expects a ``202`` acknowledgment. Failure to
receive 2xx is treated as a delivery error and the task is failed.
NOTE: For high-traffic production deployments, use aiosqlite here.
Args:
agent_id: The target agent's ID.
payload: The routing payload dict to deliver.
"""
# NOTE: For high-traffic production deployments, use aiosqlite here.
conn = get_db()
try:
row = conn.execute(
"SELECT endpoint_url, is_embedded, auth_token FROM agents WHERE agent_id = ?",
(agent_id,),
).fetchone()
finally:
conn.close()
if row is None:
task_id = payload.get("task_id")
if task_id and task_id != "new":
conn2 = get_db()
try:
await _fail_task(
task_id,
agent_id,
f"Destination agent '{agent_id}' not found.",
conn2,
)
finally:
conn2.close()
return
if row["is_embedded"]:
await _deliver_embedded(agent_id, payload)
else:
await _deliver_external(agent_id, row["endpoint_url"], payload, row["auth_token"])
async def _deliver_embedded(agent_id: str, payload: dict[str, Any]) -> None:
"""
Deliver a payload to an embedded agent via in-process ASGI transport.
The embedded agent's ``POST /receive`` endpoint is called synchronously
(from the router's perspective). A 200 response body is parsed as a
routing payload and fed back into ``_process_route_internal``.
Args:
agent_id: The embedded agent's ID.
payload: The routing payload to deliver.
"""
app = embedded_apps.get(agent_id)
if app is None:
task_id = payload.get("task_id")
if task_id and task_id != "new":
conn = get_db()
try:
await _fail_task(
task_id,
agent_id,
f"Embedded agent '{agent_id}' ASGI app not loaded.",
conn,
)
finally:
conn.close()
return
try:
transport = httpx.ASGITransport(app=app)
async with httpx.AsyncClient(transport=transport, base_url="http://embedded") as client:
response = await client.post("/receive", json=payload, timeout=EMBEDDED_AGENT_TIMEOUT)
except Exception as exc:
task_id = payload.get("task_id")
if task_id and task_id != "new":
conn = get_db()
try:
await _fail_task(
task_id,
agent_id,
f"ASGI delivery to embedded agent '{agent_id}' raised: {exc}",
conn,
)
finally:
conn.close()
return
if response.status_code == 200:
try:
response_data = response.json()
except Exception:
return
if isinstance(response_data, dict):
try:
await _process_route_internal(response_data)
except Exception as exc:
print(f"[router] Error processing embedded agent '{agent_id}' response: {exc}")
resp_task_id = response_data.get("task_id")
if resp_task_id and resp_task_id != "new":
conn = get_db()
try:
await _fail_task(
resp_task_id,
agent_id,
f"Error processing response from embedded agent '{agent_id}': {exc}",
conn,
)
finally:
conn.close()
else:
task_id = payload.get("task_id")
if task_id and task_id != "new":
conn = get_db()
try:
await _fail_task(
task_id,
agent_id,
f"Embedded agent '{agent_id}' returned HTTP {response.status_code}.",
conn,
)
finally:
conn.close()
async def _deliver_external(
agent_id: str, endpoint_url: str, payload: dict[str, Any],
auth_token: str = "",
) -> None:
"""
Deliver a payload to an external agent via HTTP POST.
Expects a ``202`` acknowledgment. Any non-2xx status or connection
error causes the task to be failed.
Args:
agent_id: The external agent's ID.
endpoint_url: The URL to POST to.
payload: The routing payload to deliver.
auth_token: Raw auth token to send as Bearer header for verification.
"""
# NOTE: For high-traffic production deployments, consider a shared
# AsyncClient with connection pooling rather than per-call clients.
headers = {"Authorization": f"Bearer {auth_token}"} if auth_token else {}
try:
async with httpx.AsyncClient(timeout=30.0) as client:
response = await client.post(endpoint_url, json=payload, headers=headers)
except Exception as exc:
task_id = payload.get("task_id")
if task_id and task_id != "new":
conn = get_db()
try:
await _fail_task(
task_id,
agent_id,
f"HTTP delivery to external agent '{agent_id}' failed: {exc}",
conn,
)
finally:
conn.close()
return
if not (200 <= response.status_code < 300):
task_id = payload.get("task_id")
if task_id and task_id != "new":
conn = get_db()
try:
await _fail_task(
task_id,
agent_id,
(
f"External agent '{agent_id}' returned HTTP "
f"{response.status_code} instead of 202."
),
conn,
)
finally:
conn.close()
async def _fail_task(
task_id: str,
reporting_agent_id: str,
reason: str,
conn: sqlite3.Connection,
) -> None:
"""
Mark a task as failed, log an error event, and propagate the error to
the task's origin agent.
NOTE: For high-traffic production deployments, use aiosqlite here.
Args:
task_id: The task to fail.
reporting_agent_id: The agent or component reporting the failure.
reason: Human-readable failure description.
conn: An open database connection.
"""
now = datetime.now(timezone.utc).isoformat()
task_row = conn.execute(
"SELECT origin_agent_id, identifier, status FROM tasks WHERE task_id = ?",
(task_id,),
).fetchone()
if task_row is None:
return
if task_row["status"] in ("completed", "failed", "timeout"):
return
conn.execute(
"UPDATE tasks SET status = 'failed' WHERE task_id = ?",
(task_id,),
)
conn.execute(
"""
INSERT INTO events (task_id, agent_id, event_type, payload, timestamp)
VALUES (?, ?, 'error', ?, ?)
""",
(task_id, reporting_agent_id, json.dumps({"reason": reason}), now),
)
conn.commit()
origin_agent_id: str = task_row["origin_agent_id"]
identifier: Optional[str] = task_row["identifier"]
error_payload: dict[str, Any] = {
"agent_id": "router",
"task_id": task_id,
"identifier": identifier,
"parent_task_id": None,
"destination_agent_id": None,
"timestamp": now,
"status_code": 500,
"payload": {"content": reason, "error": reason},
}
# Notify progress subscribers that the task failed.
done_event = {
"type": "done",
"content": "",
"task_id": task_id,
"status_code": 500,
"timestamp": now,
}
for q in _progress_queues.get(task_id, []):
try:
q.put_nowait(done_event)
except asyncio.QueueFull:
pass
# Deliver error asynchronously to avoid recursive DB locks.
asyncio.create_task(deliver_to_agent(origin_agent_id, error_payload))
# ---------------------------------------------------------------------------
# Core routing logic
# ---------------------------------------------------------------------------
async def _ingest_payload_files(
payload: dict[str, Any],
task_id: str,
) -> tuple[dict[str, Any], list[tuple[str, str, str, str]]]:
"""
Scan *payload* for ProxyFile objects (protocol "http" or "localfile"),
fetch them into the router's proxy file store, and replace with
``router-proxy`` references.
Returns ``(updated_payload, db_rows)`` where *db_rows* is a list of
``(file_key, dest_path, original_filename, task_id)`` tuples to be
bulk-inserted into ``proxy_files`` by the caller inside its own DB