From af3b0c44521ee9ae66a4dd21e979a0ae3614dd86 Mon Sep 17 00:00:00 2001 From: Haili Zhang Date: Wed, 22 Oct 2025 22:06:00 +0800 Subject: [PATCH 1/2] =?UTF-8?q?=E2=99=BB=EF=B8=8F=20refactor:=20enhance=20?= =?UTF-8?q?context=20management=20and=20middleware=20structure?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Refactor context schemas for agents, introducing `BaseAgentContext` and specialized contexts for data analysis and search capabilities. - Update `SupervisorContext` and `DeepAgentContext` to extend from the new base context, improving configuration management. - Implement `BaseMiddleware` for structured logging and debug capabilities, enhancing middleware components. - Remove deprecated middleware and streamline imports to align with the latest LangChain v1 structure. - Update tests to reflect new context and middleware structures, ensuring comprehensive coverage and functionality. This commit significantly improves the organization and maintainability of the LangGraph agent system, enhancing both context management and middleware capabilities. --- apps/sample-agent/src/sample_agent/context.py | 41 +- apps/sample-agent/src/sample_agent/graph.py | 7 +- apps/sample-agent/src/sample_agent/state.py | 29 +- .../src/sample_agent/subagents/math.py | 7 +- .../src/sample_agent/subagents/research.py | 7 +- .../src/sample_agent/tools/handoff.py | 4 +- apps/sample-agent/tests/conftest.py | 4 +- .../tests/integration/test_handoff.py | 2 +- apps/sample-agent/tests/unit/test_graph.py | 6 +- apps/sample-deep-agent/pyproject.toml | 2 +- .../src/sample_deep_agent/context.py | 50 ++- .../src/sample_deep_agent/graph.py | 7 +- .../tests/integration/test_research.py | 2 +- .../tests/unit/test_graph.py | 2 +- libs/langgraph-up-devkits/README.md | 130 ++++-- libs/langgraph-up-devkits/pyproject.toml | 9 +- .../src/langgraph_up_devkits/__init__.py | 80 +--- .../langgraph_up_devkits/context/__init__.py | 15 +- .../src/langgraph_up_devkits/context/base.py | 132 ++++++ .../langgraph_up_devkits/context/mixins.py | 84 ++++ .../langgraph_up_devkits/context/schemas.py | 148 ------ .../middleware/__init__.py | 4 +- .../langgraph_up_devkits/middleware/base.py | 90 ++++ .../middleware/filesystem_mask.py | 49 +- .../middleware/model_provider.py | 63 ++- .../middleware/summarization.py | 46 -- libs/langgraph-up-devkits/tests/__init__.py | 4 - libs/langgraph-up-devkits/tests/conftest.py | 2 +- libs/langgraph-up-devkits/tests/testing.py | 27 +- .../tests/unit/test_context.py | 101 +---- uv.lock | 420 +++++++++--------- 31 files changed, 844 insertions(+), 730 deletions(-) create mode 100644 libs/langgraph-up-devkits/src/langgraph_up_devkits/context/base.py create mode 100644 libs/langgraph-up-devkits/src/langgraph_up_devkits/context/mixins.py delete mode 100644 libs/langgraph-up-devkits/src/langgraph_up_devkits/context/schemas.py create mode 100644 libs/langgraph-up-devkits/src/langgraph_up_devkits/middleware/base.py delete mode 100644 libs/langgraph-up-devkits/src/langgraph_up_devkits/middleware/summarization.py diff --git a/apps/sample-agent/src/sample_agent/context.py b/apps/sample-agent/src/sample_agent/context.py index 6b31f7a..033b753 100644 --- a/apps/sample-agent/src/sample_agent/context.py +++ b/apps/sample-agent/src/sample_agent/context.py @@ -1,24 +1,41 @@ """Context schema for supervisor configuration.""" -from typing import Any +from __future__ import annotations -from pydantic import BaseModel, Field +from dataclasses import asdict, dataclass, field +from langgraph_up_devkits.context import BaseAgentContext -class SupervisorContext(BaseModel): - """Context schema for supervisor configuration.""" - model_name: str = Field(default="siliconflow:zai-org/GLM-4.5-Air", description="Default model name") - temperature: float = 0.7 - max_tokens: int | None = None - debug_mode: bool = False - recursion_limit: int = Field(default=100, description="Recursion limit for agent execution") +@dataclass(kw_only=True) +class SupervisorContext(BaseAgentContext): + """Context schema for supervisor configuration. + + Extends BaseAgentContext with supervisor-specific defaults. + Uses GLM-4.5-Air model by default for efficient coordination. + + Inherits from BaseAgentContext: + - model: LLM identifier (overridden to siliconflow:zai-org/GLM-4.5-Air) + - temperature: Sampling temperature (default 0.7) + - max_tokens: Response token cap (default None) + - recursion_limit: LangGraph recursion depth (default 100) + - debug: Enable debug logging + - user_id: Optional user identifier + """ + + # Override model default for supervisor + model: str = field( + default="siliconflow:zai-org/GLM-4.5-Air", + metadata={ + "description": "The name of the language model to use for the supervisor agent.", + }, + ) @classmethod - def default(cls) -> "SupervisorContext": + def default(cls) -> SupervisorContext: """Create default supervisor context.""" return cls() - def to_dict(self) -> dict[str, Any]: + def to_dict(self) -> dict[str, dict[str, str | float | int | bool | None]]: """Convert to dictionary for RunnableConfig.""" - return {"configurable": self.model_dump()} + return {"configurable": asdict(self)} diff --git a/apps/sample-agent/src/sample_agent/graph.py b/apps/sample-agent/src/sample_agent/graph.py index 4291c3f..1040683 100644 --- a/apps/sample-agent/src/sample_agent/graph.py +++ b/apps/sample-agent/src/sample_agent/graph.py @@ -24,11 +24,14 @@ def make_graph(config: RunnableConfig | None = None) -> CompiledStateGraph[Any, # Convert runnable config to context configurable = config.get("configurable", {}) - context_kwargs = {k: v for k, v in configurable.items() if k in SupervisorContext.model_fields} + from dataclasses import fields + + context_field_names = {f.name for f in fields(SupervisorContext)} + context_kwargs = {k: v for k, v in configurable.items() if k in context_field_names} context = SupervisorContext(**context_kwargs) # Load model based on configuration - model = load_chat_model(context.model_name) + model = load_chat_model(context.model) # Create agents with the configured model via make_graph functions math_agent = make_math_graph(config) diff --git a/apps/sample-agent/src/sample_agent/state.py b/apps/sample-agent/src/sample_agent/state.py index 28056c2..adb0aed 100644 --- a/apps/sample-agent/src/sample_agent/state.py +++ b/apps/sample-agent/src/sample_agent/state.py @@ -1,22 +1,25 @@ -"""Simple state definition for Agent1 extending MessagesState.""" +"""State definition for sample-agent extending AgentState.""" -from typing import Annotated, TypedDict +from typing import NotRequired -from langchain_core.messages import BaseMessage -from langgraph.graph.message import add_messages +from langchain.agents import AgentState as BaseAgentState -class AgentState(TypedDict): - """Simple state for sample-agent with task description support.""" +class AgentState(BaseAgentState): # type: ignore[type-arg] + """State for sample-agent with additional fields. - # Core message history - messages: Annotated[list[BaseMessage], add_messages] + Extends langchain.agents.AgentState which provides: + - messages: Annotated[list[BaseMessage], add_messages] + - jump_to: NotRequired[Annotated[JumpTo | None, EphemeralValue, PrivateStateAttr]] + - structured_response: NotRequired[Annotated[ResponseT, OmitFromInput]] + """ - # Required for create_react_agent + # Required by create_react_agent remaining_steps: int - # Task management - following the reference pattern - task_description: str | None + # Additional fields for supervisor pattern + task_description: NotRequired[str | None] + active_agent: NotRequired[str | None] - # Active agent tracking - active_agent: str | None + +__all__ = ["AgentState"] diff --git a/apps/sample-agent/src/sample_agent/subagents/math.py b/apps/sample-agent/src/sample_agent/subagents/math.py index a69cc9a..4bd900c 100644 --- a/apps/sample-agent/src/sample_agent/subagents/math.py +++ b/apps/sample-agent/src/sample_agent/subagents/math.py @@ -27,11 +27,14 @@ def make_graph(config: RunnableConfig | None = None) -> CompiledStateGraph[Any, # Convert runnable config to context configurable = config.get("configurable", {}) - context_kwargs = {k: v for k, v in configurable.items() if k in SupervisorContext.model_fields} + from dataclasses import fields + + context_field_names = {f.name for f in fields(SupervisorContext)} + context_kwargs = {k: v for k, v in configurable.items() if k in context_field_names} context = SupervisorContext(**context_kwargs) # Load model based on configuration - model = load_chat_model(context.model_name) + model = load_chat_model(context.model) # Create and return the math agent directly return create_agent( diff --git a/apps/sample-agent/src/sample_agent/subagents/research.py b/apps/sample-agent/src/sample_agent/subagents/research.py index 4ba5210..bc1b706 100644 --- a/apps/sample-agent/src/sample_agent/subagents/research.py +++ b/apps/sample-agent/src/sample_agent/subagents/research.py @@ -27,11 +27,14 @@ def make_graph(config: RunnableConfig | None = None) -> CompiledStateGraph[Any, # Convert runnable config to context configurable = config.get("configurable", {}) - context_kwargs = {k: v for k, v in configurable.items() if k in SupervisorContext.model_fields} + from dataclasses import fields + + context_field_names = {f.name for f in fields(SupervisorContext)} + context_kwargs = {k: v for k, v in configurable.items() if k in context_field_names} context = SupervisorContext(**context_kwargs) # Load model based on configuration - model = load_chat_model(context.model_name) + model = load_chat_model(context.model) # Create and return the research agent directly return create_agent( diff --git a/apps/sample-agent/src/sample_agent/tools/handoff.py b/apps/sample-agent/src/sample_agent/tools/handoff.py index 0051833..075aaea 100644 --- a/apps/sample-agent/src/sample_agent/tools/handoff.py +++ b/apps/sample-agent/src/sample_agent/tools/handoff.py @@ -2,8 +2,8 @@ from typing import Annotated, Any -from langchain_core.messages import ToolMessage -from langchain_core.tools import BaseTool, InjectedToolCallId, tool +from langchain.messages import ToolMessage +from langchain.tools import BaseTool, InjectedToolCallId, tool from langgraph.prebuilt import InjectedState from langgraph.types import Command from langgraph_supervisor.handoff import METADATA_KEY_HANDOFF_DESTINATION diff --git a/apps/sample-agent/tests/conftest.py b/apps/sample-agent/tests/conftest.py index 305263b..a4b43c6 100644 --- a/apps/sample-agent/tests/conftest.py +++ b/apps/sample-agent/tests/conftest.py @@ -7,8 +7,8 @@ from unittest.mock import patch import pytest -from langchain_core.language_models.chat_models import BaseChatModel -from langchain_core.messages import AIMessage +from langchain.chat_models import BaseChatModel +from langchain.messages import AIMessage from langchain_core.outputs import ChatGeneration, LLMResult diff --git a/apps/sample-agent/tests/integration/test_handoff.py b/apps/sample-agent/tests/integration/test_handoff.py index fd92097..dc975a3 100644 --- a/apps/sample-agent/tests/integration/test_handoff.py +++ b/apps/sample-agent/tests/integration/test_handoff.py @@ -1,7 +1,7 @@ """Integration tests for handoff functionality with real models and workflows.""" import pytest -from langchain_core.messages import HumanMessage +from langchain.messages import HumanMessage from sample_agent.graph import make_graph from sample_agent.state import AgentState from sample_agent.tools.handoff import create_custom_handoff_tool diff --git a/apps/sample-agent/tests/unit/test_graph.py b/apps/sample-agent/tests/unit/test_graph.py index dff9293..71456b7 100644 --- a/apps/sample-agent/tests/unit/test_graph.py +++ b/apps/sample-agent/tests/unit/test_graph.py @@ -3,7 +3,7 @@ from unittest.mock import Mock, patch import pytest -from langchain_core.messages import AIMessage, HumanMessage +from langchain.messages import AIMessage, HumanMessage from sample_agent.state import AgentState from sample_agent.tools.basic import add, multiply, web_search from sample_agent.tools.handoff import create_custom_handoff_tool @@ -92,7 +92,7 @@ def test_math_agent_creation(self, mock_load_model, mock_create_agent): mock_compiled_graph = Mock() mock_create_agent.return_value = mock_compiled_graph - config = {"configurable": {"model_name": "test_model"}} + config = {"configurable": {"model": "test_model"}} result = make_graph(config) mock_load_model.assert_called_once_with("test_model") @@ -115,7 +115,7 @@ def test_research_agent_creation(self, mock_load_model, mock_create_agent): mock_compiled_graph = Mock() mock_create_agent.return_value = mock_compiled_graph - config = {"configurable": {"model_name": "test_model"}} + config = {"configurable": {"model": "test_model"}} result = make_graph(config) mock_load_model.assert_called_once_with("test_model") diff --git a/apps/sample-deep-agent/pyproject.toml b/apps/sample-deep-agent/pyproject.toml index 3ce7279..204b9b9 100644 --- a/apps/sample-deep-agent/pyproject.toml +++ b/apps/sample-deep-agent/pyproject.toml @@ -9,7 +9,7 @@ readme = "README.md" license = { text = "MIT" } requires-python = ">=3.11,<4.0" dependencies = [ - "deepagents>=0.1.1", + "deepagents>=0.1.3", "langgraph-up-devkits", ] diff --git a/apps/sample-deep-agent/src/sample_deep_agent/context.py b/apps/sample-deep-agent/src/sample_deep_agent/context.py index 72fed9d..aa7cc59 100644 --- a/apps/sample-deep-agent/src/sample_deep_agent/context.py +++ b/apps/sample-deep-agent/src/sample_deep_agent/context.py @@ -1,19 +1,53 @@ """Context configuration for sample deep agent.""" -from pydantic import BaseModel, Field +from __future__ import annotations + +from dataclasses import dataclass, field + +from langgraph_up_devkits.context import BaseAgentContext # Constants MAX_TODOS = 2 # Global maximum number of TODOs allowed per session -class DeepAgentContext(BaseModel): - """Context configuration for deep agent runtime settings.""" +@dataclass(kw_only=True) +class DeepAgentContext(BaseAgentContext): + """Context configuration for deep agent runtime settings. + + Extends BaseAgentContext with deep agent specific configuration: + - Uses DeepSeek-V3.2-Exp model for advanced reasoning + - Higher recursion limit (1000) for complex research workflows + - Configurable max_todos for research task management + + Inherits from BaseAgentContext: + - model: LLM identifier (overridden to siliconflow:deepseek-ai/DeepSeek-V3.2-Exp) + - temperature: Sampling temperature (default 0.7) + - max_tokens: Response token cap (default None) + - recursion_limit: LangGraph recursion depth (overridden to 1000) + - debug: Enable debug logging + - user_id: Optional user identifier + """ - # Model configuration - model_name: str = Field(default="siliconflow:deepseek-ai/DeepSeek-V3.2-Exp", description="Default model name") + # Override model default for deep agent + model: str = field( + default="siliconflow:deepseek-ai/DeepSeek-V3.2-Exp", + metadata={ + "description": "The name of the language model to use for deep reasoning tasks.", + }, + ) - # Graph configuration - recursion_limit: int = Field(default=1000, description="Recursion limit for agent execution") + # Override recursion limit for deep agent + recursion_limit: int = field( + default=1000, + metadata={ + "description": "Maximum LangGraph recursion depth for complex research workflows.", + }, + ) # Research workflow settings - max_todos: int = Field(default=MAX_TODOS, description="Maximum number of TODOs to create for research tasks") + max_todos: int = field( + default=MAX_TODOS, + metadata={ + "description": "Maximum number of TODOs to create for research tasks.", + }, + ) diff --git a/apps/sample-deep-agent/src/sample_deep_agent/graph.py b/apps/sample-deep-agent/src/sample_deep_agent/graph.py index 28f774f..2ee4ba2 100644 --- a/apps/sample-deep-agent/src/sample_deep_agent/graph.py +++ b/apps/sample-deep-agent/src/sample_deep_agent/graph.py @@ -29,11 +29,14 @@ def make_graph(config: RunnableConfig | None = None) -> Any: # Convert runnable config to context configurable = config.get("configurable", {}) - context_kwargs = {k: v for k, v in configurable.items() if k in DeepAgentContext.model_fields} + from dataclasses import fields + + context_field_names = {f.name for f in fields(DeepAgentContext)} + context_kwargs = {k: v for k, v in configurable.items() if k in context_field_names} context = DeepAgentContext(**context_kwargs) # Load model based on context configuration - model = load_chat_model(context.model_name) + model = load_chat_model(context.model) # Create deep agent with research capabilities agent = create_deep_agent( diff --git a/apps/sample-deep-agent/tests/integration/test_research.py b/apps/sample-deep-agent/tests/integration/test_research.py index 548b5fc..fbe9f68 100644 --- a/apps/sample-deep-agent/tests/integration/test_research.py +++ b/apps/sample-deep-agent/tests/integration/test_research.py @@ -3,7 +3,7 @@ import os import pytest -from langchain_core.messages import AIMessage, HumanMessage +from langchain.messages import AIMessage, HumanMessage @pytest.mark.integration diff --git a/apps/sample-deep-agent/tests/unit/test_graph.py b/apps/sample-deep-agent/tests/unit/test_graph.py index ae73bda..742a6d7 100644 --- a/apps/sample-deep-agent/tests/unit/test_graph.py +++ b/apps/sample-deep-agent/tests/unit/test_graph.py @@ -55,7 +55,7 @@ def test_make_graph_with_custom_config(self, mock_load_model, mock_create_deep_a config = { "configurable": { - "model_name": "openrouter:anthropic/claude-opus-4", + "model": "openrouter:anthropic/claude-opus-4", "max_todos": 3 } } diff --git a/libs/langgraph-up-devkits/README.md b/libs/langgraph-up-devkits/README.md index a2f58f6..fc979be 100644 --- a/libs/langgraph-up-devkits/README.md +++ b/libs/langgraph-up-devkits/README.md @@ -13,9 +13,9 @@ A comprehensive development toolkit for LangGraph agents (LangChain v1) providin ## Version -**Current Version: 0.3.0** - LangChain v1 compatible +**Current Version: 0.4.0** - LangChain v1 compatible -This version uses the modern LangChain v1 API with `wrap_model_call` middleware pattern. +This version uses the modern LangChain v1 API with `wrap_model_call` middleware pattern and provides a minimal, explicit API surface. ## Installation @@ -58,14 +58,20 @@ Since `langchain-qwq` and `langchain-siliconflow` have not yet been upgraded to ## Quick Start ```python +from dataclasses import dataclass from langchain.agents import create_agent -from langchain_core.messages import HumanMessage -from langgraph_up_devkits import ( - DataAnalystContext, - ModelProviderMiddleware, - web_search, - create_context_aware_prompt -) +from langchain.messages import HumanMessage +from langgraph_up_devkits import load_chat_model +from langgraph_up_devkits.context import SearchContext, DataContext +from langgraph_up_devkits.middleware import ModelProviderMiddleware +from langgraph_up_devkits.tools import web_search +from langgraph_up_devkits.utils import create_context_aware_prompt + +# Create custom context by composing mixins +@dataclass(kw_only=True) +class DataAnalystContext(SearchContext, DataContext): + """Custom context for data analyst agents.""" + pass # Create agent using devkit components (LangChain v1) agent = create_agent( @@ -91,14 +97,27 @@ result = await agent.ainvoke( ### Context Schemas -- `BaseAgentContext` - Core configuration -- `SearchContext` - Search capabilities -- `DataContext` - Data analysis features -- `DataAnalystContext` - Composed context for data analysts -- `ResearchContext` - Research assistant configuration +Composable context mixins for building custom agent configurations: + +- `BaseAgentContext` - Core configuration (model, temperature, user_id, etc.) +- `SearchContext` - Search capabilities (max_search_results, enable_deepwiki, etc.) +- `DataContext` - Data analysis features (max_data_rows, enable_data_viz, etc.) + +**Usage Pattern**: Compose contexts by inheriting from multiple mixins: + +```python +from dataclasses import dataclass +from langgraph_up_devkits.context import SearchContext, DataContext + +@dataclass(kw_only=True) +class MyCustomContext(SearchContext, DataContext): + """Custom context combining search and data capabilities.""" + custom_field: str = "default_value" +``` ### Middleware +- `BaseMiddleware` - Base class with structured logging support - `ModelProviderMiddleware` - Automatic model provider switching with `wrap_model_call` and `awrap_model_call` (LangChain v1) - `FileSystemMaskMiddleware` - Shadows virtual file system from model context @@ -115,10 +134,32 @@ result = await agent.ainvoke( ### Utilities +- `load_chat_model` - Unified model loading with automatic provider registration (exported at top level) - `create_context_aware_prompt` - Dynamic prompt generation - Provider registration utilities - Testing helpers and mocks +## API Structure + +The library follows an explicit import pattern for clarity: + +```python +# Top-level export (most commonly used) +from langgraph_up_devkits import load_chat_model + +# Explicit submodule imports (recommended) +from langgraph_up_devkits.context import BaseAgentContext, SearchContext, DataContext +from langgraph_up_devkits.middleware import BaseMiddleware, ModelProviderMiddleware +from langgraph_up_devkits.tools import web_search, deep_web_search, think_tool +from langgraph_up_devkits.utils import create_context_aware_prompt, AVAILABLE_PROVIDERS +``` + +This design ensures: +- ✅ Clear import paths showing where components come from +- ✅ Minimal top-level namespace pollution +- ✅ Easy discoverability through IDE autocomplete +- ✅ Better maintainability and refactoring support + ## Examples ### Basic Context Usage @@ -148,7 +189,7 @@ The `ModelProviderMiddleware` uses the modern `wrap_model_call` pattern from Lan ```python from langchain.agents import create_agent from langchain.chat_models import init_chat_model -from langchain_core.messages import HumanMessage +from langchain.messages import HumanMessage from langgraph_up_devkits.middleware import ModelProviderMiddleware from dataclasses import dataclass @@ -200,7 +241,7 @@ The `FileSystemMaskMiddleware` automatically shadows the `files` field from the from typing import Annotated from langchain.agents import create_agent, AgentState from langchain.agents.middleware import AgentMiddleware -from langchain_core.messages import HumanMessage, add_messages +from langchain.messages import HumanMessage, add_messages from langgraph_up_devkits.middleware import FileSystemMaskMiddleware # Define a middleware to extend state with files field @@ -250,13 +291,20 @@ assert result["files"]["config.json"] == '{"setting": "value"}' ### Context-Aware Tools ```python +from dataclasses import dataclass from langchain.agents import create_agent -from langchain_core.messages import HumanMessage +from langchain.messages import HumanMessage from langgraph_up_devkits.context import SearchContext from langgraph_up_devkits.tools import web_search, fetch_url, get_deepwiki_tools, get_context7_tools +# Create custom research context +@dataclass(kw_only=True) +class ResearchContext(SearchContext): + """Custom context for research tasks.""" + pass + # Create search context with specific settings -search_context = SearchContext( +search_context = ResearchContext( model="openrouter:anthropic/claude-sonnet-4", max_search_results=5, enable_deepwiki=True @@ -271,7 +319,7 @@ agent = create_agent( model="openai:gpt-4o", tools=[web_search, fetch_url] + deepwiki_tools + context7_tools, system_prompt="You are a research assistant with web search, GitHub documentation, and library documentation access.", - context_schema=SearchContext + context_schema=ResearchContext ) # Tools automatically respect context limits @@ -285,14 +333,20 @@ result = await agent.ainvoke( ```python import os +from dataclasses import dataclass from langchain.agents import create_agent -from langchain_core.messages import HumanMessage -from langgraph_up_devkits import ( - ResearchContext, - ModelProviderMiddleware, - web_search, - create_context_aware_prompt -) +from langchain.messages import HumanMessage +from langgraph_up_devkits.context import SearchContext +from langgraph_up_devkits.middleware import ModelProviderMiddleware +from langgraph_up_devkits.tools import web_search +from langgraph_up_devkits.utils import create_context_aware_prompt + +# Create custom research context +@dataclass(kw_only=True) +class ResearchContext(SearchContext): + """Custom context for research assistant.""" + pass + # Set up environment (ensure API keys are available) os.environ["TAVILY_API_KEY"] = "your_tavily_key" os.environ["SILICONFLOW_API_KEY"] = "your_siliconflow_key" @@ -313,8 +367,7 @@ research_context = ResearchContext( model="siliconflow:THUDM/glm-4-9b-chat", # Switch to SiliconFlow GLM max_search_results=10, enable_deepwiki=True, - user_id="researcher_001", - thread_id="research_session_123" + user_id="researcher_001" ) # Perform research task @@ -342,7 +395,7 @@ DeepWiki provides documentation and insights for GitHub repositories: ```python from langchain.agents import create_agent -from langchain_core.messages import HumanMessage +from langchain.messages import HumanMessage from langgraph_up_devkits.tools import get_deepwiki_tools # Get built-in DeepWiki tools @@ -366,7 +419,7 @@ Context7 provides up-to-date documentation for popular libraries and frameworks: ```python from langchain.agents import create_agent -from langchain_core.messages import HumanMessage +from langchain.messages import HumanMessage from langgraph_up_devkits.tools import get_context7_tools # Get built-in Context7 tools @@ -507,13 +560,18 @@ SILICONFLOW_API_KEY=your_siliconflow_api_key The library includes comprehensive testing utilities: ```python -from langgraph_up_devkits.testing import AgentTestBuilder, MockRuntime +from dataclasses import dataclass +from langgraph_up_devkits.context import SearchContext, DataContext + +# Create custom contexts for testing +@dataclass(kw_only=True) +class TestContext(SearchContext, DataContext): + """Custom test context.""" + pass -# Test your agents easily -result = await (AgentTestBuilder() - .with_context(DataAnalystContext()) - .with_messages(["Analyze quarterly sales"]) - .run_test(agent)) +# Testing utilities available in tests/ directory +# Test your agents with standard pytest patterns +# See tests/unit/ and tests/integration/ for examples ``` ## Development diff --git a/libs/langgraph-up-devkits/pyproject.toml b/libs/langgraph-up-devkits/pyproject.toml index e1b178d..787182f 100644 --- a/libs/langgraph-up-devkits/pyproject.toml +++ b/libs/langgraph-up-devkits/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "langgraph-up-devkits" -version = "0.3.0" +version = "0.4.0" description = "Development toolkit for LangGraph agents with middleware, context schemas, and provider integrations" authors = [ { name = "Haili Zhang", email = "haili.zhang@outlook.com" }, @@ -20,12 +20,13 @@ classifiers = [ "Topic :: Scientific/Engineering :: Artificial Intelligence", ] dependencies = [ - "langchain>=1.0.0", - "langchain-dev-utils>=1.0.0", + "langchain>=1.0.2", + "langchain-dev-utils>=1.0.2", "tavily-python>=0.7.12", "langchain-mcp-adapters>=0.1.11", - "langchain-openai>=1.0.0", + "langchain-openai>=1.0.1", "langchain-anthropic>=1.0.0", + "structlog>=25.4.0", ] [project.urls] diff --git a/libs/langgraph-up-devkits/src/langgraph_up_devkits/__init__.py b/libs/langgraph-up-devkits/src/langgraph_up_devkits/__init__.py index a0d16fd..529c8b0 100644 --- a/libs/langgraph-up-devkits/src/langgraph_up_devkits/__init__.py +++ b/libs/langgraph-up-devkits/src/langgraph_up_devkits/__init__.py @@ -8,81 +8,7 @@ - Development utilities and testing helpers """ -from .context import ( - BaseAgentContext, - DataAnalystContext, - DataContext, - ResearchContext, - SearchContext, -) -from .middleware import ( - FileSystemMaskMiddleware, - ModelProviderMiddleware, - SummarizationMiddleware, -) -from .prompts import ( - DATA_ANALYST_PROMPT, - RESEARCH_ASSISTANT_PROMPT, - SYSTEM_PROMPT, -) -from .tools import ( - MCP_SERVERS, - add_mcp_server, - clear_mcp_cache, - deep_web_search, - fetch_url, - get_all_mcp_tools, - get_context7_tools, - get_deepwiki_tools, - get_mcp_client, - get_mcp_tools, - remove_mcp_server, - think_tool, - web_search, -) -from .utils import ( - AVAILABLE_PROVIDERS, - create_context_aware_prompt, - load_chat_model, -) +from .utils import load_chat_model -# Provider registration is handled automatically by load_chat_model() -# No manual registration needed - just use load_chat_model("provider:model") - -__version__ = "0.1.0" -__all__ = [ - # Context schemas - "BaseAgentContext", - "SearchContext", - "DataContext", - "DataAnalystContext", - "ResearchContext", - # Middleware - "FileSystemMaskMiddleware", - "ModelProviderMiddleware", - "SummarizationMiddleware", - # HTTP and search tools - "fetch_url", - "web_search", - "deep_web_search", - "think_tool", - # MCP tools - "get_context7_tools", - "get_deepwiki_tools", - "get_mcp_tools", - "get_all_mcp_tools", - "get_mcp_client", - # MCP configuration - "MCP_SERVERS", - "add_mcp_server", - "remove_mcp_server", - "clear_mcp_cache", - # Prompts - "create_context_aware_prompt", - "SYSTEM_PROMPT", - "DATA_ANALYST_PROMPT", - "RESEARCH_ASSISTANT_PROMPT", - # Provider registration - "load_chat_model", - "AVAILABLE_PROVIDERS", -] +__version__ = "0.4.0" +__all__ = ["load_chat_model"] diff --git a/libs/langgraph-up-devkits/src/langgraph_up_devkits/context/__init__.py b/libs/langgraph-up-devkits/src/langgraph_up_devkits/context/__init__.py index 66afa1e..e8790e8 100644 --- a/libs/langgraph-up-devkits/src/langgraph_up_devkits/context/__init__.py +++ b/libs/langgraph-up-devkits/src/langgraph_up_devkits/context/__init__.py @@ -1,17 +1,10 @@ -"""Context schemas for LangGraph agents.""" +"""Context schemas for LangGraph agent configuration.""" -from .schemas import ( - BaseAgentContext, - DataAnalystContext, - DataContext, - ResearchContext, - SearchContext, -) +from .base import BaseAgentContext +from .mixins import DataContext, SearchContext __all__ = [ "BaseAgentContext", - "SearchContext", "DataContext", - "DataAnalystContext", - "ResearchContext", + "SearchContext", ] diff --git a/libs/langgraph-up-devkits/src/langgraph_up_devkits/context/base.py b/libs/langgraph-up-devkits/src/langgraph_up_devkits/context/base.py new file mode 100644 index 0000000..6b87f79 --- /dev/null +++ b/libs/langgraph-up-devkits/src/langgraph_up_devkits/context/base.py @@ -0,0 +1,132 @@ +"""Base context schema for LangGraph agent configuration.""" + +from __future__ import annotations + +import os +from dataclasses import dataclass, field, fields + + +@dataclass(kw_only=True) +class BaseAgentContext: + """Base context schema with common configuration for all agents. + + This base class provides: + - model: LLM identifier for model loading (provider:model-name format) + - temperature: Sampling temperature for generation + - max_tokens: Optional token limit for responses + - recursion_limit: Maximum LangGraph recursion depth + - debug: Enable debug logging in middleware + - user_id: Optional user identifier for personalization + + All agent-specific contexts should extend this base context. + + The context automatically loads values from environment variables if not + explicitly set, with the following precedence: + 1. Explicitly provided values (highest priority) + 2. Environment variables (FIELD_NAME in uppercase) + 3. Default values (lowest priority) + + Example: + ```python + from langgraph_up_devkits.context import BaseAgentContext + + @dataclass(kw_only=True) + class MyAgentContext(BaseAgentContext): + custom_field: str = field( + default="value", + metadata={"description": "Custom configuration"} + ) + ``` + """ + + model: str = field( + default="siliconflow:zai-org/GLM-4.5", + metadata={ + "description": "The name of the language model to use for the agent's main interactions. " + "Should be in the form: provider:model-name (e.g., 'openrouter:anthropic/claude-sonnet-4').", + }, + ) + + temperature: float = field( + default=0.7, + metadata={ + "description": "Sampling temperature for the chat model (0.0 to 2.0).", + }, + ) + + max_tokens: int | None = field( + default=None, + metadata={ + "description": "Optional response token cap for model generation.", + }, + ) + + recursion_limit: int = field( + default=100, + metadata={ + "description": "Maximum LangGraph recursion depth for agent execution.", + }, + ) + + debug: bool = field( + default=False, + metadata={ + "description": "Enable debug logging in middleware and agent components.", + }, + ) + + user_id: str | None = field( + default=None, + metadata={ + "description": "User ID for personalization and logging.", + }, + ) + + def __post_init__(self) -> None: + """Load configuration from environment variables if not explicitly set. + + This method runs after dataclass initialization and loads values from + environment variables when: + - The current value equals the default value (not explicitly set) + - An environment variable exists with the field name in uppercase + - The environment variable is not empty + + Supports automatic type conversion for: + - bool: Accepts "true", "1", "yes", "on" (case-insensitive) + - int: Converts string to integer, keeps default on failure + - str: Uses value as-is + """ + for f in fields(self): + if not f.init: + continue + + current_value = getattr(self, f.name) + default_value = f.default + env_var_name = f.name.upper() + env_value = os.environ.get(env_var_name) + + # Only override with environment variable if current value equals default + # This preserves explicit configuration from LangGraph configurable + # Skip empty environment variables + if current_value == default_value and env_value is not None and env_value.strip(): + if isinstance(default_value, bool): + # Handle boolean environment variables + env_bool_value = env_value.lower() in ("true", "1", "yes", "on") + setattr(self, f.name, env_bool_value) + elif isinstance(default_value, int): + # Handle integer environment variables + try: + setattr(self, f.name, int(env_value)) + except ValueError: + pass # Keep default if conversion fails + elif isinstance(default_value, float): + # Handle float environment variables + try: + setattr(self, f.name, float(env_value)) + except ValueError: + pass # Keep default if conversion fails + else: + setattr(self, f.name, env_value) + + +__all__ = ["BaseAgentContext"] diff --git a/libs/langgraph-up-devkits/src/langgraph_up_devkits/context/mixins.py b/libs/langgraph-up-devkits/src/langgraph_up_devkits/context/mixins.py new file mode 100644 index 0000000..0b3d5f2 --- /dev/null +++ b/libs/langgraph-up-devkits/src/langgraph_up_devkits/context/mixins.py @@ -0,0 +1,84 @@ +"""Context mixins for specialized agent capabilities.""" + +from __future__ import annotations + +from dataclasses import dataclass, field + +from .base import BaseAgentContext + + +@dataclass(kw_only=True) +class SearchContext(BaseAgentContext): + """Context mixin for agents with search capabilities. + + Adds search-specific configuration: + - max_search_results: Limit on number of search results + - enable_deepwiki: Toggle for DeepWiki documentation access + - include_raw_content: Format for raw content in search results + + Example: + ```python + from langgraph_up_devkits.context import SearchContext + + @dataclass(kw_only=True) + class MySearchAgent(SearchContext): + custom_field: str = field(default="value") + ``` + """ + + max_search_results: int = field( + default=5, + metadata={ + "description": "The maximum number of search results to return for each search query.", + }, + ) + + enable_deepwiki: bool = field( + default=False, + metadata={ + "description": "Whether to enable the DeepWiki MCP tool for accessing open source project documentation.", + }, + ) + + include_raw_content: str = field( + default="markdown", + metadata={ + "description": "Format for raw content inclusion in search results. Options: 'none', 'text', 'markdown'.", + }, + ) + + +@dataclass(kw_only=True) +class DataContext(BaseAgentContext): + """Context mixin for agents with data analysis capabilities. + + Adds data-specific configuration: + - max_data_rows: Limit on data rows to process + - enable_data_viz: Toggle for data visualization features + + Example: + ```python + from langgraph_up_devkits.context import DataContext + + @dataclass(kw_only=True) + class MyDataAgent(DataContext): + custom_field: str = field(default="value") + ``` + """ + + max_data_rows: int = field( + default=1000, + metadata={ + "description": "Maximum number of data rows to process at once.", + }, + ) + + enable_data_viz: bool = field( + default=True, + metadata={ + "description": "Whether to enable data visualization capabilities.", + }, + ) + + +__all__ = ["SearchContext", "DataContext"] diff --git a/libs/langgraph-up-devkits/src/langgraph_up_devkits/context/schemas.py b/libs/langgraph-up-devkits/src/langgraph_up_devkits/context/schemas.py deleted file mode 100644 index 984a063..0000000 --- a/libs/langgraph-up-devkits/src/langgraph_up_devkits/context/schemas.py +++ /dev/null @@ -1,148 +0,0 @@ -"""Context schemas for configuring LangGraph agents.""" - -from __future__ import annotations - -import os -from dataclasses import dataclass, field, fields - - -@dataclass(kw_only=True) -class BaseAgentContext: - """Base context schema with common configuration for all agents.""" - - model: str = field( - default="openai:openai/gpt-4o", - metadata={ - "description": "The name of the language model to use for the agent's main interactions. " # noqa: E501 - "Should be in the form: provider:model-name.", - }, - ) - - user_id: str | None = field( - default=None, - metadata={ - "description": "User ID for personalization and logging.", - }, - ) - - def __post_init__(self) -> None: - """Load configuration from environment variables if not explicitly set.""" - for f in fields(self): - if not f.init: - continue - - current_value = getattr(self, f.name) - default_value = f.default - env_var_name = f.name.upper() - env_value = os.environ.get(env_var_name) - - # Only override with environment variable if current value equals default - # This preserves explicit configuration from LangGraph configurable - # Skip empty environment variables - if current_value == default_value and env_value is not None and env_value.strip(): - if isinstance(default_value, bool): - # Handle boolean environment variables - env_bool_value = env_value.lower() in ("true", "1", "yes", "on") - setattr(self, f.name, env_bool_value) - elif isinstance(default_value, int): - # Handle integer environment variables - try: - setattr(self, f.name, int(env_value)) - except ValueError: - pass # Keep default if conversion fails - else: - setattr(self, f.name, env_value) - - -@dataclass(kw_only=True) -class SearchContext(BaseAgentContext): - """Context mixin for agents with search capabilities.""" - - max_search_results: int = field( - default=5, - metadata={ - "description": "The maximum number of search results to return for each search query.", # noqa: E501 - }, - ) - - enable_deepwiki: bool = field( - default=False, - metadata={ - "description": "Whether to enable the DeepWiki MCP tool for accessing open source project documentation.", # noqa: E501 - }, - ) - - include_raw_content: str = field( - default="markdown", - metadata={ - "description": "Format for raw content inclusion in search results. Options: 'none', 'text', 'markdown'.", # noqa: E501 - }, - ) - - -@dataclass(kw_only=True) -class DataContext(BaseAgentContext): - """Context mixin for agents with data analysis capabilities.""" - - max_data_rows: int = field( - default=1000, - metadata={ - "description": "Maximum number of data rows to process at once.", - }, - ) - - enable_data_viz: bool = field( - default=True, - metadata={ - "description": "Whether to enable data visualization capabilities.", - }, - ) - - -@dataclass(kw_only=True) -class DataAnalystContext(SearchContext, DataContext): - """Specialized context for data analyst agents.""" - - system_prompt: str = field( - default="You are a data analyst assistant specializing in data analysis, visualization, and insights. " # noqa: E501 - "You have access to tools for analyzing data, creating visualizations, and searching for relevant information.", # noqa: E501 - metadata={ - "description": "The system prompt to use for the data analyst agent.", - }, - ) - - # Override default for data analyst - max_search_results: int = field( - default=8, # Data analysts might need more search results - metadata={ - "description": "The maximum number of search results to return for each search query.", # noqa: E501 - }, - ) - - -@dataclass(kw_only=True) -class ResearchContext(SearchContext): - """Specialized context for research assistant agents.""" - - system_prompt: str = field( - default="You are a research assistant specializing in finding, analyzing, and synthesizing information from various sources. " # noqa: E501 - "You have access to web search and documentation tools to help with research tasks.", # noqa: E501 - metadata={ - "description": "The system prompt to use for the research assistant agent.", - }, - ) - - # Override defaults for research assistant - enable_deepwiki: bool = field( - default=True, # Research assistants typically need documentation access - metadata={ - "description": "Whether to enable the DeepWiki MCP tool for accessing open source project documentation.", # noqa: E501 - }, - ) - - max_search_results: int = field( - default=10, # Research assistants need more comprehensive results - metadata={ - "description": "The maximum number of search results to return for each search query.", # noqa: E501 - }, - ) diff --git a/libs/langgraph-up-devkits/src/langgraph_up_devkits/middleware/__init__.py b/libs/langgraph-up-devkits/src/langgraph_up_devkits/middleware/__init__.py index f7c4b91..9881b28 100644 --- a/libs/langgraph-up-devkits/src/langgraph_up_devkits/middleware/__init__.py +++ b/libs/langgraph-up-devkits/src/langgraph_up_devkits/middleware/__init__.py @@ -1,11 +1,11 @@ """Middleware components for LangGraph agents.""" +from .base import BaseMiddleware from .filesystem_mask import FileSystemMaskMiddleware from .model_provider import ModelProviderMiddleware -from .summarization import SummarizationMiddleware __all__ = [ + "BaseMiddleware", "FileSystemMaskMiddleware", "ModelProviderMiddleware", - "SummarizationMiddleware", ] diff --git a/libs/langgraph-up-devkits/src/langgraph_up_devkits/middleware/base.py b/libs/langgraph-up-devkits/src/langgraph_up_devkits/middleware/base.py new file mode 100644 index 0000000..99234ac --- /dev/null +++ b/libs/langgraph-up-devkits/src/langgraph_up_devkits/middleware/base.py @@ -0,0 +1,90 @@ +"""Base middleware with structured logging support.""" + +from __future__ import annotations + +from typing import Any, Generic, TypeVar + +import structlog +from langchain.agents.middleware import AgentMiddleware, AgentState +from langgraph.runtime import Runtime + +StateT = TypeVar("StateT", bound=AgentState[Any]) +ContextT = TypeVar("ContextT") + + +class BaseMiddleware(AgentMiddleware[StateT, ContextT], Generic[StateT, ContextT]): # type: ignore[type-var] + """Base middleware class with debug logging capabilities. + + This base class provides: + - Structured logging using structlog + - Debug mode toggle from runtime context or constructor + - Private _log method for consistent logging across middleware + + The debug flag can be set in two ways (in order of precedence): + 1. Via runtime.context.debug (dynamic, per-request) + 2. Via constructor debug parameter (static, per-middleware instance) + + Example: + ```python + class MyMiddleware(BaseMiddleware[MyState, MyContext]): + def __init__(self, debug: bool = False) -> None: + super().__init__(debug=debug) + + async def abefore_model(self, state: MyState, runtime: Runtime[MyContext]) -> dict[str, Any] | None: + self._log("Processing before model", runtime, state_keys=list(state.keys())) + # ... middleware logic ... + return None + ``` + """ + + def __init__(self, debug: bool = False) -> None: + """Initialize the base middleware. + + Args: + debug: Default debug logging flag. Can be overridden by runtime.context.debug. + """ + super().__init__() + self.debug = debug + self._logger = structlog.get_logger(self.__class__.__name__) + + def _should_log(self, runtime: Runtime[ContextT] | None = None) -> bool: # type: ignore[type-var] + """Determine if logging should be enabled. + + Args: + runtime: Optional runtime context to check for debug flag. + + Returns: + True if logging should be enabled, False otherwise. + """ + # Priority 1: Check runtime context for debug flag + if runtime and hasattr(runtime, "context") and runtime.context: + context_debug = getattr(runtime.context, "debug", None) + if context_debug is not None: + return bool(context_debug) + + # Priority 2: Fall back to instance debug flag + return self.debug + + def _log(self, message: str, runtime: Runtime[ContextT] | None = None, **kwargs: Any) -> None: # type: ignore[type-var] + """Log debug messages if debug mode is enabled. + + This method uses structlog for structured logging with context. + Checks runtime.context.debug first, then falls back to self.debug. + + Args: + message: The log message to output. + runtime: Optional runtime context to check for debug flag. + **kwargs: Additional structured context to include in the log entry. + These will be output as key-value pairs. + + Example: + ```python + self._log("Processing state", runtime, state_size=len(state), step="validation") + # Output: Processing state state_size=5 step=validation + ``` + """ + if self._should_log(runtime): + self._logger.debug(message, **kwargs) + + +__all__ = ["BaseMiddleware"] diff --git a/libs/langgraph-up-devkits/src/langgraph_up_devkits/middleware/filesystem_mask.py b/libs/langgraph-up-devkits/src/langgraph_up_devkits/middleware/filesystem_mask.py index 7a5f05e..2f00ee6 100644 --- a/libs/langgraph-up-devkits/src/langgraph_up_devkits/middleware/filesystem_mask.py +++ b/libs/langgraph-up-devkits/src/langgraph_up_devkits/middleware/filesystem_mask.py @@ -2,11 +2,12 @@ from typing import Any -from langchain.agents.middleware import AgentMiddleware from langgraph.runtime import Runtime +from .base import BaseMiddleware -class FileSystemMaskMiddleware(AgentMiddleware[Any]): + +class FileSystemMaskMiddleware(BaseMiddleware[Any, Any]): """Middleware that shadows the 'files' field before model calls and restores it after. This middleware removes the 'files' field from state before passing to the model @@ -43,9 +44,13 @@ class FileSystemMaskMiddleware(AgentMiddleware[Any]): _NO_FILES_SENTINEL = object() # Sentinel to distinguish None from no files - def __init__(self) -> None: - """Initialize the FileSystemMask middleware.""" - super().__init__() + def __init__(self, debug: bool = False) -> None: + """Initialize the FileSystemMask middleware. + + Args: + debug: Enable debug logging for file masking operations. + """ + super().__init__(debug=debug) self._shadowed_files: Any = self._NO_FILES_SENTINEL def before_model(self, state: Any, runtime: Runtime[None]) -> dict[str, Any] | None: @@ -53,7 +58,7 @@ def before_model(self, state: Any, runtime: Runtime[None]) -> dict[str, Any] | N Args: state: The current agent state, expected to have a "files" field. - runtime: The runtime context (unused but required by interface). + runtime: The runtime context. Returns: State update with "files" field removed, or None if no files to shadow. @@ -62,19 +67,34 @@ def before_model(self, state: Any, runtime: Runtime[None]) -> dict[str, Any] | N if isinstance(state, dict) and "files" in state: # Store the files for later restoration (even if None) self._shadowed_files = state["files"] + self._log("Shadowing files field", runtime, files_count=len(state["files"]) if state["files"] else 0) # Return state update without files new_state = {k: v for k, v in state.items() if k != "files"} return new_state + self._log("No files field to shadow", runtime) return None + async def abefore_model(self, state: Any, runtime: Runtime[None]) -> dict[str, Any] | None: + """Shadow the 'files' field before model execution (async version). + + Args: + state: The current agent state, expected to have a "files" field. + runtime: The runtime context. + + Returns: + State update with "files" field removed, or None if no files to shadow. + """ + # Async version delegates to sync implementation + return self.before_model(state, runtime) + def after_model(self, state: Any, runtime: Runtime[None]) -> dict[str, Any] | None: """Restore the 'files' field after model execution. Args: state: The current agent state after model execution. - runtime: The runtime context (unused but required by interface). + runtime: The runtime context. Returns: State update with "files" field restored, or None if no files to restore. @@ -84,6 +104,21 @@ def after_model(self, state: Any, runtime: Runtime[None]) -> dict[str, Any] | No # Restore the files (even if None) files = self._shadowed_files self._shadowed_files = self._NO_FILES_SENTINEL # Clean up + self._log("Restoring files field", runtime, files_count=len(files) if files else 0) return {"files": files} + self._log("No files to restore", runtime) return None + + async def aafter_model(self, state: Any, runtime: Runtime[None]) -> dict[str, Any] | None: + """Restore the 'files' field after model execution (async version). + + Args: + state: The current agent state after model execution. + runtime: The runtime context. + + Returns: + State update with "files" field restored, or None if no files to restore. + """ + # Async version delegates to sync implementation + return self.after_model(state, runtime) diff --git a/libs/langgraph-up-devkits/src/langgraph_up_devkits/middleware/model_provider.py b/libs/langgraph-up-devkits/src/langgraph_up_devkits/middleware/model_provider.py index 5cd654a..7edc864 100644 --- a/libs/langgraph-up-devkits/src/langgraph_up_devkits/middleware/model_provider.py +++ b/libs/langgraph-up-devkits/src/langgraph_up_devkits/middleware/model_provider.py @@ -3,12 +3,13 @@ from collections.abc import Awaitable, Callable from typing import Any -from langchain.agents.middleware import AgentMiddleware, ModelRequest, ModelResponse +from langchain.agents.middleware import ModelRequest, ModelResponse from ..utils.providers import load_chat_model +from .base import BaseMiddleware -class ModelProviderMiddleware(AgentMiddleware[Any]): +class ModelProviderMiddleware(BaseMiddleware[Any, Any]): """Middleware that automatically loads models with provider registration. This middleware handles model loading and provider switching based on the @@ -29,58 +30,82 @@ def __init__(self, debug: bool = False) -> None: Args: debug: Enable debug logging for model loading operations. """ - super().__init__() - self.debug = debug - - def _log(self, message: str) -> None: - """Log debug messages if debug mode is enabled.""" - if self.debug: - print(f"ModelProviderMiddleware: {message}") + super().__init__(debug=debug) def _process_model_request(self, request: ModelRequest) -> None: """Process and modify the model request (shared logic for sync and async).""" - self._log(f"Processing model request: {request.model} (type: {type(request.model)})") + self._log( + "Processing model request", + request.runtime, + model=str(request.model), + model_type=type(request.model).__name__, + ) try: # Check if runtime context has a model specification model_spec = getattr(request.runtime.context, "model", None) - self._log(f"Context model spec: {model_spec}") + self._log("Context model spec", request.runtime, model_spec=model_spec) if model_spec: # Context specifies a model - load it with automatic provider registration try: new_model = load_chat_model(model_spec) request.model = new_model - self._log(f"Switched to context model: {model_spec} -> {type(new_model)}") + self._log( + "Switched to context model", + request.runtime, + model_spec=model_spec, + new_model_type=type(new_model).__name__, + ) except Exception as model_err: - self._log(f"Failed to load context model {model_spec}: {model_err}") + self._log( + "Failed to load context model", + request.runtime, + model_spec=model_spec, + error=str(model_err), + ) # Fall back to original model else: # No model in context - check if original model needs provider resolution original_model_str = str(request.model) - self._log(f"No context model, checking original: {original_model_str}") + self._log("No context model, checking original", request.runtime, original_model=original_model_str) # If original model has provider:model format, resolve it if ":" in original_model_str and not hasattr(request.model, "invoke"): try: resolved_model = load_chat_model(original_model_str) request.model = resolved_model - self._log(f"Resolved provider model: {original_model_str} -> {type(resolved_model)}") + self._log( + "Resolved provider model", + request.runtime, + original_model=original_model_str, + resolved_type=type(resolved_model).__name__, + ) except Exception as resolve_err: - self._log(f"Failed to resolve provider model {original_model_str}: {resolve_err}") + self._log( + "Failed to resolve provider model", + request.runtime, + original_model=original_model_str, + error=str(resolve_err), + ) raise ValueError(f"Cannot resolve model provider for: {original_model_str}") from resolve_err except RuntimeError as runtime_err: - self._log(f"Runtime error (no context available): {runtime_err}") + self._log("Runtime error (no context available)", error=str(runtime_err)) # When no runtime context, fallback to original model except Exception as e: - self._log(f"Unexpected error: {e}") + self._log("Unexpected error", error=str(e)) # Re-raise unexpected errors raise - self._log(f"Final model: {request.model} (type: {type(request.model)})") + self._log( + "Final model", + request.runtime, + model=str(request.model), + model_type=type(request.model).__name__, + ) def wrap_model_call( self, diff --git a/libs/langgraph-up-devkits/src/langgraph_up_devkits/middleware/summarization.py b/libs/langgraph-up-devkits/src/langgraph_up_devkits/middleware/summarization.py deleted file mode 100644 index f9a586b..0000000 --- a/libs/langgraph-up-devkits/src/langgraph_up_devkits/middleware/summarization.py +++ /dev/null @@ -1,46 +0,0 @@ -"""Summarization middleware for token management in long conversations.""" - -from typing import Any - -from langchain.agents.middleware import SummarizationMiddleware as LCSummarizationMiddleware -from langchain.chat_models import BaseChatModel - -from ..utils.providers import load_chat_model - - -class SummarizationMiddleware(LCSummarizationMiddleware): - """Middleware that summarizes conversation history when token limits are approached. - - This middleware monitors message token counts and automatically summarizes older - messages when a threshold is reached, preserving recent messages and maintaining - context continuity by ensuring AI/Tool message pairs remain together. - - This is a shallow wrapper around LangChain's SummarizationMiddleware that uses - our custom load_chat_model utility for automatic provider registration. - """ - - def __init__( - self, - model: str | BaseChatModel, - **kwargs: Any, - ) -> None: - """Initialize the summarization middleware. - - Args: - model: The language model to use for generating summaries. Can be a string - (e.g., "qwen:qwen-flash", "siliconflow:qwen-plus") or an already - initialized BaseChatModel. - **kwargs: Additional arguments passed to LangChain's SummarizationMiddleware. - Common options include: - - max_tokens_before_summary: Token threshold to trigger summarization - - messages_to_keep: Number of recent messages to preserve - - token_counter: Function to count tokens in messages - - summary_prompt: Prompt template for generating summaries - - summary_prefix: Prefix added to system message when including summary - """ - # Initialize the model if it's a string - if isinstance(model, str): - model = load_chat_model(model) - - # Call parent constructor with initialized model and all other kwargs - super().__init__(model=model, **kwargs) diff --git a/libs/langgraph-up-devkits/tests/__init__.py b/libs/langgraph-up-devkits/tests/__init__.py index 5348213..d3db5a3 100644 --- a/libs/langgraph-up-devkits/tests/__init__.py +++ b/libs/langgraph-up-devkits/tests/__init__.py @@ -7,11 +7,9 @@ MockRuntime, MockTool, create_test_messages, - data_analyst_context, mock_chat_model, mock_runtime_context, mock_web_search_tool, - research_context, run_agent_test, sample_conversation, ) @@ -28,9 +26,7 @@ "mock_runtime_context", "run_agent_test", # Fixtures - "data_analyst_context", "mock_chat_model", "mock_web_search_tool", - "research_context", "sample_conversation", ] diff --git a/libs/langgraph-up-devkits/tests/conftest.py b/libs/langgraph-up-devkits/tests/conftest.py index 47480bf..1f0cdf6 100644 --- a/libs/langgraph-up-devkits/tests/conftest.py +++ b/libs/langgraph-up-devkits/tests/conftest.py @@ -4,7 +4,7 @@ import pytest -from langgraph_up_devkits.context.schemas import BaseAgentContext, SearchContext +from langgraph_up_devkits.context import BaseAgentContext, SearchContext from tests.testing import MockRuntime # Note: Environment variables are now loaded via pytest-dotenv plugin diff --git a/libs/langgraph-up-devkits/tests/testing.py b/libs/langgraph-up-devkits/tests/testing.py index e4783ce..b5d0842 100644 --- a/libs/langgraph-up-devkits/tests/testing.py +++ b/libs/langgraph-up-devkits/tests/testing.py @@ -8,10 +8,8 @@ from langchain.tools import BaseTool from langchain_core.messages import BaseMessage -from langgraph_up_devkits.context.schemas import ( +from langgraph_up_devkits.context import ( BaseAgentContext, - DataAnalystContext, - ResearchContext, ) # ===== MOCK OBJECTS ===== @@ -258,27 +256,6 @@ async def run_test(self, agent) -> dict[str, Any]: # ===== PYTEST FIXTURES ===== -@pytest.fixture -def data_analyst_context(): - """Create data analyst context for testing.""" - return DataAnalystContext( - model="openai:openai/gpt-4o", - max_search_results=5, - max_data_rows=1000, - enable_data_viz=True, - user_id="test_analyst", - ) - - -@pytest.fixture -def research_context(): - """Create research context for testing.""" - return ResearchContext( - model="openai:anthropic/claude-sonnet-4", - enable_deepwiki=True, - max_search_results=10, - user_id="test_researcher", - ) @pytest.fixture @@ -326,8 +303,6 @@ def sample_conversation(): "mock_runtime_context", "AgentTestBuilder", # Fixtures (automatically available when imported in conftest.py) - "data_analyst_context", - "research_context", "mock_web_search_tool", "mock_chat_model", "sample_conversation", diff --git a/libs/langgraph-up-devkits/tests/unit/test_context.py b/libs/langgraph-up-devkits/tests/unit/test_context.py index eecb1da..d547eb4 100644 --- a/libs/langgraph-up-devkits/tests/unit/test_context.py +++ b/libs/langgraph-up-devkits/tests/unit/test_context.py @@ -3,11 +3,9 @@ import os from unittest.mock import patch -from langgraph_up_devkits.context.schemas import ( +from langgraph_up_devkits.context import ( BaseAgentContext, - DataAnalystContext, DataContext, - ResearchContext, SearchContext, ) @@ -18,7 +16,7 @@ class TestBaseAgentContext: def test_default_values(self): """Test default context values.""" context = BaseAgentContext() - assert context.model == "openai:openai/gpt-4o" + assert context.model == "siliconflow:zai-org/GLM-4.5" assert context.user_id is None @patch.dict(os.environ, {"MODEL": "qwen:qwen-flash", "USER_ID": "test_user"}, clear=False) @@ -59,48 +57,6 @@ def test_search_env_loading(self): class TestComposedContexts: """Test composed context schemas.""" - def test_data_analyst_context_composition(self): - """Test that DataAnalystContext inherits from both SearchContext and DataContext.""" # noqa: E501 - context = DataAnalystContext() - - # Should have BaseAgentContext fields - assert hasattr(context, "model") - assert hasattr(context, "user_id") - - # Should have SearchContext fields - assert hasattr(context, "max_search_results") - assert hasattr(context, "enable_deepwiki") - - # Should have DataContext fields - assert hasattr(context, "max_data_rows") - - # Should have its own fields - assert "data analyst assistant" in context.system_prompt.lower() - - # Test default values for data analyst - assert context.max_search_results == 8 # Data analyst override - assert context.enable_data_viz is True - assert context.max_data_rows == 1000 - - def test_research_context_composition(self): - """Test that ResearchContext has proper inheritance.""" - context = ResearchContext() - - # Should have BaseAgentContext fields - assert hasattr(context, "model") - assert hasattr(context, "user_id") - - # Should have SearchContext fields - assert hasattr(context, "max_search_results") - assert hasattr(context, "enable_deepwiki") - - # Should have its own fields - assert "research assistant" in context.system_prompt.lower() - - # Test default values for research assistant - assert context.max_search_results == 10 # Research override - assert context.enable_deepwiki is True # Research default - def test_data_context_composition(self): """Test DataContext has correct fields and defaults.""" context = DataContext() @@ -125,7 +81,7 @@ def test_context_field_validation(self): def test_context_serialization(self): """Test context can be serialized to dict.""" - context = DataAnalystContext(model="test-model", user_id="test-user", max_search_results=15) + context = SearchContext(model="test-model", user_id="test-user", max_search_results=15) # Should be serializable from dataclasses import asdict @@ -138,20 +94,17 @@ def test_context_serialization(self): def test_context_inheritance_chain(self): """Test the inheritance chain works correctly.""" - # DataAnalystContext should inherit from all parent classes - da_context = DataAnalystContext() + # SearchContext should inherit from BaseAgentContext + search_context = SearchContext() - assert isinstance(da_context, BaseAgentContext) - assert isinstance(da_context, SearchContext) - assert isinstance(da_context, DataContext) - assert isinstance(da_context, DataAnalystContext) + assert isinstance(search_context, BaseAgentContext) + assert isinstance(search_context, SearchContext) - # ResearchContext should inherit correctly - r_context = ResearchContext() + # DataContext should inherit from BaseAgentContext + data_context = DataContext() - assert isinstance(r_context, BaseAgentContext) - assert isinstance(r_context, SearchContext) - assert isinstance(r_context, ResearchContext) + assert isinstance(data_context, BaseAgentContext) + assert isinstance(data_context, DataContext) def test_context_field_override_precedence(self): """Test that explicit values override environment and defaults.""" @@ -186,7 +139,7 @@ def test_context_default_values_comprehensive(self): """Test all default values are set correctly.""" # BaseAgentContext defaults base_context = BaseAgentContext() - assert base_context.model == "openai:openai/gpt-4o" + assert base_context.model == "siliconflow:zai-org/GLM-4.5" assert base_context.user_id is None # SearchContext defaults @@ -199,34 +152,6 @@ def test_context_default_values_comprehensive(self): assert data_context.max_data_rows == 1000 assert data_context.enable_data_viz is True - @patch.dict( - os.environ, - { - "MODEL": "qwen:qwen-flash", - "MAX_SEARCH_RESULTS": "20", - "ENABLE_DATA_VIZ": "false", - "MAX_DATA_ROWS": "5000", - }, - clear=False, - ) - def test_data_analyst_env_loading(self): - """Test DataAnalystContext environment variable loading.""" - context = DataAnalystContext() - assert context.model == "qwen:qwen-flash" - assert context.max_search_results == 20 - assert context.enable_data_viz is False - assert context.max_data_rows == 5000 - - def test_explicit_override_precedence(self): - """Test that explicit parameters override environment variables.""" - with patch.dict( - os.environ, - {"MAX_SEARCH_RESULTS": "100", "ENABLE_DEEPWIKI": "false"}, - clear=False, - ): - context = DataAnalystContext(max_search_results=5, enable_deepwiki=True) - assert context.max_search_results == 5 # Explicit override - assert context.enable_deepwiki is True # Explicit override class TestDataContext: @@ -297,7 +222,7 @@ def test_empty_env_vars(self): with patch.dict(os.environ, {"MODEL": "", "USER_ID": ""}, clear=False): context = BaseAgentContext() # Empty strings should be treated as None/default - assert context.model == "openai:openai/gpt-4o" # Default + assert context.model == "siliconflow:zai-org/GLM-4.5" # Default assert context.user_id is None diff --git a/uv.lock b/uv.lock index eff18ae..134ad26 100644 --- a/uv.lock +++ b/uv.lock @@ -451,16 +451,16 @@ wheels = [ [[package]] name = "deepagents" -version = "0.1.1" +version = "0.1.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "langchain" }, { name = "langchain-anthropic" }, { name = "langchain-core" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a7/73/25dc8666a0724babb9b86fd129ab8346d402e197aabe7ba01f2100c75101/deepagents-0.1.1.tar.gz", hash = "sha256:607094d2508743be8bde7ec9f977b151a577c1261c24d23723ec99df64f369ff", size = 33818, upload-time = "2025-10-18T15:44:50.569Z" } +sdist = { url = "https://files.pythonhosted.org/packages/90/4a/bf797db02a520f7a1eac5e62a1e3ca493d86fa51b5c3bd3d9e3c576b2cbb/deepagents-0.1.3.tar.gz", hash = "sha256:80d16fe6e5930abd9f2b6ad984f4fb6fca55dc9f1f12f9d6c48ae0e683bcb15c", size = 33859, upload-time = "2025-10-21T14:29:34.138Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/90/e2/e450feb3ea5729352dcf725dcf3ada1f33f26d37dc6af4e61d6d9b9a2d45/deepagents-0.1.1-py3-none-any.whl", hash = "sha256:a20e53291e91484570a93bf8a98e5979cc7644c73ce37481a845365cc2d32084", size = 27956, upload-time = "2025-10-18T15:44:49.361Z" }, + { url = "https://files.pythonhosted.org/packages/69/93/02636452141a02a226b90cb4c263ae4695ae9e3a01bda26703e02893aa86/deepagents-0.1.3-py3-none-any.whl", hash = "sha256:ad3454872d01baf135d259085e2e074e30d00ef0dad166d411b031426655e907", size = 27956, upload-time = "2025-10-21T14:29:32.545Z" }, ] [[package]] @@ -525,118 +525,118 @@ sdist = { url = "https://files.pythonhosted.org/packages/e6/79/d4f20e91327c98096 [[package]] name = "googleapis-common-protos" -version = "1.70.0" +version = "1.71.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "protobuf" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/39/24/33db22342cf4a2ea27c9955e6713140fedd51e8b141b5ce5260897020f1a/googleapis_common_protos-1.70.0.tar.gz", hash = "sha256:0e1b44e0ea153e6594f9f394fef15193a68aaaea2d843f83e2742717ca753257", size = 145903, upload-time = "2025-04-14T10:17:02.924Z" } +sdist = { url = "https://files.pythonhosted.org/packages/30/43/b25abe02db2911397819003029bef768f68a974f2ece483e6084d1a5f754/googleapis_common_protos-1.71.0.tar.gz", hash = "sha256:1aec01e574e29da63c80ba9f7bbf1ccfaacf1da877f23609fe236ca7c72a2e2e", size = 146454, upload-time = "2025-10-20T14:58:08.732Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/86/f1/62a193f0227cf15a920390abe675f386dec35f7ae3ffe6da582d3ade42c7/googleapis_common_protos-1.70.0-py3-none-any.whl", hash = "sha256:b8bfcca8c25a2bb253e0e0b0adaf8c00773e5e6af6fd92397576680b807e0fd8", size = 294530, upload-time = "2025-04-14T10:17:01.271Z" }, + { url = "https://files.pythonhosted.org/packages/25/e8/eba9fece11d57a71e3e22ea672742c8f3cf23b35730c9e96db768b295216/googleapis_common_protos-1.71.0-py3-none-any.whl", hash = "sha256:59034a1d849dc4d18971997a72ac56246570afdd17f9369a0ff68218d50ab78c", size = 294576, upload-time = "2025-10-20T14:56:21.295Z" }, ] [[package]] name = "grpcio" -version = "1.75.1" +version = "1.76.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9d/f7/8963848164c7604efb3a3e6ee457fdb3a469653e19002bd24742473254f8/grpcio-1.75.1.tar.gz", hash = "sha256:3e81d89ece99b9ace23a6916880baca613c03a799925afb2857887efa8b1b3d2", size = 12731327, upload-time = "2025-09-26T09:03:36.887Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0c/3c/35ca9747473a306bfad0cee04504953f7098527cd112a4ab55c55af9e7bd/grpcio-1.75.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:573855ca2e58e35032aff30bfbd1ee103fbcf4472e4b28d4010757700918e326", size = 5709761, upload-time = "2025-09-26T09:01:28.528Z" }, - { url = "https://files.pythonhosted.org/packages/c9/2c/ecbcb4241e4edbe85ac2663f885726fea0e947767401288b50d8fdcb9200/grpcio-1.75.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:6a4996a2c8accc37976dc142d5991adf60733e223e5c9a2219e157dc6a8fd3a2", size = 11496691, upload-time = "2025-09-26T09:01:31.214Z" }, - { url = "https://files.pythonhosted.org/packages/81/40/bc07aee2911f0d426fa53fe636216100c31a8ea65a400894f280274cb023/grpcio-1.75.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b1ea1bbe77ecbc1be00af2769f4ae4a88ce93be57a4f3eebd91087898ed749f9", size = 6296084, upload-time = "2025-09-26T09:01:34.596Z" }, - { url = "https://files.pythonhosted.org/packages/b8/d1/10c067f6c67396cbf46448b80f27583b5e8c4b46cdfbe18a2a02c2c2f290/grpcio-1.75.1-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:e5b425aee54cc5e3e3c58f00731e8a33f5567965d478d516d35ef99fd648ab68", size = 6950403, upload-time = "2025-09-26T09:01:36.736Z" }, - { url = "https://files.pythonhosted.org/packages/3f/42/5f628abe360b84dfe8dd8f32be6b0606dc31dc04d3358eef27db791ea4d5/grpcio-1.75.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0049a7bf547dafaeeb1db17079ce79596c298bfe308fc084d023c8907a845b9a", size = 6470166, upload-time = "2025-09-26T09:01:39.474Z" }, - { url = "https://files.pythonhosted.org/packages/c3/93/a24035080251324019882ee2265cfde642d6476c0cf8eb207fc693fcebdc/grpcio-1.75.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5b8ea230c7f77c0a1a3208a04a1eda164633fb0767b4cefd65a01079b65e5b1f", size = 7107828, upload-time = "2025-09-26T09:01:41.782Z" }, - { url = "https://files.pythonhosted.org/packages/e4/f8/d18b984c1c9ba0318e3628dbbeb6af77a5007f02abc378c845070f2d3edd/grpcio-1.75.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:36990d629c3c9fb41e546414e5af52d0a7af37ce7113d9682c46d7e2919e4cca", size = 8045421, upload-time = "2025-09-26T09:01:45.835Z" }, - { url = "https://files.pythonhosted.org/packages/7e/b6/4bf9aacff45deca5eac5562547ed212556b831064da77971a4e632917da3/grpcio-1.75.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b10ad908118d38c2453ade7ff790e5bce36580c3742919007a2a78e3a1e521ca", size = 7503290, upload-time = "2025-09-26T09:01:49.28Z" }, - { url = "https://files.pythonhosted.org/packages/3b/15/d8d69d10223cb54c887a2180bd29fe5fa2aec1d4995c8821f7aa6eaf72e4/grpcio-1.75.1-cp311-cp311-win32.whl", hash = "sha256:d6be2b5ee7bea656c954dcf6aa8093c6f0e6a3ef9945c99d99fcbfc88c5c0bfe", size = 3950631, upload-time = "2025-09-26T09:01:51.23Z" }, - { url = "https://files.pythonhosted.org/packages/8a/40/7b8642d45fff6f83300c24eaac0380a840e5e7fe0e8d80afd31b99d7134e/grpcio-1.75.1-cp311-cp311-win_amd64.whl", hash = "sha256:61c692fb05956b17dd6d1ab480f7f10ad0536dba3bc8fd4e3c7263dc244ed772", size = 4646131, upload-time = "2025-09-26T09:01:53.266Z" }, - { url = "https://files.pythonhosted.org/packages/3a/81/42be79e73a50aaa20af66731c2defeb0e8c9008d9935a64dd8ea8e8c44eb/grpcio-1.75.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:7b888b33cd14085d86176b1628ad2fcbff94cfbbe7809465097aa0132e58b018", size = 5668314, upload-time = "2025-09-26T09:01:55.424Z" }, - { url = "https://files.pythonhosted.org/packages/c5/a7/3686ed15822fedc58c22f82b3a7403d9faf38d7c33de46d4de6f06e49426/grpcio-1.75.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:8775036efe4ad2085975531d221535329f5dac99b6c2a854a995456098f99546", size = 11476125, upload-time = "2025-09-26T09:01:57.927Z" }, - { url = "https://files.pythonhosted.org/packages/14/85/21c71d674f03345ab183c634ecd889d3330177e27baea8d5d247a89b6442/grpcio-1.75.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bb658f703468d7fbb5dcc4037c65391b7dc34f808ac46ed9136c24fc5eeb041d", size = 6246335, upload-time = "2025-09-26T09:02:00.76Z" }, - { url = "https://files.pythonhosted.org/packages/fd/db/3beb661bc56a385ae4fa6b0e70f6b91ac99d47afb726fe76aaff87ebb116/grpcio-1.75.1-cp312-cp312-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:4b7177a1cdb3c51b02b0c0a256b0a72fdab719600a693e0e9037949efffb200b", size = 6916309, upload-time = "2025-09-26T09:02:02.894Z" }, - { url = "https://files.pythonhosted.org/packages/1e/9c/eda9fe57f2b84343d44c1b66cf3831c973ba29b078b16a27d4587a1fdd47/grpcio-1.75.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7d4fa6ccc3ec2e68a04f7b883d354d7fea22a34c44ce535a2f0c0049cf626ddf", size = 6435419, upload-time = "2025-09-26T09:02:05.055Z" }, - { url = "https://files.pythonhosted.org/packages/c3/b8/090c98983e0a9d602e3f919a6e2d4e470a8b489452905f9a0fa472cac059/grpcio-1.75.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3d86880ecaeb5b2f0a8afa63824de93adb8ebe4e49d0e51442532f4e08add7d6", size = 7064893, upload-time = "2025-09-26T09:02:07.275Z" }, - { url = "https://files.pythonhosted.org/packages/ec/c0/6d53d4dbbd00f8bd81571f5478d8a95528b716e0eddb4217cc7cb45aae5f/grpcio-1.75.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a8041d2f9e8a742aeae96f4b047ee44e73619f4f9d24565e84d5446c623673b6", size = 8011922, upload-time = "2025-09-26T09:02:09.527Z" }, - { url = "https://files.pythonhosted.org/packages/f2/7c/48455b2d0c5949678d6982c3e31ea4d89df4e16131b03f7d5c590811cbe9/grpcio-1.75.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3652516048bf4c314ce12be37423c79829f46efffb390ad64149a10c6071e8de", size = 7466181, upload-time = "2025-09-26T09:02:12.279Z" }, - { url = "https://files.pythonhosted.org/packages/fd/12/04a0e79081e3170b6124f8cba9b6275871276be06c156ef981033f691880/grpcio-1.75.1-cp312-cp312-win32.whl", hash = "sha256:44b62345d8403975513af88da2f3d5cc76f73ca538ba46596f92a127c2aea945", size = 3938543, upload-time = "2025-09-26T09:02:14.77Z" }, - { url = "https://files.pythonhosted.org/packages/5f/d7/11350d9d7fb5adc73d2b0ebf6ac1cc70135577701e607407fe6739a90021/grpcio-1.75.1-cp312-cp312-win_amd64.whl", hash = "sha256:b1e191c5c465fa777d4cafbaacf0c01e0d5278022082c0abbd2ee1d6454ed94d", size = 4641938, upload-time = "2025-09-26T09:02:16.927Z" }, - { url = "https://files.pythonhosted.org/packages/46/74/bac4ab9f7722164afdf263ae31ba97b8174c667153510322a5eba4194c32/grpcio-1.75.1-cp313-cp313-linux_armv7l.whl", hash = "sha256:3bed22e750d91d53d9e31e0af35a7b0b51367e974e14a4ff229db5b207647884", size = 5672779, upload-time = "2025-09-26T09:02:19.11Z" }, - { url = "https://files.pythonhosted.org/packages/a6/52/d0483cfa667cddaa294e3ab88fd2c2a6e9dc1a1928c0e5911e2e54bd5b50/grpcio-1.75.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:5b8f381eadcd6ecaa143a21e9e80a26424c76a0a9b3d546febe6648f3a36a5ac", size = 11470623, upload-time = "2025-09-26T09:02:22.117Z" }, - { url = "https://files.pythonhosted.org/packages/cf/e4/d1954dce2972e32384db6a30273275e8c8ea5a44b80347f9055589333b3f/grpcio-1.75.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5bf4001d3293e3414d0cf99ff9b1139106e57c3a66dfff0c5f60b2a6286ec133", size = 6248838, upload-time = "2025-09-26T09:02:26.426Z" }, - { url = "https://files.pythonhosted.org/packages/06/43/073363bf63826ba8077c335d797a8d026f129dc0912b69c42feaf8f0cd26/grpcio-1.75.1-cp313-cp313-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:9f82ff474103e26351dacfe8d50214e7c9322960d8d07ba7fa1d05ff981c8b2d", size = 6922663, upload-time = "2025-09-26T09:02:28.724Z" }, - { url = "https://files.pythonhosted.org/packages/c2/6f/076ac0df6c359117676cacfa8a377e2abcecec6a6599a15a672d331f6680/grpcio-1.75.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0ee119f4f88d9f75414217823d21d75bfe0e6ed40135b0cbbfc6376bc9f7757d", size = 6436149, upload-time = "2025-09-26T09:02:30.971Z" }, - { url = "https://files.pythonhosted.org/packages/6b/27/1d08824f1d573fcb1fa35ede40d6020e68a04391709939e1c6f4193b445f/grpcio-1.75.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:664eecc3abe6d916fa6cf8dd6b778e62fb264a70f3430a3180995bf2da935446", size = 7067989, upload-time = "2025-09-26T09:02:33.233Z" }, - { url = "https://files.pythonhosted.org/packages/c6/98/98594cf97b8713feb06a8cb04eeef60b4757e3e2fb91aa0d9161da769843/grpcio-1.75.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:c32193fa08b2fbebf08fe08e84f8a0aad32d87c3ad42999c65e9449871b1c66e", size = 8010717, upload-time = "2025-09-26T09:02:36.011Z" }, - { url = "https://files.pythonhosted.org/packages/8c/7e/bb80b1bba03c12158f9254762cdf5cced4a9bc2e8ed51ed335915a5a06ef/grpcio-1.75.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5cebe13088b9254f6e615bcf1da9131d46cfa4e88039454aca9cb65f639bd3bc", size = 7463822, upload-time = "2025-09-26T09:02:38.26Z" }, - { url = "https://files.pythonhosted.org/packages/23/1c/1ea57fdc06927eb5640f6750c697f596f26183573069189eeaf6ef86ba2d/grpcio-1.75.1-cp313-cp313-win32.whl", hash = "sha256:4b4c678e7ed50f8ae8b8dbad15a865ee73ce12668b6aaf411bf3258b5bc3f970", size = 3938490, upload-time = "2025-09-26T09:02:40.268Z" }, - { url = "https://files.pythonhosted.org/packages/4b/24/fbb8ff1ccadfbf78ad2401c41aceaf02b0d782c084530d8871ddd69a2d49/grpcio-1.75.1-cp313-cp313-win_amd64.whl", hash = "sha256:5573f51e3f296a1bcf71e7a690c092845fb223072120f4bdb7a5b48e111def66", size = 4642538, upload-time = "2025-09-26T09:02:42.519Z" }, - { url = "https://files.pythonhosted.org/packages/f2/1b/9a0a5cecd24302b9fdbcd55d15ed6267e5f3d5b898ff9ac8cbe17ee76129/grpcio-1.75.1-cp314-cp314-linux_armv7l.whl", hash = "sha256:c05da79068dd96723793bffc8d0e64c45f316248417515f28d22204d9dae51c7", size = 5673319, upload-time = "2025-09-26T09:02:44.742Z" }, - { url = "https://files.pythonhosted.org/packages/c6/ec/9d6959429a83fbf5df8549c591a8a52bb313976f6646b79852c4884e3225/grpcio-1.75.1-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:06373a94fd16ec287116a825161dca179a0402d0c60674ceeec8c9fba344fe66", size = 11480347, upload-time = "2025-09-26T09:02:47.539Z" }, - { url = "https://files.pythonhosted.org/packages/09/7a/26da709e42c4565c3d7bf999a9569da96243ce34a8271a968dee810a7cf1/grpcio-1.75.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4484f4b7287bdaa7a5b3980f3c7224c3c622669405d20f69549f5fb956ad0421", size = 6254706, upload-time = "2025-09-26T09:02:50.4Z" }, - { url = "https://files.pythonhosted.org/packages/f1/08/dcb26a319d3725f199c97e671d904d84ee5680de57d74c566a991cfab632/grpcio-1.75.1-cp314-cp314-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:2720c239c1180eee69f7883c1d4c83fc1a495a2535b5fa322887c70bf02b16e8", size = 6922501, upload-time = "2025-09-26T09:02:52.711Z" }, - { url = "https://files.pythonhosted.org/packages/78/66/044d412c98408a5e23cb348845979a2d17a2e2b6c3c34c1ec91b920f49d0/grpcio-1.75.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:07a554fa31c668cf0e7a188678ceeca3cb8fead29bbe455352e712ec33ca701c", size = 6437492, upload-time = "2025-09-26T09:02:55.542Z" }, - { url = "https://files.pythonhosted.org/packages/4e/9d/5e3e362815152aa1afd8b26ea613effa005962f9da0eec6e0e4527e7a7d1/grpcio-1.75.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:3e71a2105210366bfc398eef7f57a664df99194f3520edb88b9c3a7e46ee0d64", size = 7081061, upload-time = "2025-09-26T09:02:58.261Z" }, - { url = "https://files.pythonhosted.org/packages/1e/1a/46615682a19e100f46e31ddba9ebc297c5a5ab9ddb47b35443ffadb8776c/grpcio-1.75.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:8679aa8a5b67976776d3c6b0521e99d1c34db8a312a12bcfd78a7085cb9b604e", size = 8010849, upload-time = "2025-09-26T09:03:00.548Z" }, - { url = "https://files.pythonhosted.org/packages/67/8e/3204b94ac30b0f675ab1c06540ab5578660dc8b690db71854d3116f20d00/grpcio-1.75.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:aad1c774f4ebf0696a7f148a56d39a3432550612597331792528895258966dc0", size = 7464478, upload-time = "2025-09-26T09:03:03.096Z" }, - { url = "https://files.pythonhosted.org/packages/b7/97/2d90652b213863b2cf466d9c1260ca7e7b67a16780431b3eb1d0420e3d5b/grpcio-1.75.1-cp314-cp314-win32.whl", hash = "sha256:62ce42d9994446b307649cb2a23335fa8e927f7ab2cbf5fcb844d6acb4d85f9c", size = 4012672, upload-time = "2025-09-26T09:03:05.477Z" }, - { url = "https://files.pythonhosted.org/packages/f9/df/e2e6e9fc1c985cd1a59e6996a05647c720fe8a03b92f5ec2d60d366c531e/grpcio-1.75.1-cp314-cp314-win_amd64.whl", hash = "sha256:f86e92275710bea3000cb79feca1762dc0ad3b27830dd1a74e82ab321d4ee464", size = 4772475, upload-time = "2025-09-26T09:03:07.661Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/b6/e0/318c1ce3ae5a17894d5791e87aea147587c9e702f24122cc7a5c8bbaeeb1/grpcio-1.76.0.tar.gz", hash = "sha256:7be78388d6da1a25c0d5ec506523db58b18be22d9c37d8d3a32c08be4987bd73", size = 12785182, upload-time = "2025-10-21T16:23:12.106Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/00/8163a1beeb6971f66b4bbe6ac9457b97948beba8dd2fc8e1281dce7f79ec/grpcio-1.76.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:2e1743fbd7f5fa713a1b0a8ac8ebabf0ec980b5d8809ec358d488e273b9cf02a", size = 5843567, upload-time = "2025-10-21T16:20:52.829Z" }, + { url = "https://files.pythonhosted.org/packages/10/c1/934202f5cf335e6d852530ce14ddb0fef21be612ba9ecbbcbd4d748ca32d/grpcio-1.76.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:a8c2cf1209497cf659a667d7dea88985e834c24b7c3b605e6254cbb5076d985c", size = 11848017, upload-time = "2025-10-21T16:20:56.705Z" }, + { url = "https://files.pythonhosted.org/packages/11/0b/8dec16b1863d74af6eb3543928600ec2195af49ca58b16334972f6775663/grpcio-1.76.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:08caea849a9d3c71a542827d6df9d5a69067b0a1efbea8a855633ff5d9571465", size = 6412027, upload-time = "2025-10-21T16:20:59.3Z" }, + { url = "https://files.pythonhosted.org/packages/d7/64/7b9e6e7ab910bea9d46f2c090380bab274a0b91fb0a2fe9b0cd399fffa12/grpcio-1.76.0-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:f0e34c2079d47ae9f6188211db9e777c619a21d4faba6977774e8fa43b085e48", size = 7075913, upload-time = "2025-10-21T16:21:01.645Z" }, + { url = "https://files.pythonhosted.org/packages/68/86/093c46e9546073cefa789bd76d44c5cb2abc824ca62af0c18be590ff13ba/grpcio-1.76.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8843114c0cfce61b40ad48df65abcfc00d4dba82eae8718fab5352390848c5da", size = 6615417, upload-time = "2025-10-21T16:21:03.844Z" }, + { url = "https://files.pythonhosted.org/packages/f7/b6/5709a3a68500a9c03da6fb71740dcdd5ef245e39266461a03f31a57036d8/grpcio-1.76.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8eddfb4d203a237da6f3cc8a540dad0517d274b5a1e9e636fd8d2c79b5c1d397", size = 7199683, upload-time = "2025-10-21T16:21:06.195Z" }, + { url = "https://files.pythonhosted.org/packages/91/d3/4b1f2bf16ed52ce0b508161df3a2d186e4935379a159a834cb4a7d687429/grpcio-1.76.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:32483fe2aab2c3794101c2a159070584e5db11d0aa091b2c0ea9c4fc43d0d749", size = 8163109, upload-time = "2025-10-21T16:21:08.498Z" }, + { url = "https://files.pythonhosted.org/packages/5c/61/d9043f95f5f4cf085ac5dd6137b469d41befb04bd80280952ffa2a4c3f12/grpcio-1.76.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:dcfe41187da8992c5f40aa8c5ec086fa3672834d2be57a32384c08d5a05b4c00", size = 7626676, upload-time = "2025-10-21T16:21:10.693Z" }, + { url = "https://files.pythonhosted.org/packages/36/95/fd9a5152ca02d8881e4dd419cdd790e11805979f499a2e5b96488b85cf27/grpcio-1.76.0-cp311-cp311-win32.whl", hash = "sha256:2107b0c024d1b35f4083f11245c0e23846ae64d02f40b2b226684840260ed054", size = 3997688, upload-time = "2025-10-21T16:21:12.746Z" }, + { url = "https://files.pythonhosted.org/packages/60/9c/5c359c8d4c9176cfa3c61ecd4efe5affe1f38d9bae81e81ac7186b4c9cc8/grpcio-1.76.0-cp311-cp311-win_amd64.whl", hash = "sha256:522175aba7af9113c48ec10cc471b9b9bd4f6ceb36aeb4544a8e2c80ed9d252d", size = 4709315, upload-time = "2025-10-21T16:21:15.26Z" }, + { url = "https://files.pythonhosted.org/packages/bf/05/8e29121994b8d959ffa0afd28996d452f291b48cfc0875619de0bde2c50c/grpcio-1.76.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:81fd9652b37b36f16138611c7e884eb82e0cec137c40d3ef7c3f9b3ed00f6ed8", size = 5799718, upload-time = "2025-10-21T16:21:17.939Z" }, + { url = "https://files.pythonhosted.org/packages/d9/75/11d0e66b3cdf998c996489581bdad8900db79ebd83513e45c19548f1cba4/grpcio-1.76.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:04bbe1bfe3a68bbfd4e52402ab7d4eb59d72d02647ae2042204326cf4bbad280", size = 11825627, upload-time = "2025-10-21T16:21:20.466Z" }, + { url = "https://files.pythonhosted.org/packages/28/50/2f0aa0498bc188048f5d9504dcc5c2c24f2eb1a9337cd0fa09a61a2e75f0/grpcio-1.76.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d388087771c837cdb6515539f43b9d4bf0b0f23593a24054ac16f7a960be16f4", size = 6359167, upload-time = "2025-10-21T16:21:23.122Z" }, + { url = "https://files.pythonhosted.org/packages/66/e5/bbf0bb97d29ede1d59d6588af40018cfc345b17ce979b7b45424628dc8bb/grpcio-1.76.0-cp312-cp312-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:9f8f757bebaaea112c00dba718fc0d3260052ce714e25804a03f93f5d1c6cc11", size = 7044267, upload-time = "2025-10-21T16:21:25.995Z" }, + { url = "https://files.pythonhosted.org/packages/f5/86/f6ec2164f743d9609691115ae8ece098c76b894ebe4f7c94a655c6b03e98/grpcio-1.76.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:980a846182ce88c4f2f7e2c22c56aefd515daeb36149d1c897f83cf57999e0b6", size = 6573963, upload-time = "2025-10-21T16:21:28.631Z" }, + { url = "https://files.pythonhosted.org/packages/60/bc/8d9d0d8505feccfdf38a766d262c71e73639c165b311c9457208b56d92ae/grpcio-1.76.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f92f88e6c033db65a5ae3d97905c8fea9c725b63e28d5a75cb73b49bda5024d8", size = 7164484, upload-time = "2025-10-21T16:21:30.837Z" }, + { url = "https://files.pythonhosted.org/packages/67/e6/5d6c2fc10b95edf6df9b8f19cf10a34263b7fd48493936fffd5085521292/grpcio-1.76.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4baf3cbe2f0be3289eb68ac8ae771156971848bb8aaff60bad42005539431980", size = 8127777, upload-time = "2025-10-21T16:21:33.577Z" }, + { url = "https://files.pythonhosted.org/packages/3f/c8/dce8ff21c86abe025efe304d9e31fdb0deaaa3b502b6a78141080f206da0/grpcio-1.76.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:615ba64c208aaceb5ec83bfdce7728b80bfeb8be97562944836a7a0a9647d882", size = 7594014, upload-time = "2025-10-21T16:21:41.882Z" }, + { url = "https://files.pythonhosted.org/packages/e0/42/ad28191ebf983a5d0ecef90bab66baa5a6b18f2bfdef9d0a63b1973d9f75/grpcio-1.76.0-cp312-cp312-win32.whl", hash = "sha256:45d59a649a82df5718fd9527ce775fd66d1af35e6d31abdcdc906a49c6822958", size = 3984750, upload-time = "2025-10-21T16:21:44.006Z" }, + { url = "https://files.pythonhosted.org/packages/9e/00/7bd478cbb851c04a48baccaa49b75abaa8e4122f7d86da797500cccdd771/grpcio-1.76.0-cp312-cp312-win_amd64.whl", hash = "sha256:c088e7a90b6017307f423efbb9d1ba97a22aa2170876223f9709e9d1de0b5347", size = 4704003, upload-time = "2025-10-21T16:21:46.244Z" }, + { url = "https://files.pythonhosted.org/packages/fc/ed/71467ab770effc9e8cef5f2e7388beb2be26ed642d567697bb103a790c72/grpcio-1.76.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:26ef06c73eb53267c2b319f43e6634c7556ea37672029241a056629af27c10e2", size = 5807716, upload-time = "2025-10-21T16:21:48.475Z" }, + { url = "https://files.pythonhosted.org/packages/2c/85/c6ed56f9817fab03fa8a111ca91469941fb514e3e3ce6d793cb8f1e1347b/grpcio-1.76.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:45e0111e73f43f735d70786557dc38141185072d7ff8dc1829d6a77ac1471468", size = 11821522, upload-time = "2025-10-21T16:21:51.142Z" }, + { url = "https://files.pythonhosted.org/packages/ac/31/2b8a235ab40c39cbc141ef647f8a6eb7b0028f023015a4842933bc0d6831/grpcio-1.76.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:83d57312a58dcfe2a3a0f9d1389b299438909a02db60e2f2ea2ae2d8034909d3", size = 6362558, upload-time = "2025-10-21T16:21:54.213Z" }, + { url = "https://files.pythonhosted.org/packages/bd/64/9784eab483358e08847498ee56faf8ff6ea8e0a4592568d9f68edc97e9e9/grpcio-1.76.0-cp313-cp313-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:3e2a27c89eb9ac3d81ec8835e12414d73536c6e620355d65102503064a4ed6eb", size = 7049990, upload-time = "2025-10-21T16:21:56.476Z" }, + { url = "https://files.pythonhosted.org/packages/2b/94/8c12319a6369434e7a184b987e8e9f3b49a114c489b8315f029e24de4837/grpcio-1.76.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:61f69297cba3950a524f61c7c8ee12e55c486cb5f7db47ff9dcee33da6f0d3ae", size = 6575387, upload-time = "2025-10-21T16:21:59.051Z" }, + { url = "https://files.pythonhosted.org/packages/15/0f/f12c32b03f731f4a6242f771f63039df182c8b8e2cf8075b245b409259d4/grpcio-1.76.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6a15c17af8839b6801d554263c546c69c4d7718ad4321e3166175b37eaacca77", size = 7166668, upload-time = "2025-10-21T16:22:02.049Z" }, + { url = "https://files.pythonhosted.org/packages/ff/2d/3ec9ce0c2b1d92dd59d1c3264aaec9f0f7c817d6e8ac683b97198a36ed5a/grpcio-1.76.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:25a18e9810fbc7e7f03ec2516addc116a957f8cbb8cbc95ccc80faa072743d03", size = 8124928, upload-time = "2025-10-21T16:22:04.984Z" }, + { url = "https://files.pythonhosted.org/packages/1a/74/fd3317be5672f4856bcdd1a9e7b5e17554692d3db9a3b273879dc02d657d/grpcio-1.76.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:931091142fd8cc14edccc0845a79248bc155425eee9a98b2db2ea4f00a235a42", size = 7589983, upload-time = "2025-10-21T16:22:07.881Z" }, + { url = "https://files.pythonhosted.org/packages/45/bb/ca038cf420f405971f19821c8c15bcbc875505f6ffadafe9ffd77871dc4c/grpcio-1.76.0-cp313-cp313-win32.whl", hash = "sha256:5e8571632780e08526f118f74170ad8d50fb0a48c23a746bef2a6ebade3abd6f", size = 3984727, upload-time = "2025-10-21T16:22:10.032Z" }, + { url = "https://files.pythonhosted.org/packages/41/80/84087dc56437ced7cdd4b13d7875e7439a52a261e3ab4e06488ba6173b0a/grpcio-1.76.0-cp313-cp313-win_amd64.whl", hash = "sha256:f9f7bd5faab55f47231ad8dba7787866b69f5e93bc306e3915606779bbfb4ba8", size = 4702799, upload-time = "2025-10-21T16:22:12.709Z" }, + { url = "https://files.pythonhosted.org/packages/b4/46/39adac80de49d678e6e073b70204091e76631e03e94928b9ea4ecf0f6e0e/grpcio-1.76.0-cp314-cp314-linux_armv7l.whl", hash = "sha256:ff8a59ea85a1f2191a0ffcc61298c571bc566332f82e5f5be1b83c9d8e668a62", size = 5808417, upload-time = "2025-10-21T16:22:15.02Z" }, + { url = "https://files.pythonhosted.org/packages/9c/f5/a4531f7fb8b4e2a60b94e39d5d924469b7a6988176b3422487be61fe2998/grpcio-1.76.0-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:06c3d6b076e7b593905d04fdba6a0525711b3466f43b3400266f04ff735de0cd", size = 11828219, upload-time = "2025-10-21T16:22:17.954Z" }, + { url = "https://files.pythonhosted.org/packages/4b/1c/de55d868ed7a8bd6acc6b1d6ddc4aa36d07a9f31d33c912c804adb1b971b/grpcio-1.76.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fd5ef5932f6475c436c4a55e4336ebbe47bd3272be04964a03d316bbf4afbcbc", size = 6367826, upload-time = "2025-10-21T16:22:20.721Z" }, + { url = "https://files.pythonhosted.org/packages/59/64/99e44c02b5adb0ad13ab3adc89cb33cb54bfa90c74770f2607eea629b86f/grpcio-1.76.0-cp314-cp314-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:b331680e46239e090f5b3cead313cc772f6caa7d0fc8de349337563125361a4a", size = 7049550, upload-time = "2025-10-21T16:22:23.637Z" }, + { url = "https://files.pythonhosted.org/packages/43/28/40a5be3f9a86949b83e7d6a2ad6011d993cbe9b6bd27bea881f61c7788b6/grpcio-1.76.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2229ae655ec4e8999599469559e97630185fdd53ae1e8997d147b7c9b2b72cba", size = 6575564, upload-time = "2025-10-21T16:22:26.016Z" }, + { url = "https://files.pythonhosted.org/packages/4b/a9/1be18e6055b64467440208a8559afac243c66a8b904213af6f392dc2212f/grpcio-1.76.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:490fa6d203992c47c7b9e4a9d39003a0c2bcc1c9aa3c058730884bbbb0ee9f09", size = 7176236, upload-time = "2025-10-21T16:22:28.362Z" }, + { url = "https://files.pythonhosted.org/packages/0f/55/dba05d3fcc151ce6e81327541d2cc8394f442f6b350fead67401661bf041/grpcio-1.76.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:479496325ce554792dba6548fae3df31a72cef7bad71ca2e12b0e58f9b336bfc", size = 8125795, upload-time = "2025-10-21T16:22:31.075Z" }, + { url = "https://files.pythonhosted.org/packages/4a/45/122df922d05655f63930cf42c9e3f72ba20aadb26c100ee105cad4ce4257/grpcio-1.76.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:1c9b93f79f48b03ada57ea24725d83a30284a012ec27eab2cf7e50a550cbbbcc", size = 7592214, upload-time = "2025-10-21T16:22:33.831Z" }, + { url = "https://files.pythonhosted.org/packages/4a/6e/0b899b7f6b66e5af39e377055fb4a6675c9ee28431df5708139df2e93233/grpcio-1.76.0-cp314-cp314-win32.whl", hash = "sha256:747fa73efa9b8b1488a95d0ba1039c8e2dca0f741612d80415b1e1c560febf4e", size = 4062961, upload-time = "2025-10-21T16:22:36.468Z" }, + { url = "https://files.pythonhosted.org/packages/19/41/0b430b01a2eb38ee887f88c1f07644a1df8e289353b78e82b37ef988fb64/grpcio-1.76.0-cp314-cp314-win_amd64.whl", hash = "sha256:922fa70ba549fce362d2e2871ab542082d66e2aaf0c19480ea453905b01f384e", size = 4834462, upload-time = "2025-10-21T16:22:39.772Z" }, ] [[package]] name = "grpcio-tools" -version = "1.75.1" +version = "1.76.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "grpcio" }, { name = "protobuf" }, { name = "setuptools" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/7d/76/0cd2a2bb379275c319544a3ab613dc3cea7a167503908c1b4de55f82bd9e/grpcio_tools-1.75.1.tar.gz", hash = "sha256:bb78960cf3d58941e1fec70cbdaccf255918beed13c34112a6915a6d8facebd1", size = 5390470, upload-time = "2025-09-26T09:10:11.948Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/45/28/71ab934662d41ded4e451d9af0ec6f9aade3525e470fdfd10bd20e588e44/grpcio_tools-1.75.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:f0635231feb70a9d551452829943a1a5fa651283e7a300aadc22df5ea5da696f", size = 2545461, upload-time = "2025-09-26T09:08:08.514Z" }, - { url = "https://files.pythonhosted.org/packages/69/40/d90f6fdb51f51b2a518401207b3920fcfdfa996ed7bca844096f111ed839/grpcio_tools-1.75.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:626293296ef7e2d87ab1a80b81a55eef91883c65b59a97576099a28b9535100b", size = 5842958, upload-time = "2025-09-26T09:08:11.468Z" }, - { url = "https://files.pythonhosted.org/packages/b4/b7/52e6f32fd0101e3ac9c654a6441b254ba5874f146b543b20afbcb8246947/grpcio_tools-1.75.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:071339d90f1faab332ce4919c815a10b9c3ed2c09473f550f686bf9cc148579f", size = 2591669, upload-time = "2025-09-26T09:08:13.481Z" }, - { url = "https://files.pythonhosted.org/packages/0a/3c/115c59a5c0c8e9d7d99a40bac8d5e91c05b6735b3bb185265d40e9fc4346/grpcio_tools-1.75.1-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:44195f58c052fa935b78c7438c85cbcd4b273dd685028e4f6d4d7b30d47daad1", size = 2904952, upload-time = "2025-09-26T09:08:15.299Z" }, - { url = "https://files.pythonhosted.org/packages/a9/cd/d2a3583a5b1d71da88f7998f20fb5a0b6fe5bb96bb916a610c29269063b6/grpcio_tools-1.75.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:860fafdb85726029d646c99859ff7bdca5aae61b5ff038c3bd355fc1ec6b2764", size = 2656311, upload-time = "2025-09-26T09:08:17.094Z" }, - { url = "https://files.pythonhosted.org/packages/aa/09/67b9215d39add550e430c9677bd43c9a315da07ab62fa3a5f44f1cf5bb75/grpcio_tools-1.75.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4559547a0cb3d3db1b982eea87d4656036339b400f48127fef932210672fb59e", size = 3105583, upload-time = "2025-09-26T09:08:19.179Z" }, - { url = "https://files.pythonhosted.org/packages/98/d7/d400b90812470f3dc2466964e62fc03592de46b5c824c82ef5303be60167/grpcio_tools-1.75.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9af65a310807d7f36a8f7cddea142fe97d6dffba74444f38870272f2e5a3a06b", size = 3654677, upload-time = "2025-09-26T09:08:21.227Z" }, - { url = "https://files.pythonhosted.org/packages/9c/93/edf6de71b4f936b3f09461a3286db1f902c6366c5de06ef19a8c2523034a/grpcio_tools-1.75.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8c1de31aefc0585d2f915a7cd0994d153547495b8d79c44c58048a3ede0b65be", size = 3322147, upload-time = "2025-09-26T09:08:23.08Z" }, - { url = "https://files.pythonhosted.org/packages/80/00/0f8c6204e34070e7d4f344b27e4b1b0320dfdd94574f79738a43504d182e/grpcio_tools-1.75.1-cp311-cp311-win32.whl", hash = "sha256:efaf95fcaa5d3ac1bcfe44ceed9e2512eb95b5c8c476569bdbbe2bee4b59c8a9", size = 993388, upload-time = "2025-09-26T09:08:24.708Z" }, - { url = "https://files.pythonhosted.org/packages/b0/ae/6f738154980f606293988a64ef4bb0ea2bb12029a4529464aac56fe2ab99/grpcio_tools-1.75.1-cp311-cp311-win_amd64.whl", hash = "sha256:7cefe76fc35c825f0148d60d2294a527053d0f5dd6a60352419214a8c53223c9", size = 1157907, upload-time = "2025-09-26T09:08:26.537Z" }, - { url = "https://files.pythonhosted.org/packages/ef/a7/581bb204d19a347303ed5e25b19f7d8c6365a28c242fca013d1d6d78ad7e/grpcio_tools-1.75.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:49b68936cf212052eeafa50b824e17731b78d15016b235d36e0d32199000b14c", size = 2546099, upload-time = "2025-09-26T09:08:28.794Z" }, - { url = "https://files.pythonhosted.org/packages/9f/59/ab65998eba14ff9d292c880f6a276fe7d0571bba3bb4ddf66aca1f8438b5/grpcio_tools-1.75.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:08cb6e568e58b76a2178ad3b453845ff057131fff00f634d7e15dcd015cd455b", size = 5839838, upload-time = "2025-09-26T09:08:31.038Z" }, - { url = "https://files.pythonhosted.org/packages/7e/65/7027f71069b4c1e8c7b46de8c46c297c9d28ef6ed4ea0161e8c82c75d1d0/grpcio_tools-1.75.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:168402ad29a249092673079cf46266936ec2fb18d4f854d96e9c5fa5708efa39", size = 2592916, upload-time = "2025-09-26T09:08:33.216Z" }, - { url = "https://files.pythonhosted.org/packages/0f/84/1abfb3c679b78c7fca7524031cf9de4c4c509c441b48fd26291ac16dd1af/grpcio_tools-1.75.1-cp312-cp312-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:bbae11c29fcf450730f021bfc14b12279f2f985e2e493ccc2f133108728261db", size = 2905276, upload-time = "2025-09-26T09:08:35.691Z" }, - { url = "https://files.pythonhosted.org/packages/99/cd/7f9e05f1eddccb61bc0ead1e49eb2222441957b02ed11acfcd2f795b03a8/grpcio_tools-1.75.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:38c6c7d5d4800f636ee691cd073db1606d1a6a76424ca75c9b709436c9c20439", size = 2656424, upload-time = "2025-09-26T09:08:38.255Z" }, - { url = "https://files.pythonhosted.org/packages/29/1d/8b7852771c2467728341f7b9c3ca4ebc76e4e23485c6a3e6d97a8323ad2a/grpcio_tools-1.75.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:626f6a61a8f141dde9a657775854d1c0d99509f9a2762b82aa401a635f6ec73d", size = 3108985, upload-time = "2025-09-26T09:08:40.291Z" }, - { url = "https://files.pythonhosted.org/packages/c2/6a/069da89cdf2e97e4558bfceef5b60bf0ef200c443b465e7691869006dd32/grpcio_tools-1.75.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:f61a8334ae38d4f98c744a732b89527e5af339d17180e25fff0676060f8709b7", size = 3657940, upload-time = "2025-09-26T09:08:42.437Z" }, - { url = "https://files.pythonhosted.org/packages/c3/e4/ca8dae800c084beb89e2720346f70012d36dfb9df02d8eacd518c06cf4a0/grpcio_tools-1.75.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bd0c3fb40d89a1e24a41974e77c7331e80396ab7cde39bc396a13d6b5e2a750b", size = 3324878, upload-time = "2025-09-26T09:08:45.083Z" }, - { url = "https://files.pythonhosted.org/packages/58/06/cbe923679309bf970923f4a11351ea9e485291b504d7243130fdcfdcb03f/grpcio_tools-1.75.1-cp312-cp312-win32.whl", hash = "sha256:004bc5327593eea48abd03be3188e757c3ca0039079587a6aac24275127cac20", size = 993071, upload-time = "2025-09-26T09:08:46.785Z" }, - { url = "https://files.pythonhosted.org/packages/7c/0c/84d6be007262c5d88a590082f3a1fe62d4b0eeefa10c6cdb3548f3663e80/grpcio_tools-1.75.1-cp312-cp312-win_amd64.whl", hash = "sha256:23952692160b5fe7900653dfdc9858dc78c2c42e15c27e19ee780c8917ba6028", size = 1157506, upload-time = "2025-09-26T09:08:48.844Z" }, - { url = "https://files.pythonhosted.org/packages/47/fa/624bbe1b2ccf4f6044bf3cd314fe2c35f78f702fcc2191dc65519baddca4/grpcio_tools-1.75.1-cp313-cp313-linux_armv7l.whl", hash = "sha256:ca9e116aab0ecf4365fc2980f2e8ae1b22273c3847328b9a8e05cbd14345b397", size = 2545752, upload-time = "2025-09-26T09:08:51.433Z" }, - { url = "https://files.pythonhosted.org/packages/b9/4c/6d884e2337feff0a656e395338019adecc3aa1daeae9d7e8eb54340d4207/grpcio_tools-1.75.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:9fe87a926b65eb7f41f8738b6d03677cc43185ff77a9d9b201bdb2f673f3fa1e", size = 5838163, upload-time = "2025-09-26T09:08:53.858Z" }, - { url = "https://files.pythonhosted.org/packages/d1/2a/2ba7b6911a754719643ed92ae816a7f989af2be2882b9a9e1f90f4b0e882/grpcio_tools-1.75.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:45503a6094f91b3fd31c3d9adef26ac514f102086e2a37de797e220a6791ee87", size = 2592148, upload-time = "2025-09-26T09:08:55.86Z" }, - { url = "https://files.pythonhosted.org/packages/88/db/fa613a45c3c7b00f905bd5ad3a93c73194724d0a2dd72adae3be32983343/grpcio_tools-1.75.1-cp313-cp313-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:b01b60b3de67be531a39fd869d7613fa8f178aff38c05e4d8bc2fc530fa58cb5", size = 2905215, upload-time = "2025-09-26T09:08:58.27Z" }, - { url = "https://files.pythonhosted.org/packages/d7/0c/ee4786972bb82f60e4f313bb2227c79c2cd20eb13c94c0263067923cfd12/grpcio_tools-1.75.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:09e2b9b9488735514777d44c1e4eda813122d2c87aad219f98d5d49b359a8eab", size = 2656251, upload-time = "2025-09-26T09:09:00.249Z" }, - { url = "https://files.pythonhosted.org/packages/77/f1/cc5a50658d705d0b71ff8a4fbbfcc6279d3c95731a2ef7285e13dc40e2fe/grpcio_tools-1.75.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:55e60300e62b220fabe6f062fe69f143abaeff3335f79b22b56d86254f3c3c80", size = 3108911, upload-time = "2025-09-26T09:09:02.515Z" }, - { url = "https://files.pythonhosted.org/packages/09/d8/43545f77c4918e778e90bc2c02b3462ac71cee14f29d85cdb69b089538eb/grpcio_tools-1.75.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:49ce00fcc6facbbf52bf376e55b8e08810cecd03dab0b3a2986d73117c6f6ee4", size = 3657021, upload-time = "2025-09-26T09:09:05.331Z" }, - { url = "https://files.pythonhosted.org/packages/fc/0b/2ae5925374b66bc8df5b828eff1a5f9459349c83dae1773f0aa9858707e6/grpcio_tools-1.75.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:71e95479aea868f8c8014d9dc4267f26ee75388a0d8a552e1648cfa0b53d24b4", size = 3324450, upload-time = "2025-09-26T09:09:07.867Z" }, - { url = "https://files.pythonhosted.org/packages/6e/53/9f887bacbecf892ac5b0b282477ca8cfa5b73911b04259f0d88b52e9a055/grpcio_tools-1.75.1-cp313-cp313-win32.whl", hash = "sha256:fff9d2297416eae8861e53154ccf70a19994e5935e6c8f58ebf431f81cbd8d12", size = 992434, upload-time = "2025-09-26T09:09:09.966Z" }, - { url = "https://files.pythonhosted.org/packages/a5/f0/9979d97002edffdc2a88e5f2e0dccea396dd4a6eab34fa2f705fe43eae2f/grpcio_tools-1.75.1-cp313-cp313-win_amd64.whl", hash = "sha256:1849ddd508143eb48791e81d42ddc924c554d1b4900e06775a927573a8d4267f", size = 1157069, upload-time = "2025-09-26T09:09:12.287Z" }, - { url = "https://files.pythonhosted.org/packages/a6/0b/4ff4ead293f2b016668628a240937828444094778c8037d2bbef700e9097/grpcio_tools-1.75.1-cp314-cp314-linux_armv7l.whl", hash = "sha256:f281b594489184b1f9a337cdfed1fc1ddb8428f41c4b4023de81527e90b38e1e", size = 2545868, upload-time = "2025-09-26T09:09:14.716Z" }, - { url = "https://files.pythonhosted.org/packages/0e/78/aa6bf73a18de5357c01ef87eea92150931586b25196fa4df197a37bae11d/grpcio_tools-1.75.1-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:becf8332f391abc62bf4eea488b63be063d76a7cf2ef00b2e36c617d9ee9216b", size = 5838010, upload-time = "2025-09-26T09:09:20.415Z" }, - { url = "https://files.pythonhosted.org/packages/99/65/7eaad673bc971af45e079d3b13c20d9ba9842b8788d31953e3234c2e2cee/grpcio_tools-1.75.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:a08330f24e5cd7b39541882a95a8ba04ffb4df79e2984aa0cd01ed26dcdccf49", size = 2593170, upload-time = "2025-09-26T09:09:22.889Z" }, - { url = "https://files.pythonhosted.org/packages/e4/db/57e1e29e9186c7ed223ce8a9b609d3f861c4db015efb643dfe60b403c137/grpcio_tools-1.75.1-cp314-cp314-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:6bf3742bd8f102630072ed317d1496f31c454cd85ad19d37a68bd85bf9d5f8b9", size = 2905167, upload-time = "2025-09-26T09:09:25.96Z" }, - { url = "https://files.pythonhosted.org/packages/cd/7b/894f891f3cf19812192f8bbf1e0e1c958055676ecf0a5466a350730a006d/grpcio_tools-1.75.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f26028949474feb380460ce52d9d090d00023940c65236294a66c42ac5850e8b", size = 2656210, upload-time = "2025-09-26T09:09:28.786Z" }, - { url = "https://files.pythonhosted.org/packages/99/76/8e48427da93ef243c09629969c7b5a2c59dceb674b6b623c1f5fbaa5c8c5/grpcio_tools-1.75.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:1bd68fb98bf08f11b6c3210834a14eefe585bad959bdba38e78b4ae3b04ba5bd", size = 3109226, upload-time = "2025-09-26T09:09:31.307Z" }, - { url = "https://files.pythonhosted.org/packages/b3/7e/ecf71c316c2a88c2478b7c6372d0f82d05f07edbf0f31b6da613df99ec7c/grpcio_tools-1.75.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:f1496e21586193da62c3a73cd16f9c63c5b3efd68ff06dab96dbdfefa90d40bf", size = 3657139, upload-time = "2025-09-26T09:09:35.043Z" }, - { url = "https://files.pythonhosted.org/packages/6f/f3/b2613e81da2085f40a989c0601ec9efc11e8b32fcb71b1234b64a18af830/grpcio_tools-1.75.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:14a78b1e36310cdb3516cdf9ee2726107875e0b247e2439d62fc8dc38cf793c1", size = 3324513, upload-time = "2025-09-26T09:09:37.44Z" }, - { url = "https://files.pythonhosted.org/packages/9a/1f/2df4fa8634542524bc22442ffe045d41905dae62cc5dd14408b80c5ac1b8/grpcio_tools-1.75.1-cp314-cp314-win32.whl", hash = "sha256:0e6f916daf222002fb98f9a6f22de0751959e7e76a24941985cc8e43cea77b50", size = 1015283, upload-time = "2025-09-26T09:09:39.461Z" }, - { url = "https://files.pythonhosted.org/packages/23/4f/f27c973ff50486a70be53a3978b6b0244398ca170a4e19d91988b5295d92/grpcio_tools-1.75.1-cp314-cp314-win_amd64.whl", hash = "sha256:878c3b362264588c45eba57ce088755f8b2b54893d41cc4a68cdeea62996da5c", size = 1189364, upload-time = "2025-09-26T09:09:42.036Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/a0/77/17d60d636ccd86a0db0eccc24d02967bbc3eea86b9db7324b04507ebaa40/grpcio_tools-1.76.0.tar.gz", hash = "sha256:ce80169b5e6adf3e8302f3ebb6cb0c3a9f08089133abca4b76ad67f751f5ad88", size = 5390807, upload-time = "2025-10-21T16:26:55.416Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/73/d1/efbeed1a864c846228c0a3b322e7a2d6545f025e35246aebf96496a36004/grpcio_tools-1.76.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:c6480f6af6833850a85cca1c6b435ef4ffd2ac8e88ef683b4065233827950243", size = 2545931, upload-time = "2025-10-21T16:24:50.201Z" }, + { url = "https://files.pythonhosted.org/packages/af/8e/f257c0f565d9d44658301238b01a9353bc6f3b272bb4191faacae042579d/grpcio_tools-1.76.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:c7c23fe1dc09818e16a48853477806ad77dd628b33996f78c05a293065f8210c", size = 5844794, upload-time = "2025-10-21T16:24:53.312Z" }, + { url = "https://files.pythonhosted.org/packages/c7/c0/6c1e89c67356cb20e19ed670c5099b13e40fd678cac584c778f931666a86/grpcio_tools-1.76.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fcdce7f7770ff052cd4e60161764b0b3498c909bde69138f8bd2e7b24a3ecd8f", size = 2591772, upload-time = "2025-10-21T16:24:55.729Z" }, + { url = "https://files.pythonhosted.org/packages/c0/10/5f33aa7bc3ddaad0cfd2f4e950ac4f1a310e8d0c7b1358622a581e8b7a2f/grpcio_tools-1.76.0-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:b598fdcebffa931c7da5c9e90b5805fff7e9bc6cf238319358a1b85704c57d33", size = 2905140, upload-time = "2025-10-21T16:24:57.952Z" }, + { url = "https://files.pythonhosted.org/packages/f4/3e/23e3a52a77368f47188ed83c34eb53866d3ce0f73835b2f6764844ae89eb/grpcio_tools-1.76.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6a9818ff884796b12dcf8db32126e40ec1098cacf5697f27af9cfccfca1c1fae", size = 2656475, upload-time = "2025-10-21T16:25:00.811Z" }, + { url = "https://files.pythonhosted.org/packages/51/85/a74ae87ec7dbd3d2243881f5c548215aed1148660df7945be3a125ba9a21/grpcio_tools-1.76.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:105e53435b2eed3961da543db44a2a34479d98d18ea248219856f30a0ca4646b", size = 3106158, upload-time = "2025-10-21T16:25:03.642Z" }, + { url = "https://files.pythonhosted.org/packages/54/d5/a6ed1e5823bc5d55a1eb93e0c14ccee0b75951f914832ab51fb64d522a0f/grpcio_tools-1.76.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:454a1232c7f99410d92fa9923c7851fd4cdaf657ee194eac73ea1fe21b406d6e", size = 3654980, upload-time = "2025-10-21T16:25:05.717Z" }, + { url = "https://files.pythonhosted.org/packages/f9/29/c05d5501ba156a242079ef71d073116d2509c195b5e5e74c545f0a3a3a69/grpcio_tools-1.76.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ca9ccf667afc0268d45ab202af4556c72e57ea36ebddc93535e1a25cbd4f8aba", size = 3322658, upload-time = "2025-10-21T16:25:07.885Z" }, + { url = "https://files.pythonhosted.org/packages/02/b6/ee0317b91da19a7537d93c4161cbc2a45a165c8893209b0bbd470d830ffa/grpcio_tools-1.76.0-cp311-cp311-win32.whl", hash = "sha256:a83c87513b708228b4cad7619311daba65b40937745103cadca3db94a6472d9c", size = 993837, upload-time = "2025-10-21T16:25:10.133Z" }, + { url = "https://files.pythonhosted.org/packages/81/63/9623cadf0406b264737f16d4ed273bb2d65001d87fbd803b565c45d665d1/grpcio_tools-1.76.0-cp311-cp311-win_amd64.whl", hash = "sha256:2ce5e87ec71f2e4041dce4351f2a8e3b713e3bca6b54c69c3fbc6c7ad1f4c386", size = 1158634, upload-time = "2025-10-21T16:25:12.705Z" }, + { url = "https://files.pythonhosted.org/packages/4f/ca/a931c1439cabfe305c9afd07e233150cd0565aa062c20d1ee412ed188852/grpcio_tools-1.76.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:4ad555b8647de1ebaffb25170249f89057721ffb74f7da96834a07b4855bb46a", size = 2546852, upload-time = "2025-10-21T16:25:15.024Z" }, + { url = "https://files.pythonhosted.org/packages/4c/07/935cfbb7dccd602723482a86d43fbd992f91e9867bca0056a1e9f348473e/grpcio_tools-1.76.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:243af7c8fc7ff22a40a42eb8e0f6f66963c1920b75aae2a2ec503a9c3c8b31c1", size = 5841777, upload-time = "2025-10-21T16:25:17.425Z" }, + { url = "https://files.pythonhosted.org/packages/e4/92/8fcb5acebdccb647e0fa3f002576480459f6cf81e79692d7b3c4d6e29605/grpcio_tools-1.76.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8207b890f423142cc0025d041fb058f7286318df6a049565c27869d73534228b", size = 2594004, upload-time = "2025-10-21T16:25:19.809Z" }, + { url = "https://files.pythonhosted.org/packages/9d/ea/64838e8113b7bfd4842b15c815a7354cb63242fdce9d6648d894b5d50897/grpcio_tools-1.76.0-cp312-cp312-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:3dafa34c2626a6691d103877e8a145f54c34cf6530975f695b396ed2fc5c98f8", size = 2905563, upload-time = "2025-10-21T16:25:21.889Z" }, + { url = "https://files.pythonhosted.org/packages/a6/d6/53798827d821098219e58518b6db52161ce4985620850aa74ce3795da8a7/grpcio_tools-1.76.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:30f1d2dda6ece285b3d9084e94f66fa721ebdba14ae76b2bc4c581c8a166535c", size = 2656936, upload-time = "2025-10-21T16:25:24.369Z" }, + { url = "https://files.pythonhosted.org/packages/89/a3/d9c1cefc46a790eec520fe4e70e87279abb01a58b1a3b74cf93f62b824a2/grpcio_tools-1.76.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a889af059dc6dbb82d7b417aa581601316e364fe12eb54c1b8d95311ea50916d", size = 3109811, upload-time = "2025-10-21T16:25:26.711Z" }, + { url = "https://files.pythonhosted.org/packages/50/75/5997752644b73b5d59377d333a51c8a916606df077f5a487853e37dca289/grpcio_tools-1.76.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c3f2c3c44c56eb5d479ab178f0174595d0a974c37dade442f05bb73dfec02f31", size = 3658786, upload-time = "2025-10-21T16:25:28.819Z" }, + { url = "https://files.pythonhosted.org/packages/84/47/dcf8380df4bd7931ffba32fc6adc2de635b6569ca27fdec7121733797062/grpcio_tools-1.76.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:479ce02dff684046f909a487d452a83a96b4231f7c70a3b218a075d54e951f56", size = 3325144, upload-time = "2025-10-21T16:25:30.863Z" }, + { url = "https://files.pythonhosted.org/packages/04/88/ea3e5fdb874d8c2d04488e4b9d05056537fba70915593f0c283ac77df188/grpcio_tools-1.76.0-cp312-cp312-win32.whl", hash = "sha256:9ba4bb539936642a44418b38ee6c3e8823c037699e2cb282bd8a44d76a4be833", size = 993523, upload-time = "2025-10-21T16:25:32.594Z" }, + { url = "https://files.pythonhosted.org/packages/de/b1/ce7d59d147675ec191a55816be46bc47a343b5ff07279eef5817c09cc53e/grpcio_tools-1.76.0-cp312-cp312-win_amd64.whl", hash = "sha256:0cd489016766b05f9ed8a6b6596004b62c57d323f49593eac84add032a6d43f7", size = 1158493, upload-time = "2025-10-21T16:25:34.5Z" }, + { url = "https://files.pythonhosted.org/packages/13/01/b16fe73f129df49811d886dc99d3813a33cf4d1c6e101252b81c895e929f/grpcio_tools-1.76.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:ff48969f81858397ef33a36b326f2dbe2053a48b254593785707845db73c8f44", size = 2546312, upload-time = "2025-10-21T16:25:37.138Z" }, + { url = "https://files.pythonhosted.org/packages/25/17/2594c5feb76bb0b25bfbf91ec1075b276e1b2325e4bc7ea649a7b5dbf353/grpcio_tools-1.76.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:aa2f030fd0ef17926026ee8e2b700e388d3439155d145c568fa6b32693277613", size = 5839627, upload-time = "2025-10-21T16:25:40.082Z" }, + { url = "https://files.pythonhosted.org/packages/c7/c6/097b1aa26fbf72fb3cdb30138a2788529e4f10d8759de730a83f5c06726e/grpcio_tools-1.76.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bacbf3c54f88c38de8e28f8d9b97c90b76b105fb9ddef05d2c50df01b32b92af", size = 2592817, upload-time = "2025-10-21T16:25:42.301Z" }, + { url = "https://files.pythonhosted.org/packages/03/78/d1d985b48592a674509a85438c1a3d4c36304ddfc99d1b05d27233b51062/grpcio_tools-1.76.0-cp313-cp313-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:0d4e4afe9a0e3c24fad2f1af45f98cf8700b2bfc4d790795756ba035d2ea7bdc", size = 2905186, upload-time = "2025-10-21T16:25:44.395Z" }, + { url = "https://files.pythonhosted.org/packages/b9/0e/770afbb47f0b5f594b93a7b46a95b892abda5eebe60efb511e96cee52170/grpcio_tools-1.76.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fbbd4e1fc5af98001ceef5e780e8c10921d94941c3809238081e73818ef707f1", size = 2656188, upload-time = "2025-10-21T16:25:46.942Z" }, + { url = "https://files.pythonhosted.org/packages/3d/2b/017c2fcf4c5d3cf00cf7d5ce21eb88521de0d89bdcf26538ad2862ec6d07/grpcio_tools-1.76.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b05efe5a59883ab8292d596657273a60e0c3e4f5a9723c32feb9fc3a06f2f3ef", size = 3109141, upload-time = "2025-10-21T16:25:49.137Z" }, + { url = "https://files.pythonhosted.org/packages/e9/5f/2495f88e3d50c6f2c2da2752bad4fa3a30c52ece6c9d8b0c636cd8b1430b/grpcio_tools-1.76.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:be483b90e62b7892eb71fa1fc49750bee5b2ee35b5ec99dd2b32bed4bedb5d71", size = 3657892, upload-time = "2025-10-21T16:25:52.362Z" }, + { url = "https://files.pythonhosted.org/packages/5e/1d/c4f39d31b19d9baf35d900bf3f969ce1c842f63a8560c8003ed2e5474760/grpcio_tools-1.76.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:630cd7fd3e8a63e20703a7ad816979073c2253e591b5422583c27cae2570de73", size = 3324778, upload-time = "2025-10-21T16:25:54.629Z" }, + { url = "https://files.pythonhosted.org/packages/b4/b6/35ee3a6e4af85a93da28428f81f4b29bcb36f6986b486ad71910fcc02e25/grpcio_tools-1.76.0-cp313-cp313-win32.whl", hash = "sha256:eb2567280f9f6da5444043f0e84d8408c7a10df9ba3201026b30e40ef3814736", size = 993084, upload-time = "2025-10-21T16:25:56.52Z" }, + { url = "https://files.pythonhosted.org/packages/f3/7a/5bd72344d86ee860e5920c9a7553cfe3bc7b1fce79f18c00ac2497f5799f/grpcio_tools-1.76.0-cp313-cp313-win_amd64.whl", hash = "sha256:0071b1c0bd0f5f9d292dca4efab32c92725d418e57f9c60acdc33c0172af8b53", size = 1158151, upload-time = "2025-10-21T16:25:58.468Z" }, + { url = "https://files.pythonhosted.org/packages/f0/c0/aa20eebe8f3553b7851643e9c88d237c3a6ca30ade646897e25dbb27be99/grpcio_tools-1.76.0-cp314-cp314-linux_armv7l.whl", hash = "sha256:c53c5719ef2a435997755abde3826ba4087174bd432aa721d8fac781fcea79e4", size = 2546297, upload-time = "2025-10-21T16:26:01.258Z" }, + { url = "https://files.pythonhosted.org/packages/d9/98/6af702804934443c1d0d4d27d21b990d92d22ddd1b6bec6b056558cbbffa/grpcio_tools-1.76.0-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:e3db1300d7282264639eeee7243f5de7e6a7c0283f8bf05d66c0315b7b0f0b36", size = 5839804, upload-time = "2025-10-21T16:26:05.495Z" }, + { url = "https://files.pythonhosted.org/packages/ea/8d/7725fa7b134ef8405ffe0a37c96eeb626e5af15d70e1bdac4f8f1abf842e/grpcio_tools-1.76.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0b018a4b7455a7e8c16d0fdb3655a6ba6c9536da6de6c5d4f11b6bb73378165b", size = 2593922, upload-time = "2025-10-21T16:26:07.563Z" }, + { url = "https://files.pythonhosted.org/packages/de/ff/5b6b5012c79fa72f9107dc13f7226d9ce7e059ea639fd8c779e0dd284386/grpcio_tools-1.76.0-cp314-cp314-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:ec6e4de3866e47cfde56607b1fae83ecc5aa546e06dec53de11f88063f4b5275", size = 2905327, upload-time = "2025-10-21T16:26:09.668Z" }, + { url = "https://files.pythonhosted.org/packages/24/01/2691d369ea462cd6b6c92544122885ca01f7fa5ac75dee023e975e675858/grpcio_tools-1.76.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b8da4d828883913f1852bdd67383713ae5c11842f6c70f93f31893eab530aead", size = 2656214, upload-time = "2025-10-21T16:26:11.773Z" }, + { url = "https://files.pythonhosted.org/packages/6a/e7/3f8856e6ec3dd492336a91572993344966f237b0e3819fbe96437b19d313/grpcio_tools-1.76.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:5c120c2cf4443121800e7f9bcfe2e94519fa25f3bb0b9882359dd3b252c78a7b", size = 3109889, upload-time = "2025-10-21T16:26:15.058Z" }, + { url = "https://files.pythonhosted.org/packages/f3/e4/ce5248072e47db276dc7e069e93978dcde490c959788ce7cce8081d0bfdc/grpcio_tools-1.76.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:8b7df5591d699cd9076065f1f15049e9c3597e0771bea51c8c97790caf5e4197", size = 3657939, upload-time = "2025-10-21T16:26:17.34Z" }, + { url = "https://files.pythonhosted.org/packages/f6/df/81ff88af93c52135e425cd5ec9fe8b186169c7d5f9e0409bdf2bbedc3919/grpcio_tools-1.76.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:a25048c5f984d33e3f5b6ad7618e98736542461213ade1bd6f2fcfe8ce804e3d", size = 3324752, upload-time = "2025-10-21T16:26:20.092Z" }, + { url = "https://files.pythonhosted.org/packages/35/3d/f6b83044afbf6522254a3b509515a00fed16a819c87731a478dbdd1d35c1/grpcio_tools-1.76.0-cp314-cp314-win32.whl", hash = "sha256:4b77ce6b6c17869858cfe14681ad09ed3a8a80e960e96035de1fd87f78158740", size = 1015578, upload-time = "2025-10-21T16:26:22.517Z" }, + { url = "https://files.pythonhosted.org/packages/95/4d/31236cddb7ffb09ba4a49f4f56d2608fec3bbb21c7a0a975d93bca7cd22e/grpcio_tools-1.76.0-cp314-cp314-win_amd64.whl", hash = "sha256:2ccd2c8d041351cc29d0fc4a84529b11ee35494a700b535c1f820b642f2a72fc", size = 1190242, upload-time = "2025-10-21T16:26:25.296Z" }, ] [[package]] @@ -963,16 +963,16 @@ wheels = [ [[package]] name = "langchain" -version = "1.0.0" +version = "1.0.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "langchain-core" }, { name = "langgraph" }, { name = "pydantic" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/7d/b8/36078257ba52351608129ee983079a4d77ee69eb1470ee248cd8f5728a31/langchain-1.0.0.tar.gz", hash = "sha256:56bf90d935ac1dda864519372d195ca58757b755dd4c44b87840b67d069085b7", size = 466932, upload-time = "2025-10-17T20:53:20.319Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/d6/bdf9ea27a92ed4685497c2659b5c7f703ba63bac4bd92351ca09bab3b924/langchain-1.0.2.tar.gz", hash = "sha256:22f814c7b4f5f76e945c35924ff288f6dfbe33747db2a029162ef1d4f8566493", size = 473869, upload-time = "2025-10-21T21:08:26.44Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c4/4d/2758a16ad01716c0fb3fe9ec205fd530eae4528b35a27ff44837c399e032/langchain-1.0.0-py3-none-any.whl", hash = "sha256:8c95e41250fc86d09a978fbdf999f86c18d50a28a2addc5da88546af00a1ad15", size = 106202, upload-time = "2025-10-17T20:53:18.685Z" }, + { url = "https://files.pythonhosted.org/packages/8c/06/0e03587da37173c29a58bf17312793c2453df9ca2912e9adfe869c120437/langchain-1.0.2-py3-none-any.whl", hash = "sha256:e0c5647ea47cde7feb9534f56f4496c7f86a45084ad9bd152e7b19739f210ead", size = 107831, upload-time = "2025-10-21T21:08:25.009Z" }, ] [[package]] @@ -1009,15 +1009,15 @@ wheels = [ [[package]] name = "langchain-dev-utils" -version = "1.0.0" +version = "1.0.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "langchain" }, { name = "langgraph" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c6/90/5c1fcbf20054bf171800fd5858846c5b1317920f5c4b0c3bc42a4ea0e060/langchain_dev_utils-1.0.0.tar.gz", hash = "sha256:9c6d7dcc479b393734d2b69aca26af19956e0355d5eed3c535d7989ff9803069", size = 160913, upload-time = "2025-10-19T09:36:06.473Z" } +sdist = { url = "https://files.pythonhosted.org/packages/2d/ee/f8f6138ccdca74899c90a89b8541558b790374ded4ef4bc2ca5a56d390cd/langchain_dev_utils-1.0.3.tar.gz", hash = "sha256:3d351ca8424eb48f841762c38a2918af50821dfba0610ec0776cebad4da26ea3", size = 170683, upload-time = "2025-10-21T02:33:55.188Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/11/cb/79c0698011981b8e2065d40ffb3e02ba0f0d88b57b3e523a4903b38b7eeb/langchain_dev_utils-1.0.0-py3-none-any.whl", hash = "sha256:3c690da88a5c6578eb17ca14a397cd7fb65c887fdba93127a531150061dea9dd", size = 31113, upload-time = "2025-10-19T09:35:51.899Z" }, + { url = "https://files.pythonhosted.org/packages/3d/80/32603dabf324e5814248c56b486f3cb0f0dc617a4fc3983780d0a8a98eb8/langchain_dev_utils-1.0.3-py3-none-any.whl", hash = "sha256:bd985b7279d27c2535e9c55235e6bd93848c7fbbb7f598b6aa2abbb07ff7cd63", size = 38226, upload-time = "2025-10-21T02:33:53.332Z" }, ] [[package]] @@ -1036,21 +1036,21 @@ wheels = [ [[package]] name = "langchain-openai" -version = "1.0.0" +version = "1.0.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "langchain-core" }, { name = "openai" }, { name = "tiktoken" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/bc/16/e7f69c584432b33cec31d9e09ffc53b612cc971ba81739d1ff62bb84d5d8/langchain_openai-1.0.0.tar.gz", hash = "sha256:14b4f8e77e2ace62e390fe4824f08718b227ed86b476983ce4f0063fa258e966", size = 1023557, upload-time = "2025-10-17T15:43:22.308Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f1/50/eaa53ac18f63b3e92e6c3a30269714cff477af5a568f486254779a9973f1/langchain_openai-1.0.1.tar.gz", hash = "sha256:78aff09a631fccca08a64f5fc669b325d0f5821490acce024e5da4cf0a08e0d0", size = 1025305, upload-time = "2025-10-21T15:45:06.095Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/02/1f/0440b1bacc3dbfb8e34d9b478e3f3d333384f32ba6b753907b5ff9dc8c17/langchain_openai-1.0.0-py3-none-any.whl", hash = "sha256:e42b856ae53d2f6f3afd2ecb875056025d8c0b8f313a89fc66eb682c6b958111", size = 80465, upload-time = "2025-10-17T15:43:20.771Z" }, + { url = "https://files.pythonhosted.org/packages/62/c0/06d74093e3e798eb464ef76f53d031235b87feccdadbbf6f7b8409043e4d/langchain_openai-1.0.1-py3-none-any.whl", hash = "sha256:9b61309a7268e7c1c614c554cfd66401519e7434aaefc52de7e251887aceb5f7", size = 81898, upload-time = "2025-10-21T15:45:04.957Z" }, ] [[package]] name = "langgraph" -version = "1.0.0" +version = "1.0.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "langchain-core" }, @@ -1060,14 +1060,14 @@ dependencies = [ { name = "pydantic" }, { name = "xxhash" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/57/f7/7ae10f1832ab1a6a402f451e54d6dab277e28e7d4e4204e070c7897ca71c/langgraph-1.0.0.tar.gz", hash = "sha256:5f83ed0e9bbcc37635bc49cbc9b3d9306605fa07504f955b7a871ed715f9964c", size = 472835, upload-time = "2025-10-17T20:23:38.263Z" } +sdist = { url = "https://files.pythonhosted.org/packages/20/7c/a0f4211f751b8b37aae2d88c6243ceb14027ca9ebf00ac8f3b210657af6a/langgraph-1.0.1.tar.gz", hash = "sha256:4985b32ceabb046a802621660836355dfcf2402c5876675dc353db684aa8f563", size = 480245, upload-time = "2025-10-20T18:51:59.839Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/07/42/6f6d0fe4eb661b06da8e6c59e58044e9e4221fdbffdcacae864557de961e/langgraph-1.0.0-py3-none-any.whl", hash = "sha256:4d478781832a1bc67e06c3eb571412ec47d7c57a5467d1f3775adf0e9dd4042c", size = 155416, upload-time = "2025-10-17T20:23:36.978Z" }, + { url = "https://files.pythonhosted.org/packages/b1/3c/acc0956a0da96b25a2c5c1a85168eacf1253639a04ed391d7a7bcaae5d6c/langgraph-1.0.1-py3-none-any.whl", hash = "sha256:892f04f64f4889abc80140265cc6bd57823dd8e327a5eef4968875f2cd9013bd", size = 155415, upload-time = "2025-10-20T18:51:58.321Z" }, ] [[package]] name = "langgraph-api" -version = "0.4.43" +version = "0.4.46" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cloudpickle" }, @@ -1096,22 +1096,22 @@ dependencies = [ { name = "uvicorn" }, { name = "watchfiles" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/6e/7a/0120874cb8f39c3cd40cc3f2e9c2289cc181be74c897185cf642592bf945/langgraph_api-0.4.43.tar.gz", hash = "sha256:cdd94f941b96c192167d3da0984bac0212d5f43e83cae5635b4e598e1efaff3e", size = 339216, upload-time = "2025-10-16T23:55:08.973Z" } +sdist = { url = "https://files.pythonhosted.org/packages/52/47/0918d3e46d67baff427d14e3d24e87772046ba568021a2d21a2d3396bd36/langgraph_api-0.4.46.tar.gz", hash = "sha256:b1c544216cb512c42422a03dd0c95407ea44e37454959e3ee273425f1bb7329b", size = 342137, upload-time = "2025-10-21T21:40:19.043Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/91/8a/81723a26e0b4a865cc67f94f35ae4edd1d84c88e26eaa2622892d1c618fc/langgraph_api-0.4.43-py3-none-any.whl", hash = "sha256:d7369496d761e68ba87982f7e3383cc2398852ed3ed5db2e9aba6e7fe743d8be", size = 256581, upload-time = "2025-10-16T23:55:07.16Z" }, + { url = "https://files.pythonhosted.org/packages/e6/a6/0e7f6a2326ef5d0505372cff67619868d86eb7033f8fcb18bc0226e1a84f/langgraph_api-0.4.46-py3-none-any.whl", hash = "sha256:e61aa4eff6d22e6fd75d56687944d83e35ed6fa8c01ba2e2c9a53ba5d4d083a8", size = 256638, upload-time = "2025-10-21T21:40:17.215Z" }, ] [[package]] name = "langgraph-checkpoint" -version = "2.1.2" +version = "3.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "langchain-core" }, { name = "ormsgpack" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/29/83/6404f6ed23a91d7bc63d7df902d144548434237d017820ceaa8d014035f2/langgraph_checkpoint-2.1.2.tar.gz", hash = "sha256:112e9d067a6eff8937caf198421b1ffba8d9207193f14ac6f89930c1260c06f9", size = 142420, upload-time = "2025-10-07T17:45:17.129Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b7/cb/2a6dad2f0a14317580cc122e2a60e7f0ecabb50aaa6dc5b7a6a2c94cead7/langgraph_checkpoint-3.0.0.tar.gz", hash = "sha256:f738695ad938878d8f4775d907d9629e9fcd345b1950196effb08f088c52369e", size = 132132, upload-time = "2025-10-20T18:35:49.132Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c4/f2/06bf5addf8ee664291e1b9ffa1f28fc9d97e59806dc7de5aea9844cbf335/langgraph_checkpoint-2.1.2-py3-none-any.whl", hash = "sha256:911ebffb069fd01775d4b5184c04aaafc2962fcdf50cf49d524cd4367c4d0c60", size = 45763, upload-time = "2025-10-07T17:45:16.19Z" }, + { url = "https://files.pythonhosted.org/packages/85/2a/2efe0b5a72c41e3a936c81c5f5d8693987a1b260287ff1bbebaae1b7b888/langgraph_checkpoint-3.0.0-py3-none-any.whl", hash = "sha256:560beb83e629784ab689212a3d60834fb3196b4bbe1d6ac18e5cad5d85d46010", size = 46060, upload-time = "2025-10-20T18:35:48.255Z" }, ] [[package]] @@ -1136,15 +1136,15 @@ inmem = [ [[package]] name = "langgraph-prebuilt" -version = "1.0.0" +version = "1.0.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "langchain-core" }, { name = "langgraph-checkpoint" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/02/2d/934b1129e217216a0dfaf0f7df0a10cedf2dfafe6cc8e1ee238cafaaa4a7/langgraph_prebuilt-1.0.0.tar.gz", hash = "sha256:eb75dad9aca0137451ca0395aa8541a665b3f60979480b0431d626fd195dcda2", size = 119927, upload-time = "2025-10-17T20:15:21.429Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/b6/2bcb992acf67713a3557e51c1955854672ec6c1abe6ba51173a87eb8d825/langgraph_prebuilt-1.0.1.tar.gz", hash = "sha256:ecbfb9024d9d7ed9652dde24eef894650aaab96bf79228e862c503e2a060b469", size = 119918, upload-time = "2025-10-20T18:49:55.991Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/33/2e/ffa698eedc4c355168a9207ee598b2cc74ede92ce2b55c3469ea06978b6e/langgraph_prebuilt-1.0.0-py3-none-any.whl", hash = "sha256:ceaae4c5cee8c1f9b6468f76c114cafebb748aed0c93483b7c450e5a89de9c61", size = 28455, upload-time = "2025-10-17T20:15:20.043Z" }, + { url = "https://files.pythonhosted.org/packages/68/47/9ffd10882403020ea866e381de7f8e504a78f606a914af7f8244456c7783/langgraph_prebuilt-1.0.1-py3-none-any.whl", hash = "sha256:8c02e023538f7ef6ad5ed76219ba1ab4f6de0e31b749e4d278f57a8a95eec9f7", size = 28458, upload-time = "2025-10-20T18:49:54.723Z" }, ] [[package]] @@ -1192,7 +1192,7 @@ wheels = [ [[package]] name = "langgraph-up-devkits" -version = "0.3.0" +version = "0.4.0" source = { editable = "libs/langgraph-up-devkits" } dependencies = [ { name = "langchain" }, @@ -1200,6 +1200,7 @@ dependencies = [ { name = "langchain-dev-utils" }, { name = "langchain-mcp-adapters" }, { name = "langchain-openai" }, + { name = "structlog" }, { name = "tavily-python" }, ] @@ -1230,14 +1231,15 @@ test = [ [package.metadata] requires-dist = [ { name = "black", marker = "extra == 'dev'", specifier = ">=23.0.0" }, - { name = "langchain", specifier = ">=1.0.0" }, + { name = "langchain", specifier = ">=1.0.2" }, { name = "langchain-anthropic", specifier = ">=1.0.0" }, - { name = "langchain-dev-utils", specifier = ">=1.0.0" }, + { name = "langchain-dev-utils", specifier = ">=1.0.2" }, { name = "langchain-mcp-adapters", specifier = ">=0.1.11" }, - { name = "langchain-openai", specifier = ">=1.0.0" }, + { name = "langchain-openai", specifier = ">=1.0.1" }, { name = "mypy", marker = "extra == 'dev'", specifier = ">=1.11.1" }, { name = "pre-commit", marker = "extra == 'dev'", specifier = ">=3.0.0" }, { name = "ruff", marker = "extra == 'dev'", specifier = ">=0.1.0" }, + { name = "structlog", specifier = ">=25.4.0" }, { name = "tavily-python", specifier = ">=0.7.12" }, ] provides-extras = ["dev"] @@ -1463,7 +1465,7 @@ wheels = [ [[package]] name = "openai" -version = "2.5.0" +version = "2.6.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, @@ -1475,9 +1477,9 @@ dependencies = [ { name = "tqdm" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/72/39/aa3767c920c217ef56f27e89cbe3aaa43dd6eea3269c95f045c5761b9df1/openai-2.5.0.tar.gz", hash = "sha256:f8fa7611f96886a0f31ac6b97e58bc0ada494b255ee2cfd51c8eb502cfcb4814", size = 590333, upload-time = "2025-10-17T18:14:47.669Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/c7/e42bcd89dfd47fec8a30b9e20f93e512efdbfbb3391b05bbb79a2fb295fa/openai-2.6.0.tar.gz", hash = "sha256:f119faf7fc07d7e558c1e7c32c873e241439b01bd7480418234291ee8c8f4b9d", size = 592904, upload-time = "2025-10-20T17:17:24.588Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/14/f3/ebbd700d8dc1e6380a7a382969d96bc0cbea8717b52fb38ff0ca2a7653e8/openai-2.5.0-py3-none-any.whl", hash = "sha256:21380e5f52a71666dbadbf322dd518bdf2b9d11ed0bb3f96bea17310302d6280", size = 999851, upload-time = "2025-10-17T18:14:45.528Z" }, + { url = "https://files.pythonhosted.org/packages/c0/0a/58e9dcd34abe273eaeac3807a8483073767b5609d01bb78ea2f048e515a0/openai-2.6.0-py3-none-any.whl", hash = "sha256:f33fa12070fe347b5787a7861c8dd397786a4a17e1c3186e239338dac7e2e743", size = 1005403, upload-time = "2025-10-20T17:17:22.091Z" }, ] [[package]] @@ -2119,94 +2121,94 @@ wheels = [ [[package]] name = "regex" -version = "2025.9.18" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/49/d3/eaa0d28aba6ad1827ad1e716d9a93e1ba963ada61887498297d3da715133/regex-2025.9.18.tar.gz", hash = "sha256:c5ba23274c61c6fef447ba6a39333297d0c247f53059dba0bca415cac511edc4", size = 400917, upload-time = "2025-09-19T00:38:35.79Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/58/61/80eda662fc4eb32bfedc331f42390974c9e89c7eac1b79cd9eea4d7c458c/regex-2025.9.18-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:51076980cd08cd13c88eb7365427ae27f0d94e7cebe9ceb2bb9ffdae8fc4d82a", size = 484832, upload-time = "2025-09-19T00:35:30.011Z" }, - { url = "https://files.pythonhosted.org/packages/a6/d9/33833d9abddf3f07ad48504ddb53fe3b22f353214bbb878a72eee1e3ddbf/regex-2025.9.18-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:828446870bd7dee4e0cbeed767f07961aa07f0ea3129f38b3ccecebc9742e0b8", size = 288994, upload-time = "2025-09-19T00:35:31.733Z" }, - { url = "https://files.pythonhosted.org/packages/2a/b3/526ee96b0d70ea81980cbc20c3496fa582f775a52e001e2743cc33b2fa75/regex-2025.9.18-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c28821d5637866479ec4cc23b8c990f5bc6dd24e5e4384ba4a11d38a526e1414", size = 286619, upload-time = "2025-09-19T00:35:33.221Z" }, - { url = "https://files.pythonhosted.org/packages/65/4f/c2c096b02a351b33442aed5895cdd8bf87d372498d2100927c5a053d7ba3/regex-2025.9.18-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:726177ade8e481db669e76bf99de0b278783be8acd11cef71165327abd1f170a", size = 792454, upload-time = "2025-09-19T00:35:35.361Z" }, - { url = "https://files.pythonhosted.org/packages/24/15/b562c9d6e47c403c4b5deb744f8b4bf6e40684cf866c7b077960a925bdff/regex-2025.9.18-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f5cca697da89b9f8ea44115ce3130f6c54c22f541943ac8e9900461edc2b8bd4", size = 858723, upload-time = "2025-09-19T00:35:36.949Z" }, - { url = "https://files.pythonhosted.org/packages/f2/01/dba305409849e85b8a1a681eac4c03ed327d8de37895ddf9dc137f59c140/regex-2025.9.18-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:dfbde38f38004703c35666a1e1c088b778e35d55348da2b7b278914491698d6a", size = 905899, upload-time = "2025-09-19T00:35:38.723Z" }, - { url = "https://files.pythonhosted.org/packages/fe/d0/c51d1e6a80eab11ef96a4cbad17fc0310cf68994fb01a7283276b7e5bbd6/regex-2025.9.18-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f2f422214a03fab16bfa495cfec72bee4aaa5731843b771860a471282f1bf74f", size = 798981, upload-time = "2025-09-19T00:35:40.416Z" }, - { url = "https://files.pythonhosted.org/packages/c4/5e/72db90970887bbe02296612bd61b0fa31e6d88aa24f6a4853db3e96c575e/regex-2025.9.18-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a295916890f4df0902e4286bc7223ee7f9e925daa6dcdec4192364255b70561a", size = 781900, upload-time = "2025-09-19T00:35:42.077Z" }, - { url = "https://files.pythonhosted.org/packages/50/ff/596be45eea8e9bc31677fde243fa2904d00aad1b32c31bce26c3dbba0b9e/regex-2025.9.18-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:5db95ff632dbabc8c38c4e82bf545ab78d902e81160e6e455598014f0abe66b9", size = 852952, upload-time = "2025-09-19T00:35:43.751Z" }, - { url = "https://files.pythonhosted.org/packages/e5/1b/2dfa348fa551e900ed3f5f63f74185b6a08e8a76bc62bc9c106f4f92668b/regex-2025.9.18-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:fb967eb441b0f15ae610b7069bdb760b929f267efbf522e814bbbfffdf125ce2", size = 844355, upload-time = "2025-09-19T00:35:45.309Z" }, - { url = "https://files.pythonhosted.org/packages/f4/bf/aefb1def27fe33b8cbbb19c75c13aefccfbef1c6686f8e7f7095705969c7/regex-2025.9.18-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f04d2f20da4053d96c08f7fde6e1419b7ec9dbcee89c96e3d731fca77f411b95", size = 787254, upload-time = "2025-09-19T00:35:46.904Z" }, - { url = "https://files.pythonhosted.org/packages/e3/4e/8ef042e7cf0dbbb401e784e896acfc1b367b95dfbfc9ada94c2ed55a081f/regex-2025.9.18-cp311-cp311-win32.whl", hash = "sha256:895197241fccf18c0cea7550c80e75f185b8bd55b6924fcae269a1a92c614a07", size = 264129, upload-time = "2025-09-19T00:35:48.597Z" }, - { url = "https://files.pythonhosted.org/packages/b4/7d/c4fcabf80dcdd6821c0578ad9b451f8640b9110fb3dcb74793dd077069ff/regex-2025.9.18-cp311-cp311-win_amd64.whl", hash = "sha256:7e2b414deae99166e22c005e154a5513ac31493db178d8aec92b3269c9cce8c9", size = 276160, upload-time = "2025-09-19T00:36:00.45Z" }, - { url = "https://files.pythonhosted.org/packages/64/f8/0e13c8ae4d6df9d128afaba138342d532283d53a4c1e7a8c93d6756c8f4a/regex-2025.9.18-cp311-cp311-win_arm64.whl", hash = "sha256:fb137ec7c5c54f34a25ff9b31f6b7b0c2757be80176435bf367111e3f71d72df", size = 268471, upload-time = "2025-09-19T00:36:02.149Z" }, - { url = "https://files.pythonhosted.org/packages/b0/99/05859d87a66ae7098222d65748f11ef7f2dff51bfd7482a4e2256c90d72b/regex-2025.9.18-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:436e1b31d7efd4dcd52091d076482031c611dde58bf9c46ca6d0a26e33053a7e", size = 486335, upload-time = "2025-09-19T00:36:03.661Z" }, - { url = "https://files.pythonhosted.org/packages/97/7e/d43d4e8b978890932cf7b0957fce58c5b08c66f32698f695b0c2c24a48bf/regex-2025.9.18-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c190af81e5576b9c5fdc708f781a52ff20f8b96386c6e2e0557a78402b029f4a", size = 289720, upload-time = "2025-09-19T00:36:05.471Z" }, - { url = "https://files.pythonhosted.org/packages/bb/3b/ff80886089eb5dcf7e0d2040d9aaed539e25a94300403814bb24cc775058/regex-2025.9.18-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e4121f1ce2b2b5eec4b397cc1b277686e577e658d8f5870b7eb2d726bd2300ab", size = 287257, upload-time = "2025-09-19T00:36:07.072Z" }, - { url = "https://files.pythonhosted.org/packages/ee/66/243edf49dd8720cba8d5245dd4d6adcb03a1defab7238598c0c97cf549b8/regex-2025.9.18-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:300e25dbbf8299d87205e821a201057f2ef9aa3deb29caa01cd2cac669e508d5", size = 797463, upload-time = "2025-09-19T00:36:08.399Z" }, - { url = "https://files.pythonhosted.org/packages/df/71/c9d25a1142c70432e68bb03211d4a82299cd1c1fbc41db9409a394374ef5/regex-2025.9.18-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7b47fcf9f5316c0bdaf449e879407e1b9937a23c3b369135ca94ebc8d74b1742", size = 862670, upload-time = "2025-09-19T00:36:10.101Z" }, - { url = "https://files.pythonhosted.org/packages/f8/8f/329b1efc3a64375a294e3a92d43372bf1a351aa418e83c21f2f01cf6ec41/regex-2025.9.18-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:57a161bd3acaa4b513220b49949b07e252165e6b6dc910ee7617a37ff4f5b425", size = 910881, upload-time = "2025-09-19T00:36:12.223Z" }, - { url = "https://files.pythonhosted.org/packages/35/9e/a91b50332a9750519320ed30ec378b74c996f6befe282cfa6bb6cea7e9fd/regex-2025.9.18-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f130c3a7845ba42de42f380fff3c8aebe89a810747d91bcf56d40a069f15352", size = 802011, upload-time = "2025-09-19T00:36:13.901Z" }, - { url = "https://files.pythonhosted.org/packages/a4/1d/6be3b8d7856b6e0d7ee7f942f437d0a76e0d5622983abbb6d21e21ab9a17/regex-2025.9.18-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5f96fa342b6f54dcba928dd452e8d8cb9f0d63e711d1721cd765bb9f73bb048d", size = 786668, upload-time = "2025-09-19T00:36:15.391Z" }, - { url = "https://files.pythonhosted.org/packages/cb/ce/4a60e53df58bd157c5156a1736d3636f9910bdcc271d067b32b7fcd0c3a8/regex-2025.9.18-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0f0d676522d68c207828dcd01fb6f214f63f238c283d9f01d85fc664c7c85b56", size = 856578, upload-time = "2025-09-19T00:36:16.845Z" }, - { url = "https://files.pythonhosted.org/packages/86/e8/162c91bfe7217253afccde112868afb239f94703de6580fb235058d506a6/regex-2025.9.18-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:40532bff8a1a0621e7903ae57fce88feb2e8a9a9116d341701302c9302aef06e", size = 849017, upload-time = "2025-09-19T00:36:18.597Z" }, - { url = "https://files.pythonhosted.org/packages/35/34/42b165bc45289646ea0959a1bc7531733e90b47c56a72067adfe6b3251f6/regex-2025.9.18-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:039f11b618ce8d71a1c364fdee37da1012f5a3e79b1b2819a9f389cd82fd6282", size = 788150, upload-time = "2025-09-19T00:36:20.464Z" }, - { url = "https://files.pythonhosted.org/packages/79/5d/cdd13b1f3c53afa7191593a7ad2ee24092a5a46417725ffff7f64be8342d/regex-2025.9.18-cp312-cp312-win32.whl", hash = "sha256:e1dd06f981eb226edf87c55d523131ade7285137fbde837c34dc9d1bf309f459", size = 264536, upload-time = "2025-09-19T00:36:21.922Z" }, - { url = "https://files.pythonhosted.org/packages/e0/f5/4a7770c9a522e7d2dc1fa3ffc83ab2ab33b0b22b447e62cffef186805302/regex-2025.9.18-cp312-cp312-win_amd64.whl", hash = "sha256:3d86b5247bf25fa3715e385aa9ff272c307e0636ce0c9595f64568b41f0a9c77", size = 275501, upload-time = "2025-09-19T00:36:23.4Z" }, - { url = "https://files.pythonhosted.org/packages/df/05/9ce3e110e70d225ecbed455b966003a3afda5e58e8aec2964042363a18f4/regex-2025.9.18-cp312-cp312-win_arm64.whl", hash = "sha256:032720248cbeeae6444c269b78cb15664458b7bb9ed02401d3da59fe4d68c3a5", size = 268601, upload-time = "2025-09-19T00:36:25.092Z" }, - { url = "https://files.pythonhosted.org/packages/d2/c7/5c48206a60ce33711cf7dcaeaed10dd737733a3569dc7e1dce324dd48f30/regex-2025.9.18-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:2a40f929cd907c7e8ac7566ac76225a77701a6221bca937bdb70d56cb61f57b2", size = 485955, upload-time = "2025-09-19T00:36:26.822Z" }, - { url = "https://files.pythonhosted.org/packages/e9/be/74fc6bb19a3c491ec1ace943e622b5a8539068771e8705e469b2da2306a7/regex-2025.9.18-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c90471671c2cdf914e58b6af62420ea9ecd06d1554d7474d50133ff26ae88feb", size = 289583, upload-time = "2025-09-19T00:36:28.577Z" }, - { url = "https://files.pythonhosted.org/packages/25/c4/9ceaa433cb5dc515765560f22a19578b95b92ff12526e5a259321c4fc1a0/regex-2025.9.18-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1a351aff9e07a2dabb5022ead6380cff17a4f10e4feb15f9100ee56c4d6d06af", size = 287000, upload-time = "2025-09-19T00:36:30.161Z" }, - { url = "https://files.pythonhosted.org/packages/7d/e6/68bc9393cb4dc68018456568c048ac035854b042bc7c33cb9b99b0680afa/regex-2025.9.18-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bc4b8e9d16e20ddfe16430c23468a8707ccad3365b06d4536142e71823f3ca29", size = 797535, upload-time = "2025-09-19T00:36:31.876Z" }, - { url = "https://files.pythonhosted.org/packages/6a/1c/ebae9032d34b78ecfe9bd4b5e6575b55351dc8513485bb92326613732b8c/regex-2025.9.18-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4b8cdbddf2db1c5e80338ba2daa3cfa3dec73a46fff2a7dda087c8efbf12d62f", size = 862603, upload-time = "2025-09-19T00:36:33.344Z" }, - { url = "https://files.pythonhosted.org/packages/3b/74/12332c54b3882557a4bcd2b99f8be581f5c6a43cf1660a85b460dd8ff468/regex-2025.9.18-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a276937d9d75085b2c91fb48244349c6954f05ee97bba0963ce24a9d915b8b68", size = 910829, upload-time = "2025-09-19T00:36:34.826Z" }, - { url = "https://files.pythonhosted.org/packages/86/70/ba42d5ed606ee275f2465bfc0e2208755b06cdabd0f4c7c4b614d51b57ab/regex-2025.9.18-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:92a8e375ccdc1256401c90e9dc02b8642894443d549ff5e25e36d7cf8a80c783", size = 802059, upload-time = "2025-09-19T00:36:36.664Z" }, - { url = "https://files.pythonhosted.org/packages/da/c5/fcb017e56396a7f2f8357412638d7e2963440b131a3ca549be25774b3641/regex-2025.9.18-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0dc6893b1f502d73037cf807a321cdc9be29ef3d6219f7970f842475873712ac", size = 786781, upload-time = "2025-09-19T00:36:38.168Z" }, - { url = "https://files.pythonhosted.org/packages/c6/ee/21c4278b973f630adfb3bcb23d09d83625f3ab1ca6e40ebdffe69901c7a1/regex-2025.9.18-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:a61e85bfc63d232ac14b015af1261f826260c8deb19401c0597dbb87a864361e", size = 856578, upload-time = "2025-09-19T00:36:40.129Z" }, - { url = "https://files.pythonhosted.org/packages/87/0b/de51550dc7274324435c8f1539373ac63019b0525ad720132866fff4a16a/regex-2025.9.18-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:1ef86a9ebc53f379d921fb9a7e42b92059ad3ee800fcd9e0fe6181090e9f6c23", size = 849119, upload-time = "2025-09-19T00:36:41.651Z" }, - { url = "https://files.pythonhosted.org/packages/60/52/383d3044fc5154d9ffe4321696ee5b2ee4833a28c29b137c22c33f41885b/regex-2025.9.18-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d3bc882119764ba3a119fbf2bd4f1b47bc56c1da5d42df4ed54ae1e8e66fdf8f", size = 788219, upload-time = "2025-09-19T00:36:43.575Z" }, - { url = "https://files.pythonhosted.org/packages/20/bd/2614fc302671b7359972ea212f0e3a92df4414aaeacab054a8ce80a86073/regex-2025.9.18-cp313-cp313-win32.whl", hash = "sha256:3810a65675845c3bdfa58c3c7d88624356dd6ee2fc186628295e0969005f928d", size = 264517, upload-time = "2025-09-19T00:36:45.503Z" }, - { url = "https://files.pythonhosted.org/packages/07/0f/ab5c1581e6563a7bffdc1974fb2d25f05689b88e2d416525271f232b1946/regex-2025.9.18-cp313-cp313-win_amd64.whl", hash = "sha256:16eaf74b3c4180ede88f620f299e474913ab6924d5c4b89b3833bc2345d83b3d", size = 275481, upload-time = "2025-09-19T00:36:46.965Z" }, - { url = "https://files.pythonhosted.org/packages/49/22/ee47672bc7958f8c5667a587c2600a4fba8b6bab6e86bd6d3e2b5f7cac42/regex-2025.9.18-cp313-cp313-win_arm64.whl", hash = "sha256:4dc98ba7dd66bd1261927a9f49bd5ee2bcb3660f7962f1ec02617280fc00f5eb", size = 268598, upload-time = "2025-09-19T00:36:48.314Z" }, - { url = "https://files.pythonhosted.org/packages/e8/83/6887e16a187c6226cb85d8301e47d3b73ecc4505a3a13d8da2096b44fd76/regex-2025.9.18-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:fe5d50572bc885a0a799410a717c42b1a6b50e2f45872e2b40f4f288f9bce8a2", size = 489765, upload-time = "2025-09-19T00:36:49.996Z" }, - { url = "https://files.pythonhosted.org/packages/51/c5/e2f7325301ea2916ff301c8d963ba66b1b2c1b06694191df80a9c4fea5d0/regex-2025.9.18-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:1b9d9a2d6cda6621551ca8cf7a06f103adf72831153f3c0d982386110870c4d3", size = 291228, upload-time = "2025-09-19T00:36:51.654Z" }, - { url = "https://files.pythonhosted.org/packages/91/60/7d229d2bc6961289e864a3a3cfebf7d0d250e2e65323a8952cbb7e22d824/regex-2025.9.18-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:13202e4c4ac0ef9a317fff817674b293c8f7e8c68d3190377d8d8b749f566e12", size = 289270, upload-time = "2025-09-19T00:36:53.118Z" }, - { url = "https://files.pythonhosted.org/packages/3c/d7/b4f06868ee2958ff6430df89857fbf3d43014bbf35538b6ec96c2704e15d/regex-2025.9.18-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:874ff523b0fecffb090f80ae53dc93538f8db954c8bb5505f05b7787ab3402a0", size = 806326, upload-time = "2025-09-19T00:36:54.631Z" }, - { url = "https://files.pythonhosted.org/packages/d6/e4/bca99034a8f1b9b62ccf337402a8e5b959dd5ba0e5e5b2ead70273df3277/regex-2025.9.18-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d13ab0490128f2bb45d596f754148cd750411afc97e813e4b3a61cf278a23bb6", size = 871556, upload-time = "2025-09-19T00:36:56.208Z" }, - { url = "https://files.pythonhosted.org/packages/6d/df/e06ffaf078a162f6dd6b101a5ea9b44696dca860a48136b3ae4a9caf25e2/regex-2025.9.18-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:05440bc172bc4b4b37fb9667e796597419404dbba62e171e1f826d7d2a9ebcef", size = 913817, upload-time = "2025-09-19T00:36:57.807Z" }, - { url = "https://files.pythonhosted.org/packages/9e/05/25b05480b63292fd8e84800b1648e160ca778127b8d2367a0a258fa2e225/regex-2025.9.18-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5514b8e4031fdfaa3d27e92c75719cbe7f379e28cacd939807289bce76d0e35a", size = 811055, upload-time = "2025-09-19T00:36:59.762Z" }, - { url = "https://files.pythonhosted.org/packages/70/97/7bc7574655eb651ba3a916ed4b1be6798ae97af30104f655d8efd0cab24b/regex-2025.9.18-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:65d3c38c39efce73e0d9dc019697b39903ba25b1ad45ebbd730d2cf32741f40d", size = 794534, upload-time = "2025-09-19T00:37:01.405Z" }, - { url = "https://files.pythonhosted.org/packages/b4/c2/d5da49166a52dda879855ecdba0117f073583db2b39bb47ce9a3378a8e9e/regex-2025.9.18-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:ae77e447ebc144d5a26d50055c6ddba1d6ad4a865a560ec7200b8b06bc529368", size = 866684, upload-time = "2025-09-19T00:37:03.441Z" }, - { url = "https://files.pythonhosted.org/packages/bd/2d/0a5c4e6ec417de56b89ff4418ecc72f7e3feca806824c75ad0bbdae0516b/regex-2025.9.18-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e3ef8cf53dc8df49d7e28a356cf824e3623764e9833348b655cfed4524ab8a90", size = 853282, upload-time = "2025-09-19T00:37:04.985Z" }, - { url = "https://files.pythonhosted.org/packages/f4/8e/d656af63e31a86572ec829665d6fa06eae7e144771e0330650a8bb865635/regex-2025.9.18-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:9feb29817df349c976da9a0debf775c5c33fc1c8ad7b9f025825da99374770b7", size = 797830, upload-time = "2025-09-19T00:37:06.697Z" }, - { url = "https://files.pythonhosted.org/packages/db/ce/06edc89df8f7b83ffd321b6071be4c54dc7332c0f77860edc40ce57d757b/regex-2025.9.18-cp313-cp313t-win32.whl", hash = "sha256:168be0d2f9b9d13076940b1ed774f98595b4e3c7fc54584bba81b3cc4181742e", size = 267281, upload-time = "2025-09-19T00:37:08.568Z" }, - { url = "https://files.pythonhosted.org/packages/83/9a/2b5d9c8b307a451fd17068719d971d3634ca29864b89ed5c18e499446d4a/regex-2025.9.18-cp313-cp313t-win_amd64.whl", hash = "sha256:d59ecf3bb549e491c8104fea7313f3563c7b048e01287db0a90485734a70a730", size = 278724, upload-time = "2025-09-19T00:37:10.023Z" }, - { url = "https://files.pythonhosted.org/packages/3d/70/177d31e8089a278a764f8ec9a3faac8d14a312d622a47385d4b43905806f/regex-2025.9.18-cp313-cp313t-win_arm64.whl", hash = "sha256:dbef80defe9fb21310948a2595420b36c6d641d9bea4c991175829b2cc4bc06a", size = 269771, upload-time = "2025-09-19T00:37:13.041Z" }, - { url = "https://files.pythonhosted.org/packages/44/b7/3b4663aa3b4af16819f2ab6a78c4111c7e9b066725d8107753c2257448a5/regex-2025.9.18-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:c6db75b51acf277997f3adcd0ad89045d856190d13359f15ab5dda21581d9129", size = 486130, upload-time = "2025-09-19T00:37:14.527Z" }, - { url = "https://files.pythonhosted.org/packages/80/5b/4533f5d7ac9c6a02a4725fe8883de2aebc713e67e842c04cf02626afb747/regex-2025.9.18-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8f9698b6f6895d6db810e0bda5364f9ceb9e5b11328700a90cae573574f61eea", size = 289539, upload-time = "2025-09-19T00:37:16.356Z" }, - { url = "https://files.pythonhosted.org/packages/b8/8d/5ab6797c2750985f79e9995fad3254caa4520846580f266ae3b56d1cae58/regex-2025.9.18-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:29cd86aa7cb13a37d0f0d7c21d8d949fe402ffa0ea697e635afedd97ab4b69f1", size = 287233, upload-time = "2025-09-19T00:37:18.025Z" }, - { url = "https://files.pythonhosted.org/packages/cb/1e/95afcb02ba8d3a64e6ffeb801718ce73471ad6440c55d993f65a4a5e7a92/regex-2025.9.18-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7c9f285a071ee55cd9583ba24dde006e53e17780bb309baa8e4289cd472bcc47", size = 797876, upload-time = "2025-09-19T00:37:19.609Z" }, - { url = "https://files.pythonhosted.org/packages/c8/fb/720b1f49cec1f3b5a9fea5b34cd22b88b5ebccc8c1b5de9cc6f65eed165a/regex-2025.9.18-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5adf266f730431e3be9021d3e5b8d5ee65e563fec2883ea8093944d21863b379", size = 863385, upload-time = "2025-09-19T00:37:21.65Z" }, - { url = "https://files.pythonhosted.org/packages/a9/ca/e0d07ecf701e1616f015a720dc13b84c582024cbfbb3fc5394ae204adbd7/regex-2025.9.18-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:1137cabc0f38807de79e28d3f6e3e3f2cc8cfb26bead754d02e6d1de5f679203", size = 910220, upload-time = "2025-09-19T00:37:23.723Z" }, - { url = "https://files.pythonhosted.org/packages/b6/45/bba86413b910b708eca705a5af62163d5d396d5f647ed9485580c7025209/regex-2025.9.18-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7cc9e5525cada99699ca9223cce2d52e88c52a3d2a0e842bd53de5497c604164", size = 801827, upload-time = "2025-09-19T00:37:25.684Z" }, - { url = "https://files.pythonhosted.org/packages/b8/a6/740fbd9fcac31a1305a8eed30b44bf0f7f1e042342be0a4722c0365ecfca/regex-2025.9.18-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:bbb9246568f72dce29bcd433517c2be22c7791784b223a810225af3b50d1aafb", size = 786843, upload-time = "2025-09-19T00:37:27.62Z" }, - { url = "https://files.pythonhosted.org/packages/80/a7/0579e8560682645906da640c9055506465d809cb0f5415d9976f417209a6/regex-2025.9.18-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:6a52219a93dd3d92c675383efff6ae18c982e2d7651c792b1e6d121055808743", size = 857430, upload-time = "2025-09-19T00:37:29.362Z" }, - { url = "https://files.pythonhosted.org/packages/8d/9b/4dc96b6c17b38900cc9fee254fc9271d0dde044e82c78c0811b58754fde5/regex-2025.9.18-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:ae9b3840c5bd456780e3ddf2f737ab55a79b790f6409182012718a35c6d43282", size = 848612, upload-time = "2025-09-19T00:37:31.42Z" }, - { url = "https://files.pythonhosted.org/packages/b3/6a/6f659f99bebb1775e5ac81a3fb837b85897c1a4ef5acffd0ff8ffe7e67fb/regex-2025.9.18-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d488c236ac497c46a5ac2005a952c1a0e22a07be9f10c3e735bc7d1209a34773", size = 787967, upload-time = "2025-09-19T00:37:34.019Z" }, - { url = "https://files.pythonhosted.org/packages/61/35/9e35665f097c07cf384a6b90a1ac11b0b1693084a0b7a675b06f760496c6/regex-2025.9.18-cp314-cp314-win32.whl", hash = "sha256:0c3506682ea19beefe627a38872d8da65cc01ffa25ed3f2e422dffa1474f0788", size = 269847, upload-time = "2025-09-19T00:37:35.759Z" }, - { url = "https://files.pythonhosted.org/packages/af/64/27594dbe0f1590b82de2821ebfe9a359b44dcb9b65524876cd12fabc447b/regex-2025.9.18-cp314-cp314-win_amd64.whl", hash = "sha256:57929d0f92bebb2d1a83af372cd0ffba2263f13f376e19b1e4fa32aec4efddc3", size = 278755, upload-time = "2025-09-19T00:37:37.367Z" }, - { url = "https://files.pythonhosted.org/packages/30/a3/0cd8d0d342886bd7d7f252d701b20ae1a3c72dc7f34ef4b2d17790280a09/regex-2025.9.18-cp314-cp314-win_arm64.whl", hash = "sha256:6a4b44df31d34fa51aa5c995d3aa3c999cec4d69b9bd414a8be51984d859f06d", size = 271873, upload-time = "2025-09-19T00:37:39.125Z" }, - { url = "https://files.pythonhosted.org/packages/99/cb/8a1ab05ecf404e18b54348e293d9b7a60ec2bd7aa59e637020c5eea852e8/regex-2025.9.18-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:b176326bcd544b5e9b17d6943f807697c0cb7351f6cfb45bf5637c95ff7e6306", size = 489773, upload-time = "2025-09-19T00:37:40.968Z" }, - { url = "https://files.pythonhosted.org/packages/93/3b/6543c9b7f7e734d2404fa2863d0d710c907bef99d4598760ed4563d634c3/regex-2025.9.18-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:0ffd9e230b826b15b369391bec167baed57c7ce39efc35835448618860995946", size = 291221, upload-time = "2025-09-19T00:37:42.901Z" }, - { url = "https://files.pythonhosted.org/packages/cd/91/e9fdee6ad6bf708d98c5d17fded423dcb0661795a49cba1b4ffb8358377a/regex-2025.9.18-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:ec46332c41add73f2b57e2f5b642f991f6b15e50e9f86285e08ffe3a512ac39f", size = 289268, upload-time = "2025-09-19T00:37:44.823Z" }, - { url = "https://files.pythonhosted.org/packages/94/a6/bc3e8a918abe4741dadeaeb6c508e3a4ea847ff36030d820d89858f96a6c/regex-2025.9.18-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b80fa342ed1ea095168a3f116637bd1030d39c9ff38dc04e54ef7c521e01fc95", size = 806659, upload-time = "2025-09-19T00:37:46.684Z" }, - { url = "https://files.pythonhosted.org/packages/2b/71/ea62dbeb55d9e6905c7b5a49f75615ea1373afcad95830047e4e310db979/regex-2025.9.18-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f4d97071c0ba40f0cf2a93ed76e660654c399a0a04ab7d85472239460f3da84b", size = 871701, upload-time = "2025-09-19T00:37:48.882Z" }, - { url = "https://files.pythonhosted.org/packages/6a/90/fbe9dedb7dad24a3a4399c0bae64bfa932ec8922a0a9acf7bc88db30b161/regex-2025.9.18-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0ac936537ad87cef9e0e66c5144484206c1354224ee811ab1519a32373e411f3", size = 913742, upload-time = "2025-09-19T00:37:51.015Z" }, - { url = "https://files.pythonhosted.org/packages/f0/1c/47e4a8c0e73d41eb9eb9fdeba3b1b810110a5139a2526e82fd29c2d9f867/regex-2025.9.18-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dec57f96d4def58c422d212d414efe28218d58537b5445cf0c33afb1b4768571", size = 811117, upload-time = "2025-09-19T00:37:52.686Z" }, - { url = "https://files.pythonhosted.org/packages/2a/da/435f29fddfd015111523671e36d30af3342e8136a889159b05c1d9110480/regex-2025.9.18-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:48317233294648bf7cd068857f248e3a57222259a5304d32c7552e2284a1b2ad", size = 794647, upload-time = "2025-09-19T00:37:54.626Z" }, - { url = "https://files.pythonhosted.org/packages/23/66/df5e6dcca25c8bc57ce404eebc7342310a0d218db739d7882c9a2b5974a3/regex-2025.9.18-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:274687e62ea3cf54846a9b25fc48a04459de50af30a7bd0b61a9e38015983494", size = 866747, upload-time = "2025-09-19T00:37:56.367Z" }, - { url = "https://files.pythonhosted.org/packages/82/42/94392b39b531f2e469b2daa40acf454863733b674481fda17462a5ffadac/regex-2025.9.18-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:a78722c86a3e7e6aadf9579e3b0ad78d955f2d1f1a8ca4f67d7ca258e8719d4b", size = 853434, upload-time = "2025-09-19T00:37:58.39Z" }, - { url = "https://files.pythonhosted.org/packages/a8/f8/dcc64c7f7bbe58842a8f89622b50c58c3598fbbf4aad0a488d6df2c699f1/regex-2025.9.18-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:06104cd203cdef3ade989a1c45b6215bf42f8b9dd705ecc220c173233f7cba41", size = 798024, upload-time = "2025-09-19T00:38:00.397Z" }, - { url = "https://files.pythonhosted.org/packages/20/8d/edf1c5d5aa98f99a692313db813ec487732946784f8f93145e0153d910e5/regex-2025.9.18-cp314-cp314t-win32.whl", hash = "sha256:2e1eddc06eeaffd249c0adb6fafc19e2118e6308c60df9db27919e96b5656096", size = 273029, upload-time = "2025-09-19T00:38:02.383Z" }, - { url = "https://files.pythonhosted.org/packages/a7/24/02d4e4f88466f17b145f7ea2b2c11af3a942db6222429c2c146accf16054/regex-2025.9.18-cp314-cp314t-win_amd64.whl", hash = "sha256:8620d247fb8c0683ade51217b459cb4a1081c0405a3072235ba43a40d355c09a", size = 282680, upload-time = "2025-09-19T00:38:04.102Z" }, - { url = "https://files.pythonhosted.org/packages/1f/a3/c64894858aaaa454caa7cc47e2f225b04d3ed08ad649eacf58d45817fad2/regex-2025.9.18-cp314-cp314t-win_arm64.whl", hash = "sha256:b7531a8ef61de2c647cdf68b3229b071e46ec326b3138b2180acb4275f470b01", size = 273034, upload-time = "2025-09-19T00:38:05.807Z" }, +version = "2025.10.23" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f8/c8/1d2160d36b11fbe0a61acb7c3c81ab032d9ec8ad888ac9e0a61b85ab99dd/regex-2025.10.23.tar.gz", hash = "sha256:8cbaf8ceb88f96ae2356d01b9adf5e6306fa42fa6f7eab6b97794e37c959ac26", size = 401266, upload-time = "2025-10-21T15:58:20.23Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/82/e5/74b7cd5cd76b4171f9793042045bb1726f7856dd56e582fc3e058a7a8a5e/regex-2025.10.23-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6c531155bf9179345e85032052a1e5fe1a696a6abf9cea54b97e8baefff970fd", size = 487960, upload-time = "2025-10-21T15:54:53.253Z" }, + { url = "https://files.pythonhosted.org/packages/b9/08/854fa4b3b20471d1df1c71e831b6a1aa480281e37791e52a2df9641ec5c6/regex-2025.10.23-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:912e9df4e89d383681268d38ad8f5780d7cccd94ba0e9aa09ca7ab7ab4f8e7eb", size = 290425, upload-time = "2025-10-21T15:54:55.21Z" }, + { url = "https://files.pythonhosted.org/packages/ab/d3/6272b1dd3ca1271661e168762b234ad3e00dbdf4ef0c7b9b72d2d159efa7/regex-2025.10.23-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4f375c61bfc3138b13e762fe0ae76e3bdca92497816936534a0177201666f44f", size = 288278, upload-time = "2025-10-21T15:54:56.862Z" }, + { url = "https://files.pythonhosted.org/packages/14/8f/c7b365dd9d9bc0a36e018cb96f2ffb60d2ba8deb589a712b437f67de2920/regex-2025.10.23-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e248cc9446081119128ed002a3801f8031e0c219b5d3c64d3cc627da29ac0a33", size = 793289, upload-time = "2025-10-21T15:54:58.352Z" }, + { url = "https://files.pythonhosted.org/packages/d4/fb/b8fbe9aa16cf0c21f45ec5a6c74b4cecbf1a1c0deb7089d4a6f83a9c1caa/regex-2025.10.23-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b52bf9282fdf401e4f4e721f0f61fc4b159b1307244517789702407dd74e38ca", size = 860321, upload-time = "2025-10-21T15:54:59.813Z" }, + { url = "https://files.pythonhosted.org/packages/b0/81/bf41405c772324926a9bd8a640dedaa42da0e929241834dfce0733070437/regex-2025.10.23-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5c084889ab2c59765a0d5ac602fd1c3c244f9b3fcc9a65fdc7ba6b74c5287490", size = 907011, upload-time = "2025-10-21T15:55:01.968Z" }, + { url = "https://files.pythonhosted.org/packages/a4/fb/5ad6a8b92d3f88f3797b51bb4ef47499acc2d0b53d2fbe4487a892f37a73/regex-2025.10.23-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d80e8eb79009bdb0936658c44ca06e2fbbca67792013e3818eea3f5f228971c2", size = 800312, upload-time = "2025-10-21T15:55:04.15Z" }, + { url = "https://files.pythonhosted.org/packages/42/48/b4efba0168a2b57f944205d823f8e8a3a1ae6211a34508f014ec2c712f4f/regex-2025.10.23-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b6f259118ba87b814a8ec475380aee5f5ae97a75852a3507cf31d055b01b5b40", size = 782839, upload-time = "2025-10-21T15:55:05.641Z" }, + { url = "https://files.pythonhosted.org/packages/13/2a/c9efb4c6c535b0559c1fa8e431e0574d229707c9ca718600366fcfef6801/regex-2025.10.23-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:9b8c72a242683dcc72d37595c4f1278dfd7642b769e46700a8df11eab19dfd82", size = 854270, upload-time = "2025-10-21T15:55:07.27Z" }, + { url = "https://files.pythonhosted.org/packages/34/2d/68eecc1bdaee020e8ba549502291c9450d90d8590d0552247c9b543ebf7b/regex-2025.10.23-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:a8d7b7a0a3df9952f9965342159e0c1f05384c0f056a47ce8b61034f8cecbe83", size = 845771, upload-time = "2025-10-21T15:55:09.477Z" }, + { url = "https://files.pythonhosted.org/packages/a5/cd/a1ae499cf9b87afb47a67316bbf1037a7c681ffe447c510ed98c0aa2c01c/regex-2025.10.23-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:413bfea20a484c524858125e92b9ce6ffdd0a4b97d4ff96b5859aa119b0f1bdd", size = 788778, upload-time = "2025-10-21T15:55:11.396Z" }, + { url = "https://files.pythonhosted.org/packages/38/f9/70765e63f5ea7d43b2b6cd4ee9d3323f16267e530fb2a420d92d991cf0fc/regex-2025.10.23-cp311-cp311-win32.whl", hash = "sha256:f76deef1f1019a17dad98f408b8f7afc4bd007cbe835ae77b737e8c7f19ae575", size = 265666, upload-time = "2025-10-21T15:55:13.306Z" }, + { url = "https://files.pythonhosted.org/packages/9c/1a/18e9476ee1b63aaec3844d8e1cb21842dc19272c7e86d879bfc0dcc60db3/regex-2025.10.23-cp311-cp311-win_amd64.whl", hash = "sha256:59bba9f7125536f23fdab5deeea08da0c287a64c1d3acc1c7e99515809824de8", size = 277600, upload-time = "2025-10-21T15:55:15.087Z" }, + { url = "https://files.pythonhosted.org/packages/1d/1b/c019167b1f7a8ec77251457e3ff0339ed74ca8bce1ea13138dc98309c923/regex-2025.10.23-cp311-cp311-win_arm64.whl", hash = "sha256:b103a752b6f1632ca420225718d6ed83f6a6ced3016dd0a4ab9a6825312de566", size = 269974, upload-time = "2025-10-21T15:55:16.841Z" }, + { url = "https://files.pythonhosted.org/packages/f6/57/eeb274d83ab189d02d778851b1ac478477522a92b52edfa6e2ae9ff84679/regex-2025.10.23-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:7a44d9c00f7a0a02d3b777429281376370f3d13d2c75ae74eb94e11ebcf4a7fc", size = 489187, upload-time = "2025-10-21T15:55:18.322Z" }, + { url = "https://files.pythonhosted.org/packages/55/5c/7dad43a9b6ea88bf77e0b8b7729a4c36978e1043165034212fd2702880c6/regex-2025.10.23-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b83601f84fde939ae3478bb32a3aef36f61b58c3208d825c7e8ce1a735f143f2", size = 291122, upload-time = "2025-10-21T15:55:20.2Z" }, + { url = "https://files.pythonhosted.org/packages/66/21/38b71e6f2818f0f4b281c8fba8d9d57cfca7b032a648fa59696e0a54376a/regex-2025.10.23-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ec13647907bb9d15fd192bbfe89ff06612e098a5709e7d6ecabbdd8f7908fc45", size = 288797, upload-time = "2025-10-21T15:55:21.932Z" }, + { url = "https://files.pythonhosted.org/packages/be/95/888f069c89e7729732a6d7cca37f76b44bfb53a1e35dda8a2c7b65c1b992/regex-2025.10.23-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:78d76dd2957d62501084e7012ddafc5fcd406dd982b7a9ca1ea76e8eaaf73e7e", size = 798442, upload-time = "2025-10-21T15:55:23.747Z" }, + { url = "https://files.pythonhosted.org/packages/76/70/4f903c608faf786627a8ee17c06e0067b5acade473678b69c8094b248705/regex-2025.10.23-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8668e5f067e31a47699ebb354f43aeb9c0ef136f915bd864243098524482ac43", size = 864039, upload-time = "2025-10-21T15:55:25.656Z" }, + { url = "https://files.pythonhosted.org/packages/62/19/2df67b526bf25756c7f447dde554fc10a220fd839cc642f50857d01e4a7b/regex-2025.10.23-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a32433fe3deb4b2d8eda88790d2808fed0dc097e84f5e683b4cd4f42edef6cca", size = 912057, upload-time = "2025-10-21T15:55:27.309Z" }, + { url = "https://files.pythonhosted.org/packages/99/14/9a39b7c9e007968411bc3c843cc14cf15437510c0a9991f080cab654fd16/regex-2025.10.23-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d97d73818c642c938db14c0668167f8d39520ca9d983604575ade3fda193afcc", size = 803374, upload-time = "2025-10-21T15:55:28.9Z" }, + { url = "https://files.pythonhosted.org/packages/d4/f7/3495151dd3ca79949599b6d069b72a61a2c5e24fc441dccc79dcaf708fe6/regex-2025.10.23-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bca7feecc72ee33579e9f6ddf8babbe473045717a0e7dbc347099530f96e8b9a", size = 787714, upload-time = "2025-10-21T15:55:30.628Z" }, + { url = "https://files.pythonhosted.org/packages/28/65/ee882455e051131869957ee8597faea45188c9a98c0dad724cfb302d4580/regex-2025.10.23-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7e24af51e907d7457cc4a72691ec458320b9ae67dc492f63209f01eecb09de32", size = 858392, upload-time = "2025-10-21T15:55:32.322Z" }, + { url = "https://files.pythonhosted.org/packages/53/25/9287fef5be97529ebd3ac79d256159cb709a07eb58d4be780d1ca3885da8/regex-2025.10.23-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:d10bcde58bbdf18146f3a69ec46dd03233b94a4a5632af97aa5378da3a47d288", size = 850484, upload-time = "2025-10-21T15:55:34.037Z" }, + { url = "https://files.pythonhosted.org/packages/f3/b4/b49b88b4fea2f14dc73e5b5842755e782fc2e52f74423d6f4adc130d5880/regex-2025.10.23-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:44383bc0c933388516c2692c9a7503e1f4a67e982f20b9a29d2fb70c6494f147", size = 789634, upload-time = "2025-10-21T15:55:35.958Z" }, + { url = "https://files.pythonhosted.org/packages/b6/3c/2f8d199d0e84e78bcd6bdc2be9b62410624f6b796e2893d1837ae738b160/regex-2025.10.23-cp312-cp312-win32.whl", hash = "sha256:6040a86f95438a0114bba16e51dfe27f1bc004fd29fe725f54a586f6d522b079", size = 266060, upload-time = "2025-10-21T15:55:37.902Z" }, + { url = "https://files.pythonhosted.org/packages/d7/67/c35e80969f6ded306ad70b0698863310bdf36aca57ad792f45ddc0e2271f/regex-2025.10.23-cp312-cp312-win_amd64.whl", hash = "sha256:436b4c4352fe0762e3bfa34a5567079baa2ef22aa9c37cf4d128979ccfcad842", size = 276931, upload-time = "2025-10-21T15:55:39.502Z" }, + { url = "https://files.pythonhosted.org/packages/f5/a1/4ed147de7d2b60174f758412c87fa51ada15cd3296a0ff047f4280aaa7ca/regex-2025.10.23-cp312-cp312-win_arm64.whl", hash = "sha256:f4b1b1991617055b46aff6f6db24888c1f05f4db9801349d23f09ed0714a9335", size = 270103, upload-time = "2025-10-21T15:55:41.24Z" }, + { url = "https://files.pythonhosted.org/packages/28/c6/195a6217a43719d5a6a12cc192a22d12c40290cecfa577f00f4fb822f07d/regex-2025.10.23-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:b7690f95404a1293923a296981fd943cca12c31a41af9c21ba3edd06398fc193", size = 488956, upload-time = "2025-10-21T15:55:42.887Z" }, + { url = "https://files.pythonhosted.org/packages/4c/93/181070cd1aa2fa541ff2d3afcf763ceecd4937b34c615fa92765020a6c90/regex-2025.10.23-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1a32d77aeaea58a13230100dd8797ac1a84c457f3af2fdf0d81ea689d5a9105b", size = 290997, upload-time = "2025-10-21T15:55:44.53Z" }, + { url = "https://files.pythonhosted.org/packages/b6/c5/9d37fbe3a40ed8dda78c23e1263002497540c0d1522ed75482ef6c2000f0/regex-2025.10.23-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b24b29402f264f70a3c81f45974323b41764ff7159655360543b7cabb73e7d2f", size = 288686, upload-time = "2025-10-21T15:55:46.186Z" }, + { url = "https://files.pythonhosted.org/packages/5f/e7/db610ff9f10c2921f9b6ac0c8d8be4681b28ddd40fc0549429366967e61f/regex-2025.10.23-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:563824a08c7c03d96856d84b46fdb3bbb7cfbdf79da7ef68725cda2ce169c72a", size = 798466, upload-time = "2025-10-21T15:55:48.24Z" }, + { url = "https://files.pythonhosted.org/packages/90/10/aab883e1fa7fe2feb15ac663026e70ca0ae1411efa0c7a4a0342d9545015/regex-2025.10.23-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a0ec8bdd88d2e2659c3518087ee34b37e20bd169419ffead4240a7004e8ed03b", size = 863996, upload-time = "2025-10-21T15:55:50.478Z" }, + { url = "https://files.pythonhosted.org/packages/a2/b0/8f686dd97a51f3b37d0238cd00a6d0f9ccabe701f05b56de1918571d0d61/regex-2025.10.23-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b577601bfe1d33913fcd9276d7607bbac827c4798d9e14d04bf37d417a6c41cb", size = 912145, upload-time = "2025-10-21T15:55:52.215Z" }, + { url = "https://files.pythonhosted.org/packages/a3/ca/639f8cd5b08797bca38fc5e7e07f76641a428cf8c7fca05894caf045aa32/regex-2025.10.23-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7c9f2c68ac6cb3de94eea08a437a75eaa2bd33f9e97c84836ca0b610a5804368", size = 803370, upload-time = "2025-10-21T15:55:53.944Z" }, + { url = "https://files.pythonhosted.org/packages/0d/1e/a40725bb76959eddf8abc42a967bed6f4851b39f5ac4f20e9794d7832aa5/regex-2025.10.23-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:89f8b9ea3830c79468e26b0e21c3585f69f105157c2154a36f6b7839f8afb351", size = 787767, upload-time = "2025-10-21T15:55:56.004Z" }, + { url = "https://files.pythonhosted.org/packages/3d/d8/8ee9858062936b0f99656dce390aa667c6e7fb0c357b1b9bf76fb5e2e708/regex-2025.10.23-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:98fd84c4e4ea185b3bb5bf065261ab45867d8875032f358a435647285c722673", size = 858335, upload-time = "2025-10-21T15:55:58.185Z" }, + { url = "https://files.pythonhosted.org/packages/d8/0a/ed5faaa63fa8e3064ab670e08061fbf09e3a10235b19630cf0cbb9e48c0a/regex-2025.10.23-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:1e11d3e5887b8b096f96b4154dfb902f29c723a9556639586cd140e77e28b313", size = 850402, upload-time = "2025-10-21T15:56:00.023Z" }, + { url = "https://files.pythonhosted.org/packages/79/14/d05f617342f4b2b4a23561da500ca2beab062bfcc408d60680e77ecaf04d/regex-2025.10.23-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4f13450328a6634348d47a88367e06b64c9d84980ef6a748f717b13f8ce64e87", size = 789739, upload-time = "2025-10-21T15:56:01.967Z" }, + { url = "https://files.pythonhosted.org/packages/f9/7b/e8ce8eef42a15f2c3461f8b3e6e924bbc86e9605cb534a393aadc8d3aff8/regex-2025.10.23-cp313-cp313-win32.whl", hash = "sha256:37be9296598a30c6a20236248cb8b2c07ffd54d095b75d3a2a2ee5babdc51df1", size = 266054, upload-time = "2025-10-21T15:56:05.291Z" }, + { url = "https://files.pythonhosted.org/packages/71/2d/55184ed6be6473187868d2f2e6a0708195fc58270e62a22cbf26028f2570/regex-2025.10.23-cp313-cp313-win_amd64.whl", hash = "sha256:ea7a3c283ce0f06fe789365841e9174ba05f8db16e2fd6ae00a02df9572c04c0", size = 276917, upload-time = "2025-10-21T15:56:07.303Z" }, + { url = "https://files.pythonhosted.org/packages/9c/d4/927eced0e2bd45c45839e556f987f8c8f8683268dd3c00ad327deb3b0172/regex-2025.10.23-cp313-cp313-win_arm64.whl", hash = "sha256:d9a4953575f300a7bab71afa4cd4ac061c7697c89590a2902b536783eeb49a4f", size = 270105, upload-time = "2025-10-21T15:56:09.857Z" }, + { url = "https://files.pythonhosted.org/packages/3e/b3/95b310605285573341fc062d1d30b19a54f857530e86c805f942c4ff7941/regex-2025.10.23-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:7d6606524fa77b3912c9ef52a42ef63c6cfbfc1077e9dc6296cd5da0da286044", size = 491850, upload-time = "2025-10-21T15:56:11.685Z" }, + { url = "https://files.pythonhosted.org/packages/a4/8f/207c2cec01e34e56db1eff606eef46644a60cf1739ecd474627db90ad90b/regex-2025.10.23-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:c037aadf4d64bdc38af7db3dbd34877a057ce6524eefcb2914d6d41c56f968cc", size = 292537, upload-time = "2025-10-21T15:56:13.963Z" }, + { url = "https://files.pythonhosted.org/packages/98/3b/025240af4ada1dc0b5f10d73f3e5122d04ce7f8908ab8881e5d82b9d61b6/regex-2025.10.23-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:99018c331fb2529084a0c9b4c713dfa49fafb47c7712422e49467c13a636c656", size = 290904, upload-time = "2025-10-21T15:56:16.016Z" }, + { url = "https://files.pythonhosted.org/packages/81/8e/104ac14e2d3450c43db18ec03e1b96b445a94ae510b60138f00ce2cb7ca1/regex-2025.10.23-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fd8aba965604d70306eb90a35528f776e59112a7114a5162824d43b76fa27f58", size = 807311, upload-time = "2025-10-21T15:56:17.818Z" }, + { url = "https://files.pythonhosted.org/packages/19/63/78aef90141b7ce0be8a18e1782f764f6997ad09de0e05251f0d2503a914a/regex-2025.10.23-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:238e67264b4013e74136c49f883734f68656adf8257bfa13b515626b31b20f8e", size = 873241, upload-time = "2025-10-21T15:56:19.941Z" }, + { url = "https://files.pythonhosted.org/packages/b3/a8/80eb1201bb49ae4dba68a1b284b4211ed9daa8e74dc600018a10a90399fb/regex-2025.10.23-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b2eb48bd9848d66fd04826382f5e8491ae633de3233a3d64d58ceb4ecfa2113a", size = 914794, upload-time = "2025-10-21T15:56:22.488Z" }, + { url = "https://files.pythonhosted.org/packages/f0/d5/1984b6ee93281f360a119a5ca1af6a8ca7d8417861671388bf750becc29b/regex-2025.10.23-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d36591ce06d047d0c0fe2fc5f14bfbd5b4525d08a7b6a279379085e13f0e3d0e", size = 812581, upload-time = "2025-10-21T15:56:24.319Z" }, + { url = "https://files.pythonhosted.org/packages/c4/39/11ebdc6d9927172a64ae237d16763145db6bd45ebb4055c17b88edab72a7/regex-2025.10.23-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b5d4ece8628d6e364302006366cea3ee887db397faebacc5dacf8ef19e064cf8", size = 795346, upload-time = "2025-10-21T15:56:26.232Z" }, + { url = "https://files.pythonhosted.org/packages/3b/b4/89a591bcc08b5e436af43315284bd233ba77daf0cf20e098d7af12f006c1/regex-2025.10.23-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:39a7e8083959cb1c4ff74e483eecb5a65d3b3e1d821b256e54baf61782c906c6", size = 868214, upload-time = "2025-10-21T15:56:28.597Z" }, + { url = "https://files.pythonhosted.org/packages/3d/ff/58ba98409c1dbc8316cdb20dafbc63ed267380a07780cafecaf5012dabc9/regex-2025.10.23-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:842d449a8fefe546f311656cf8c0d6729b08c09a185f1cad94c756210286d6a8", size = 854540, upload-time = "2025-10-21T15:56:30.875Z" }, + { url = "https://files.pythonhosted.org/packages/9a/f2/4a9e9338d67626e2071b643f828a482712ad15889d7268e11e9a63d6f7e9/regex-2025.10.23-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d614986dc68506be8f00474f4f6960e03e4ca9883f7df47744800e7d7c08a494", size = 799346, upload-time = "2025-10-21T15:56:32.725Z" }, + { url = "https://files.pythonhosted.org/packages/63/be/543d35c46bebf6f7bf2be538cca74d6585f25714700c36f37f01b92df551/regex-2025.10.23-cp313-cp313t-win32.whl", hash = "sha256:a5b7a26b51a9df473ec16a1934d117443a775ceb7b39b78670b2e21893c330c9", size = 268657, upload-time = "2025-10-21T15:56:34.577Z" }, + { url = "https://files.pythonhosted.org/packages/14/9f/4dd6b7b612037158bb2c9bcaa710e6fb3c40ad54af441b9c53b3a137a9f1/regex-2025.10.23-cp313-cp313t-win_amd64.whl", hash = "sha256:ce81c5544a5453f61cb6f548ed358cfb111e3b23f3cd42d250a4077a6be2a7b6", size = 280075, upload-time = "2025-10-21T15:56:36.767Z" }, + { url = "https://files.pythonhosted.org/packages/81/7a/5bd0672aa65d38c8da6747c17c8b441bdb53d816c569e3261013af8e83cf/regex-2025.10.23-cp313-cp313t-win_arm64.whl", hash = "sha256:e9bf7f6699f490e4e43c44757aa179dab24d1960999c84ab5c3d5377714ed473", size = 271219, upload-time = "2025-10-21T15:56:39.033Z" }, + { url = "https://files.pythonhosted.org/packages/73/f6/0caf29fec943f201fbc8822879c99d31e59c1d51a983d9843ee5cf398539/regex-2025.10.23-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:5b5cb5b6344c4c4c24b2dc87b0bfee78202b07ef7633385df70da7fcf6f7cec6", size = 488960, upload-time = "2025-10-21T15:56:40.849Z" }, + { url = "https://files.pythonhosted.org/packages/8e/7d/ebb7085b8fa31c24ce0355107cea2b92229d9050552a01c5d291c42aecea/regex-2025.10.23-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:a6ce7973384c37bdf0f371a843f95a6e6f4e1489e10e0cf57330198df72959c5", size = 290932, upload-time = "2025-10-21T15:56:42.875Z" }, + { url = "https://files.pythonhosted.org/packages/27/41/43906867287cbb5ca4cee671c3cc8081e15deef86a8189c3aad9ac9f6b4d/regex-2025.10.23-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:2ee3663f2c334959016b56e3bd0dd187cbc73f948e3a3af14c3caaa0c3035d10", size = 288766, upload-time = "2025-10-21T15:56:44.894Z" }, + { url = "https://files.pythonhosted.org/packages/ab/9e/ea66132776700fc77a39b1056e7a5f1308032fead94507e208dc6716b7cd/regex-2025.10.23-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2003cc82a579107e70d013482acce8ba773293f2db534fb532738395c557ff34", size = 798884, upload-time = "2025-10-21T15:56:47.178Z" }, + { url = "https://files.pythonhosted.org/packages/d5/99/aed1453687ab63819a443930770db972c5c8064421f0d9f5da9ad029f26b/regex-2025.10.23-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:182c452279365a93a9f45874f7f191ec1c51e1f1eb41bf2b16563f1a40c1da3a", size = 864768, upload-time = "2025-10-21T15:56:49.793Z" }, + { url = "https://files.pythonhosted.org/packages/99/5d/732fe747a1304805eb3853ce6337eea16b169f7105a0d0dd9c6a5ffa9948/regex-2025.10.23-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b1249e9ff581c5b658c8f0437f883b01f1edcf424a16388591e7c05e5e9e8b0c", size = 911394, upload-time = "2025-10-21T15:56:52.186Z" }, + { url = "https://files.pythonhosted.org/packages/5e/48/58a1f6623466522352a6efa153b9a3714fc559d9f930e9bc947b4a88a2c3/regex-2025.10.23-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2b841698f93db3ccc36caa1900d2a3be281d9539b822dc012f08fc80b46a3224", size = 803145, upload-time = "2025-10-21T15:56:55.142Z" }, + { url = "https://files.pythonhosted.org/packages/ea/f6/7dea79be2681a5574ab3fc237aa53b2c1dfd6bd2b44d4640b6c76f33f4c1/regex-2025.10.23-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:956d89e0c92d471e8f7eee73f73fdff5ed345886378c45a43175a77538a1ffe4", size = 787831, upload-time = "2025-10-21T15:56:57.203Z" }, + { url = "https://files.pythonhosted.org/packages/3a/ad/07b76950fbbe65f88120ca2d8d845047c401450f607c99ed38862904671d/regex-2025.10.23-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:5c259cb363299a0d90d63b5c0d7568ee98419861618a95ee9d91a41cb9954462", size = 859162, upload-time = "2025-10-21T15:56:59.195Z" }, + { url = "https://files.pythonhosted.org/packages/41/87/374f3b2021b22aa6a4fc0b750d63f9721e53d1631a238f7a1c343c1cd288/regex-2025.10.23-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:185d2b18c062820b3a40d8fefa223a83f10b20a674bf6e8c4a432e8dfd844627", size = 849899, upload-time = "2025-10-21T15:57:01.747Z" }, + { url = "https://files.pythonhosted.org/packages/12/4a/7f7bb17c5a5a9747249807210e348450dab9212a46ae6d23ebce86ba6a2b/regex-2025.10.23-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:281d87fa790049c2b7c1b4253121edd80b392b19b5a3d28dc2a77579cb2a58ec", size = 789372, upload-time = "2025-10-21T15:57:04.018Z" }, + { url = "https://files.pythonhosted.org/packages/c9/dd/9c7728ff544fea09bbc8635e4c9e7c423b11c24f1a7a14e6ac4831466709/regex-2025.10.23-cp314-cp314-win32.whl", hash = "sha256:63b81eef3656072e4ca87c58084c7a9c2b81d41a300b157be635a8a675aacfb8", size = 271451, upload-time = "2025-10-21T15:57:06.266Z" }, + { url = "https://files.pythonhosted.org/packages/48/f8/ef7837ff858eb74079c4804c10b0403c0b740762e6eedba41062225f7117/regex-2025.10.23-cp314-cp314-win_amd64.whl", hash = "sha256:0967c5b86f274800a34a4ed862dfab56928144d03cb18821c5153f8777947796", size = 280173, upload-time = "2025-10-21T15:57:08.206Z" }, + { url = "https://files.pythonhosted.org/packages/8e/d0/d576e1dbd9885bfcd83d0e90762beea48d9373a6f7ed39170f44ed22e336/regex-2025.10.23-cp314-cp314-win_arm64.whl", hash = "sha256:c70dfe58b0a00b36aa04cdb0f798bf3e0adc31747641f69e191109fd8572c9a9", size = 273206, upload-time = "2025-10-21T15:57:10.367Z" }, + { url = "https://files.pythonhosted.org/packages/a6/d0/2025268315e8b2b7b660039824cb7765a41623e97d4cd421510925400487/regex-2025.10.23-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:1f5799ea1787aa6de6c150377d11afad39a38afd033f0c5247aecb997978c422", size = 491854, upload-time = "2025-10-21T15:57:12.526Z" }, + { url = "https://files.pythonhosted.org/packages/44/35/5681c2fec5e8b33454390af209c4353dfc44606bf06d714b0b8bd0454ffe/regex-2025.10.23-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:a9639ab7540cfea45ef57d16dcbea2e22de351998d614c3ad2f9778fa3bdd788", size = 292542, upload-time = "2025-10-21T15:57:15.158Z" }, + { url = "https://files.pythonhosted.org/packages/5d/17/184eed05543b724132e4a18149e900f5189001fcfe2d64edaae4fbaf36b4/regex-2025.10.23-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:08f52122c352eb44c3421dab78b9b73a8a77a282cc8314ae576fcaa92b780d10", size = 290903, upload-time = "2025-10-21T15:57:17.108Z" }, + { url = "https://files.pythonhosted.org/packages/25/d0/5e3347aa0db0de382dddfa133a7b0ae72f24b4344f3989398980b44a3924/regex-2025.10.23-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ebf1baebef1c4088ad5a5623decec6b52950f0e4d7a0ae4d48f0a99f8c9cb7d7", size = 807546, upload-time = "2025-10-21T15:57:19.179Z" }, + { url = "https://files.pythonhosted.org/packages/d2/bb/40c589bbdce1be0c55e9f8159789d58d47a22014f2f820cf2b517a5cd193/regex-2025.10.23-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:16b0f1c2e2d566c562d5c384c2b492646be0a19798532fdc1fdedacc66e3223f", size = 873322, upload-time = "2025-10-21T15:57:21.36Z" }, + { url = "https://files.pythonhosted.org/packages/fe/56/a7e40c01575ac93360e606278d359f91829781a9f7fb6e5aa435039edbda/regex-2025.10.23-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f7ada5d9dceafaab92646aa00c10a9efd9b09942dd9b0d7c5a4b73db92cc7e61", size = 914855, upload-time = "2025-10-21T15:57:24.044Z" }, + { url = "https://files.pythonhosted.org/packages/5c/4b/d55587b192763db3163c3f508b3b67b31bb6f5e7a0e08b83013d0a59500a/regex-2025.10.23-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3a36b4005770044bf08edecc798f0e41a75795b9e7c9c12fe29da8d792ef870c", size = 812724, upload-time = "2025-10-21T15:57:26.123Z" }, + { url = "https://files.pythonhosted.org/packages/33/20/18bac334955fbe99d17229f4f8e98d05e4a501ac03a442be8facbb37c304/regex-2025.10.23-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:af7b2661dcc032da1fae82069b5ebf2ac1dfcd5359ef8b35e1367bfc92181432", size = 795439, upload-time = "2025-10-21T15:57:28.497Z" }, + { url = "https://files.pythonhosted.org/packages/67/46/c57266be9df8549c7d85deb4cb82280cb0019e46fff677534c5fa1badfa4/regex-2025.10.23-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:1cb976810ac1416a67562c2e5ba0accf6f928932320fef302e08100ed681b38e", size = 868336, upload-time = "2025-10-21T15:57:30.867Z" }, + { url = "https://files.pythonhosted.org/packages/b8/f3/bd5879e41ef8187fec5e678e94b526a93f99e7bbe0437b0f2b47f9101694/regex-2025.10.23-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:1a56a54be3897d62f54290190fbcd754bff6932934529fbf5b29933da28fcd43", size = 854567, upload-time = "2025-10-21T15:57:33.062Z" }, + { url = "https://files.pythonhosted.org/packages/e6/57/2b6bbdbd2f24dfed5b028033aa17ad8f7d86bb28f1a892cac8b3bc89d059/regex-2025.10.23-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8f3e6d202fb52c2153f532043bbcf618fd177df47b0b306741eb9b60ba96edc3", size = 799565, upload-time = "2025-10-21T15:57:35.153Z" }, + { url = "https://files.pythonhosted.org/packages/c7/ba/a6168f542ba73b151ed81237adf6b869c7b2f7f8d51618111296674e20ee/regex-2025.10.23-cp314-cp314t-win32.whl", hash = "sha256:1fa1186966b2621b1769fd467c7b22e317e6ba2d2cdcecc42ea3089ef04a8521", size = 274428, upload-time = "2025-10-21T15:57:37.996Z" }, + { url = "https://files.pythonhosted.org/packages/ef/a0/c84475e14a2829e9b0864ebf77c3f7da909df9d8acfe2bb540ff0072047c/regex-2025.10.23-cp314-cp314t-win_amd64.whl", hash = "sha256:08a15d40ce28362eac3e78e83d75475147869c1ff86bc93285f43b4f4431a741", size = 284140, upload-time = "2025-10-21T15:57:40.027Z" }, + { url = "https://files.pythonhosted.org/packages/51/33/6a08ade0eee5b8ba79386869fa6f77afeb835b60510f3525db987e2fffc4/regex-2025.10.23-cp314-cp314t-win_arm64.whl", hash = "sha256:a93e97338e1c8ea2649e130dcfbe8cd69bba5e1e163834752ab64dcb4de6d5ed", size = 274497, upload-time = "2025-10-21T15:57:42.389Z" }, ] [[package]] @@ -2464,7 +2466,7 @@ dev = [ [package.metadata] requires-dist = [ - { name = "deepagents", specifier = ">=0.1.1" }, + { name = "deepagents", specifier = ">=0.1.3" }, { name = "langgraph-up-devkits", editable = "libs/langgraph-up-devkits" }, ] From f2891877007823fa481a1b493ae7412a9ee7832d Mon Sep 17 00:00:00 2001 From: Haili Zhang Date: Thu, 23 Oct 2025 23:29:26 +0800 Subject: [PATCH 2/2] =?UTF-8?q?=E2=9C=A8=20feat:=20enhance=20HITL=20functi?= =?UTF-8?q?onality=20in=20sample=20deep=20agent?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Update README to include detailed instructions for unit and integration testing, emphasizing the new human-in-the-loop (HITL) testing capabilities. - Implement comprehensive HITL integration tests to verify interrupt functionality with real LLM calls, ensuring robust decision-making processes. - Refactor `make_graph` to accept `interrupt_on` and `subagent_interrupts` configurations, allowing for more granular control over interrupt behavior. - Add a new context configuration for HITL, ensuring that interrupt settings are clearly defined and managed. - Introduce a structured prompt for handling tool rejections, guiding the agent's response strategy after user feedback. This commit significantly enhances the sample deep agent's HITL capabilities, improving its responsiveness and adaptability to user interactions. --- apps/sample-deep-agent/README.md | 138 +++++++++++++- .../src/sample_deep_agent/context.py | 4 +- .../src/sample_deep_agent/graph.py | 41 +++- .../src/sample_deep_agent/prompts.py | 33 ++++ .../src/sample_deep_agent/tools/__init__.py | 4 +- .../tests/integration/test_hitl.py | 156 ++++++++++++++++ .../sample-deep-agent/tests/unit/test_hitl.py | 176 ++++++++++++++++++ 7 files changed, 543 insertions(+), 9 deletions(-) create mode 100644 apps/sample-deep-agent/tests/integration/test_hitl.py create mode 100644 apps/sample-deep-agent/tests/unit/test_hitl.py diff --git a/apps/sample-deep-agent/README.md b/apps/sample-deep-agent/README.md index 1f33b38..cf94122 100644 --- a/apps/sample-deep-agent/README.md +++ b/apps/sample-deep-agent/README.md @@ -174,16 +174,146 @@ Automatic storage of: ## Testing -Run the test suite: +### Unit Tests + +Run unit tests (no API keys required): ```bash -# Unit tests make unit sample-deep-agent +``` + +### Integration Tests + +Integration tests require API keys and make real API calls: + +```bash +# Set up environment variables first +export SILICONFLOW_API_KEY=your_key_here +export TAVILY_API_KEY=your_key_here -# Integration tests (requires API keys) +# Run all integration tests make integration sample-deep-agent -# All tests +# Run specific HITL integration test +cd apps/sample-deep-agent +uv run pytest tests/integration/test_hitl.py::TestHITLWorkflow::test_comprehensive_hitl_workflow -v -s +``` + +### Human-in-the-Loop (HITL) Testing + +The agent includes comprehensive HITL integration tests that verify interrupt functionality with real LLM calls. + +#### HITL Configuration + +Configure interrupts by passing `interrupt_on` and `subagent_interrupts` to `make_graph()`: + +```python +from sample_deep_agent.graph import make_graph + +# Define interrupt configuration +interrupt_on = { + "task": {"allowed_decisions": ["approve", "reject"]}, # Only approve/reject + "write_todos": False, # Don't interrupt write_todos + "think_tool": False, # Don't interrupt think_tool + "deep_web_search": True, # Interrupt at top level +} + +subagent_interrupts = { + "research-agent": { + "deep_web_search": True, # Interrupt in subagent too + "think_tool": False, # Don't interrupt think_tool in subagent + } +} + +# Create agent with HITL configuration +agent = make_graph( + config={"configurable": {"max_todos": 1}}, + interrupt_on=interrupt_on, + subagent_interrupts=subagent_interrupts +) +``` + +#### Interrupt Decision Types + +Three types of decisions are supported: + +1. **Approve**: Execute tool with original arguments + ```python + {"type": "approve"} + ``` + +2. **Reject**: Skip tool execution (agent receives error message) + ```python + {"type": "reject"} + ``` + +3. **Edit**: Modify arguments before execution + ```python + { + "type": "edit", + "edited_action": { + "name": "tool_name", + "args": {"modified": "arguments"} + } + } + ``` + +#### HITL Workflow Example + +```python +import uuid +from langchain.messages import HumanMessage +from langgraph.types import Command + +# Use thread_id for state persistence (required for HITL) +thread_id = str(uuid.uuid4()) +thread_config = {"configurable": {"thread_id": thread_id}} + +# Initial invocation +result = await agent.ainvoke( + {"messages": [HumanMessage(content="What are the core features of LangChain v1?")]}, + config=thread_config +) + +# Handle interrupts +while result.get("__interrupt__"): + interrupts = result["__interrupt__"][0].value + action_requests = interrupts["action_requests"] + + # Make decisions for each action + decisions = [] + for action in action_requests: + if action["name"] == "task": + decisions.append({"type": "approve"}) + elif action["name"] == "deep_web_search": + decisions.append({"type": "reject"}) + else: + decisions.append({"type": "approve"}) + + # Resume with decisions (must use same thread_config) + result = await agent.ainvoke( + Command(resume={"decisions": decisions}), + config=thread_config + ) + +# Get final result +final_message = result["messages"][-1] +print(final_message.content) +``` + +#### Key Features Tested + +- ✅ Allowed decisions configuration (restrict to approve/reject only) +- ✅ Top-level tool approval/rejection +- ✅ Subagent-specific interrupt overrides +- ✅ Multiple concurrent tool interrupts +- ✅ Agent resilience when tools are rejected +- ✅ Verification that rejected tools don't execute + +### All Tests + +```bash +# Run all tests across the monorepo make test ``` diff --git a/apps/sample-deep-agent/src/sample_deep_agent/context.py b/apps/sample-deep-agent/src/sample_deep_agent/context.py index aa7cc59..89f6120 100644 --- a/apps/sample-deep-agent/src/sample_deep_agent/context.py +++ b/apps/sample-deep-agent/src/sample_deep_agent/context.py @@ -14,7 +14,7 @@ class DeepAgentContext(BaseAgentContext): """Context configuration for deep agent runtime settings. - Extends BaseAgentContext with deep agent specific configuration: + Deep agent specific configuration: - Uses DeepSeek-V3.2-Exp model for advanced reasoning - Higher recursion limit (1000) for complex research workflows - Configurable max_todos for research task management @@ -26,6 +26,8 @@ class DeepAgentContext(BaseAgentContext): - recursion_limit: LangGraph recursion depth (overridden to 1000) - debug: Enable debug logging - user_id: Optional user identifier + + For HITL interrupts, pass interrupt_on directly to make_graph() instead of using context. """ # Override model default for deep agent diff --git a/apps/sample-deep-agent/src/sample_deep_agent/graph.py b/apps/sample-deep-agent/src/sample_deep_agent/graph.py index 2ee4ba2..0463b8f 100644 --- a/apps/sample-deep-agent/src/sample_deep_agent/graph.py +++ b/apps/sample-deep-agent/src/sample_deep_agent/graph.py @@ -3,7 +3,9 @@ from typing import Any from deepagents import create_deep_agent # type: ignore[import-untyped] +from langchain.agents.middleware import InterruptOnConfig from langchain_core.runnables import RunnableConfig +from langgraph.checkpoint.memory import MemorySaver from langgraph_up_devkits import load_chat_model from langgraph_up_devkits.tools import deep_web_search, think_tool @@ -15,14 +17,27 @@ __all__ = ["make_graph", "app", "deep_web_search", "think_tool"] -def make_graph(config: RunnableConfig | None = None) -> Any: +def make_graph( + config: RunnableConfig | None = None, + interrupt_on: dict[str, bool | InterruptOnConfig] | None = None, + subagent_interrupts: dict[str, dict[str, bool | InterruptOnConfig]] | None = None, +) -> Any: """Make deep agent graph based on runtime configuration. Args: config: Optional runtime configuration containing model settings. + interrupt_on: Optional interrupt configuration mapping tool names to interrupt settings. + - True: Enable interrupts with default behavior (approve, edit, reject allowed) + - False: Disable interrupts for this tool + - InterruptOnConfig: Custom configuration with specific allowed decisions + subagent_interrupts: Optional subagent-specific interrupt overrides mapping subagent + names to their interrupt_on configurations. Returns: Compiled deep agent graph ready for deployment. + + Note: + When interrupt_on is provided, a MemorySaver checkpointer is automatically configured. """ if config is None: config = {} @@ -38,13 +53,33 @@ def make_graph(config: RunnableConfig | None = None) -> Any: # Load model based on context configuration model = load_chat_model(context.model) - # Create deep agent with research capabilities + # Automatically add checkpointer if interrupts are enabled + checkpointer = MemorySaver() if interrupt_on is not None else None + + # Prepare subagents with interrupt overrides + subagents = RESEARCH_AGENTS + if subagent_interrupts: + # Apply subagent-specific interrupt configurations + subagents = [] + for subagent in RESEARCH_AGENTS: + subagent_name = subagent["name"] + if subagent_name in subagent_interrupts: + # Create a copy with interrupt_on override + subagent_copy = subagent.copy() + subagent_copy["interrupt_on"] = subagent_interrupts[subagent_name] + subagents.append(subagent_copy) + else: + subagents.append(subagent) + + # Create deep agent with research capabilities and interrupt support agent = create_deep_agent( model=model, tools=[deep_web_search, think_tool], system_prompt=get_research_instructions(), - subagents=RESEARCH_AGENTS, + subagents=subagents, context_schema=DeepAgentContext, + interrupt_on=interrupt_on, + checkpointer=checkpointer, ).with_config({"recursion_limit": context.recursion_limit}) return agent diff --git a/apps/sample-deep-agent/src/sample_deep_agent/prompts.py b/apps/sample-deep-agent/src/sample_deep_agent/prompts.py index 9db9a94..c67f6f4 100644 --- a/apps/sample-deep-agent/src/sample_deep_agent/prompts.py +++ b/apps/sample-deep-agent/src/sample_deep_agent/prompts.py @@ -4,6 +4,26 @@ from sample_deep_agent.context import MAX_TODOS, DeepAgentContext +# Shared prompt templates +TOOL_REJECTION_HANDLING = """**HANDLING TOOL REJECTIONS:** +When a tool use request is REJECTED by the user: +1. **IMMEDIATELY use think_tool** to reflect on why the request was rejected +2. **ANALYZE the rejection** - what was the user's concern? What alternative approach should you take? +3. **REVISE YOUR STRATEGY** - create a new plan that addresses the user's concerns +4. **ADAPT YOUR APPROACH** - use different tools or methods that align with user preferences +5. **NEVER repeat the same rejected action** - learn from the feedback and adjust + +**Example think_tool reflection after rejection:** +"The user rejected my [tool_name] request. Let me think about why: +- Perhaps the question can be answered without this tool +- Maybe I should use a different approach or tool +- Or the user wants a different strategy +- Could there be resource or time constraints? +Let me revise my strategy to [alternative approach]..." + +**CRITICAL:** After any tool rejection, ALWAYS use think_tool to reflect and adapt before proceeding. +Do not repeat rejected actions - learn from feedback and adjust your approach.""" + # Sub-agent prompts SUB_RESEARCH_PROMPT = f"""You are a dedicated researcher. Your job is to conduct research based on the users questions. @@ -25,6 +45,8 @@ - TODO 2: Gather expert opinions and academic perspectives on [key aspect] - TODO 3: Synthesize findings into comprehensive analysis +{TOOL_REJECTION_HANDLING} + **CRITICAL:** Always begin with think_tool to plan your approach, then systematically execute your TODO list. Do not use tools randomly without a structured plan. @@ -233,6 +255,17 @@ def get_research_instructions() -> str: ### `think_tool` Use this for strategic planning, coordinating sub-agents, and synthesizing research results. + +{TOOL_REJECTION_HANDLING} + +**Additional context-specific examples:** +- After task delegation rejection: Consider if the task is too complex, needs breaking down, + or should be handled directly +- After deep_web_search rejection: Consider using existing knowledge, delegating to + research-agent, or refining the query + +**REMEMBER:** After ANY rejection (including task delegation), use think_tool to reflect and adapt. +This demonstrates learning and responsiveness to user preferences. """ diff --git a/apps/sample-deep-agent/src/sample_deep_agent/tools/__init__.py b/apps/sample-deep-agent/src/sample_deep_agent/tools/__init__.py index 05688cc..f212d04 100644 --- a/apps/sample-deep-agent/src/sample_deep_agent/tools/__init__.py +++ b/apps/sample-deep-agent/src/sample_deep_agent/tools/__init__.py @@ -1 +1,3 @@ -"""Custom tools for sample deep agent (if any).""" +"""Custom tools for sample deep agent.""" + +__all__: list[str] = [] diff --git a/apps/sample-deep-agent/tests/integration/test_hitl.py b/apps/sample-deep-agent/tests/integration/test_hitl.py new file mode 100644 index 0000000..cf061bf --- /dev/null +++ b/apps/sample-deep-agent/tests/integration/test_hitl.py @@ -0,0 +1,156 @@ +"""Integration tests for human-in-the-loop (HITL) functionality.""" + +import os +import uuid + +import pytest +from langchain.messages import HumanMessage +from langgraph.types import Command + + +@pytest.mark.integration +@pytest.mark.slow +class TestHITLWorkflow: + """Test end-to-end HITL workflows with interrupts.""" + + async def test_comprehensive_hitl_workflow(self): + """Test comprehensive HITL workflow. + + - Top-level task: interrupt with approve/reject only, approve it + - think_tool calls: allow all (no interrupts) + - write_todos: allow but limit to max 1 todo + - All deep_web_search calls: reject all at all levels + - Verify no deep_web_search in final message list + """ + # Skip if no API credentials available + if not os.getenv("SILICONFLOW_API_KEY") or not os.getenv("TAVILY_API_KEY"): + pytest.skip("No API credentials available for integration test") + + from sample_deep_agent.context import DeepAgentContext + from sample_deep_agent.graph import make_graph + + # Create context with max_todos limit + context = DeepAgentContext( + max_todos=1, # Limit to 1 todo to prevent excessive planning + ) + + # Create graph with HITL configuration + from dataclasses import asdict + + config = {"configurable": asdict(context)} + + # Define interrupt configuration + interrupt_on = { + "task": {"allowed_decisions": ["approve", "reject"]}, # Only approve/reject + "write_todos": False, # Don't interrupt write_todos + "think_tool": False, # Don't interrupt think_tool + "deep_web_search": True, # Interrupt at top level + } + + subagent_interrupts = { + "research-agent": { + "deep_web_search": True, # Interrupt in subagent too + "think_tool": False, # Don't interrupt think_tool in subagent + } + } + + agent = make_graph(config, interrupt_on=interrupt_on, subagent_interrupts=subagent_interrupts) + + # Use thread_id for state persistence (required for HITL) + thread_id = str(uuid.uuid4()) + thread_config = {"configurable": {"thread_id": thread_id}} + + try: + # Invoke the agent with a research task that requires web search + result = await agent.ainvoke( + {"messages": [HumanMessage(content="What are the core features of LangChain v1?")]}, + config=thread_config, + ) + except Exception as e: + if "402" in str(e) or "credits" in str(e).lower(): + pytest.skip("Insufficient API credits for integration test") + raise + + # Track statistics + task_approved = False + deep_web_search_rejected_count = 0 + + max_iterations = 20 # Prevent infinite loops + iteration = 0 + + while iteration < max_iterations: + if result.get("__interrupt__"): + interrupts = result["__interrupt__"][0].value + action_requests = interrupts["action_requests"] + + # Check what tools are being interrupted + tool_names = [action["name"] for action in action_requests] + print(f"Iteration {iteration}: Interrupted for tools: {tool_names}") + + # Process each action request + decisions = [] + for action in action_requests: + tool_name = action["name"] + + if tool_name == "task": + if not task_approved: + print("✅ Approving task (only approve/reject allowed)") + decisions.append({"type": "approve"}) + task_approved = True + else: + print("❌ Rejecting subsequent task call") + decisions.append({"type": "reject"}) + + elif tool_name == "deep_web_search": + print(f"❌ Rejecting deep_web_search call #{deep_web_search_rejected_count + 1}") + decisions.append({"type": "reject"}) + deep_web_search_rejected_count += 1 + + else: + # For other tools, approve + print(f"✅ Approving other tool: {tool_name}") + decisions.append({"type": "approve"}) + + # Resume execution with decisions + result = await agent.ainvoke(Command(resume={"decisions": decisions}), config=thread_config) + else: + # No more interrupts - workflow completed + print("Workflow completed without further interrupts") + break + + iteration += 1 + + # Verify we got a result + assert result is not None + assert "messages" in result + assert len(result["messages"]) > 1 + + # Verify task was approved + assert task_approved, "Task should have been approved" + + # Verify at least one deep_web_search was rejected + assert deep_web_search_rejected_count > 0, "Should have rejected at least one deep_web_search call" + + # Verify no deep_web_search was executed (check for ToolMessage responses) + # Note: AIMessages may contain rejected tool_calls, but we check for actual execution + tool_messages = [msg for msg in result["messages"] if msg.__class__.__name__ == "ToolMessage"] + for tool_msg in tool_messages: + # ToolMessage.name contains the tool that was executed + if hasattr(tool_msg, "name"): + assert tool_msg.name != "deep_web_search", ( + f"Found executed deep_web_search in ToolMessage - should have been rejected. " + f"Content: {tool_msg.content[:200]}" + ) + + # Report summary + print("\n📊 Summary:") + print(" - task: approved") + print(f" - deep_web_search calls rejected: {deep_web_search_rejected_count}") + print(" - max_todos limit: 1") + print(f" - Total messages in result: {len(result['messages'])}") + + # The agent should have responded + final_message = result["messages"][-1] + assert len(final_message.content) > 0 + + print("\n✅ Successfully completed comprehensive HITL workflow") diff --git a/apps/sample-deep-agent/tests/unit/test_hitl.py b/apps/sample-deep-agent/tests/unit/test_hitl.py new file mode 100644 index 0000000..eed2484 --- /dev/null +++ b/apps/sample-deep-agent/tests/unit/test_hitl.py @@ -0,0 +1,176 @@ +"""Unit tests for human-in-the-loop (HITL) functionality in deep agent.""" + +from unittest.mock import Mock, patch + +from sample_deep_agent.context import DeepAgentContext + + +class TestDeepAgentHITLConfiguration: + """Unit tests for deep agent HITL configuration.""" + + def test_default_context_no_interrupt_fields(self): + """Test that context no longer has interrupt fields (moved to make_graph).""" + context = DeepAgentContext() + + # Interrupt fields should not exist in context anymore + assert not hasattr(context, "enable_interrupts") + assert not hasattr(context, "interrupt_on") + assert not hasattr(context, "subagent_interrupts") + + @patch("sample_deep_agent.graph.create_deep_agent") + @patch("sample_deep_agent.graph.load_chat_model") + def test_checkpointer_added_when_interrupt_on_provided( + self, mock_load_model, mock_create_deep_agent + ): + """Test that checkpointer is automatically added when interrupt_on is provided.""" + from sample_deep_agent.graph import make_graph + + # Setup mocks + mock_model = Mock() + mock_load_model.return_value = mock_model + mock_agent = Mock() + mock_agent.with_config.return_value = mock_agent + mock_create_deep_agent.return_value = mock_agent + + # Create graph with interrupt_on + interrupt_on = {"deep_web_search": True} + make_graph(interrupt_on=interrupt_on) + + # Verify create_deep_agent was called with checkpointer + call_args = mock_create_deep_agent.call_args + assert call_args[1]["checkpointer"] is not None + + @patch("sample_deep_agent.graph.create_deep_agent") + @patch("sample_deep_agent.graph.load_chat_model") + def test_no_checkpointer_when_no_interrupt_on( + self, mock_load_model, mock_create_deep_agent + ): + """Test that checkpointer is not added when interrupt_on is None.""" + from sample_deep_agent.graph import make_graph + + # Setup mocks + mock_model = Mock() + mock_load_model.return_value = mock_model + mock_agent = Mock() + mock_agent.with_config.return_value = mock_agent + mock_create_deep_agent.return_value = mock_agent + + # Create graph without interrupt_on (default) + make_graph() + + # Verify create_deep_agent was called without checkpointer + call_args = mock_create_deep_agent.call_args + assert call_args[1]["checkpointer"] is None + + @patch("sample_deep_agent.graph.create_deep_agent") + @patch("sample_deep_agent.graph.load_chat_model") + def test_interrupt_on_passed_to_create_deep_agent( + self, mock_load_model, mock_create_deep_agent + ): + """Test that interrupt_on configuration is passed to create_deep_agent.""" + from sample_deep_agent.graph import make_graph + + # Setup mocks + mock_model = Mock() + mock_load_model.return_value = mock_model + mock_agent = Mock() + mock_agent.with_config.return_value = mock_agent + mock_create_deep_agent.return_value = mock_agent + + # Create graph with interrupt_on config + interrupt_config = {"deep_web_search": True} + make_graph(interrupt_on=interrupt_config) + + # Verify create_deep_agent was called with interrupt_on + call_args = mock_create_deep_agent.call_args + assert call_args[1]["interrupt_on"] == interrupt_config + + @patch("sample_deep_agent.graph.create_deep_agent") + @patch("sample_deep_agent.graph.load_chat_model") + def test_subagent_interrupts_applied_to_subagents( + self, mock_load_model, mock_create_deep_agent + ): + """Test that subagent interrupt overrides are applied correctly.""" + from sample_deep_agent.graph import make_graph + + # Setup mocks + mock_model = Mock() + mock_load_model.return_value = mock_model + mock_agent = Mock() + mock_agent.with_config.return_value = mock_agent + mock_create_deep_agent.return_value = mock_agent + + # Create graph with subagent interrupt overrides + subagent_config = { + "research-agent": { + "deep_web_search": True, + } + } + make_graph(subagent_interrupts=subagent_config) + + # Verify create_deep_agent was called with modified subagents + call_args = mock_create_deep_agent.call_args + subagents = call_args[1]["subagents"] + + # Find the research-agent subagent + research_agent = next( + (s for s in subagents if s["name"] == "research-agent"), + None, + ) + assert research_agent is not None + assert research_agent["interrupt_on"] == {"deep_web_search": True} + + @patch("sample_deep_agent.graph.create_deep_agent") + @patch("sample_deep_agent.graph.load_chat_model") + def test_interrupt_config_with_allowed_decisions( + self, mock_load_model, mock_create_deep_agent + ): + """Test interrupt configuration with custom allowed_decisions.""" + from sample_deep_agent.graph import make_graph + + # Setup mocks + mock_model = Mock() + mock_load_model.return_value = mock_model + mock_agent = Mock() + mock_agent.with_config.return_value = mock_agent + mock_create_deep_agent.return_value = mock_agent + + interrupt_config = { + "deep_web_search": { + "allowed_decisions": ["approve", "reject"] # No editing allowed + } + } + + make_graph(interrupt_on=interrupt_config) + + # Verify create_deep_agent was called with interrupt_on + call_args = mock_create_deep_agent.call_args + assert call_args[1]["interrupt_on"]["deep_web_search"]["allowed_decisions"] == [ + "approve", + "reject", + ] + + @patch("sample_deep_agent.graph.create_deep_agent") + @patch("sample_deep_agent.graph.load_chat_model") + def test_mixed_interrupt_configurations(self, mock_load_model, mock_create_deep_agent): + """Test mixed interrupt configurations (bool and dict).""" + from sample_deep_agent.graph import make_graph + + # Setup mocks + mock_model = Mock() + mock_load_model.return_value = mock_model + mock_agent = Mock() + mock_agent.with_config.return_value = mock_agent + mock_create_deep_agent.return_value = mock_agent + + interrupt_config = { + "deep_web_search": True, # Default behavior + "think_tool": {"allowed_decisions": ["approve"]}, # Must approve + } + + make_graph(interrupt_on=interrupt_config) + + # Verify create_deep_agent was called with interrupt_on + call_args = mock_create_deep_agent.call_args + assert call_args[1]["interrupt_on"]["deep_web_search"] is True + assert call_args[1]["interrupt_on"]["think_tool"]["allowed_decisions"] == ["approve"]