diff --git a/api/backend/ai/__init__.py b/api/backend/ai/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/api/backend/ai/ai_router.py b/api/backend/ai/ai_router.py index d327c05..014f7da 100644 --- a/api/backend/ai/ai_router.py +++ b/api/backend/ai/ai_router.py @@ -1,66 +1,73 @@ # STL import logging -from collections.abc import Iterable, AsyncGenerator +import asyncio +from typing import AsyncGenerator, List, Dict, Any, cast # PDM -from ollama import Message -from fastapi import APIRouter +from fastapi import APIRouter, HTTPException from fastapi.responses import JSONResponse, StreamingResponse -from openai.types.chat import ChatCompletionMessageParam +from langchain_core.messages import BaseMessage +from langchain_core.runnables import RunnableConfig +from langchain.callbacks.streaming_aiter import AsyncIteratorCallbackHandler +from langchain_core.exceptions import LangChainException # LOCAL +from api.backend.models import AI as AIRequestModel from api.backend.ai.clients import ( - llama_model, - open_ai_key, - llama_client, - open_ai_model, - openai_client, + llm_instance, + provider_info, + AI_PROVIDER_BACKEND, + convert_to_langchain_messages, ) from api.backend.ai.schemas import AI from api.backend.routers.handle_exceptions import handle_exceptions LOG = logging.getLogger("AI") - ai_router = APIRouter() - -async def llama_chat(chat_messages: list[Message]) -> AsyncGenerator[str, None]: - if llama_client and llama_model: +async def langchain_chat(messages: List[BaseMessage]) -> AsyncGenerator[str, None]: + if not llm_instance: + LOG.error("LLM instance not available") + yield "An error occurred: LLM not configured." + return + + callback_handler = AsyncIteratorCallbackHandler() + run_config = RunnableConfig(callbacks=[callback_handler]) + + async def stream_llm_task(): try: - async for part in await llama_client.chat( - model=llama_model, messages=chat_messages, stream=True - ): - yield part["message"]["content"] + async for _ in llm_instance.astream(messages, config=run_config): + pass # Callback handler processes the chunks + except LangChainException as e: + LOG.error(f"LangChain error during streaming: {e}") + raise except Exception as e: - LOG.error(f"Error during chat: {e}") - yield "An error occurred while processing your request." - - -async def openai_chat( - chat_messages: Iterable[ChatCompletionMessageParam], -) -> AsyncGenerator[str, None]: - if openai_client and not open_ai_model: - LOG.error("OpenAI model is not set") - yield "An error occurred while processing your request." - - if not openai_client: - LOG.error("OpenAI client is not set") - yield "An error occurred while processing your request." - - if openai_client and open_ai_model: - try: - response = openai_client.chat.completions.create( - model=open_ai_model, messages=chat_messages, stream=True - ) - for part in response: - yield part.choices[0].delta.content or "" - except Exception as e: - LOG.error(f"Error during OpenAI chat: {e}") - yield "An error occurred while processing your request." - - -chat_function = llama_chat if llama_client else openai_chat - + LOG.error(f"Unexpected error during LLM streaming: {e}", exc_info=True) + raise + finally: + if not callback_handler.done.is_set(): + callback_handler.done.set() + + stream_task = asyncio.create_task(stream_llm_task()) + + try: + async for token in callback_handler.aiter(): + yield token + except Exception as e: + LOG.error(f"Error in streaming response: {e}", exc_info=True) + yield f"Streaming error: {str(e)}" + finally: + if not stream_task.done(): + stream_task.cancel() + try: + await stream_task + except asyncio.CancelledError: + LOG.debug("Stream task cancelled successfully") + except Exception as e: + LOG.error(f"Error during stream task cleanup: {e}") + + +chat_function = langchain_chat if llm_instance else None @ai_router.post("/ai") @handle_exceptions(logger=LOG) @@ -73,4 +80,8 @@ async def ai(c: AI): @ai_router.get("/ai/check") @handle_exceptions(logger=LOG) async def check(): - return JSONResponse(content={"ai_enabled": bool(open_ai_key or llama_model)}) + return JSONResponse(content={ + "ai_system_enabled": bool(llm_instance and provider_info.get("configured", False)), + "configured_backend_provider": AI_PROVIDER_BACKEND, + "active_provider_details": provider_info + }) \ No newline at end of file diff --git a/api/backend/ai/clients.py b/api/backend/ai/clients.py index 04c7e7d..4eb71a4 100644 --- a/api/backend/ai/clients.py +++ b/api/backend/ai/clients.py @@ -1,39 +1,194 @@ # STL import os +import logging +from typing import Optional, Dict, Any, List # PDM -from ollama import AsyncClient -from openai import OpenAI +from langchain_core.messages import BaseMessage, HumanMessage, SystemMessage, AIMessage +from langchain_core.language_models.chat_models import BaseChatModel +from langchain_openai import ChatOpenAI +from langchain_community.chat_models import ChatOllama -# Load environment variables -open_ai_key = os.getenv("OPENAI_KEY") -open_ai_model = os.getenv("OPENAI_MODEL") -llama_url = os.getenv("OLLAMA_URL") -llama_model = os.getenv("OLLAMA_MODEL") +LOG = logging.getLogger(__name__) -# Initialize clients -openai_client = OpenAI(api_key=open_ai_key) if open_ai_key else None -llama_client = AsyncClient(host=llama_url) if llama_url else None +# Environment variables +AI_PROVIDER_BACKEND = os.getenv("AI_PROVIDER_BACKEND") +OPENAI_API_KEY = os.getenv("OPENAI_API_KEY") +OPENAI_MODEL_NAME = os.getenv("OPENAI_MODEL_NAME") +OLLAMA_BASE_URL = os.getenv("OLLAMA_BASE_URL") +OLLAMA_MODEL_NAME = os.getenv("OLLAMA_MODEL_NAME") +OPENROUTER_API_KEY = os.getenv("OPENROUTER_API_KEY") +OPENROUTER_MODEL_NAME = os.getenv("OPENROUTER_MODEL_NAME") +# Global state +llm_instance: Optional[BaseChatModel] = None +provider_info: Dict[str, Any] = { + "name": "None", + "model": None, + "configured": False, + "error": None +} -async def ask_open_ai(prompt: str) -> str: - if not openai_client: - raise ValueError("OpenAI client not initialized") - response = openai_client.chat.completions.create( - model=open_ai_model or "gpt-4.1-mini", - messages=[{"role": "user", "content": prompt}], - ) +class ChatOpenRouter(ChatOpenAI): + """Custom OpenRouter client extending ChatOpenAI.""" + + def __init__(self, openai_api_key: Optional[str] = None, **kwargs): + api_key = openai_api_key or os.environ.get("OPENROUTER_API_KEY") + super().__init__( + base_url="https://openrouter.ai/api/v1", + openai_api_key=api_key, + **kwargs + ) - return response.choices[0].message.content or "" +def create_openai_provider() -> tuple[Optional[BaseChatModel], Dict[str, Any]]: + """Create OpenAI provider instance.""" + if not OPENAI_API_KEY or not OPENAI_MODEL_NAME: + error_msg = "OpenAI API key or model name not provided." + return None, {"name": "OpenAI", "configured": False, "error": error_msg} + + try: + llm = ChatOpenAI( + model=OPENAI_MODEL_NAME, + api_key=OPENAI_API_KEY, + streaming=True, + temperature=0.7, + ) + info = {"name": "OpenAI", "model": OPENAI_MODEL_NAME, "configured": True} + LOG.info(f"Initialized OpenAI provider. Model: {OPENAI_MODEL_NAME}") + return llm, info + except Exception as e: + error_msg = f"Failed to initialize OpenAI provider: {e}" + LOG.error(error_msg) + return None, {"name": "OpenAI", "configured": False, "error": error_msg} -async def ask_ollama(prompt: str) -> str: - if not llama_client: - raise ValueError("Ollama client not initialized") - response = await llama_client.chat( - model=llama_model or "", messages=[{"role": "user", "content": prompt}] - ) +def create_ollama_provider() -> tuple[Optional[BaseChatModel], Dict[str, Any]]: + if not OLLAMA_BASE_URL or not OLLAMA_MODEL_NAME: + error_msg = "Ollama base URL or model name not provided." + return None, {"name": "Ollama", "configured": False, "error": error_msg} + + try: + llm = ChatOllama( + base_url=OLLAMA_BASE_URL, + model=OLLAMA_MODEL_NAME, + temperature=0.7, + ) + info = {"name": "Ollama", "model": OLLAMA_MODEL_NAME, "configured": True} + LOG.info(f"Initialized Ollama provider. Model: {OLLAMA_MODEL_NAME}, URL: {OLLAMA_BASE_URL}") + return llm, info + except Exception as e: + error_msg = f"Failed to initialize Ollama provider: {e}" + LOG.error(error_msg) + return None, {"name": "Ollama", "configured": False, "error": error_msg} - return response.message.content or "" + +def create_openrouter_provider() -> tuple[Optional[BaseChatModel], Dict[str, Any]]: + if not OPENROUTER_API_KEY or not OPENROUTER_MODEL_NAME: + error_msg = "OpenRouter API key or model name not provided." + return None, {"name": "OpenRouter", "configured": False, "error": error_msg} + + try: + llm = ChatOpenRouter( + model=OPENROUTER_MODEL_NAME, + openai_api_key=OPENROUTER_API_KEY, + streaming=True, + temperature=0.7, + ) + info = {"name": "OpenRouter", "model": OPENROUTER_MODEL_NAME, "configured": True} + LOG.info(f"Initialized OpenRouter provider. Model: {OPENROUTER_MODEL_NAME}") + return llm, info + except Exception as e: + error_msg = f"Failed to initialize OpenRouter provider: {e}" + LOG.error(error_msg) + return None, {"name": "OpenRouter", "configured": False, "error": error_msg} + + +def initialize_ai_provider() -> None: + global llm_instance, provider_info + + if not AI_PROVIDER_BACKEND: + provider_info.update({"configured": False, "error": "No AI provider specified"}) + return + + LOG.info(f"Initializing AI provider: {AI_PROVIDER_BACKEND}") + + provider_factories = { + "openai": create_openai_provider, + "ollama": create_ollama_provider, + "openrouter": create_openrouter_provider, + } + + factory = provider_factories.get(AI_PROVIDER_BACKEND) + if not factory: + error_msg = f"Unsupported AI provider: {AI_PROVIDER_BACKEND}" + LOG.error(error_msg) + provider_info.update({"configured": False, "error": error_msg}) + return + + try: + llm_instance, provider_info = factory() + except ImportError as e: + error_msg = f"Missing dependencies for {AI_PROVIDER_BACKEND}: {e}" + LOG.error(error_msg) + provider_info.update({"configured": False, "error": error_msg}) + except Exception as e: + error_msg = f"Unexpected error initializing {AI_PROVIDER_BACKEND}: {e}" + LOG.error(error_msg, exc_info=True) + provider_info.update({"configured": False, "error": error_msg}) + + +def convert_to_langchain_messages(messages: List[Dict[str, Any]]) -> List[BaseMessage]: + lc_messages: List[BaseMessage] = [] + + for msg_dict in messages: + role = str(msg_dict.get("role", "user")).lower() + content = str(msg_dict.get("content", "")) + + if role == "user": + lc_messages.append(HumanMessage(content=content)) + elif role in ("assistant", "ai"): + lc_messages.append(AIMessage(content=content)) + elif role == "system": + lc_messages.append(SystemMessage(content=content)) + else: + LOG.warning(f"Unknown message role '{role}', treating as user message") + lc_messages.append(HumanMessage(content=content)) + + return lc_messages + + +async def ask_llm(prompt: str) -> str: + """Simple non-streaming LLM query (similar to your old ask_open_ai/ask_ollama).""" + if not llm_instance: + raise ValueError("LLM client not initialized") + + try: + messages = [HumanMessage(content=prompt)] + response = await llm_instance.ainvoke(messages) + + content = response.content + if not content: + return "" + + if not isinstance(content, list): + return str(content) + + text_parts: List[str] = [] + for item in content: + if not isinstance(item, dict): + text_parts.append(str(item)) + else: + text_value = item.get("text") + if text_value is not None: + text_parts.append(str(text_value)) + + return " ".join(text_parts) if text_parts else "" + + except Exception as e: + LOG.error(f"Error in LLM query: {e}") + raise + + +initialize_ai_provider() diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml index c44c1f2..4598c32 100644 --- a/docker-compose.dev.yml +++ b/docker-compose.dev.yml @@ -18,6 +18,13 @@ services: dockerfile: docker/api/Dockerfile environment: - LOG_LEVEL=INFO + - AI_PROVIDER_BACKEND= # openai | ollama | openrouter | (leave empty to disable ai) + - OPENAI_API_KEY='' + - OPENAI_MODEL_NAME='gpt4o' + - OLLAMA_BASE_URL=http://localhost:11434 + - OLLAMA_MODEL_NAME=llama2 + - OPENROUTER_API_KEY='' + - OPENROUTER_MODEL_NAME='openai/gpt-4' volumes: - "$PWD/api:/project/app/api" ports: diff --git a/docker-compose.yml b/docker-compose.yml index 5c7d6e2..affcd5c 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -15,7 +15,13 @@ services: image: jpyles0524/scraperr_api:latest environment: - LOG_LEVEL=INFO - - OPENAI_KEY=${OPENAI_KEY} + - AI_PROVIDER_BACKEND= # openai | ollama | openrouter | (leave empty to disable ai) + - OPENAI_API_KEY='' + - OPENAI_MODEL_NAME='gpt4o' + - OLLAMA_BASE_URL=http://localhost:11434 + - OLLAMA_MODEL_NAME=llama2 + - OPENROUTER_API_KEY='' + - OPENROUTER_MODEL_NAME='openai/gpt-4' container_name: scraperr_api ports: - 8000:8000 diff --git a/pdm.lock b/pdm.lock index e07823c..e4f6115 100644 --- a/pdm.lock +++ b/pdm.lock @@ -223,14 +223,17 @@ files = [ [[package]] name = "async-timeout" -version = "5.0.1" -requires_python = ">=3.8" +version = "4.0.3" +requires_python = ">=3.7" summary = "Timeout context manager for asyncio programs" groups = ["default"] marker = "python_version < \"3.11\"" +dependencies = [ + "typing-extensions>=3.6.5; python_version < \"3.8\"", +] files = [ - {file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}, - {file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}, + {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, + {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, ] [[package]] @@ -783,6 +786,21 @@ files = [ {file = "cython-3.1.0.tar.gz", hash = "sha256:1097dd60d43ad0fff614a57524bfd531b35c13a907d13bee2cc2ec152e6bf4a1"}, ] +[[package]] +name = "dataclasses-json" +version = "0.6.7" +requires_python = "<4.0,>=3.7" +summary = "Easily serialize dataclasses to and from JSON." +groups = ["default"] +dependencies = [ + "marshmallow<4.0.0,>=3.18.0", + "typing-inspect<1,>=0.4.0", +] +files = [ + {file = "dataclasses_json-0.6.7-py3-none-any.whl", hash = "sha256:0dbf33f26c8d5305befd61b39d2b3414e8a407bedc2834dea9b8d642666fb40a"}, + {file = "dataclasses_json-0.6.7.tar.gz", hash = "sha256:b6b3e528266ea45b9535223bc53ca645f5208833c29229e847b3f26a1cc55fc0"}, +] + [[package]] name = "decorator" version = "5.2.1" @@ -1217,6 +1235,17 @@ files = [ {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"}, ] +[[package]] +name = "httpx-sse" +version = "0.4.0" +requires_python = ">=3.8" +summary = "Consume Server-Sent Event (SSE) messages with HTTPX." +groups = ["default"] +files = [ + {file = "httpx-sse-0.4.0.tar.gz", hash = "sha256:1e81a3a3070ce322add1d3529ed42eb5f70817f45ed6ec915ab753f961139721"}, + {file = "httpx_sse-0.4.0-py3-none-any.whl", hash = "sha256:f329af6eae57eaa2bdfd962b42524764af68075ea87370a2de920af5341e318f"}, +] + [[package]] name = "hyperframe" version = "6.1.0" @@ -1372,6 +1401,31 @@ files = [ {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, ] +[[package]] +name = "jsonpatch" +version = "1.33" +requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +summary = "Apply JSON-Patches (RFC 6902) " +groups = ["default"] +dependencies = [ + "jsonpointer>=1.9", +] +files = [ + {file = "jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade"}, + {file = "jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c"}, +] + +[[package]] +name = "jsonpointer" +version = "3.0.0" +requires_python = ">=3.7" +summary = "Identify specific nodes in a JSON document (RFC 6901) " +groups = ["default"] +files = [ + {file = "jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942"}, + {file = "jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef"}, +] + [[package]] name = "jwcrypto" version = "1.5.6" @@ -1401,6 +1455,124 @@ files = [ {file = "kaitaistruct-0.10.tar.gz", hash = "sha256:a044dee29173d6afbacf27bcac39daf89b654dd418cfa009ab82d9178a9ae52a"}, ] +[[package]] +name = "langchain" +version = "0.3.25" +requires_python = ">=3.9" +summary = "Building applications with LLMs through composability" +groups = ["default"] +dependencies = [ + "PyYAML>=5.3", + "SQLAlchemy<3,>=1.4", + "async-timeout<5.0.0,>=4.0.0; python_version < \"3.11\"", + "langchain-core<1.0.0,>=0.3.58", + "langchain-text-splitters<1.0.0,>=0.3.8", + "langsmith<0.4,>=0.1.17", + "pydantic<3.0.0,>=2.7.4", + "requests<3,>=2", +] +files = [ + {file = "langchain-0.3.25-py3-none-any.whl", hash = "sha256:931f7d2d1eaf182f9f41c5e3272859cfe7f94fc1f7cef6b3e5a46024b4884c21"}, + {file = "langchain-0.3.25.tar.gz", hash = "sha256:a1d72aa39546a23db08492d7228464af35c9ee83379945535ceef877340d2a3a"}, +] + +[[package]] +name = "langchain-community" +version = "0.3.24" +requires_python = ">=3.9" +summary = "Community contributed LangChain integrations." +groups = ["default"] +dependencies = [ + "PyYAML>=5.3", + "SQLAlchemy<3,>=1.4", + "aiohttp<4.0.0,>=3.8.3", + "dataclasses-json<0.7,>=0.5.7", + "httpx-sse<1.0.0,>=0.4.0", + "langchain-core<1.0.0,>=0.3.59", + "langchain<1.0.0,>=0.3.25", + "langsmith<0.4,>=0.1.125", + "numpy>=1.26.2; python_version < \"3.13\"", + "numpy>=2.1.0; python_version >= \"3.13\"", + "pydantic-settings<3.0.0,>=2.4.0", + "requests<3,>=2", + "tenacity!=8.4.0,<10,>=8.1.0", +] +files = [ + {file = "langchain_community-0.3.24-py3-none-any.whl", hash = "sha256:b6cdb376bf1c2f4d2503aca20f8f35f2d5b3d879c52848277f20ce1950e7afaf"}, + {file = "langchain_community-0.3.24.tar.gz", hash = "sha256:62d9e8cf9aadf35182ec3925f9ec1c8e5e84fb4f199f67a01aee496d289dc264"}, +] + +[[package]] +name = "langchain-core" +version = "0.3.60" +requires_python = ">=3.9" +summary = "Building applications with LLMs through composability" +groups = ["default"] +dependencies = [ + "PyYAML>=5.3", + "jsonpatch<2.0,>=1.33", + "langsmith<0.4,>=0.1.126", + "packaging<25,>=23.2", + "pydantic>=2.7.4", + "tenacity!=8.4.0,<10.0.0,>=8.1.0", + "typing-extensions>=4.7", +] +files = [ + {file = "langchain_core-0.3.60-py3-none-any.whl", hash = "sha256:2ccdf06b12e699b1b0962bc02837056c075b4981c3d13f82a4d4c30bb22ea3dc"}, + {file = "langchain_core-0.3.60.tar.gz", hash = "sha256:63dd1bdf7939816115399522661ca85a2f3686a61440f2f46ebd86d1b028595b"}, +] + +[[package]] +name = "langchain-openai" +version = "0.3.17" +requires_python = ">=3.9" +summary = "An integration package connecting OpenAI and LangChain" +groups = ["default"] +dependencies = [ + "langchain-core<1.0.0,>=0.3.59", + "openai<2.0.0,>=1.68.2", + "tiktoken<1,>=0.7", +] +files = [ + {file = "langchain_openai-0.3.17-py3-none-any.whl", hash = "sha256:d4d9cf945e2453ee5895ccd12fd8a3ea9131a0f6130dcc21427c77cc2206b1c0"}, + {file = "langchain_openai-0.3.17.tar.gz", hash = "sha256:10bcdfac3edb3dea4a8aabb12f01566e5ff8756634cc52aa169c62e4c4b73801"}, +] + +[[package]] +name = "langchain-text-splitters" +version = "0.3.8" +requires_python = "<4.0,>=3.9" +summary = "LangChain text splitting utilities" +groups = ["default"] +dependencies = [ + "langchain-core<1.0.0,>=0.3.51", +] +files = [ + {file = "langchain_text_splitters-0.3.8-py3-none-any.whl", hash = "sha256:e75cc0f4ae58dcf07d9f18776400cf8ade27fadd4ff6d264df6278bb302f6f02"}, + {file = "langchain_text_splitters-0.3.8.tar.gz", hash = "sha256:116d4b9f2a22dda357d0b79e30acf005c5518177971c66a9f1ab0edfdb0f912e"}, +] + +[[package]] +name = "langsmith" +version = "0.3.42" +requires_python = ">=3.9" +summary = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." +groups = ["default"] +dependencies = [ + "httpx<1,>=0.23.0", + "orjson<4.0.0,>=3.9.14; platform_python_implementation != \"PyPy\"", + "packaging>=23.2", + "pydantic<3,>=1; python_full_version < \"3.12.4\"", + "pydantic<3.0.0,>=2.7.4; python_full_version >= \"3.12.4\"", + "requests-toolbelt<2.0.0,>=1.0.0", + "requests<3,>=2", + "zstandard<0.24.0,>=0.23.0", +] +files = [ + {file = "langsmith-0.3.42-py3-none-any.whl", hash = "sha256:18114327f3364385dae4026ebfd57d1c1cb46d8f80931098f0f10abe533475ff"}, + {file = "langsmith-0.3.42.tar.gz", hash = "sha256:2b5cbc450ab808b992362aac6943bb1d285579aa68a3a8be901d30a393458f25"}, +] + [[package]] name = "language-tags" version = "1.2.0" @@ -1607,6 +1779,20 @@ files = [ {file = "lxml-5.4.0.tar.gz", hash = "sha256:d12832e1dbea4be280b22fd0ea7c9b87f0d8fc51ba06e92dc62d52f804f78ebd"}, ] +[[package]] +name = "marshmallow" +version = "3.26.1" +requires_python = ">=3.9" +summary = "A lightweight library for converting complex datatypes to and from native Python datatypes." +groups = ["default"] +dependencies = [ + "packaging>=17.0", +] +files = [ + {file = "marshmallow-3.26.1-py3-none-any.whl", hash = "sha256:3350409f20a70a7e4e11a27661187b77cdcaeb20abca41c1454fe33636bea09c"}, + {file = "marshmallow-3.26.1.tar.gz", hash = "sha256:e6d8affb6cb61d39d26402096dc0aee12d5a26d490a121f118d2e81dc0719dc6"}, +] + [[package]] name = "matplotlib-inline" version = "0.1.7" @@ -1847,6 +2033,17 @@ files = [ {file = "mypy_boto3_sqs-1.38.0.tar.gz", hash = "sha256:39aebc121a2fe20f962fd83b617fd916003605d6f6851fdf195337a0aa428fe1"}, ] +[[package]] +name = "mypy-extensions" +version = "1.1.0" +requires_python = ">=3.8" +summary = "Type system extensions for programs checked with the mypy type checker." +groups = ["default"] +files = [ + {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, + {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, +] + [[package]] name = "numpy" version = "2.2.5" @@ -2045,13 +2242,13 @@ files = [ [[package]] name = "packaging" -version = "25.0" +version = "24.2" requires_python = ">=3.8" summary = "Core utilities for Python packages" groups = ["default", "dev"] files = [ - {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, - {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, + {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, + {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, ] [[package]] @@ -2473,6 +2670,22 @@ files = [ {file = "pydantic_core-2.33.1.tar.gz", hash = "sha256:bcc9c6fdb0ced789245b02b7d6603e17d1563064ddcfc36f046b61c0c05dd9df"}, ] +[[package]] +name = "pydantic-settings" +version = "2.9.1" +requires_python = ">=3.9" +summary = "Settings management using Pydantic" +groups = ["default"] +dependencies = [ + "pydantic>=2.7.0", + "python-dotenv>=0.21.0", + "typing-inspection>=0.4.0", +] +files = [ + {file = "pydantic_settings-2.9.1-py3-none-any.whl", hash = "sha256:59b4f431b1defb26fe620c71a7d3968a710d719f5f4cdbbdb7926edeb770f6ef"}, + {file = "pydantic_settings-2.9.1.tar.gz", hash = "sha256:c509bf79d27563add44e8446233359004ed85066cd096d8b510f715e6ef5d268"}, +] + [[package]] name = "pydantic" version = "2.11.3" @@ -2887,6 +3100,77 @@ files = [ {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] +[[package]] +name = "regex" +version = "2024.11.6" +requires_python = ">=3.8" +summary = "Alternative regular expression module, to replace re." +groups = ["default"] +files = [ + {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff590880083d60acc0433f9c3f713c51f7ac6ebb9adf889c79a261ecf541aa91"}, + {file = "regex-2024.11.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:658f90550f38270639e83ce492f27d2c8d2cd63805c65a13a14d36ca126753f0"}, + {file = "regex-2024.11.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:164d8b7b3b4bcb2068b97428060b2a53be050085ef94eca7f240e7947f1b080e"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3660c82f209655a06b587d55e723f0b813d3a7db2e32e5e7dc64ac2a9e86fde"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d22326fcdef5e08c154280b71163ced384b428343ae16a5ab2b3354aed12436e"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1ac758ef6aebfc8943560194e9fd0fa18bcb34d89fd8bd2af18183afd8da3a2"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:997d6a487ff00807ba810e0f8332c18b4eb8d29463cfb7c820dc4b6e7562d0cf"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:02a02d2bb04fec86ad61f3ea7f49c015a0681bf76abb9857f945d26159d2968c"}, + {file = "regex-2024.11.6-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f02f93b92358ee3f78660e43b4b0091229260c5d5c408d17d60bf26b6c900e86"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:06eb1be98df10e81ebaded73fcd51989dcf534e3c753466e4b60c4697a003b67"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:040df6fe1a5504eb0f04f048e6d09cd7c7110fef851d7c567a6b6e09942feb7d"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabbfc59f2c6edba2a6622c647b716e34e8e3867e0ab975412c5c2f79b82da2"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8447d2d39b5abe381419319f942de20b7ecd60ce86f16a23b0698f22e1b70008"}, + {file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:da8f5fc57d1933de22a9e23eec290a0d8a5927a5370d24bda9a6abe50683fe62"}, + {file = "regex-2024.11.6-cp310-cp310-win32.whl", hash = "sha256:b489578720afb782f6ccf2840920f3a32e31ba28a4b162e13900c3e6bd3f930e"}, + {file = "regex-2024.11.6-cp310-cp310-win_amd64.whl", hash = "sha256:5071b2093e793357c9d8b2929dfc13ac5f0a6c650559503bb81189d0a3814519"}, + {file = "regex-2024.11.6-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5478c6962ad548b54a591778e93cd7c456a7a29f8eca9c49e4f9a806dcc5d638"}, + {file = "regex-2024.11.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c89a8cc122b25ce6945f0423dc1352cb9593c68abd19223eebbd4e56612c5b7"}, + {file = "regex-2024.11.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:94d87b689cdd831934fa3ce16cc15cd65748e6d689f5d2b8f4f4df2065c9fa20"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1062b39a0a2b75a9c694f7a08e7183a80c63c0d62b301418ffd9c35f55aaa114"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:167ed4852351d8a750da48712c3930b031f6efdaa0f22fa1933716bfcd6bf4a3"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d548dafee61f06ebdb584080621f3e0c23fff312f0de1afc776e2a2ba99a74f"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a19f302cd1ce5dd01a9099aaa19cae6173306d1302a43b627f62e21cf18ac0"}, + {file = "regex-2024.11.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bec9931dfb61ddd8ef2ebc05646293812cb6b16b60cf7c9511a832b6f1854b55"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9714398225f299aa85267fd222f7142fcb5c769e73d7733344efc46f2ef5cf89"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:202eb32e89f60fc147a41e55cb086db2a3f8cb82f9a9a88440dcfc5d37faae8d"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:4181b814e56078e9b00427ca358ec44333765f5ca1b45597ec7446d3a1ef6e34"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:068376da5a7e4da51968ce4c122a7cd31afaaec4fccc7856c92f63876e57b51d"}, + {file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ac10f2c4184420d881a3475fb2c6f4d95d53a8d50209a2500723d831036f7c45"}, + {file = "regex-2024.11.6-cp311-cp311-win32.whl", hash = "sha256:c36f9b6f5f8649bb251a5f3f66564438977b7ef8386a52460ae77e6070d309d9"}, + {file = "regex-2024.11.6-cp311-cp311-win_amd64.whl", hash = "sha256:02e28184be537f0e75c1f9b2f8847dc51e08e6e171c6bde130b2687e0c33cf60"}, + {file = "regex-2024.11.6-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:52fb28f528778f184f870b7cf8f225f5eef0a8f6e3778529bdd40c7b3920796a"}, + {file = "regex-2024.11.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdd6028445d2460f33136c55eeb1f601ab06d74cb3347132e1c24250187500d9"}, + {file = "regex-2024.11.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:805e6b60c54bf766b251e94526ebad60b7de0c70f70a4e6210ee2891acb70bf2"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b85c2530be953a890eaffde05485238f07029600e8f098cdf1848d414a8b45e4"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb26437975da7dc36b7efad18aa9dd4ea569d2357ae6b783bf1118dabd9ea577"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:abfa5080c374a76a251ba60683242bc17eeb2c9818d0d30117b4486be10c59d3"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b7fa6606c2881c1db9479b0eaa11ed5dfa11c8d60a474ff0e095099f39d98e"}, + {file = "regex-2024.11.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c32f75920cf99fe6b6c539c399a4a128452eaf1af27f39bce8909c9a3fd8cbe"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:982e6d21414e78e1f51cf595d7f321dcd14de1f2881c5dc6a6e23bbbbd68435e"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a7c2155f790e2fb448faed6dd241386719802296ec588a8b9051c1f5c481bc29"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:149f5008d286636e48cd0b1dd65018548944e495b0265b45e1bffecce1ef7f39"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:e5364a4502efca094731680e80009632ad6624084aff9a23ce8c8c6820de3e51"}, + {file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0a86e7eeca091c09e021db8eb72d54751e527fa47b8d5787caf96d9831bd02ad"}, + {file = "regex-2024.11.6-cp312-cp312-win32.whl", hash = "sha256:32f9a4c643baad4efa81d549c2aadefaeba12249b2adc5af541759237eee1c54"}, + {file = "regex-2024.11.6-cp312-cp312-win_amd64.whl", hash = "sha256:a93c194e2df18f7d264092dc8539b8ffb86b45b899ab976aa15d48214138e81b"}, + {file = "regex-2024.11.6-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a6ba92c0bcdf96cbf43a12c717eae4bc98325ca3730f6b130ffa2e3c3c723d84"}, + {file = "regex-2024.11.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:525eab0b789891ac3be914d36893bdf972d483fe66551f79d3e27146191a37d4"}, + {file = "regex-2024.11.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:086a27a0b4ca227941700e0b31425e7a28ef1ae8e5e05a33826e17e47fbfdba0"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bde01f35767c4a7899b7eb6e823b125a64de314a8ee9791367c9a34d56af18d0"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b583904576650166b3d920d2bcce13971f6f9e9a396c673187f49811b2769dc7"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c4de13f06a0d54fa0d5ab1b7138bfa0d883220965a29616e3ea61b35d5f5fc7"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3cde6e9f2580eb1665965ce9bf17ff4952f34f5b126beb509fee8f4e994f143c"}, + {file = "regex-2024.11.6-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0d7f453dca13f40a02b79636a339c5b62b670141e63efd511d3f8f73fba162b3"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:59dfe1ed21aea057a65c6b586afd2a945de04fc7db3de0a6e3ed5397ad491b07"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b97c1e0bd37c5cd7902e65f410779d39eeda155800b65fc4d04cc432efa9bc6e"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f9d1e379028e0fc2ae3654bac3cbbef81bf3fd571272a42d56c24007979bafb6"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:13291b39131e2d002a7940fb176e120bec5145f3aeb7621be6534e46251912c4"}, + {file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4f51f88c126370dcec4908576c5a627220da6c09d0bff31cfa89f2523843316d"}, + {file = "regex-2024.11.6-cp313-cp313-win32.whl", hash = "sha256:63b13cfd72e9601125027202cad74995ab26921d8cd935c25f09c630436348ff"}, + {file = "regex-2024.11.6-cp313-cp313-win_amd64.whl", hash = "sha256:2b3361af3198667e99927da8b84c1b010752fa4b1115ee30beaa332cabc3ef1a"}, + {file = "regex-2024.11.6.tar.gz", hash = "sha256:7ab159b063c52a0333c884e4679f8d7a85112ee3078fe3d9004b2dd875585519"}, +] + [[package]] name = "requests" version = "2.32.3" @@ -3083,6 +3367,54 @@ files = [ {file = "soupsieve-2.7.tar.gz", hash = "sha256:ad282f9b6926286d2ead4750552c8a6142bc4c783fd66b0293547c8fe6ae126a"}, ] +[[package]] +name = "sqlalchemy" +version = "2.0.41" +requires_python = ">=3.7" +summary = "Database Abstraction Library" +groups = ["default"] +dependencies = [ + "greenlet>=1; (platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\") and python_version < \"3.14\"", + "importlib-metadata; python_version < \"3.8\"", + "typing-extensions>=4.6.0", +] +files = [ + {file = "sqlalchemy-2.0.41-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b1f09b6821406ea1f94053f346f28f8215e293344209129a9c0fcc3578598d7b"}, + {file = "sqlalchemy-2.0.41-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1936af879e3db023601196a1684d28e12f19ccf93af01bf3280a3262c4b6b4e5"}, + {file = "sqlalchemy-2.0.41-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2ac41acfc8d965fb0c464eb8f44995770239668956dc4cdf502d1b1ffe0d747"}, + {file = "sqlalchemy-2.0.41-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81c24e0c0fde47a9723c81d5806569cddef103aebbf79dbc9fcbb617153dea30"}, + {file = "sqlalchemy-2.0.41-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:23a8825495d8b195c4aa9ff1c430c28f2c821e8c5e2d98089228af887e5d7e29"}, + {file = "sqlalchemy-2.0.41-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:60c578c45c949f909a4026b7807044e7e564adf793537fc762b2489d522f3d11"}, + {file = "sqlalchemy-2.0.41-cp310-cp310-win32.whl", hash = "sha256:118c16cd3f1b00c76d69343e38602006c9cfb9998fa4f798606d28d63f23beda"}, + {file = "sqlalchemy-2.0.41-cp310-cp310-win_amd64.whl", hash = "sha256:7492967c3386df69f80cf67efd665c0f667cee67032090fe01d7d74b0e19bb08"}, + {file = "sqlalchemy-2.0.41-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6375cd674fe82d7aa9816d1cb96ec592bac1726c11e0cafbf40eeee9a4516b5f"}, + {file = "sqlalchemy-2.0.41-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9f8c9fdd15a55d9465e590a402f42082705d66b05afc3ffd2d2eb3c6ba919560"}, + {file = "sqlalchemy-2.0.41-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32f9dc8c44acdee06c8fc6440db9eae8b4af8b01e4b1aee7bdd7241c22edff4f"}, + {file = "sqlalchemy-2.0.41-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90c11ceb9a1f482c752a71f203a81858625d8df5746d787a4786bca4ffdf71c6"}, + {file = "sqlalchemy-2.0.41-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:911cc493ebd60de5f285bcae0491a60b4f2a9f0f5c270edd1c4dbaef7a38fc04"}, + {file = "sqlalchemy-2.0.41-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03968a349db483936c249f4d9cd14ff2c296adfa1290b660ba6516f973139582"}, + {file = "sqlalchemy-2.0.41-cp311-cp311-win32.whl", hash = "sha256:293cd444d82b18da48c9f71cd7005844dbbd06ca19be1ccf6779154439eec0b8"}, + {file = "sqlalchemy-2.0.41-cp311-cp311-win_amd64.whl", hash = "sha256:3d3549fc3e40667ec7199033a4e40a2f669898a00a7b18a931d3efb4c7900504"}, + {file = "sqlalchemy-2.0.41-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:81f413674d85cfd0dfcd6512e10e0f33c19c21860342a4890c3a2b59479929f9"}, + {file = "sqlalchemy-2.0.41-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:598d9ebc1e796431bbd068e41e4de4dc34312b7aa3292571bb3674a0cb415dd1"}, + {file = "sqlalchemy-2.0.41-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a104c5694dfd2d864a6f91b0956eb5d5883234119cb40010115fd45a16da5e70"}, + {file = "sqlalchemy-2.0.41-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6145afea51ff0af7f2564a05fa95eb46f542919e6523729663a5d285ecb3cf5e"}, + {file = "sqlalchemy-2.0.41-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b46fa6eae1cd1c20e6e6f44e19984d438b6b2d8616d21d783d150df714f44078"}, + {file = "sqlalchemy-2.0.41-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41836fe661cc98abfae476e14ba1906220f92c4e528771a8a3ae6a151242d2ae"}, + {file = "sqlalchemy-2.0.41-cp312-cp312-win32.whl", hash = "sha256:a8808d5cf866c781150d36a3c8eb3adccfa41a8105d031bf27e92c251e3969d6"}, + {file = "sqlalchemy-2.0.41-cp312-cp312-win_amd64.whl", hash = "sha256:5b14e97886199c1f52c14629c11d90c11fbb09e9334fa7bb5f6d068d9ced0ce0"}, + {file = "sqlalchemy-2.0.41-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4eeb195cdedaf17aab6b247894ff2734dcead6c08f748e617bfe05bd5a218443"}, + {file = "sqlalchemy-2.0.41-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d4ae769b9c1c7757e4ccce94b0641bc203bbdf43ba7a2413ab2523d8d047d8dc"}, + {file = "sqlalchemy-2.0.41-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a62448526dd9ed3e3beedc93df9bb6b55a436ed1474db31a2af13b313a70a7e1"}, + {file = "sqlalchemy-2.0.41-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc56c9788617b8964ad02e8fcfeed4001c1f8ba91a9e1f31483c0dffb207002a"}, + {file = "sqlalchemy-2.0.41-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c153265408d18de4cc5ded1941dcd8315894572cddd3c58df5d5b5705b3fa28d"}, + {file = "sqlalchemy-2.0.41-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4f67766965996e63bb46cfbf2ce5355fc32d9dd3b8ad7e536a920ff9ee422e23"}, + {file = "sqlalchemy-2.0.41-cp313-cp313-win32.whl", hash = "sha256:bfc9064f6658a3d1cadeaa0ba07570b83ce6801a1314985bf98ec9b95d74e15f"}, + {file = "sqlalchemy-2.0.41-cp313-cp313-win_amd64.whl", hash = "sha256:82ca366a844eb551daff9d2e6e7a9e5e76d2612c8564f58db6c19a726869c1df"}, + {file = "sqlalchemy-2.0.41-py3-none-any.whl", hash = "sha256:57df5dc6fdb5ed1a88a1ed2195fd31927e705cad62dedd86b46972752a80f576"}, + {file = "sqlalchemy-2.0.41.tar.gz", hash = "sha256:edba70118c4be3c2b1f90754d308d0b79c6fe2c0fdc52d8ddf603916f83f4db9"}, +] + [[package]] name = "stack-data" version = "0.6.3" @@ -3113,6 +3445,55 @@ files = [ {file = "starlette-0.46.2.tar.gz", hash = "sha256:7f7361f34eed179294600af672f565727419830b54b7b084efe44bb82d2fccd5"}, ] +[[package]] +name = "tenacity" +version = "9.1.2" +requires_python = ">=3.9" +summary = "Retry code until it succeeds" +groups = ["default"] +files = [ + {file = "tenacity-9.1.2-py3-none-any.whl", hash = "sha256:f77bf36710d8b73a50b2dd155c97b870017ad21afe6ab300326b0371b3b05138"}, + {file = "tenacity-9.1.2.tar.gz", hash = "sha256:1169d376c297e7de388d18b4481760d478b0e99a777cad3a9c86e556f4b697cb"}, +] + +[[package]] +name = "tiktoken" +version = "0.9.0" +requires_python = ">=3.9" +summary = "tiktoken is a fast BPE tokeniser for use with OpenAI's models" +groups = ["default"] +dependencies = [ + "regex>=2022.1.18", + "requests>=2.26.0", +] +files = [ + {file = "tiktoken-0.9.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:586c16358138b96ea804c034b8acf3f5d3f0258bd2bc3b0227af4af5d622e382"}, + {file = "tiktoken-0.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d9c59ccc528c6c5dd51820b3474402f69d9a9e1d656226848ad68a8d5b2e5108"}, + {file = "tiktoken-0.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0968d5beeafbca2a72c595e8385a1a1f8af58feaebb02b227229b69ca5357fd"}, + {file = "tiktoken-0.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:92a5fb085a6a3b7350b8fc838baf493317ca0e17bd95e8642f95fc69ecfed1de"}, + {file = "tiktoken-0.9.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:15a2752dea63d93b0332fb0ddb05dd909371ededa145fe6a3242f46724fa7990"}, + {file = "tiktoken-0.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:26113fec3bd7a352e4b33dbaf1bd8948de2507e30bd95a44e2b1156647bc01b4"}, + {file = "tiktoken-0.9.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:f32cc56168eac4851109e9b5d327637f15fd662aa30dd79f964b7c39fbadd26e"}, + {file = "tiktoken-0.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:45556bc41241e5294063508caf901bf92ba52d8ef9222023f83d2483a3055348"}, + {file = "tiktoken-0.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03935988a91d6d3216e2ec7c645afbb3d870b37bcb67ada1943ec48678e7ee33"}, + {file = "tiktoken-0.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b3d80aad8d2c6b9238fc1a5524542087c52b860b10cbf952429ffb714bc1136"}, + {file = "tiktoken-0.9.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b2a21133be05dc116b1d0372af051cd2c6aa1d2188250c9b553f9fa49301b336"}, + {file = "tiktoken-0.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:11a20e67fdf58b0e2dea7b8654a288e481bb4fc0289d3ad21291f8d0849915fb"}, + {file = "tiktoken-0.9.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e88f121c1c22b726649ce67c089b90ddda8b9662545a8aeb03cfef15967ddd03"}, + {file = "tiktoken-0.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a6600660f2f72369acb13a57fb3e212434ed38b045fd8cc6cdd74947b4b5d210"}, + {file = "tiktoken-0.9.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95e811743b5dfa74f4b227927ed86cbc57cad4df859cb3b643be797914e41794"}, + {file = "tiktoken-0.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99376e1370d59bcf6935c933cb9ba64adc29033b7e73f5f7569f3aad86552b22"}, + {file = "tiktoken-0.9.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:badb947c32739fb6ddde173e14885fb3de4d32ab9d8c591cbd013c22b4c31dd2"}, + {file = "tiktoken-0.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:5a62d7a25225bafed786a524c1b9f0910a1128f4232615bf3f8257a73aaa3b16"}, + {file = "tiktoken-0.9.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:2b0e8e05a26eda1249e824156d537015480af7ae222ccb798e5234ae0285dbdb"}, + {file = "tiktoken-0.9.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:27d457f096f87685195eea0165a1807fae87b97b2161fe8c9b1df5bd74ca6f63"}, + {file = "tiktoken-0.9.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cf8ded49cddf825390e36dd1ad35cd49589e8161fdcb52aa25f0583e90a3e01"}, + {file = "tiktoken-0.9.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc156cb314119a8bb9748257a2eaebd5cc0753b6cb491d26694ed42fc7cb3139"}, + {file = "tiktoken-0.9.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:cd69372e8c9dd761f0ab873112aba55a0e3e506332dd9f7522ca466e817b1b7a"}, + {file = "tiktoken-0.9.0-cp313-cp313-win_amd64.whl", hash = "sha256:5ea0edb6f83dc56d794723286215918c1cde03712cbbafa0348b33448faf5b95"}, + {file = "tiktoken-0.9.0.tar.gz", hash = "sha256:d02a5ca6a938e0490e1ff957bc48c8b078c88cb83977be1625b1fd8aac792c5d"}, +] + [[package]] name = "tomli" version = "2.2.1" @@ -3250,6 +3631,21 @@ files = [ {file = "typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"}, ] +[[package]] +name = "typing-inspect" +version = "0.9.0" +summary = "Runtime inspection utilities for typing module." +groups = ["default"] +dependencies = [ + "mypy-extensions>=0.3.0", + "typing-extensions>=3.7.4", + "typing>=3.7.4; python_version < \"3.5\"", +] +files = [ + {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"}, + {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"}, +] + [[package]] name = "typing-inspection" version = "0.4.0" diff --git a/pyproject.toml b/pyproject.toml index b66dc43..4a7ad8b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -41,6 +41,9 @@ dependencies = [ "apscheduler>=3.11.0", "playwright>=1.52.0", "camoufox>=0.4.11", + "langchain>=0.3.25", + "langchain-openai>=0.3.17", + "langchain-community>=0.3.24", "html2text>=2025.4.15", "proxy-py>=2.4.10", ] diff --git a/src/components/pages/chat/chat.tsx b/src/components/pages/chat/chat.tsx index a41f1f6..05e5a90 100644 --- a/src/components/pages/chat/chat.tsx +++ b/src/components/pages/chat/chat.tsx @@ -343,7 +343,7 @@ export const AI: React.FC = () => { boxShadow: "0 4px 8px rgba(0, 0, 0, 0.2)", }} > - Must set either OPENAI_KEY or OLLAMA_MODEL to use AI features. + Must set either OPENAI_API_KEY or OLLAMA_MODEL to use AI features. )}