Skip to content

Commit

Permalink
Merge pull request #266 from kyegomez/revert-250-master
Browse files Browse the repository at this point in the history
Revert "pydantic bump fix for #249 "
  • Loading branch information
kyegomez authored Dec 5, 2023
2 parents 574c1ae + 43198ef commit fa7e1c7
Show file tree
Hide file tree
Showing 22 changed files with 166 additions and 134 deletions.
40 changes: 0 additions & 40 deletions .github/workflows/docker-compose.yml

This file was deleted.

3 changes: 1 addition & 2 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@ on:
env:
POETRY_VERSION: "1.4.2"

jobs:
test:
runs-on: ubuntu-latest
strategy:
Expand Down Expand Up @@ -47,7 +46,7 @@ jobs:
make extended_tests
fi
shell: bash

name: Python ${{ matrix.python-version }} ${{ matrix.test_type }}
steps:
- uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/testing.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ jobs:
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: 3.11
python-version: 3.x

- name: Install dependencies
run: |
Expand Down
2 changes: 0 additions & 2 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,6 @@
# ==================================
# Use an official Python runtime as a parent image
FROM python:3.9-slim
RUN apt-get update && apt-get -y install libgl1-mesa-dev libglib2.0-0; apt-get clean
RUN pip install opencv-contrib-python-headless

# Set environment variables
ENV PYTHONDONTWRITEBYTECODE 1
Expand Down
6 changes: 3 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"

[tool.poetry]
name = "swarms"
version = "2.5.8"
version = "2.5.7"
description = "Swarms - Pytorch"
license = "MIT"
authors = ["Kye Gomez <[email protected]>"]
Expand Down Expand Up @@ -52,11 +52,11 @@ ratelimit = "*"
beautifulsoup4 = "*"
cohere = "*"
huggingface-hub = "*"
pydantic = "2.*"
pydantic = "1.10.12"
tenacity = "*"
Pillow = "*"
chromadb = "*"
opencv-python-headless
opencv-python-headless = "*"
tabulate = "*"
termcolor = "*"
black = "*"
Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ faiss-cpu
openai==0.28.0
attrs
datasets
pydantic>2
pydantic==1.10.12
soundfile
huggingface-hub
google-generativeai
Expand Down
36 changes: 18 additions & 18 deletions swarms/memory/schemas.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,25 +12,25 @@ class TaskInput(BaseModel):
description=(
"The input parameters for the task. Any value is allowed."
),
examples=['{\n"debug": false,\n"mode": "benchmarks"\n}'],
example='{\n"debug": false,\n"mode": "benchmarks"\n}',
)


class Artifact(BaseModel):
artifact_id: str = Field(
...,
description="Id of the artifact",
examples=["b225e278-8b4c-4f99-a696-8facf19f0e56"],
example="b225e278-8b4c-4f99-a696-8facf19f0e56",
)
file_name: str = Field(
..., description="Filename of the artifact", examples=["main.py"]
..., description="Filename of the artifact", example="main.py"
)
relative_path: Optional[str] = Field(
None,
description=(
"Relative path of the artifact in the agent's workspace"
),
examples=["python/code/"],
example="python/code/",
)


Expand All @@ -41,7 +41,7 @@ class ArtifactUpload(BaseModel):
description=(
"Relative path of the artifact in the agent's workspace"
),
examples=["python/code/"],
example="python/code/",
)


Expand All @@ -52,7 +52,7 @@ class StepInput(BaseModel):
"Input parameters for the task step. Any value is"
" allowed."
),
examples=['{\n"file_to_refactor": "models.py"\n}'],
example='{\n"file_to_refactor": "models.py"\n}',
)


Expand All @@ -63,17 +63,17 @@ class StepOutput(BaseModel):
"Output that the task step has produced. Any value is"
" allowed."
),
examples=['{\n"tokens": 7894,\n"estimated_cost": "0,24$"\n}'],
example='{\n"tokens": 7894,\n"estimated_cost": "0,24$"\n}',
)


class TaskRequestBody(BaseModel):
input: Optional[str] = Field(
None,
description="Input prompt for the task.",
examples=[(
example=(
"Write the words you receive to the file 'output.txt'."
)],
),
)
additional_input: Optional[TaskInput] = None

Expand All @@ -82,23 +82,23 @@ class Task(TaskRequestBody):
task_id: str = Field(
...,
description="The ID of the task.",
examples=["50da533e-3904-4401-8a07-c49adf88b5eb"],
example="50da533e-3904-4401-8a07-c49adf88b5eb",
)
artifacts: List[Artifact] = Field(
[],
description="A list of artifacts that the task has produced.",
examples=[[
example=[
"7a49f31c-f9c6-4346-a22c-e32bc5af4d8e",
"ab7b4091-2560-4692-a4fe-d831ea3ca7d6",
]],
],
)


class StepRequestBody(BaseModel):
input: Optional[str] = Field(
None,
description="Input prompt for the step.",
examples=["Washington"],
example="Washington",
)
additional_input: Optional[StepInput] = None

Expand All @@ -113,29 +113,29 @@ class Step(StepRequestBody):
task_id: str = Field(
...,
description="The ID of the task this step belongs to.",
examples=["50da533e-3904-4401-8a07-c49adf88b5eb"],
example="50da533e-3904-4401-8a07-c49adf88b5eb",
)
step_id: str = Field(
...,
description="The ID of the task step.",
examples=["6bb1801a-fd80-45e8-899a-4dd723cc602e"],
example="6bb1801a-fd80-45e8-899a-4dd723cc602e",
)
name: Optional[str] = Field(
None,
description="The name of the task step.",
examples=["Write to file"],
example="Write to file",
)
status: Status = Field(
..., description="The status of the task step."
)
output: Optional[str] = Field(
None,
description="Output of the task step.",
examples=[(
example=(
"I am going to use the write_to_file command and write"
" Washington to a file called output.txt"
" <write_to_file('output.txt', 'Washington')"
)],
),
)
additional_output: Optional[StepOutput] = None
artifacts: List[Artifact] = Field(
Expand Down
36 changes: 26 additions & 10 deletions swarms/models/anthropic.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
CallbackManagerForLLMRun,
)
from langchain.llms.base import LLM

from pydantic import Field, SecretStr, root_validator
from langchain.schema.language_model import BaseLanguageModel
from langchain.schema.output import GenerationChunk
from langchain.schema.prompt import PromptValue
Expand Down Expand Up @@ -219,13 +219,21 @@ def build_extra_kwargs(

return extra_kwargs


def convert_to_secret_str(value: Union[SecretStr, str]) -> SecretStr:
"""Convert a string to a SecretStr if needed."""
if isinstance(value, SecretStr):
return value
return SecretStr(value)


class _AnthropicCommon(BaseLanguageModel):
client: Any = None #: :meta private:
async_client: Any = None #: :meta private:
model: str ="claude-2"
model: str = Field(default="claude-2", alias="model_name")
"""Model name to use."""

max_tokens_to_sample: int =256
max_tokens_to_sample: int = Field(default=256, alias="max_tokens")
"""Denotes the number of tokens to predict per generation."""

temperature: Optional[float] = None
Expand All @@ -245,14 +253,14 @@ class _AnthropicCommon(BaseLanguageModel):

anthropic_api_url: Optional[str] = None

anthropic_api_key: Optional[str] = None
anthropic_api_key: Optional[SecretStr] = None

HUMAN_PROMPT: Optional[str] = None
AI_PROMPT: Optional[str] = None
count_tokens: Optional[Callable[[str], int]] = None
model_kwargs: Dict[str, Any] = {}
model_kwargs: Dict[str, Any] = Field(default_factory=dict)

@classmethod
@root_validator(pre=True)
def build_extra(cls, values: Dict) -> Dict:
extra = values.get("model_kwargs", {})
all_required_field_names = get_pydantic_field_names(cls)
Expand All @@ -261,11 +269,13 @@ def build_extra(cls, values: Dict) -> Dict:
)
return values

@classmethod
@root_validator()
def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment."""
values["anthropic_api_key"] = get_from_dict_or_env(
values["anthropic_api_key"] = convert_to_secret_str(
get_from_dict_or_env(
values, "anthropic_api_key", "ANTHROPIC_API_KEY"
)
)
# Get custom api url from environment.
values["anthropic_api_url"] = get_from_dict_or_env(
Expand Down Expand Up @@ -366,8 +376,14 @@ class Anthropic(LLM, _AnthropicCommon):
prompt = f"{anthropic.HUMAN_PROMPT} {prompt}{anthropic.AI_PROMPT}"
response = model(prompt)
"""

@classmethod

class Config:
"""Configuration for this pydantic object."""

allow_population_by_field_name = True
arbitrary_types_allowed = True

@root_validator()
def raise_warning(cls, values: Dict) -> Dict:
"""Raise warning that this class is deprecated."""
warnings.warn(
Expand Down
11 changes: 7 additions & 4 deletions swarms/models/cohere_chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
from langchain.llms.base import LLM
from langchain.llms.utils import enforce_stop_tokens
from langchain.load.serializable import Serializable
from pydantic import model_validator, ConfigDict, Field
from pydantic import Extra, Field, root_validator
from langchain.utils import get_from_dict_or_env

logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -85,8 +85,7 @@ class BaseCohere(Serializable):
user_agent: str = "langchain"
"""Identifier for the application making the request."""

@model_validator()
@classmethod
@root_validator()
def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key and python package exists in environment."""
try:
Expand Down Expand Up @@ -146,7 +145,11 @@ class Cohere(LLM, BaseCohere):

max_retries: int = 10
"""Maximum number of retries to make when generating."""
model_config = ConfigDict(extra="forbid")

class Config:
"""Configuration for this pydantic object."""

extra = Extra.forbid

@property
def _default_params(self) -> Dict[str, Any]:
Expand Down
5 changes: 2 additions & 3 deletions swarms/models/dalle3.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
from dotenv import load_dotenv
from openai import OpenAI
from PIL import Image
from pydantic import field_validator
from pydantic import validator
from termcolor import colored

load_dotenv()
Expand Down Expand Up @@ -92,8 +92,7 @@ class Config:

arbitrary_types_allowed = True

@field_validator("max_retries", "time_seconds")
@classmethod
@validator("max_retries", "time_seconds")
def must_be_positive(cls, value):
if value <= 0:
raise ValueError("Must be positive")
Expand Down
5 changes: 2 additions & 3 deletions swarms/models/eleven_labs.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from typing import Any, Dict, Union

from langchain.utils import get_from_dict_or_env
from pydantic import model_validator
from pydantic import root_validator

from swarms.tools.tool import BaseTool

Expand Down Expand Up @@ -59,8 +59,7 @@ class ElevenLabsText2SpeechTool(BaseTool):
" Italian, French, Portuguese, and Hindi. "
)

@model_validator(mode="before")
@classmethod
@root_validator(pre=True)
def validate_environment(cls, values: Dict) -> Dict:
"""Validate that api key exists in environment."""
_ = get_from_dict_or_env(
Expand Down
Loading

0 comments on commit fa7e1c7

Please sign in to comment.