Skip to content

Commit

Permalink
Added an update of the endpoint, Zapier flow, updated Readme, added A…
Browse files Browse the repository at this point in the history
…rchitecture diagram
  • Loading branch information
Vasilije1990 committed Jun 23, 2023
1 parent 2affd0c commit ebaf004
Show file tree
Hide file tree
Showing 11 changed files with 116 additions and 83 deletions.
5 changes: 5 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,11 @@
<a href="https://prometh.ai">Visit and sign up for the closed Beta at prometh.ai</a>
</p>

## 🛣 Architecture
<p align="center">
<img src="assets/PromethAI_infra.png" alt="PromethAI Architecture" width="50%" height="50%">
</p>


## 🛣 Roadmap
<p align="center">
Expand Down
16 changes: 15 additions & 1 deletion api.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
import logging
import os
import uvicorn

from fastapi import Request
CANNED_RESPONSES=False

# Set up logging
Expand Down Expand Up @@ -69,6 +69,20 @@ async def clear_cache(request_data: Payload) -> dict:
agent.clear_cache()
return JSONResponse(content={"response":"Cache cleared"})

@app.post("/action-add-zapier-calendar-action", response_model=dict)
async def action_add_zapier_calendar_action(request: Request,request_data: Payload) -> dict:
json_payload = request_data.payload
agent = Agent()
agent.set_user_session(json_payload["user_id"], json_payload["session_id"])
# Extract the bearer token from the header
auth_header = request.headers.get('Authorization')
if auth_header:
bearer_token = auth_header.replace("Bearer ", "")
else:
bearer_token = None
outcome = agent.add_zapier_calendar_action(prompt_base=json_payload["prompt_base"], token=bearer_token, model_speed=json_payload["model_speed"])
return JSONResponse(content={"response":outcome})

@app.post("/prompt-to-choose-meal-tree", response_model=dict)
async def prompt_to_choose_meal_tree(request_data: Payload) -> dict:
json_payload = request_data.payload
Expand Down
Binary file added assets/PromethAI_infra.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified assets/img_1.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified assets/img_2.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified assets/img_3.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified assets/img_4.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified assets/img_5.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
56 changes: 36 additions & 20 deletions chains.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,14 +46,14 @@
from langchain.cache import RedisCache
import os
from langchain import llm_cache
if os.getenv("LOCAL_DEV", "") != "True":
REDIS_HOST = os.getenv("REDIS_HOST", "promethai-dev-backend-redis-repl-gr.60qtmk.ng.0001.euw1.cache.amazonaws.com")
langchain.llm_cache = RedisCache(redis_=Redis(host=REDIS_HOST, port=6379, db=0))
logging.info("Using redis cache")
else:
REDIS_HOST = os.getenv("0.0.0.0", "promethai-dev-backend-redis-repl-gr.60qtmk.ng.0001.euw1.cache.amazonaws.com")
langchain.llm_cache = RedisCache(redis_=Redis(host=REDIS_HOST, port=6379, db=0))
logging.info("Using localredis cache")
# if os.getenv("LOCAL_DEV", "") != "True":
# REDIS_HOST = os.getenv("REDIS_HOST", "promethai-dev-backend-redis-repl-gr.60qtmk.ng.0001.euw1.cache.amazonaws.com")
# langchain.llm_cache = RedisCache(redis_=Redis(host=REDIS_HOST, port=6379, db=0))
# logging.info("Using redis cache")
# else:
# REDIS_HOST = os.getenv("0.0.0.0", "promethai-dev-backend-redis-repl-gr.60qtmk.ng.0001.euw1.cache.amazonaws.com")
# langchain.llm_cache = RedisCache(redis_=Redis(host=REDIS_HOST, port=6379, db=0))
# logging.info("Using localredis cache")


class Agent():
Expand Down Expand Up @@ -573,15 +573,31 @@ async def delivery_generation(self, prompt: str, zipcode:str, model_speed:str):



def add_zapier_calendar_action(self, context=None):

def add_zapier_calendar_action(self, prompt_base, token, model_speed:str):
"""Serves to add a calendar action to the user's Google Calendar account"""

zapier = ZapierNLAWrapper()
toolkit = ZapierToolkit.from_zapier_nla_wrapper(zapier)
agent = initialize_agent(toolkit.get_tools(), self.llm, agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION, verbose=True)
agent.run("Search for and play easy cooking music on Spotify, where rock is search term and Spotify is the app")

#'https://api.spotify.com/v1/search?q=easy+cooking+music&type=playlist&limit=1'
try:
ZAPIER_NLA_OAUTH_ACCESS_TOKEN = token
zapier = ZapierNLAWrapper(zapier_nla_oauth_access_token=ZAPIER_NLA_OAUTH_ACCESS_TOKEN)
toolkit = ZapierToolkit.from_zapier_nla_wrapper(zapier)
agent = initialize_agent(toolkit.get_tools(), self.llm_fast, agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION,
verbose=True)
except:
zapier = ZapierNLAWrapper()
toolkit = ZapierToolkit.from_zapier_nla_wrapper(zapier)
agent = initialize_agent(toolkit.get_tools(), self.llm_fast, agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION,
verbose=True)

template = """ Formulate the following statement into a calendar request containing time, title, details of the meeting: {prompt} """
prompt_template = PromptTemplate(
input_variables=["prompt"], template=template
)
# complete_query = PromptTemplate.from_template(output)
chain = LLMChain(llm=self.llm, prompt=prompt_template, verbose=self.verbose)
overall_chain = SimpleSequentialChain( chains=[chain, agent],verbose=True)
outcome =overall_chain.run(prompt_base)
print("HERE IS THE OUTCOME", outcome)
return outcome
def voice_text_input(self, query: str, model_speed: str):

"""Serves to generate sub goals for the user and or update the user's preferences"""
Expand Down Expand Up @@ -670,12 +686,12 @@ def _retrieve_summary(self):
# agent._update_memories("lazy, stupid and hungry", "TRAITS")
# agent.update_agent_traits("His personality is greedy")
# agent.update_agent_preferences("Alergic to corn")
# agent.update_agent_taboos("Dislike is brocolli")
agent.add_zapier_calendar_action("I would like to schedule 1 hour meeting tomorrow at 12 about brocolli", 'bla', 'BLA')
#agent.update_agent_summary(model_speed="slow")
#agent.recipe_generation(prompt="I would like a healthy chicken meal over 125$", model_speed="slow")
loop = asyncio.get_event_loop()
loop.run_until_complete(agent.prompt_decompose_to_meal_tree_categories("diet=vegan;availability=cheap", "food", model_speed="slow"))
loop.close()
# loop = asyncio.get_event_loop()
# loop.run_until_complete(agent.prompt_decompose_to_meal_tree_categories("diet=vegan;availability=cheap", "food", model_speed="slow"))
# loop.close()
# #agent.prompt_to_choose_meal_tree(prompt="I want would like a quick meal vietnamese cuisine", assistant_category="food", model_speed="slow")

#print(result)
Expand Down
Loading

0 comments on commit ebaf004

Please sign in to comment.