Skip to content

Commit

Permalink
Refactored action router to use semantic routing (#58)
Browse files Browse the repository at this point in the history
  • Loading branch information
judahpaul16 authored Jul 13, 2024
1 parent 1b6880d commit 4eb12b9
Show file tree
Hide file tree
Showing 6 changed files with 112 additions and 61 deletions.
3 changes: 3 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -826,3 +826,6 @@ Contributions are certainly welcome! Please read the [`contributing guidelines`]
## 📜 License
This project is licensed under the GNU GPL v3.0 License - see the [`LICENSE`](LICENSE) file for details.
## 🌟 Star History
[![Star History Chart](https://api.star-history.com/svg?repos=judahpaul16/gpt-home&type=Date&theme=dark)](https://star-history.com/#judahpaul16/gpt-home)
28 changes: 6 additions & 22 deletions src/actions.py
Original file line number Diff line number Diff line change
Expand Up @@ -225,15 +225,15 @@ async def philips_hue_action(text: str):

raise Exception("No philips hue bridge IP found. Please enter your bridge IP for Phillips Hue in the web interface or try reconnecting the service.")

async def query_openai(text, display, retries=3):
async def query_openai(text, retries=3):
# Load settings from settings.json
settings = load_settings()
max_tokens = settings.get("max_tokens")
temperature = settings.get("temperature")

for i in range(retries):
try:
response = openai.ChatCompletion.create(
response = openai.chat.completions.create(
model=settings.get("model"),
messages=[
{"role": "system", "content": f"You are a helpful assistant. {settings.get('custom_instructions')}"},
Expand All @@ -242,31 +242,15 @@ async def query_openai(text, display, retries=3):
max_tokens=max_tokens,
temperature=temperature
)
response_content = response['choices'][0]['message']['content'].strip()
response_content = response.choices[0].message.content.strip()
if response_content: # Check if the response is not empty
message = response_content
return message
return response_content
else:
logger.warning(f"Retry {i+1}: Received empty response from OpenAI.")
except Exception as e:
if 'Did you mean to use v1/completions?' in str(e):
# Re-query using v1/completions
prompt = f"You are a helpful assistant. {settings.get('custom_instructions')}\nHuman: {text}"
response = openai.Completion.create(
model=settings.get("model"),
prompt=prompt,
max_tokens=max_tokens,
temperature=temperature
)
response_content = response['choices'][0]['text'].strip()
if response_content:
message = response_content
return message
else:
logger.error(f"Error on try {i+1}: {e}")
logger.error(f"Error on try {i+1}: {e}")
if i == retries - 1: # If this was the last retry
error_message = f"Something went wrong after {retries} retries: {e}"
raise Exception(error_message)
raise Exception(f"Something went wrong after {retries} retries: {e}\n{traceback.format_exc()}")
await asyncio.sleep(0.5) # Wait before retrying

alarms = {}
Expand Down
3 changes: 2 additions & 1 deletion src/app.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from common import *
from routes import *

async def main():
state_task = None
Expand Down Expand Up @@ -38,7 +39,7 @@ async def main():
delay_heard = await calculate_delay(heard_message)

# Create a task for OpenAI query, don't await it yet
query_task = asyncio.create_task(action_router(actual_text, display))
query_task = asyncio.create_task(action_router(actual_text))

await asyncio.gather(
speak(heard_message, stop_event_heard),
Expand Down
34 changes: 0 additions & 34 deletions src/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -288,40 +288,6 @@ def _speak():
await loop.run_in_executor(executor, _speak)
stop_event.set()

from actions import \
alarm_reminder_action, \
spotify_action, \
open_weather_action, \
philips_hue_action, \
caldav_action, \
query_openai

async def action_router(text: str, display):
# Alarm and Reminder actions
if re.search(r'\b(alarm|timer|reminder|remind me|^wake me up)\b', text, re.IGNORECASE):
return await alarm_reminder_action(text)

# For Spotify actions
if re.search(r'\b(^play|resume|next song|go back|pause|stop|shuffle|repeat|volume)(\s.*)?(\bon\b\sSpotify)?\b', text, re.IGNORECASE):
return await spotify_action(text)

# For Open Weather actions
elif re.search(r'\b(weather|forecast|temperature)\b.*\b(in|for|at)?\b(\w+)?', text, re.IGNORECASE) or \
re.search(r'\b(is|will)\sit\b.*\b(hot|cold|rain(ing|y)?|sun(ny|ning)?|cloud(y|ing)?|wind(y|ing)?|storm(y|ing)?|snow(ing)?)\b', text, re.IGNORECASE):
return await open_weather_action(text)

# For Philips Hue actions
elif re.search(r'\b(turn\s)?(the\s)?lights?\s?(on|off)?\b', text, re.IGNORECASE):
return await philips_hue_action(text)

# For Calendar & To-Do List actions (CalDAV)
elif re.search(r'\b(event|calendar|schedule|appointment|task|todo|to-do|task\slist|to-do\slist|to do)\b', text, re.IGNORECASE):
return await caldav_action(text)

# If no pattern matches, query OpenAI
else:
return await query_openai(text, display)

async def handle_error(message, state_task, display):
if state_task:
state_task.cancel()
Expand Down
9 changes: 5 additions & 4 deletions src/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ adafruit-circuitpython-typing==1.9.4
Adafruit-GPIO==1.0.3
Adafruit-PlatformDetect==3.52.0
Adafruit-PureIO==1.1.11
aiohttp==3.8.5
aiohttp==3.9.5
aiosignal==1.3.1
annotated-types==0.5.0
anyio==3.7.1
Expand All @@ -28,15 +28,15 @@ idna==3.4
multidict==6.0.4
ninja==1.11.1
oauthlib==3.2.2
openai==0.28.0
openai==1.10.0
phue==1.1
Pillow==10.0.0
protobuf==4.24.3
pyasn1==0.5.0
pyasn1-modules==0.3.0
PyAudio==0.2.13
pydantic==2.3.0
pydantic_core==2.6.3
pydantic==2.5.3
pydantic_core==2.14.6
pyftdi==0.55.0
pyparsing==3.1.1
pyserial==3.5
Expand All @@ -47,6 +47,7 @@ requests==2.31.0
requests-oauthlib==1.3.1
RPi.GPIO==0.7.1
rsa==4.9
semantic-router==0.0.51
sniffio==1.3.0
SpeechRecognition==3.10.0
spidev==3.6
Expand Down
96 changes: 96 additions & 0 deletions src/routes.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
from semantic_router import Route
from semantic_router.encoders import OpenAIEncoder
from semantic_router.layer import RouteLayer

from actions import *

alarm_route = Route(
name="alarm_reminder_action",
utterances=[
"set an alarm",
"wake me up",
"remind me in"
]
)

spotify_route = Route(
name="spotify_action",
utterances=[
"play some music",
"open spotify",
"play my playlist"
]
)

weather_route = Route(
name="open_weather_action",
utterances=[
"what's the weather",
"tell me the weather",
"what is the temperature"
]
)

lights_route = Route(
name="philips_hue_action",
utterances=[
"turn on the lights",
"switch off the lights",
"dim the lights"
]
)

calendar_route = Route(
name="caldav_action",
utterances=[
"schedule a meeting",
"what's on my calendar",
"add an event"
]
)

general_route = Route(
name="query_openai",
utterances=[
"tell me a joke",
"what's the time",
"how are you",
"what is the meaning of life",
"what is the capital of France",
"what is the difference between Python 2 and Python 3",
"what is the best programming language",
"who was the first president of the United States",
"what is the largest mammal"
]
)

routes = [alarm_route, spotify_route, weather_route, lights_route, calendar_route, general_route]
encoder = OpenAIEncoder()
rl = RouteLayer(encoder=encoder, routes=routes)

class ActionRouter:
def __init__(self):
self.route_layer = rl

def resolve(self, text):
result = self.route_layer(text)
return result.name if result else "query_openai"

class Action:
def __init__(self, action_name, text):
self.action_name = action_name
self.text = text

async def perform(self, **kwargs):
try:
action_func = globals()[self.action_name]
return await action_func(self.text, **kwargs)
except KeyError:
action_func = globals()["query_openai"]
return await action_func(self.text, **kwargs)
return "Action not found."

async def action_router(text: str, router=ActionRouter()):
action_name = router.resolve(text)
act = Action(action_name, text)
return await act.perform()

0 comments on commit 4eb12b9

Please sign in to comment.