Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 7 additions & 11 deletions lib/lib_llm.metta
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
;Import Python helpers for OpenRouter
!(import! &self (library lib_llm.py))

;Create string by appending the list entries
(= (py-str-helper () $outp) $outp)
(= (py-str-helper $L $outp)
Expand All @@ -17,11 +20,6 @@
;Add to builtins a function that can access by index:
!(py-eval "setattr(__import__('builtins'),'index', (lambda o,i: o[i])) or 0")

;Add to builtins a function that POST JSON to OpenRouter and return the assistant reply text
!(py-eval "setattr(__import__('builtins'),'or_post',(lambda model,prompt,key: __import__('json').loads(__import__('urllib.request',fromlist=['urlopen','Request']).urlopen(__import__('urllib.request',fromlist=['Request']).Request('https://openrouter.ai/api/v1/chat/completions',__import__('json').dumps({'model':model,'messages':[{'role':'user','content':prompt}]}).encode('utf-8'),{'Authorization':'Bearer '+key,'Content-Type':'application/json'})).read())['choices'][0]['message']['content'])) or 0")
;Add to builtins a function that POST to OpenRouter embeddings endpoint and return the embedding vector
!(py-eval "setattr(__import__('builtins'),'or_embed',(lambda model,text,key: __import__('json').loads(__import__('urllib.request',fromlist=['urlopen','Request']).urlopen(__import__('urllib.request',fromlist=['Request']).Request('https://openrouter.ai/api/v1/embeddings',__import__('json').dumps({'model':model,'input':text}).encode('utf-8'),{'Authorization':'Bearer '+key,'Content-Type':'application/json'})).read())['data'][0]['embedding'])) or 0")

;Function to prompt GPT via OpenAI API:
(= (useGPT $prompt)
(useGPT gpt-5.2 1000000 medium $prompt))
Expand All @@ -39,9 +37,9 @@

;Function to prompt a model via OpenRouter API:
(= (useOpenRouter $model $prompt)
(let* (($apikey (once (py-eval "__import__('os').environ.get('OPENROUTER_API_KEY','')")))
($text (once (py-call (builtins.or_post $model $prompt $apikey)))))
$text))
(useOpenRouter $model 100000 medium $prompt))
(= (useOpenRouter $model $max_tokens $effort $prompt)
(py-call (lib_llm.openrouter_chat $model $prompt $max_tokens $effort)))

(= (sread-safe $w)
(case (catch (sread $w))
Expand All @@ -62,6 +60,4 @@
$vector))

(= (useOpenRouterEmbedding $model $text)
(let* (($apikey (once (py-eval "__import__('os').environ.get('OPENROUTER_API_KEY','')")))
($vector (once (py-call (builtins.or_embed $model $text $apikey)))))
$vector))
(py-call (lib_llm.openrouter_embed $model $text)))
33 changes: 33 additions & 0 deletions lib/lib_llm.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
import json
import os
import urllib.request

def openrouter_chat(model, prompt, max_tokens, effort):
key = os.environ.get('OPENROUTER_API_KEY', '')
data = json.dumps({
'model': model,
'messages': [{'role': 'user', 'content': prompt}],
'max_tokens': max_tokens,
'reasoning': {'effort': effort}
}).encode('utf-8')
req = urllib.request.Request(
'https://openrouter.ai/api/v1/chat/completions',
data,
{'Authorization': 'Bearer ' + key, 'Content-Type': 'application/json'}
)
response = urllib.request.urlopen(req).read()
return json.loads(response)['choices'][0]['message']['content']

def openrouter_embed(model, text):
key = os.environ.get('OPENROUTER_API_KEY', '')
data = json.dumps({
'model': model,
'input': text
}).encode('utf-8')
req = urllib.request.Request(
'https://openrouter.ai/api/v1/embeddings',
data,
{'Authorization': 'Bearer ' + key, 'Content-Type': 'application/json'}
)
response = urllib.request.urlopen(req).read()
return json.loads(response)['data'][0]['embedding']