-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathagent_node.py
More file actions
executable file
·52 lines (42 loc) · 1.69 KB
/
agent_node.py
File metadata and controls
executable file
·52 lines (42 loc) · 1.69 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
#!/usr/bin/env python3
from dora import Node
import pyarrow as pa
import urllib.request
import json
import time
def ask_ollama(prompt):
url = "http://localhost:11434/api/generate"
data = json.dumps({
"model": "llama3.2:1b",
"prompt": prompt,
"stream": False,
"format": "json"
}).encode('utf-8')
req = urllib.request.Request(url, data=data, headers={'Content-Type': 'application/json'})
try:
with urllib.request.urlopen(req) as response:
return json.loads(response.read().decode())['response']
except Exception as e:
return f'{{"error": "{str(e)}" }}'
def main():
node = Node()
print("[Agent] Node started. Waking up LLM...")
# The high-level instruction we want the agent to execute
user_instruction = "We need to pick up the object directly in front of the robot. Generate the target coordinates to move the arm forward."
system_prompt = f"""
You are a robotic control agent.
User Instruction: {user_instruction}
Output ONLY a valid JSON object with an 'action' (string) and 'target' (array of 3 floats: x, y, z).
Do not output any markdown or explanation.
"""
for i in range(2): # Test it twice
print(f"\n[Agent] Thinking about instruction: '{user_instruction}'...")
# 1. Get dynamic JSON from local LLM
llm_output = ask_ollama(system_prompt)
print(f"[Agent] LLM Decision: {llm_output}")
# 2. Send the LLM's raw output to the Dora dataflow
node.send_output("robot_command", pa.array([llm_output]))
time.sleep(3)
print("\n[Agent] Finished tasks.")
if __name__ == '__main__':
main()