rahul7star commited on
Commit
20cf1e1
·
verified ·
1 Parent(s): 6a0e283

Update pygmyclaw.py

Browse files
Files changed (1) hide show
  1. pygmyclaw.py +70 -91
pygmyclaw.py CHANGED
@@ -1,7 +1,6 @@
1
  #!/usr/bin/env python3
2
  """
3
- PygmyClaw – Compact AI Agent with multi-instance speculative decoding,
4
- persistent queue, and Hugging Face-backed persistent storage.
5
  """
6
 
7
  import os
@@ -10,80 +9,69 @@ import json
10
  import time
11
  import queue
12
  import threading
13
- from pathlib import Path
14
  import urllib.request
15
-
16
  from huggingface_hub import hf_hub_download, upload_file
17
 
18
- # Optional Redis support
19
- try:
20
- import redis
21
- REDIS_AVAILABLE = True
22
- except ImportError:
23
- REDIS_AVAILABLE = False
24
-
25
  # -------------------- Globals --------------------
26
  SCRIPT_DIR = Path(__file__).parent.resolve()
27
  DEFAULT_MODEL = "qwen2.5:0.5b"
28
  DEFAULT_ENDPOINT = "http://localhost:11434/api/generate"
29
 
30
- TASK_QUEUE = queue.Queue()
31
- QUEUE_PROCESSOR_EVENT = threading.Event()
32
-
33
- # HF storage
34
  HF_TOKEN = os.environ.get("HF_TOKEN")
35
  HF_REPO = "rahul7star/pyclaw"
36
  HF_LOCAL_DIR = SCRIPT_DIR / "pyclaw_hf"
37
  FILES_TO_DOWNLOAD = ["memory.json", "tools.json"]
38
 
 
 
 
39
  # -------------------- HF Download --------------------
40
  def download_hf_files():
41
  HF_LOCAL_DIR.mkdir(parents=True, exist_ok=True)
42
- for fname in FILES_TO_DOWNLOAD:
43
- local_path = HF_LOCAL_DIR / fname
44
  if not local_path.exists() or local_path.stat().st_size == 0:
45
  try:
46
- path = hf_hub_download(
47
  repo_id=HF_REPO,
48
- filename=fname,
49
  token=HF_TOKEN,
50
  local_dir=str(HF_LOCAL_DIR)
51
  )
52
- print(f"Downloaded {fname} -> {path}")
53
  except Exception as e:
54
- print(f"⚠️ Failed to download {fname}: {e}")
55
  local_path.write_text("{}")
56
- print(f"Created empty {fname}")
57
 
58
- # -------------------- PygmyClaw Class --------------------
59
  class PygmyClaw:
60
  def __init__(self):
61
-
62
- # Use given workspace or default
63
- if workspace:
64
- self.workspace = Path(workspace).resolve()
65
- else:
66
- self.workspace = Path("/workspace/data").resolve()
67
- self.workspace.mkdir(parents=True, exist_ok=True)
68
  self.model = DEFAULT_MODEL
69
  self.endpoint = DEFAULT_ENDPOINT
70
  self.memory_data = {}
71
  self.tools_data = {}
 
72
 
73
  # Ensure HF files exist
74
  download_hf_files()
75
  self._load_hf_memory()
76
  self._load_hf_tools()
77
 
 
 
 
 
78
  # Ensure model is ready
79
  self._ensure_model_ready()
80
  self._warmup_model()
81
 
82
- # Start queue processor
83
  QUEUE_PROCESSOR_EVENT.set()
84
  threading.Thread(target=self._process_queue, daemon=True).start()
85
 
86
- # -------------------- HF Memory / Tools --------------------
87
  def _load_hf_memory(self):
88
  mem_file = HF_LOCAL_DIR / "memory.json"
89
  mem_file.parent.mkdir(parents=True, exist_ok=True)
@@ -94,26 +82,22 @@ class PygmyClaw:
94
  self.memory_data = json.load(f)
95
  except json.JSONDecodeError:
96
  self.memory_data = {}
97
- print("⚠️ memory.json was empty or invalid, initialized with {}")
98
 
99
  def _save_hf_memory(self):
100
  mem_file = HF_LOCAL_DIR / "memory.json"
101
  with open(mem_file, "w") as f:
102
  json.dump(self.memory_data, f, indent=2)
103
-
104
- # Upload to Hugging Face if token provided
105
- if HF_TOKEN:
106
- try:
107
- upload_file(
108
- path_or_fileobj=str(mem_file),
109
- path_in_repo="memory.json",
110
- repo_id=HF_REPO,
111
- token=HF_TOKEN,
112
- repo_type="model"
113
- )
114
- print("💾 Memory saved to HF repo")
115
- except Exception as e:
116
- print(f"⚠️ Failed to push memory to HF: {e}")
117
 
118
  def _load_hf_tools(self):
119
  tools_file = HF_LOCAL_DIR / "tools.json"
@@ -125,13 +109,13 @@ class PygmyClaw:
125
  self.tools_data = json.load(f)
126
  except json.JSONDecodeError:
127
  self.tools_data = {}
128
- print("⚠️ tools.json was empty or invalid, initialized with {}")
129
 
130
  # -------------------- Model --------------------
131
  def _ensure_model_ready(self):
132
  print(f"⏳ Ensuring model '{self.model}' is ready...")
 
133
  try:
134
- payload = {"model": self.model, "prompt": "hello", "stream": False, "options": {"num_predict": 1}}
135
  req = urllib.request.Request(
136
  self.endpoint,
137
  data=json.dumps(payload).encode("utf-8"),
@@ -155,18 +139,16 @@ class PygmyClaw:
155
  headers={"Content-Type": "application/json"},
156
  method="POST"
157
  )
158
- with urllib.request.urlopen(req, timeout=5) as resp:
159
  pass
160
  except Exception:
161
  pass
162
 
163
- # -------------------- Queue Processor --------------------
164
- def add_task(self, prompt, tool=None, callback=None):
165
- """Add a task to the queue with optional tool and callback"""
166
  task_id = str(time.time())
167
- task = {"id": task_id, "prompt": prompt, "tool": tool or "AI Agent", "callback": callback}
168
- TASK_QUEUE.put(task)
169
- print(f"Task {task_id} queued with tool={task['tool']}.")
170
  return task_id
171
 
172
  def _process_queue(self):
@@ -177,59 +159,56 @@ class PygmyClaw:
177
  except queue.Empty:
178
  continue
179
 
180
- print(f"Processing task {task['id']} -> {task['prompt']}")
 
 
 
 
 
181
  try:
182
- result = self.generate_with_ssd(task["prompt"])
183
- self.memory_data[task["id"]] = {
184
- "prompt": task["prompt"],
 
 
 
 
 
 
 
 
185
  "response": result,
186
  "timestamp": time.time(),
187
- "tool": task.get("tool", "AI Agent")
188
  }
189
  self._save_hf_memory()
190
- print(f"✅ Task {task['id']} processed.")
191
-
192
- # Call callback if present
193
- if task.get("callback"):
194
- try:
195
- task["callback"](result)
196
- except Exception as e:
197
- print(f"⚠️ Callback failed for task {task['id']}: {e}")
198
-
199
  except Exception as e:
200
- print(f"❌ Failed to process task {task['id']}: {e}")
201
  finally:
202
  TASK_QUEUE.task_done()
203
 
204
  # -------------------- Submit Prompt --------------------
205
- def submit_prompt(self, prompt):
206
- """Add prompt to queue and wait for completion."""
207
- task_id = self.add_task(prompt)
208
  while task_id not in self.memory_data:
209
  time.sleep(0.1)
210
  return self.memory_data[task_id]["response"]
211
 
212
- # -------------------- Model Call --------------------
213
  def generate_with_ssd(self, prompt):
214
- payload = {"model": self.model, "prompt": prompt, "stream": False, "options": {"num_predict": 1}}
215
  req = urllib.request.Request(
216
  self.endpoint,
217
  data=json.dumps(payload).encode("utf-8"),
218
  headers={"Content-Type": "application/json"},
219
  method="POST"
220
  )
221
- with urllib.request.urlopen(req, timeout=60) as resp:
222
- resp_data = json.loads(resp.read())
223
- return resp_data.get("response", "")
224
-
225
- # -------------------- Main --------------------
226
- def main():
227
- agent = PygmyClaw()
228
- prompt = input("Enter prompt: ")
229
- outcome = agent.submit_prompt(prompt)
230
- print("\n=== Outcome ===")
231
- print(outcome)
232
-
233
-
234
- if __name__ == "__main__":
235
- main()
 
1
  #!/usr/bin/env python3
2
  """
3
+ PygmyClaw – Compact AI Agent with queue, AI Agent + HF tools support.
 
4
  """
5
 
6
  import os
 
9
  import time
10
  import queue
11
  import threading
 
12
  import urllib.request
13
+ from pathlib import Path
14
  from huggingface_hub import hf_hub_download, upload_file
15
 
 
 
 
 
 
 
 
16
  # -------------------- Globals --------------------
17
  SCRIPT_DIR = Path(__file__).parent.resolve()
18
  DEFAULT_MODEL = "qwen2.5:0.5b"
19
  DEFAULT_ENDPOINT = "http://localhost:11434/api/generate"
20
 
 
 
 
 
21
  HF_TOKEN = os.environ.get("HF_TOKEN")
22
  HF_REPO = "rahul7star/pyclaw"
23
  HF_LOCAL_DIR = SCRIPT_DIR / "pyclaw_hf"
24
  FILES_TO_DOWNLOAD = ["memory.json", "tools.json"]
25
 
26
+ TASK_QUEUE = queue.Queue()
27
+ QUEUE_PROCESSOR_EVENT = threading.Event()
28
+
29
  # -------------------- HF Download --------------------
30
  def download_hf_files():
31
  HF_LOCAL_DIR.mkdir(parents=True, exist_ok=True)
32
+ for file_name in FILES_TO_DOWNLOAD:
33
+ local_path = HF_LOCAL_DIR / file_name
34
  if not local_path.exists() or local_path.stat().st_size == 0:
35
  try:
36
+ hf_hub_download(
37
  repo_id=HF_REPO,
38
+ filename=file_name,
39
  token=HF_TOKEN,
40
  local_dir=str(HF_LOCAL_DIR)
41
  )
42
+ print(f"Downloaded {file_name}")
43
  except Exception as e:
44
+ print(f"⚠️ Failed to download {file_name}: {e}")
45
  local_path.write_text("{}")
46
+ print(f"Created empty {file_name}")
47
 
48
+ # -------------------- PygmyClaw Agent --------------------
49
  class PygmyClaw:
50
  def __init__(self):
 
 
 
 
 
 
 
51
  self.model = DEFAULT_MODEL
52
  self.endpoint = DEFAULT_ENDPOINT
53
  self.memory_data = {}
54
  self.tools_data = {}
55
+ self.python_tools = ["Python Script"] # default code execution tool
56
 
57
  # Ensure HF files exist
58
  download_hf_files()
59
  self._load_hf_memory()
60
  self._load_hf_tools()
61
 
62
+ # Combine tools from HF + Python + AI Agent
63
+ self.python_tools += list(self.tools_data.keys())
64
+ self.python_tools.append("AI Agent") # default AI Agent
65
+
66
  # Ensure model is ready
67
  self._ensure_model_ready()
68
  self._warmup_model()
69
 
70
+ # Start queue processor in background
71
  QUEUE_PROCESSOR_EVENT.set()
72
  threading.Thread(target=self._process_queue, daemon=True).start()
73
 
74
+ # -------------------- Memory & Tools --------------------
75
  def _load_hf_memory(self):
76
  mem_file = HF_LOCAL_DIR / "memory.json"
77
  mem_file.parent.mkdir(parents=True, exist_ok=True)
 
82
  self.memory_data = json.load(f)
83
  except json.JSONDecodeError:
84
  self.memory_data = {}
85
+ print("⚠️ memory.json invalid, initialized with {}")
86
 
87
  def _save_hf_memory(self):
88
  mem_file = HF_LOCAL_DIR / "memory.json"
89
  with open(mem_file, "w") as f:
90
  json.dump(self.memory_data, f, indent=2)
91
+ try:
92
+ upload_file(
93
+ path_or_fileobj=str(mem_file),
94
+ path_in_repo="memory.json",
95
+ repo_id=HF_REPO,
96
+ token=HF_TOKEN,
97
+ repo_type="model"
98
+ )
99
+ except Exception as e:
100
+ print(f"⚠️ Failed to push memory to HF: {e}")
 
 
 
 
101
 
102
  def _load_hf_tools(self):
103
  tools_file = HF_LOCAL_DIR / "tools.json"
 
109
  self.tools_data = json.load(f)
110
  except json.JSONDecodeError:
111
  self.tools_data = {}
112
+ print("⚠️ tools.json invalid, initialized with {}")
113
 
114
  # -------------------- Model --------------------
115
  def _ensure_model_ready(self):
116
  print(f"⏳ Ensuring model '{self.model}' is ready...")
117
+ payload = {"model": self.model, "prompt": "hello", "stream": False, "options": {"num_predict": 1}}
118
  try:
 
119
  req = urllib.request.Request(
120
  self.endpoint,
121
  data=json.dumps(payload).encode("utf-8"),
 
139
  headers={"Content-Type": "application/json"},
140
  method="POST"
141
  )
142
+ with urllib.request.urlopen(req, timeout=5):
143
  pass
144
  except Exception:
145
  pass
146
 
147
+ # -------------------- Queue --------------------
148
+ def add_task(self, prompt, tool="AI Agent", callback=None):
 
149
  task_id = str(time.time())
150
+ TASK_QUEUE.put({"id": task_id, "prompt": prompt, "tool": tool, "callback": callback})
151
+ print(f"Task {task_id} queued with tool={tool}")
 
152
  return task_id
153
 
154
  def _process_queue(self):
 
159
  except queue.Empty:
160
  continue
161
 
162
+ task_id = task["id"]
163
+ prompt = task["prompt"]
164
+ tool = task.get("tool", "AI Agent")
165
+ callback = task.get("callback", None)
166
+
167
+ print(f"Processing task {task_id} with tool={tool} -> {prompt}")
168
  try:
169
+ if tool == "Python Script":
170
+ # Run code dynamically
171
+ local_vars = {}
172
+ exec(prompt, {}, local_vars)
173
+ result = str(local_vars)
174
+ else:
175
+ # AI Agent or HF tool
176
+ result = self.generate_with_ssd(prompt)
177
+
178
+ self.memory_data[task_id] = {
179
+ "prompt": prompt,
180
  "response": result,
181
  "timestamp": time.time(),
182
+ "tool": tool
183
  }
184
  self._save_hf_memory()
185
+ if callback:
186
+ callback(result)
187
+ print(f"✅ Task {task_id} done.")
 
 
 
 
 
 
188
  except Exception as e:
189
+ print(f"❌ Task {task_id} failed: {e}")
190
  finally:
191
  TASK_QUEUE.task_done()
192
 
193
  # -------------------- Submit Prompt --------------------
194
+ def submit_prompt(self, prompt, tool="AI Agent"):
195
+ task_id = self.add_task(prompt, tool=tool)
 
196
  while task_id not in self.memory_data:
197
  time.sleep(0.1)
198
  return self.memory_data[task_id]["response"]
199
 
200
+ # -------------------- Model / Ollama call --------------------
201
  def generate_with_ssd(self, prompt):
202
+ payload = {"model": self.model, "prompt": prompt, "stream": False, "options": {"num_predict": 50}}
203
  req = urllib.request.Request(
204
  self.endpoint,
205
  data=json.dumps(payload).encode("utf-8"),
206
  headers={"Content-Type": "application/json"},
207
  method="POST"
208
  )
209
+ try:
210
+ with urllib.request.urlopen(req, timeout=60) as resp:
211
+ data = json.loads(resp.read())
212
+ return data.get("response", "No response")
213
+ except Exception as e:
214
+ return f"❌ Model request failed: {e}"