Executor-Tyrant-Framework commited on
Commit
dede8c6
·
verified ·
1 Parent(s): 8682d5a

Update requirements.txt

Browse files
Files changed (1) hide show
  1. requirements.txt +18 -177
requirements.txt CHANGED
@@ -1,182 +1,23 @@
1
- """
2
- Recursive Context Manager for Clawdbot
 
 
3
 
4
- CHANGELOG [2026-01-31 - Gemini]
5
- ADDED: Phase 1 Orchestrator tools: create_shadow_branch, write_file, shell_execute.
6
- ADDED: Documentation Scanner to mandate Living Changelog headers.
7
- FIXED: PermissionError on /.cache by forcing ONNXMiniLM_L6_V2.DOWNLOAD_PATH.
8
- """
9
 
10
- from pathlib import Path
11
- from typing import List, Dict, Optional, Tuple
12
- import chromadb
13
- from chromadb.config import Settings
14
- from chromadb.utils.embedding_functions import ONNXMiniLM_L6_V2
15
- import hashlib
16
- import json
17
- import os
18
- import time
19
- import threading
20
- import subprocess
21
- import re
22
 
23
- def _select_chroma_path():
24
- """HF Spaces Docker containers wipe everything EXCEPT /data on restart."""
25
- data_path = Path("/data/chroma_db")
26
- try:
27
- data_path.mkdir(parents=True, exist_ok=True)
28
- test_file = data_path / ".write_test"
29
- test_file.write_text("test")
30
- test_file.unlink()
31
- return str(data_path)
32
- except (OSError, PermissionError):
33
- workspace_path = Path("/workspace/chroma_db")
34
- workspace_path.mkdir(parents=True, exist_ok=True)
35
- return str(workspace_path)
36
 
37
- CHROMA_DB_PATH = _select_chroma_path()
 
 
38
 
39
- class HFDatasetPersistence:
40
- """Handles durable cloud storage via your 1TB PRO Dataset repository."""
41
- def __init__(self, repo_id: str = None):
42
- from huggingface_hub import HfApi
43
- self.api = HfApi()
44
- self.repo_id = repo_id or os.getenv("MEMORY_REPO")
45
- self.token = os.getenv("HF_TOKEN") or os.getenv("HUGGINGFACE_TOKEN")
46
- self._repo_ready = False
47
-
48
- if self.repo_id and self.token:
49
- self._ensure_repo_exists()
50
-
51
- def _ensure_repo_exists(self):
52
- if self._repo_ready: return
53
- try:
54
- self.api.repo_info(repo_id=self.repo_id, repo_type="dataset", token=self.token)
55
- self._repo_ready = True
56
- except Exception:
57
- try:
58
- self.api.create_repo(repo_id=self.repo_id, repo_type="dataset", private=True, token=self.token)
59
- self._repo_ready = True
60
- except Exception: pass
61
-
62
- @property
63
- def is_configured(self):
64
- return bool(self.repo_id and self.token)
65
-
66
- def save_conversations(self, data: List[Dict]):
67
- if not self.is_configured: return
68
- temp = Path("/tmp/conv_backup.json")
69
- temp.write_text(json.dumps(data, indent=2))
70
- try:
71
- self.api.upload_file(
72
- path_or_fileobj=str(temp),
73
- path_in_repo="conversations.json",
74
- repo_id=self.repo_id,
75
- repo_type="dataset",
76
- token=self.token
77
- )
78
- except Exception: pass
79
-
80
- def load_conversations(self) -> List[Dict]:
81
- if not self.is_configured: return []
82
- try:
83
- from huggingface_hub import hf_hub_download
84
- local_path = hf_hub_download(repo_id=self.repo_id, filename="conversations.json", repo_type="dataset", token=self.token)
85
- with open(local_path, 'r') as f: return json.load(f)
86
- except Exception: return []
87
-
88
- class RecursiveContextManager:
89
- """Manages unlimited context and vibe-coding tools for E-T Systems."""
90
- def __init__(self, repo_path: str):
91
- self.repo_path = Path(repo_path)
92
- self.persistence = HFDatasetPersistence()
93
-
94
- # Embedding Config (Fixes /.cache PermissionError)
95
- self.embedding_function = ONNXMiniLM_L6_V2()
96
- cache_dir = os.getenv("CHROMA_CACHE_DIR", "/tmp/.cache/chroma")
97
- self.embedding_function.DOWNLOAD_PATH = cache_dir
98
- os.makedirs(cache_dir, exist_ok=True)
99
-
100
- self.chroma_client = chromadb.PersistentClient(
101
- path=CHROMA_DB_PATH,
102
- settings=Settings(anonymized_telemetry=False, allow_reset=True)
103
- )
104
-
105
- c_name = self._get_collection_name()
106
- self.collection = self.chroma_client.get_or_create_collection(
107
- name=c_name,
108
- embedding_function=self.embedding_function
109
- )
110
- self.conversations = self.chroma_client.get_or_create_collection(
111
- name=f"conv_{c_name.split('_')[1]}",
112
- embedding_function=self.embedding_function
113
- )
114
-
115
- if self.conversations.count() == 0:
116
- self._restore_from_cloud()
117
-
118
- def _restore_from_cloud(self):
119
- data = self.persistence.load_conversations()
120
- for conv in data:
121
- try:
122
- self.conversations.add(documents=[conv["document"]], metadatas=[conv["metadata"]], ids=[conv["id"]])
123
- except Exception: pass
124
-
125
- def _get_collection_name(self) -> str:
126
- path_hash = hashlib.md5(str(self.repo_path).encode()).hexdigest()[:8]
127
- return f"codebase_{path_hash}"
128
-
129
- # --- PHASE 1 ORCHESTRATOR TOOLS ---
130
-
131
- def create_shadow_branch(self):
132
- """Creates a timestamped backup branch of the E-T Systems Space."""
133
- timestamp = time.strftime("%Y%m%d-%H%M%S")
134
- branch_name = f"vibe-backup-{timestamp}"
135
- try:
136
- repo_id = os.getenv("ET_SYSTEMS_SPACE", "Executor-Tyrant-Framework/Executor-Framworks_Full_VDB")
137
- self.persistence.api.create_branch(
138
- repo_id=repo_id,
139
- branch=branch_name,
140
- repo_type="space",
141
- token=self.persistence.token
142
- )
143
- return f"🛡️ Shadow branch created: {branch_name}"
144
- except Exception as e:
145
- return f"⚠️ Shadow branch failed: {e}"
146
-
147
- def write_file(self, path: str, content: str):
148
- """Writes file strictly if valid CHANGELOG is present."""
149
- if not re.search(r"CHANGELOG \[\d{4}-\d{2}-\d{2} - \w+\]", content):
150
- return "REJECTED: Missing mandatory CHANGELOG [YYYY-MM-DD - AgentName] header."
151
-
152
- try:
153
- full_path = self.repo_path / path
154
- full_path.parent.mkdir(parents=True, exist_ok=True)
155
- full_path.write_text(content)
156
- return f"✅ Successfully wrote {path}"
157
- except Exception as e:
158
- return f"Error writing file: {e}"
159
-
160
- def shell_execute(self, command: str):
161
- """Runs shell commands in the /workspace directory."""
162
- try:
163
- result = subprocess.run(command, shell=True, capture_output=True, text=True, cwd=self.repo_path, timeout=30)
164
- return f"STDOUT: {result.stdout}\nSTDERR: {result.stderr}"
165
- except Exception as e:
166
- return f"Execution Error: {e}"
167
-
168
- # --- RECURSIVE SEARCH TOOLS ---
169
- def search_code(self, query: str, n: int = 5):
170
- if self.collection.count() == 0: return []
171
- res = self.collection.query(query_texts=[query], n_results=min(n, self.collection.count()))
172
- return [{"file": m['path'], "snippet": d[:500]} for d, m in zip(res['documents'][0], res['metadatas'][0])]
173
-
174
- def read_file(self, path: str):
175
- p = self.repo_path / path
176
- return p.read_text() if p.exists() else "File not found."
177
-
178
- def save_conversation_turn(self, u, a, t_id):
179
- combined = f"USER: {u}\n\nASSISTANT: {a}"
180
- u_id = f"turn_{int(time.time())}"
181
- self.conversations.add(documents=[combined], metadatas=[{"turn": t_id}], ids=[u_id])
182
- self.persistence.save_conversations([{"document": combined, "metadata": {"turn": t_id}, "id": u_id}])
 
1
+ # Python Dependencies for Clawdbot Dev Assistant
2
+ #
3
+ # CHANGELOG [2025-01-28 - Josh]
4
+ # Core dependencies for recursive context + HF inference
5
 
6
+ # Gradio for web interface (5.0+ required for type="messages" format)
7
+ gradio>=5.0.0
8
+ pytz>=2023.3
 
 
9
 
10
+ # HuggingFace for model inference
11
+ huggingface-hub>=0.20.0
 
 
 
 
 
 
 
 
 
 
12
 
13
+ # ChromaDB for vector search (recursive context)
14
+ chromadb>=0.4.0
15
+ pydantic>=2.0.0
16
+ pydantic-settings>=2.0.0
 
 
 
 
 
 
 
 
 
17
 
18
+ # Additional utilities
19
+ requests>=2.31.0
20
+ gitpython>=3.1.0
21
 
22
+ # Performance
23
+ numpy>=1.24.0