Spaces:
Sleeping
Sleeping
Upload folder using huggingface_hub
Browse files- Dockerfile +3 -6
- README.md +1 -5
- brain.py +7 -29
Dockerfile
CHANGED
|
@@ -1,10 +1,7 @@
|
|
| 1 |
FROM python:3.11-slim
|
| 2 |
-
RUN
|
| 3 |
WORKDIR /app
|
| 4 |
-
|
| 5 |
-
COPY brain.py /app/brain.py
|
| 6 |
-
RUN chmod +x /app/brain.py
|
| 7 |
-
USER user
|
| 8 |
EXPOSE 7860
|
| 9 |
-
HEALTHCHECK --interval=30s --timeout=10s
|
| 10 |
CMD ["python3", "/app/brain.py"]
|
|
|
|
| 1 |
FROM python:3.11-slim
|
| 2 |
+
RUN apt-get update && apt-get install -y curl && rm -rf /var/lib/apt/lists/*
|
| 3 |
WORKDIR /app
|
| 4 |
+
COPY brain.py /app/
|
|
|
|
|
|
|
|
|
|
| 5 |
EXPOSE 7860
|
| 6 |
+
HEALTHCHECK --interval=30s --timeout=10s CMD curl -sf http://localhost:7860/health || exit 1
|
| 7 |
CMD ["python3", "/app/brain.py"]
|
README.md
CHANGED
|
@@ -1,11 +1,7 @@
|
|
| 1 |
---
|
| 2 |
title: MEGAMIND VECTOR
|
| 3 |
emoji: 🧠
|
| 4 |
-
colorFrom: purple
|
| 5 |
-
colorTo: blue
|
| 6 |
sdk: docker
|
| 7 |
-
pinned: false
|
| 8 |
---
|
| 9 |
-
|
| 10 |
# MEGAMIND VECTOR
|
| 11 |
-
|
|
|
|
| 1 |
---
|
| 2 |
title: MEGAMIND VECTOR
|
| 3 |
emoji: 🧠
|
|
|
|
|
|
|
| 4 |
sdk: docker
|
|
|
|
| 5 |
---
|
|
|
|
| 6 |
# MEGAMIND VECTOR
|
| 7 |
+
Federation Node
|
brain.py
CHANGED
|
@@ -1,16 +1,10 @@
|
|
| 1 |
#!/usr/bin/env python3
|
| 2 |
-
"""MEGAMIND Brain - Python Edition for HuggingFace Spaces"""
|
| 3 |
import os, json, sqlite3, hashlib, time
|
| 4 |
from http.server import HTTPServer, BaseHTTPRequestHandler
|
| 5 |
from urllib.parse import urlparse
|
| 6 |
-
|
| 7 |
PORT = int(os.environ.get('PORT', 7860))
|
| 8 |
-
DATA_DIR = os.environ.get('
|
| 9 |
-
|
| 10 |
-
|
| 11 |
-
db = None
|
| 12 |
-
stats = {'tensors': 0, 'patterns': 0, 'queries': 0, 'start': time.time()}
|
| 13 |
-
|
| 14 |
def init_db():
|
| 15 |
global db
|
| 16 |
os.makedirs(DATA_DIR, exist_ok=True)
|
|
@@ -18,45 +12,29 @@ def init_db():
|
|
| 18 |
db.execute('CREATE TABLE IF NOT EXISTS chunks (id INTEGER PRIMARY KEY, hash TEXT UNIQUE, content TEXT, ts REAL)')
|
| 19 |
db.execute('CREATE TABLE IF NOT EXISTS tensors (id INTEGER PRIMARY KEY, name TEXT, source TEXT, meta TEXT, ts REAL)')
|
| 20 |
db.commit()
|
| 21 |
-
# Count existing
|
| 22 |
stats['patterns'] = db.execute('SELECT COUNT(*) FROM chunks').fetchone()[0]
|
| 23 |
stats['tensors'] = db.execute('SELECT COUNT(*) FROM tensors').fetchone()[0]
|
| 24 |
-
|
| 25 |
class Handler(BaseHTTPRequestHandler):
|
| 26 |
def log_message(self, *a): pass
|
| 27 |
def do_GET(self):
|
| 28 |
p = urlparse(self.path).path
|
| 29 |
if p == '/health': self.json({'status': 'healthy'})
|
| 30 |
-
elif p == '/status': self.json({'node': NODE_ID, 'status': 'online', 'tensors_learned': stats['tensors'],
|
| 31 |
-
|
| 32 |
-
elif p == '/': self.json({'name': 'MEGAMIND Brain', 'node': NODE_ID, 'version': '1.0-py'})
|
| 33 |
-
else: self.send_error(404)
|
| 34 |
def do_POST(self):
|
| 35 |
-
p = urlparse(self.path).path
|
| 36 |
body = self.rfile.read(int(self.headers.get('Content-Length', 0))).decode()
|
| 37 |
data = json.loads(body) if body else {}
|
|
|
|
| 38 |
if p == '/learn':
|
| 39 |
c = data.get('content', '')[:10000]
|
| 40 |
h = hashlib.sha256(c.encode()).hexdigest()[:16]
|
| 41 |
db.execute('INSERT OR IGNORE INTO chunks (hash, content, ts) VALUES (?, ?, ?)', (h, c, time.time()))
|
| 42 |
db.commit(); stats['patterns'] += 1
|
| 43 |
-
self.json({'status': 'learned', 'hash': h})
|
| 44 |
-
elif p == '/learn-tensor':
|
| 45 |
-
db.execute('INSERT INTO tensors (name, source, meta, ts) VALUES (?, ?, ?, ?)',
|
| 46 |
-
(data.get('name',''), data.get('source',''), json.dumps(data.get('metadata',{})), time.time()))
|
| 47 |
-
db.commit(); stats['tensors'] += 1
|
| 48 |
self.json({'status': 'learned'})
|
| 49 |
-
|
| 50 |
-
stats['queries'] += 1
|
| 51 |
-
r = [row[0][:500] for row in db.execute('SELECT content FROM chunks WHERE content LIKE ? LIMIT 10', (f"%{data.get('query','')}%",))]
|
| 52 |
-
self.json({'results': r, 'count': len(r)})
|
| 53 |
-
else: self.send_error(404)
|
| 54 |
def json(self, d):
|
| 55 |
self.send_response(200); self.send_header('Content-Type', 'application/json'); self.end_headers()
|
| 56 |
self.wfile.write(json.dumps(d).encode())
|
| 57 |
-
|
| 58 |
if __name__ == '__main__':
|
| 59 |
-
print(f'MEGAMIND Brain [{NODE_ID}]
|
| 60 |
-
init_db()
|
| 61 |
-
print(f'Loaded: {stats["tensors"]} tensors, {stats["patterns"]} patterns')
|
| 62 |
HTTPServer(('0.0.0.0', PORT), Handler).serve_forever()
|
|
|
|
| 1 |
#!/usr/bin/env python3
|
|
|
|
| 2 |
import os, json, sqlite3, hashlib, time
|
| 3 |
from http.server import HTTPServer, BaseHTTPRequestHandler
|
| 4 |
from urllib.parse import urlparse
|
|
|
|
| 5 |
PORT = int(os.environ.get('PORT', 7860))
|
| 6 |
+
DATA_DIR, NODE_ID = './data', os.environ.get('SPACE_ID', 'hf-brain')
|
| 7 |
+
db, stats = None, {'tensors': 0, 'patterns': 0, 'queries': 0, 'start': time.time()}
|
|
|
|
|
|
|
|
|
|
|
|
|
| 8 |
def init_db():
|
| 9 |
global db
|
| 10 |
os.makedirs(DATA_DIR, exist_ok=True)
|
|
|
|
| 12 |
db.execute('CREATE TABLE IF NOT EXISTS chunks (id INTEGER PRIMARY KEY, hash TEXT UNIQUE, content TEXT, ts REAL)')
|
| 13 |
db.execute('CREATE TABLE IF NOT EXISTS tensors (id INTEGER PRIMARY KEY, name TEXT, source TEXT, meta TEXT, ts REAL)')
|
| 14 |
db.commit()
|
|
|
|
| 15 |
stats['patterns'] = db.execute('SELECT COUNT(*) FROM chunks').fetchone()[0]
|
| 16 |
stats['tensors'] = db.execute('SELECT COUNT(*) FROM tensors').fetchone()[0]
|
|
|
|
| 17 |
class Handler(BaseHTTPRequestHandler):
|
| 18 |
def log_message(self, *a): pass
|
| 19 |
def do_GET(self):
|
| 20 |
p = urlparse(self.path).path
|
| 21 |
if p == '/health': self.json({'status': 'healthy'})
|
| 22 |
+
elif p == '/status': self.json({'node': NODE_ID, 'status': 'online', 'tensors_learned': stats['tensors'], 'patterns_learned': stats['patterns']})
|
| 23 |
+
else: self.json({'name': 'MEGAMIND', 'node': NODE_ID})
|
|
|
|
|
|
|
| 24 |
def do_POST(self):
|
|
|
|
| 25 |
body = self.rfile.read(int(self.headers.get('Content-Length', 0))).decode()
|
| 26 |
data = json.loads(body) if body else {}
|
| 27 |
+
p = urlparse(self.path).path
|
| 28 |
if p == '/learn':
|
| 29 |
c = data.get('content', '')[:10000]
|
| 30 |
h = hashlib.sha256(c.encode()).hexdigest()[:16]
|
| 31 |
db.execute('INSERT OR IGNORE INTO chunks (hash, content, ts) VALUES (?, ?, ?)', (h, c, time.time()))
|
| 32 |
db.commit(); stats['patterns'] += 1
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 33 |
self.json({'status': 'learned'})
|
| 34 |
+
else: self.json({})
|
|
|
|
|
|
|
|
|
|
|
|
|
| 35 |
def json(self, d):
|
| 36 |
self.send_response(200); self.send_header('Content-Type', 'application/json'); self.end_headers()
|
| 37 |
self.wfile.write(json.dumps(d).encode())
|
|
|
|
| 38 |
if __name__ == '__main__':
|
| 39 |
+
print(f'MEGAMIND Brain [{NODE_ID}]'); init_db()
|
|
|
|
|
|
|
| 40 |
HTTPServer(('0.0.0.0', PORT), Handler).serve_forever()
|