Spaces:
Sleeping
Sleeping
File size: 6,240 Bytes
59cece6 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 |
from fastapi import FastAPI, HTTPException, BackgroundTasks
from pydantic import BaseModel
import uvicorn
import os
import time
import json
import pandas as pd
from typing import List, Optional, Dict, Any
from neurohack_memory import MemorySystem
from neurohack_memory.utils import load_yaml
# -----------------------------------------------------------------------------
# SETUP
# -----------------------------------------------------------------------------
app = FastAPI(title="NeuroHack Memory Backend", version="2.0.0")
# SINGLETON SYSTEM
_SYSTEM_INSTANCE = None
print("π Server: Loading System Module...")
def get_system():
global _SYSTEM_INSTANCE
if _SYSTEM_INSTANCE is None:
print("π Server: Initializing MemorySystem (This may take time)...")
from neurohack_memory import MemorySystem
from neurohack_memory.utils import load_yaml
cfg = load_yaml("config.yaml")
print("π Server: Config Loaded. Creating Instance...")
# Ensure artifacts directory
if not os.path.exists("artifacts"):
os.makedirs("artifacts")
# Respect config.yaml storage path if present, else default
if "storage" not in cfg:
cfg["storage"] = {}
if "path" not in cfg["storage"]:
cfg["storage"]["path"] = "artifacts/memory.sqlite"
sys = MemorySystem(cfg)
print("β
Memory System Online")
return sys
# -----------------------------------------------------------------------------
# MODELS
# -----------------------------------------------------------------------------
class QueryRequest(BaseModel):
query: str
class InjectRequest(BaseModel):
text: str
class SeedRequest(BaseModel):
texts: List[str]
# -----------------------------------------------------------------------------
# ENDPOINTS
# -----------------------------------------------------------------------------
@app.on_event("startup")
async def startup_event():
get_system()
@app.get("/")
def read_root():
return {"status": "online", "system": "NeuroHack Memory Console v2.0"}
@app.post("/query")
async def query_memory(req: QueryRequest):
try:
t_start = time.perf_counter()
s = get_system()
res = s.retrieve(req.query)
# Serialize for transport
if res and "retrieved" in res:
# We need to break circular refs or complex objects if any
# The MemoryCell object might not be JSON serializable directly
# Let's construct a clean response
serialized_hits = []
for hit in res["retrieved"]:
serialized_hits.append({
"score": hit.score,
"memory": {
"value": hit.memory.value,
"key": hit.memory.key,
"confidence": hit.memory.confidence,
"type": hit.memory.type.value,
"source_turn": hit.memory.source_turn,
"id": hit.memory.memory_id
}
})
dur_ms = (time.perf_counter() - t_start) * 1000
print(f"π Query Processed in {dur_ms:.2f}ms")
return {
"retrieved": serialized_hits,
"context": res.get("context", "")
}
return {"retrieved": []}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@app.post("/inject")
async def inject_memory(req: InjectRequest):
try:
s = get_system()
# sys.process_turn is async
await s.process_turn(req.text)
return {"status": "committed", "text": req.text}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@app.post("/admin/seed")
async def seed_data(req: SeedRequest):
try:
s = get_system()
for text in req.texts:
await s.process_turn(text)
return {"status": "seeded", "count": len(req.texts)}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@app.post("/admin/clear")
def clear_db():
try:
s = get_system()
s.store.conn.execute("DELETE FROM memories")
s.store.conn.commit()
s._memory_cache.clear()
s.turn = 0
return {"status": "cleared"}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@app.get("/stats")
def get_stats():
try:
s = get_system()
conn = s.store.conn
# Counts
type_dist_query = "SELECT type, COUNT(*) as count FROM memories GROUP BY type"
df_types = pd.read_sql_query(type_dist_query, conn)
total = df_types["count"].sum() if not df_types.empty else 0
cur = conn.cursor()
cur.execute("SELECT COUNT(*) FROM memories WHERE use_count > 0")
resolved = cur.fetchone()[0]
# Live Distribution
df_live = pd.read_sql_query("SELECT confidence, type FROM memories", conn)
live_stats = df_live.to_dict(orient="records")
return {
"total_memories": int(total),
"conflicts_resolved": int(resolved),
"type_distribution": df_types.to_dict(orient="records"),
"live_stats": live_stats
}
except Exception as e:
# Return empty safe stats if DB locked or empty
return {
"total_memories": 0,
"conflicts_resolved": 0,
"type_distribution": [],
"live_stats": []
}
@app.get("/history/evolution")
def get_evolution(key: Optional[str] = None):
try:
s = get_system()
conn = s.store.conn
query = "SELECT memory_id, type, key, value, confidence, source_turn FROM memories ORDER BY source_turn DESC"
df = pd.read_sql_query(query, conn)
if key:
df = df[df["key"] == key]
return df.to_dict(orient="records")
except Exception as e:
return []
if __name__ == "__main__":
uvicorn.run("server:app", host="0.0.0.0", port=8000, reload=True)
# Trigger Reload
|