Update app.py
Browse files
app.py
CHANGED
|
@@ -1,26 +1,33 @@
|
|
| 1 |
-
import os,
|
| 2 |
-
from typing import
|
| 3 |
-
from fastapi import FastAPI,
|
| 4 |
-
from fastapi.responses import JSONResponse
|
| 5 |
-
from fastapi.middleware.cors import CORSMiddleware
|
| 6 |
|
| 7 |
-
|
|
|
|
| 8 |
FILES_DIR = os.path.join(BASE_DIR, "files")
|
| 9 |
LOGS_DIR = os.path.join(FILES_DIR, "logs")
|
| 10 |
-
|
| 11 |
-
os.makedirs(d, exist_ok=True)
|
| 12 |
|
| 13 |
-
|
| 14 |
-
|
| 15 |
-
with open(os.path.join(LOGS_DIR, "events.jsonl"), "a", encoding="utf-8") as f:
|
| 16 |
-
f.write(json.dumps(rec, ensure_ascii=False) + "\n")
|
| 17 |
|
| 18 |
-
app = FastAPI(title="Brain
|
| 19 |
-
|
| 20 |
-
|
| 21 |
-
|
| 22 |
-
|
| 23 |
-
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 24 |
|
| 25 |
@app.get("/health")
|
| 26 |
def health():
|
|
@@ -32,32 +39,47 @@ def health():
|
|
| 32 |
"logs_dir": LOGS_DIR,
|
| 33 |
}
|
| 34 |
|
| 35 |
-
@app.post("/
|
| 36 |
-
async def
|
| 37 |
-
|
| 38 |
-
log_event("echo_http_in", payload)
|
| 39 |
-
out = {"ok": True, "ts": time.time(), "echo": payload}
|
| 40 |
-
log_event("echo_http_out", out)
|
| 41 |
-
return JSONResponse(out)
|
| 42 |
-
|
| 43 |
-
@app.websocket("/ws/echo")
|
| 44 |
-
async def ws_echo(ws: WebSocket):
|
| 45 |
-
await ws.accept()
|
| 46 |
-
await ws.send_json({"event": "hello", "msg": "echo socket ready"})
|
| 47 |
try:
|
| 48 |
-
|
| 49 |
-
|
| 50 |
-
|
| 51 |
-
|
| 52 |
-
|
| 53 |
-
|
| 54 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 55 |
try:
|
| 56 |
-
|
| 57 |
-
|
|
|
|
|
|
|
|
|
|
| 58 |
pass
|
| 59 |
-
|
| 60 |
-
|
| 61 |
-
|
| 62 |
-
|
| 63 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os, json, time, asyncio
|
| 2 |
+
from typing import AsyncGenerator, Dict, Any
|
| 3 |
+
from fastapi import FastAPI, Request, Response
|
| 4 |
+
from fastapi.responses import JSONResponse, StreamingResponse
|
|
|
|
| 5 |
|
| 6 |
+
# Directories (HF Spaces writable path)
|
| 7 |
+
BASE_DIR = os.environ.get("BASE_DIR", "/tmp/brain_app")
|
| 8 |
FILES_DIR = os.path.join(BASE_DIR, "files")
|
| 9 |
LOGS_DIR = os.path.join(FILES_DIR, "logs")
|
| 10 |
+
EVENTS_FILE = os.path.join(LOGS_DIR, "events.jsonl")
|
|
|
|
| 11 |
|
| 12 |
+
for p in (BASE_DIR, FILES_DIR, LOGS_DIR):
|
| 13 |
+
os.makedirs(p, exist_ok=True)
|
|
|
|
|
|
|
| 14 |
|
| 15 |
+
app = FastAPI(title="Brain Skeleton", version="1.0.0")
|
| 16 |
+
|
| 17 |
+
# Simple in-memory queue to fan-out logs to /stream/logs clients
|
| 18 |
+
log_queue: "asyncio.Queue[Dict[str, Any]]" = asyncio.Queue()
|
| 19 |
+
|
| 20 |
+
def write_event(event: Dict[str, Any]) -> None:
|
| 21 |
+
"""Append event to file and enqueue for streaming."""
|
| 22 |
+
event.setdefault("ts", time.time())
|
| 23 |
+
os.makedirs(LOGS_DIR, exist_ok=True)
|
| 24 |
+
with open(EVENTS_FILE, "a", encoding="utf-8") as f:
|
| 25 |
+
f.write(json.dumps(event, ensure_ascii=False) + "\n")
|
| 26 |
+
# Put to queue without awaiting (called from sync context)
|
| 27 |
+
try:
|
| 28 |
+
log_queue.put_nowait(event)
|
| 29 |
+
except asyncio.QueueFull:
|
| 30 |
+
pass
|
| 31 |
|
| 32 |
@app.get("/health")
|
| 33 |
def health():
|
|
|
|
| 39 |
"logs_dir": LOGS_DIR,
|
| 40 |
}
|
| 41 |
|
| 42 |
+
@app.post("/process")
|
| 43 |
+
async def process(req: Request):
|
| 44 |
+
"""Echo whatever JSON you send and log it."""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 45 |
try:
|
| 46 |
+
payload = await req.json()
|
| 47 |
+
except Exception:
|
| 48 |
+
return JSONResponse({"ok": False, "error": "Invalid JSON body"}, status_code=400)
|
| 49 |
+
|
| 50 |
+
event = {"type": "process", "data": payload}
|
| 51 |
+
write_event(event)
|
| 52 |
+
return {"ok": True, "received": payload}
|
| 53 |
+
|
| 54 |
+
@app.get("/stream/logs")
|
| 55 |
+
async def stream_logs() -> StreamingResponse:
|
| 56 |
+
"""Server-Sent Events stream of log events (one per line)."""
|
| 57 |
+
async def gen() -> AsyncGenerator[bytes, None]:
|
| 58 |
+
# On connect, tail recent file lines so client sees immediate data (optional)
|
| 59 |
try:
|
| 60 |
+
if os.path.exists(EVENTS_FILE):
|
| 61 |
+
with open(EVENTS_FILE, "r", encoding="utf-8") as f:
|
| 62 |
+
for line in f.readlines()[-50:]:
|
| 63 |
+
yield b"data: " + line.encode("utf-8").rstrip(b"\n") + b"\n\n"
|
| 64 |
+
except Exception:
|
| 65 |
pass
|
| 66 |
+
|
| 67 |
+
# Now live stream
|
| 68 |
+
while True:
|
| 69 |
+
event = await log_queue.get()
|
| 70 |
+
line = json.dumps(event, ensure_ascii=False)
|
| 71 |
+
yield b"data: " + line.encode("utf-8") + b"\n\n"
|
| 72 |
+
|
| 73 |
+
headers = {"Cache-Control": "no-cache", "Connection": "keep-alive"}
|
| 74 |
+
return StreamingResponse(gen(), media_type="text/event-stream", headers=headers)
|
| 75 |
+
|
| 76 |
+
@app.post("/log_error")
|
| 77 |
+
async def log_error(req: Request):
|
| 78 |
+
"""Allow client to log an error into the event stream."""
|
| 79 |
+
try:
|
| 80 |
+
payload = await req.json()
|
| 81 |
+
except Exception:
|
| 82 |
+
return JSONResponse({"ok": False, "error": "Invalid JSON body"}, status_code=400)
|
| 83 |
+
event = {"type": "error", "data": payload}
|
| 84 |
+
write_event(event)
|
| 85 |
+
return {"ok": True}
|