Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -30,7 +30,9 @@ class TTSRequest(BaseModel):
|
|
| 30 |
text: str
|
| 31 |
|
| 32 |
|
| 33 |
-
|
|
|
|
|
|
|
| 34 |
|
| 35 |
# =====================================================
|
| 36 |
# HEALTH CHECK
|
|
@@ -47,17 +49,15 @@ def health():
|
|
| 47 |
|
| 48 |
@app.post("/upload")
|
| 49 |
async def upload_file(file: UploadFile = File(...)):
|
| 50 |
-
os.makedirs(UPLOAD_DIR, exist_ok=True)
|
| 51 |
-
|
| 52 |
file_path = os.path.join(UPLOAD_DIR, file.filename)
|
| 53 |
|
| 54 |
with open(file_path, "wb") as f:
|
| 55 |
f.write(await file.read())
|
| 56 |
|
| 57 |
-
# Update
|
| 58 |
update_retriever(file_path)
|
| 59 |
|
| 60 |
-
# 🔥 Rebuild LangGraph so RAG
|
| 61 |
rebuild_graph()
|
| 62 |
|
| 63 |
return {
|
|
@@ -74,6 +74,8 @@ async def upload_file(file: UploadFile = File(...)):
|
|
| 74 |
async def chat(message: str, session_id: str = "default"):
|
| 75 |
|
| 76 |
async def event_generator():
|
|
|
|
|
|
|
| 77 |
async for chunk in app_graph.astream(
|
| 78 |
{"messages": [HumanMessage(content=message)]},
|
| 79 |
config={"configurable": {"thread_id": session_id}},
|
|
@@ -85,10 +87,15 @@ async def chat(message: str, session_id: str = "default"):
|
|
| 85 |
msg = chunk[0] if isinstance(chunk, tuple) else chunk
|
| 86 |
|
| 87 |
if hasattr(msg, "content") and msg.content:
|
| 88 |
-
|
| 89 |
-
|
| 90 |
-
|
|
|
|
|
|
|
|
|
|
| 91 |
|
|
|
|
|
|
|
| 92 |
|
| 93 |
return StreamingResponse(
|
| 94 |
event_generator(),
|
|
|
|
| 30 |
text: str
|
| 31 |
|
| 32 |
|
| 33 |
+
# ⚠️ HF requires persistent storage under /data
|
| 34 |
+
UPLOAD_DIR = "/data/uploads"
|
| 35 |
+
os.makedirs(UPLOAD_DIR, exist_ok=True)
|
| 36 |
|
| 37 |
# =====================================================
|
| 38 |
# HEALTH CHECK
|
|
|
|
| 49 |
|
| 50 |
@app.post("/upload")
|
| 51 |
async def upload_file(file: UploadFile = File(...)):
|
|
|
|
|
|
|
| 52 |
file_path = os.path.join(UPLOAD_DIR, file.filename)
|
| 53 |
|
| 54 |
with open(file_path, "wb") as f:
|
| 55 |
f.write(await file.read())
|
| 56 |
|
| 57 |
+
# Update vector store
|
| 58 |
update_retriever(file_path)
|
| 59 |
|
| 60 |
+
# 🔥 Rebuild LangGraph so RAG becomes active
|
| 61 |
rebuild_graph()
|
| 62 |
|
| 63 |
return {
|
|
|
|
| 74 |
async def chat(message: str, session_id: str = "default"):
|
| 75 |
|
| 76 |
async def event_generator():
|
| 77 |
+
buffer = ""
|
| 78 |
+
|
| 79 |
async for chunk in app_graph.astream(
|
| 80 |
{"messages": [HumanMessage(content=message)]},
|
| 81 |
config={"configurable": {"thread_id": session_id}},
|
|
|
|
| 87 |
msg = chunk[0] if isinstance(chunk, tuple) else chunk
|
| 88 |
|
| 89 |
if hasattr(msg, "content") and msg.content:
|
| 90 |
+
buffer += msg.content
|
| 91 |
+
|
| 92 |
+
# Flush every ~150 characters (prevents broken tokens)
|
| 93 |
+
if len(buffer) > 150:
|
| 94 |
+
yield f"data: {buffer.strip()}\n\n"
|
| 95 |
+
buffer = ""
|
| 96 |
|
| 97 |
+
if buffer:
|
| 98 |
+
yield f"data: {buffer.strip()}\n\n"
|
| 99 |
|
| 100 |
return StreamingResponse(
|
| 101 |
event_generator(),
|