junaid17 commited on
Commit
fdd58a7
·
verified ·
1 Parent(s): 367fd43

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +17 -36
app.py CHANGED
@@ -1,14 +1,12 @@
1
- from fastapi import FastAPI, UploadFile, File, HTTPException
2
  from fastapi.middleware.cors import CORSMiddleware
3
  from fastapi.responses import StreamingResponse, FileResponse
4
- from pydantic import BaseModel
5
- import os
6
- import asyncio
7
-
8
- from chatbot import app as app_graph
9
  from langchain_core.messages import HumanMessage
 
10
  from tools import update_retriever
11
- from utils import TTS, STT
 
 
12
 
13
  app = FastAPI()
14
 
@@ -24,54 +22,37 @@ UPLOAD_DIR = "uploads"
24
  os.makedirs(UPLOAD_DIR, exist_ok=True)
25
 
26
 
27
- class TTSRequest(BaseModel):
28
- text: str
29
-
30
-
31
  @app.get("/")
32
  def health():
33
- return {"status": "API is running"}
34
 
35
 
36
  @app.post("/upload")
37
  async def upload_file(file: UploadFile = File(...)):
38
- file_path = os.path.join(UPLOAD_DIR, file.filename)
39
 
40
- with open(file_path, "wb") as f:
41
  f.write(await file.read())
42
 
43
- update_retriever(file_path)
44
 
45
- return {
46
- "status": "success",
47
- "filename": file.filename
48
- }
49
 
50
 
51
  @app.post("/chat")
52
  async def chat(message: str, session_id: str = "default"):
53
 
54
- async def event_generator():
55
  async for chunk in app_graph.astream(
56
  {"messages": [HumanMessage(content=message)]},
57
  config={"configurable": {"thread_id": session_id}},
58
- stream_mode="messages"
59
  ):
60
- if chunk:
61
- msg = chunk[0] if isinstance(chunk, tuple) else chunk
62
- if hasattr(msg, "content") and msg.content:
63
- yield f"data: {msg.content}\n\n"
64
- await asyncio.sleep(0.01)
65
-
66
- return StreamingResponse(
67
- event_generator(),
68
- media_type="text/event-stream",
69
- headers={
70
- "Cache-Control": "no-cache",
71
- "Connection": "keep-alive",
72
- "X-Accel-Buffering": "no",
73
- },
74
- )
75
 
76
 
77
  @app.post("/stt")
 
1
+ from fastapi import FastAPI, UploadFile, File
2
  from fastapi.middleware.cors import CORSMiddleware
3
  from fastapi.responses import StreamingResponse, FileResponse
 
 
 
 
 
4
  from langchain_core.messages import HumanMessage
5
+ from chatbot import app as app_graph
6
  from tools import update_retriever
7
+ from utils import STT, TTS
8
+ import asyncio
9
+ import os
10
 
11
  app = FastAPI()
12
 
 
22
  os.makedirs(UPLOAD_DIR, exist_ok=True)
23
 
24
 
 
 
 
 
25
  @app.get("/")
26
  def health():
27
+ return {"status": "running"}
28
 
29
 
30
  @app.post("/upload")
31
  async def upload_file(file: UploadFile = File(...)):
32
+ path = os.path.join(UPLOAD_DIR, file.filename)
33
 
34
+ with open(path, "wb") as f:
35
  f.write(await file.read())
36
 
37
+ update_retriever(path)
38
 
39
+ return {"status": "uploaded", "file": file.filename}
 
 
 
40
 
41
 
42
  @app.post("/chat")
43
  async def chat(message: str, session_id: str = "default"):
44
 
45
+ async def stream():
46
  async for chunk in app_graph.astream(
47
  {"messages": [HumanMessage(content=message)]},
48
  config={"configurable": {"thread_id": session_id}},
49
+ stream_mode="messages",
50
  ):
51
+ msg = chunk[0] if isinstance(chunk, tuple) else chunk
52
+ if hasattr(msg, "content") and msg.content:
53
+ yield msg.content + "\n"
54
+
55
+ return StreamingResponse(stream(), media_type="text/plain")
 
 
 
 
 
 
 
 
 
 
56
 
57
 
58
  @app.post("/stt")