viboognesh commited on
Commit
a749b20
·
verified ·
1 Parent(s): fefc214

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +65 -7
main.py CHANGED
@@ -1,5 +1,8 @@
1
- from fastapi import FastAPI, File, UploadFile, Depends, HTTPException
2
  from fastapi.middleware.cors import CORSMiddleware
 
 
 
3
 
4
  from typing import List, Dict, Any
5
  from io import BytesIO, StringIO
@@ -182,6 +185,29 @@ class Conversational_Chain:
182
  return ChatPromptTemplate.from_messages(messages)
183
 
184
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
185
  app = FastAPI()
186
 
187
  origins = ["https://viboognesh-react-chat.static.hf.space"]
@@ -195,12 +221,30 @@ app.add_middleware(
195
  allow_headers=["*"],
196
  )
197
 
 
 
 
 
 
 
 
 
 
 
198
 
199
- app.state.conversation_chain = None
 
 
 
 
 
 
 
 
200
 
201
 
202
  @app.post("/upload_files/")
203
- async def upload_files(files: List[UploadFile] = File(...)):
204
  file_details = []
205
  try:
206
  for file in files:
@@ -211,10 +255,18 @@ async def upload_files(files: List[UploadFile] = File(...)):
211
  except Exception as e:
212
  raise HTTPException(status_code=400, detail=str(e))
213
 
 
 
 
 
 
214
  try:
215
- app.state.conversational_chain = Conversational_Chain(
216
  file_details
217
  ).create_conversational_chain()
 
 
 
218
  print("conversational_chain_manager created")
219
  except Exception as e:
220
  raise HTTPException(status_code=500, detail=str(e))
@@ -224,17 +276,23 @@ async def upload_files(files: List[UploadFile] = File(...)):
224
 
225
  @app.get("/predict/")
226
  async def predict(query: str):
 
 
 
 
 
227
  try:
228
- if app.state.conversation_chain is None:
 
229
  system_prompt = "Answer the question and also ask the user to upload files to ask questions from the files.\n"
230
  llm_model = ChatOpenAI()
231
  response = llm_model.invoke(system_prompt + query)
232
  answer = response.content
233
  else:
234
- response = app.state.conversation_chain.invoke(query)
235
  answer = response["answer"]
236
  except Exception as e:
237
  raise HTTPException(status_code=500, detail=str(e))
238
 
239
  print("predict called")
240
- return {"answer": answer}
 
1
+ from fastapi import FastAPI, File, UploadFile, HTTPException, Response, Request
2
  from fastapi.middleware.cors import CORSMiddleware
3
+ import uuid
4
+ from datetime import datetime, timedelta
5
+ import asyncio
6
 
7
  from typing import List, Dict, Any
8
  from io import BytesIO, StringIO
 
185
  return ChatPromptTemplate.from_messages(messages)
186
 
187
 
188
+ class UserSessionManager:
189
+ def __init__(self):
190
+ self.sessions = {}
191
+ self.last_request_time = {}
192
+
193
+ def get_session(self, user_id: str):
194
+ if user_id not in self.sessions:
195
+ self.sessions[user_id] = None
196
+ self.last_request_time = datetime.now()
197
+ return self.sessions[user_id]
198
+
199
+ def set_session(self, user_id: str, conversational_chain):
200
+ self.sessions[user_id] = conversational_chain
201
+ self.last_request_time[user_id] = datetime.now()
202
+
203
+ def delete_inactive_sessions(self, inactive_period: timedelta):
204
+ current_time = datetime.now()
205
+ for user_id, last_request_time in list(self.last_request_time.items()):
206
+ if current_time - last_request_time > inactive_period:
207
+ del self.sessions[user_id]
208
+ del self.last_request_time[user_id]
209
+
210
+
211
  app = FastAPI()
212
 
213
  origins = ["https://viboognesh-react-chat.static.hf.space"]
 
221
  allow_headers=["*"],
222
  )
223
 
224
+ user_session_manager = UserSessionManager()
225
+
226
+
227
+ @app.middleware("http")
228
+ async def update_last_request_time(request: Request, call_next):
229
+ user_id = request.cookies.get("user_id")
230
+ if user_id:
231
+ user_session_manager.last_request_time[user_id] = datetime.now()
232
+ response = await call_next(request)
233
+ return response
234
 
235
+
236
+ async def check_inactivity():
237
+ inactive_period = timedelta(hours=2)
238
+ while True:
239
+ await asyncio.sleep(600)
240
+ user_session_manager.delete_inactive_sessions(inactive_period)
241
+
242
+
243
+ app.add_task(check_inactivity())
244
 
245
 
246
  @app.post("/upload_files/")
247
+ async def upload_files(response: Response, files: List[UploadFile] = File(...)):
248
  file_details = []
249
  try:
250
  for file in files:
 
255
  except Exception as e:
256
  raise HTTPException(status_code=400, detail=str(e))
257
 
258
+ user_id = response.cookies.get("user_id")
259
+ if not user_id:
260
+ user_id = str(uuid.uuid4())
261
+ response.set_cookie(key="user_id", value=user_id)
262
+
263
  try:
264
+ conversational_chain = Conversational_Chain(
265
  file_details
266
  ).create_conversational_chain()
267
+ user_session_manager.set_session(
268
+ user_id=user_id, conversational_chain=conversational_chain
269
+ )
270
  print("conversational_chain_manager created")
271
  except Exception as e:
272
  raise HTTPException(status_code=500, detail=str(e))
 
276
 
277
  @app.get("/predict/")
278
  async def predict(query: str):
279
+ user_id = response.cookies.get("user_id")
280
+ if not user_id:
281
+ user_id = str(uuid.uuid4())
282
+ response.set_cookie(key="user_id", value=user_id)
283
+
284
  try:
285
+ conversational_chain = user_session_manager.get_session(user_id=user_id)
286
+ if conversational_chain is None:
287
  system_prompt = "Answer the question and also ask the user to upload files to ask questions from the files.\n"
288
  llm_model = ChatOpenAI()
289
  response = llm_model.invoke(system_prompt + query)
290
  answer = response.content
291
  else:
292
+ response = conversational_chain.invoke(query)
293
  answer = response["answer"]
294
  except Exception as e:
295
  raise HTTPException(status_code=500, detail=str(e))
296
 
297
  print("predict called")
298
+ return {"answer": answer}