kiranmadhusud commited on
Commit
5933ec9
·
1 Parent(s): 1dcc426

Fix: update RAG pipeline with TinyLlama text-generation

Browse files
Files changed (1) hide show
  1. app.py +13 -3
app.py CHANGED
@@ -2,15 +2,25 @@ import gradio as gr
2
  from pypdf import PdfReader
3
  from rag_pipeline import RAGPipeline
4
  import warnings
 
5
  import logging
6
  import os
7
 
8
- # Suppress FutureWarnings from torch/spaces
9
  warnings.filterwarnings("ignore", category=FutureWarning)
10
-
11
- # Suppress transformers verbose logs
12
  logging.getLogger("transformers").setLevel(logging.ERROR)
13
  os.environ["TOKENIZERS_PARALLELISM"] = "false"
 
 
 
 
 
 
 
 
 
 
 
 
14
  rag = RAGPipeline()
15
 
16
  def process_file(file):
 
2
  from pypdf import PdfReader
3
  from rag_pipeline import RAGPipeline
4
  import warnings
5
+ import asyncio
6
  import logging
7
  import os
8
 
 
9
  warnings.filterwarnings("ignore", category=FutureWarning)
 
 
10
  logging.getLogger("transformers").setLevel(logging.ERROR)
11
  os.environ["TOKENIZERS_PARALLELISM"] = "false"
12
+
13
+ # Fix Python 3.13 asyncio cleanup bug
14
+ import sys
15
+ if sys.version_info >= (3, 13):
16
+ import asyncio.base_events
17
+ _original_del = asyncio.base_events.BaseEventLoop.__del__
18
+ def _safe_del(self):
19
+ try:
20
+ _original_del(self)
21
+ except Exception:
22
+ pass
23
+ asyncio.base_events.BaseEventLoop.__del__ = _safe_del
24
  rag = RAGPipeline()
25
 
26
  def process_file(file):