tejovanth commited on
Commit
cd5b981
Β·
verified Β·
1 Parent(s): 217c34a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -17
app.py CHANGED
@@ -2,26 +2,18 @@ import gradio as gr
2
  import fitz
3
  import torch
4
  from transformers import pipeline
5
- import time, io
6
 
7
  device = 0 if torch.cuda.is_available() else -1
8
- if device == -1: print("⚠️ No GPU detected. Expect ~10–20s for 300,000 chars on CPU.")
9
 
10
- summarizer = pipeline("summarization", model="google/pegasus-xsum", device=device, torch_dtype=torch.float32)
11
 
12
- def extract_text(file_bytes):
13
- if file_bytes[:4].startswith(b'%PDF'):
14
- doc = fitz.open(stream=file_bytes, filetype="pdf")
15
- text = "".join(page.get_text("text", flags=16) for page in doc)
16
- doc.close()
17
- return text
18
- try: return file_bytes.decode("utf-8")
19
- except: return "❌ Unsupported format (PDF/TXT only)"
20
-
21
- async def summarize_file(file_bytes):
22
  start = time.time()
23
- text = extract_text(file_bytes)[:300000] or "❌ No text found"
24
- if len(text.strip()) == 0: return text
 
25
  chunks = [text[i:i+15000] for i in range(0, len(text), 15000)]
26
  if not chunks: return "❌ No chunks to summarize"
27
  summaries = []
@@ -30,8 +22,8 @@ async def summarize_file(file_bytes):
30
  if time.time() - start > 9:
31
  summaries.append("⚠️ Stopped early")
32
  break
33
- batch = chunks[i:i+batch_size]
34
  try:
 
35
  batch_summaries = summarizer(batch, max_length=40, min_length=10, do_sample=False, batch_size=batch_size)
36
  summaries.extend(f"**Chunk {i+j+1}**:\n{s['summary_text']}" for j, s in enumerate(batch_summaries))
37
  except: summaries.append(f"**Chunk {i+1}**: ❌ Error")
@@ -40,7 +32,7 @@ async def summarize_file(file_bytes):
40
  demo = gr.Interface(
41
  fn=summarize_file, inputs=gr.File(label="πŸ“„ PDF/TXT Notes"),
42
  outputs=gr.Textbox(label="πŸ“ Summary"),
43
- title="Fast Summarizer", description="300,000+ chars in ~5–10s (GPU) or ~10–20s (CPU)"
44
  )
45
 
46
  if __name__ == "__main__":
 
2
  import fitz
3
  import torch
4
  from transformers import pipeline
5
+ import time
6
 
7
  device = 0 if torch.cuda.is_available() else -1
8
+ if device == -1: print("⚠️ No GPU. Expect ~12–22s for 300,000 chars on CPU.")
9
 
10
+ summarizer = pipeline("summarization", model="t5-small", device=device, torch_dtype=torch.float32)
11
 
12
+ def summarize_file(file_bytes):
 
 
 
 
 
 
 
 
 
13
  start = time.time()
14
+ text = "".join(page.get_text("text", flags=16) for page in fitz.open(stream=file_bytes, filetype="pdf")) if file_bytes[:4].startswith(b'%PDF') else file_bytes.decode("utf-8", errors="ignore")
15
+ if not text.strip(): return "❌ No text found"
16
+ text = text[:300000]
17
  chunks = [text[i:i+15000] for i in range(0, len(text), 15000)]
18
  if not chunks: return "❌ No chunks to summarize"
19
  summaries = []
 
22
  if time.time() - start > 9:
23
  summaries.append("⚠️ Stopped early")
24
  break
 
25
  try:
26
+ batch = chunks[i:i+batch_size]
27
  batch_summaries = summarizer(batch, max_length=40, min_length=10, do_sample=False, batch_size=batch_size)
28
  summaries.extend(f"**Chunk {i+j+1}**:\n{s['summary_text']}" for j, s in enumerate(batch_summaries))
29
  except: summaries.append(f"**Chunk {i+1}**: ❌ Error")
 
32
  demo = gr.Interface(
33
  fn=summarize_file, inputs=gr.File(label="πŸ“„ PDF/TXT Notes"),
34
  outputs=gr.Textbox(label="πŸ“ Summary"),
35
+ title="Fast Summarizer", description="300,000+ chars in ~5–8s (GPU) or ~12–22s (CPU)"
36
  )
37
 
38
  if __name__ == "__main__":