tejovanth commited on
Commit
a9fe87e
·
verified ·
1 Parent(s): e2b641c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -7,7 +7,7 @@ import time, io
7
  device = 0 if torch.cuda.is_available() else -1
8
  if device == -1: print("⚠️ No GPU detected. Expect ~10–20s for 300,000 chars on CPU.")
9
 
10
- summarizer = pipeline("summarization", model="google/pegasus-xsum", device=device, torch_dtype=torch.int8)
11
 
12
  def extract_text(file_bytes):
13
  if file_bytes[:4].startswith(b'%PDF'):
@@ -25,7 +25,7 @@ async def summarize_file(file_bytes):
25
  chunks = [text[i:i+15000] for i in range(0, len(text), 15000)]
26
  if not chunks: return "❌ No chunks to summarize"
27
  summaries = []
28
- batch_size = 2 if device == -1 else 10 # Smaller batch for CPU
29
  for i in range(0, len(chunks), batch_size):
30
  if time.time() - start > 9:
31
  summaries.append("⚠️ Stopped early")
 
7
  device = 0 if torch.cuda.is_available() else -1
8
  if device == -1: print("⚠️ No GPU detected. Expect ~10–20s for 300,000 chars on CPU.")
9
 
10
+ summarizer = pipeline("summarization", model="google/pegasus-xsum", device=device, torch_dtype=torch.float32)
11
 
12
  def extract_text(file_bytes):
13
  if file_bytes[:4].startswith(b'%PDF'):
 
25
  chunks = [text[i:i+15000] for i in range(0, len(text), 15000)]
26
  if not chunks: return "❌ No chunks to summarize"
27
  summaries = []
28
+ batch_size = 2 if device == -1 else 10
29
  for i in range(0, len(chunks), batch_size):
30
  if time.time() - start > 9:
31
  summaries.append("⚠️ Stopped early")