tejovanth commited on
Commit
62801eb
Β·
verified Β·
1 Parent(s): 50df8c7

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +81 -24
app.py CHANGED
@@ -1,54 +1,111 @@
1
  import gradio as gr
2
- import fitz
3
  import torch
4
  from transformers import pipeline
5
  import time, logging
 
 
 
6
 
 
7
  logging.basicConfig(level=logging.ERROR)
8
- device = -1 # CPU-only
9
- print("⚠️ CPU-only. Expect ~20–30s for 300,000 chars.")
10
 
 
11
  try:
12
  summarizer = pipeline("summarization", model="t5-small", device=device, torch_dtype=torch.float32)
13
  except Exception as e:
14
- print(f"❌ Model loading failed: {str(e)}")
15
  exit(1)
16
 
17
- def summarize_file(file_bytes):
18
- start = time.time()
19
- print(f"File type: {type(file_bytes)}")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
20
  try:
21
- text = "".join(page.get_text("text", flags=16) for page in fitz.open(stream=file_bytes, filetype="pdf")) if file_bytes[:4].startswith(b'%PDF') else file_bytes.decode("utf-8", errors="ignore")
 
 
 
22
  except Exception as e:
23
- return f"❌ Text extraction failed: {str(e)}"
24
- if not text.strip(): return "❌ No text found"
 
 
 
 
25
  text = text[:300000]
26
- chunks = [text[i:i+2000] for i in range(0, len(text), 2000)]
27
- print(f"Chunks created: {len(chunks)}")
28
- if not chunks: return "❌ No chunks to summarize"
29
- summaries = []
30
  for i, chunk in enumerate(chunks):
31
- if time.time() - start > 15:
32
- summaries.append("⚠️ Stopped early")
33
  break
34
  try:
35
- summary = summarizer(chunk, max_length=60, min_length=10, do_sample=False)[0]['summary_text']
36
- summaries.append(f"**Chunk {i+1}**:\n{summary}")
 
 
 
37
  except Exception as e:
38
- summaries.append(f"**Chunk {i+1}**: ❌ Error: {str(e)}")
39
- return f"**Chars**: {len(text)}\n**Time**: {time.time()-start:.2f}s\n\n" + "\n\n".join(summaries)
 
 
 
40
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
41
  demo = gr.Interface(
42
- fn=summarize_file, inputs=gr.File(label="πŸ“„ PDF/TXT Notes", type="binary"),
43
- outputs=gr.Textbox(label="πŸ“ Summary"),
44
- title="Fast Summarizer", description="300,000+ chars in ~20–30s (CPU)"
 
 
 
 
 
45
  )
46
 
47
  if __name__ == "__main__":
48
  try:
49
  demo.launch(share=False, server_port=7860)
50
  except Exception as e:
51
- print(f"❌ Gradio launch failed: {str(e)}")
 
52
 
53
 
54
 
 
1
  import gradio as gr
2
+ import fitz # PyMuPDF
3
  import torch
4
  from transformers import pipeline
5
  import time, logging
6
+ import re
7
+ import tempfile
8
+ import os
9
 
10
+ # === Setup ===
11
  logging.basicConfig(level=logging.ERROR)
12
+ device = -1 # CPU
13
+ print("⚠️ CPU-only mode. Expect ~20–30s for large documents.")
14
 
15
+ # === Load summarization model ===
16
  try:
17
  summarizer = pipeline("summarization", model="t5-small", device=device, torch_dtype=torch.float32)
18
  except Exception as e:
19
+ print(f"❌ Model loading failed: {e}")
20
  exit(1)
21
 
22
+ # === Text Preprocessing ===
23
+ def smart_chunk(text, max_chunk_len=2000):
24
+ sentences = re.split(r'(?<=[.!?]) +', text)
25
+ chunks, current_chunk = [], ""
26
+ for sentence in sentences:
27
+ if len(current_chunk) + len(sentence) < max_chunk_len:
28
+ current_chunk += sentence + " "
29
+ else:
30
+ chunks.append(current_chunk.strip())
31
+ current_chunk = sentence + " "
32
+ if current_chunk:
33
+ chunks.append(current_chunk.strip())
34
+ return chunks
35
+
36
+ # === Summarization per file ===
37
+ def summarize_file_bytes(file_bytes, filename):
38
+ start_time = time.time()
39
  try:
40
+ if file_bytes[:4].startswith(b'%PDF'):
41
+ text = "".join(page.get_text("text", flags=16) for page in fitz.open(stream=file_bytes, filetype="pdf"))
42
+ else:
43
+ text = file_bytes.decode("utf-8", errors="ignore")
44
  except Exception as e:
45
+ return f"{filename}: ❌ Text extraction failed: {e}", ""
46
+
47
+ text = text.strip()
48
+ if not text:
49
+ return f"{filename}: ❌ No text found.", ""
50
+
51
  text = text[:300000]
52
+ chunks = smart_chunk(text)
53
+ summaries, line_count = [], 0
54
+
 
55
  for i, chunk in enumerate(chunks):
56
+ if time.time() - start_time > 20:
57
+ summaries.append("⚠️ Stopped early due to time limit.")
58
  break
59
  try:
60
+ summary = summarizer(chunk, max_length=100, min_length=30, do_sample=False)[0]["summary_text"]
61
+ summaries.append(f"**Chunk {i+1}**:\n{summary.strip()}")
62
+ line_count += summary.count('\n') + 1
63
+ if line_count >= 15:
64
+ break
65
  except Exception as e:
66
+ summaries.append(f"**Chunk {i+1}**: ❌ Error summarizing: {e}")
67
+
68
+ total_time = time.time() - start_time
69
+ summary_text = f"πŸ“„ **{filename}**\n**Characters**: {len(text)} | **Time**: {total_time:.2f}s\n\n" + "\n\n".join(summaries)
70
+ return summary_text, summary_text
71
 
72
+ # === Gradio Wrapper ===
73
+ def summarize_multiple_files(files):
74
+ all_summaries = []
75
+ combined_text = ""
76
+
77
+ for file_obj in files:
78
+ file_bytes = file_obj.read()
79
+ filename = file_obj.name.split("/")[-1]
80
+ summary, raw = summarize_file_bytes(file_bytes, filename)
81
+ all_summaries.append(summary)
82
+ combined_text += f"\n\n{raw}\n" + "="*60 + "\n"
83
+
84
+ # Write summary to temp .txt file
85
+ with tempfile.NamedTemporaryFile(delete=False, suffix=".txt", mode="w", encoding="utf-8") as f:
86
+ f.write(combined_text)
87
+ summary_file_path = f.name
88
+
89
+ return "\n\n".join(all_summaries), summary_file_path
90
+
91
+ # === Gradio Interface ===
92
  demo = gr.Interface(
93
+ fn=summarize_multiple_files,
94
+ inputs=gr.File(label="πŸ“„ Upload PDF or TXT files", file_types=[".pdf", ".txt"], type="file", file_count="multiple"),
95
+ outputs=[
96
+ gr.Textbox(label="πŸ“ Summary", lines=30, max_lines=100),
97
+ gr.File(label="πŸ“₯ Download Summary as .txt")
98
+ ],
99
+ title="πŸ“š Multi-File Summarizer",
100
+ description="Summarizes multiple PDFs or TXTs into at least 15 lines each. Download final output as .txt. CPU-optimized."
101
  )
102
 
103
  if __name__ == "__main__":
104
  try:
105
  demo.launch(share=False, server_port=7860)
106
  except Exception as e:
107
+ print(f"❌ Gradio launch failed: {e}")
108
+
109
 
110
 
111