Update app/core/summarizer.py
Browse files- app/core/summarizer.py +22 -27
app/core/summarizer.py
CHANGED
|
@@ -1,19 +1,8 @@
|
|
| 1 |
import logging
|
| 2 |
-
from transformers import pipeline
|
| 3 |
|
| 4 |
-
#
|
| 5 |
-
logging.basicConfig(level=logging.INFO)
|
| 6 |
-
logger = logging.getLogger("AutoTLDR")
|
| 7 |
|
| 8 |
-
|
| 9 |
-
summarizer = pipeline("summarization", model="facebook/bart-large-cnn")
|
| 10 |
-
|
| 11 |
-
# Configuration
|
| 12 |
-
MAX_TEXT_LENGTH = 6000 # max total input length
|
| 13 |
-
MAX_CHUNKS = 3 # max number of chunks
|
| 14 |
-
CHUNK_SIZE = 700 # approx number of characters per chunk
|
| 15 |
-
|
| 16 |
-
def chunk_text(text, max_len=CHUNK_SIZE):
|
| 17 |
words = text.split()
|
| 18 |
chunks = []
|
| 19 |
current = []
|
|
@@ -27,29 +16,35 @@ def chunk_text(text, max_len=CHUNK_SIZE):
|
|
| 27 |
if current:
|
| 28 |
chunks.append(" ".join(current))
|
| 29 |
|
| 30 |
-
return chunks[:
|
| 31 |
|
| 32 |
-
def get_summary(text
|
| 33 |
-
|
|
|
|
|
|
|
| 34 |
|
| 35 |
-
if
|
| 36 |
-
|
| 37 |
-
|
| 38 |
|
| 39 |
-
|
| 40 |
-
|
| 41 |
-
return f"Input too long to summarize (max {MAX_TEXT_LENGTH} characters allowed)."
|
| 42 |
|
| 43 |
summaries = []
|
| 44 |
chunks = chunk_text(text)
|
| 45 |
|
| 46 |
-
for
|
|
|
|
| 47 |
try:
|
| 48 |
-
logger.info(f"Summarizing chunk {idx + 1}/{len(chunks)} (length: {len(chunk)})")
|
| 49 |
result = summarizer(chunk, max_length=150, min_length=30, do_sample=False)
|
| 50 |
summaries.append(result[0]['summary_text'])
|
|
|
|
| 51 |
except Exception as e:
|
| 52 |
-
|
| 53 |
-
summaries.append("
|
|
|
|
|
|
|
|
|
|
|
|
|
| 54 |
|
| 55 |
-
return
|
|
|
|
| 1 |
import logging
|
|
|
|
| 2 |
|
| 3 |
+
MAX_INPUT_CHARS = 6000 # Limit text length
|
|
|
|
|
|
|
| 4 |
|
| 5 |
+
def chunk_text(text, max_len=700):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 6 |
words = text.split()
|
| 7 |
chunks = []
|
| 8 |
current = []
|
|
|
|
| 16 |
if current:
|
| 17 |
chunks.append(" ".join(current))
|
| 18 |
|
| 19 |
+
return chunks[:3] # Limit to first 3 chunks
|
| 20 |
|
| 21 |
+
def get_summary(text):
|
| 22 |
+
logging.info("=== AutoTLDR Incoming Request ===")
|
| 23 |
+
logging.info(f"Raw input (first 500 chars):\n{text[:500]}")
|
| 24 |
+
logging.info(f"Total input length: {len(text)} characters")
|
| 25 |
|
| 26 |
+
if len(text) > MAX_INPUT_CHARS:
|
| 27 |
+
logging.warning("Input too long! Aborting.")
|
| 28 |
+
raise ValueError("Text too long to summarize. Please try a shorter page.")
|
| 29 |
|
| 30 |
+
from transformers import pipeline
|
| 31 |
+
summarizer = pipeline("summarization", model="facebook/bart-large-cnn")
|
|
|
|
| 32 |
|
| 33 |
summaries = []
|
| 34 |
chunks = chunk_text(text)
|
| 35 |
|
| 36 |
+
for i, chunk in enumerate(chunks):
|
| 37 |
+
logging.info(f"Summarizing chunk {i+1}/{len(chunks)} (length: {len(chunk)})")
|
| 38 |
try:
|
|
|
|
| 39 |
result = summarizer(chunk, max_length=150, min_length=30, do_sample=False)
|
| 40 |
summaries.append(result[0]['summary_text'])
|
| 41 |
+
logging.info(f"Chunk {i+1} summary: {result[0]['summary_text']}")
|
| 42 |
except Exception as e:
|
| 43 |
+
logging.exception(f"Error summarizing chunk {i+1}")
|
| 44 |
+
summaries.append("...")
|
| 45 |
+
|
| 46 |
+
final_summary = "\n".join(summaries)
|
| 47 |
+
logging.info("=== Final Summary Output ===")
|
| 48 |
+
logging.info(final_summary)
|
| 49 |
|
| 50 |
+
return final_summary
|