from utils.file_loader import extract_text from prompts.summarize_prompt import SUMMARY_PROMPT from llm.hf_llm import get_chat_model chat_model = get_chat_model() summarize_chain = SUMMARY_PROMPT | chat_model MAX_CHARS = 18_000 def summarize_document(file): if not file: return "Please upload a document." text = extract_text(file) if text.startswith("❌"): return text if len(text.strip()) < 80: return "Not enough meaningful text extracted." warning = "" if len(text) > MAX_CHARS: text = text[:MAX_CHARS] warning = "⚠️ Document truncated for processing.\n\n" try: response = summarize_chain.invoke({"text": text}) return warning + response.content.strip() except Exception as e: msg = str(e).lower() if "token" in msg: return "❌ Invalid or missing Hugging Face token." if "rate" in msg: return "❌ Rate limit exceeded. Try later." return f"❌ Error: {str(e)}"