newsintel-agent / llm.py
Hasitha16's picture
Upload 11 files
ed42ca4 verified
import os, requests, json
from dotenv import load_dotenv
load_dotenv()
API_KEY = os.getenv("OPENROUTER_API_KEY")
# Optional but recommended for OpenRouter rate limits
HTTP_REFERER = os.getenv("HTTP_REFERER", "http://localhost")
APP_TITLE = os.getenv("APP_TITLE", "NewsIntel Agent")
OPENROUTER_URL = "https://openrouter.ai/api/v1/chat/completions"
MODEL = "mistralai/mistral-7b-instruct"
def summarize(text: str) -> str:
"""Try OpenRouter first; fallback to local if it fails."""
if API_KEY:
try:
headers = {
"Authorization": f"Bearer {API_KEY}",
"Content-Type": "application/json",
"HTTP-Referer": HTTP_REFERER,
"X-Title": APP_TITLE,
}
prompt = (
"You are a concise news analyst. Summarize into 3 bullets:"
" 1) What happened 2) Business impact 3) Risk or opportunity."
" Limit ~80 words. Text:\n" + text
)
payload = {
"model": MODEL,
"messages": [
{"role": "system", "content": "Be factual, neutral, and brief."},
{"role": "user", "content": prompt}
],
"temperature": 0.2,
"max_tokens": 220
}
resp = requests.post(OPENROUTER_URL, json=payload, headers=headers, timeout=45)
resp.raise_for_status()
return resp.json()["choices"][0]["message"]["content"].strip()
except Exception:
pass # fall back to local below
return local_summarize(text)
# -------- local fallback (no API needed) --------
_local_pipe = None
def local_summarize(text: str) -> str:
global _local_pipe
if _local_pipe is None:
from transformers import pipeline
_local_pipe = pipeline("summarization", model="sshleifer/distilbart-cnn-12-6")
text = text[:1800] # truncate long inputs for speed
out = _local_pipe(text, max_length=120, min_length=60, do_sample=False)
return out[0]["summary_text"].strip()