echo-demo / app.py
thksirhc's picture
Update app.py
e556e6e verified
import os
os.environ["TOKENIZERS_PARALLELISM"] = "false" # Stops warning crashes
import gradio as gr
from transformers import pipeline
# Lazy load models only when needed (saves memory on free tier)
def get_diversity_pipeline():
return pipeline("text-classification", model="thksirhc/autotrain-media-analysis", device=-1) # CPU
def get_objective_pipeline():
return pipeline("text-classification", model="thksirhc/autotrain-HKm-objective", device=-1)
def get_genuine_pipeline():
return pipeline("text-classification", model="thksirhc/autotrain-genuine", device=-1)
def analyze(text):
if not text.strip():
return "Please paste an article!"
try:
# Load and run one by one (safer on low RAM)
d_pipe = get_diversity_pipeline()
d_result = d_pipe(text)[0]
diversity_score = float(d_result.get("score", 0)) if "score" in d_result else float(d_result["label"].replace("LABEL_", ""))
o_pipe = get_objective_pipeline()
o_result = o_pipe(text)[0]
objective_score = float(o_result.get("score", 0)) if "score" in o_result else float(o_result["label"].replace("LABEL_", ""))
g_pipe = get_genuine_pipeline()
g_result = g_pipe(text)[0]
genuine_score = float(g_result.get("score", 0)) if "score" in g_result else float(g_result["label"].replace("LABEL_", ""))
return f"""
### EchoScore Results
**Diversity:** {diversity_score:.1f}/10
**Objectiveness:** {objective_score:.1f}/10
**Genuineness:** {genuine_score:.1f}/10
"""
except Exception as e:
return f"Error (check logs for details): {str(e)}\n\nFirst load can be slow (30-60s on free tier)—try again or short text."
demo = gr.Interface(
fn=analyze,
inputs=gr.Textbox(lines=12, placeholder="Paste HK news article here (short for test)...", label="Article"),
outputs=gr.Markdown(),
title="EchoScore HK Media Checker",
description="Instant bias scores—be patient on first try (free server slow)."
)
demo.launch()