Aadityaramrame's picture
Update app.py
59db687 verified
import gradio as gr
from transformers import BartTokenizer, BartForConditionalGeneration
import torch
# -------------------------------
# MODEL LOADING
# -------------------------------
model_name = "facebook/bart-large-cnn" # Hugging Face model
tokenizer = BartTokenizer.from_pretrained(model_name)
model = BartForConditionalGeneration.from_pretrained(model_name)
# -------------------------------
# SUMMARIZATION FUNCTION
# -------------------------------
def summarize_text(input_text):
if not input_text.strip():
return "⚠️ Please enter some text to summarize."
inputs = tokenizer([input_text], max_length=1024, return_tensors="pt", truncation=True)
summary_ids = model.generate(
inputs["input_ids"],
num_beams=4,
min_length=50,
max_length=200,
early_stopping=True
)
summary = tokenizer.decode(summary_ids[0], skip_special_tokens=True)
return summary
# -------------------------------
# GRADIO INTERFACE
# -------------------------------
demo = gr.Interface(
fn=summarize_text,
inputs=gr.Textbox(lines=12, placeholder="Paste medical or long text here...", label="Input Text"),
outputs=gr.Textbox(lines=10, label="Generated Summary"),
title="🩺 Jan Arogya Summarizer",
description="Summarize long medical or research text using the **facebook/bart-large-cnn** model.",
theme="soft",
examples=[
["COVID-19 is a respiratory disease caused by the SARS-CoV-2 virus. It spread rapidly across the globe..."],
["Hypertension is a chronic medical condition characterized by persistently high blood pressure levels..."]
],
)
# -------------------------------
# LAUNCH
# -------------------------------
if __name__ == "__main__":
demo.launch()