import gradio as gr from transformers import pipeline, logging # Disable unnecessary warnings from transformers library logging.set_verbosity_error() # Load the summarization model try: summarizer = pipeline("summarization", model="facebook/bart-large-cnn") print("Model loaded successfully!") except Exception as e: print(f"Error loading model: {e}") summarizer = None # Function for summarizing text def summarize_text(input_text): if summarizer: # Generate the summary from the input text summary = summarizer(input_text, max_length=150, min_length=30, do_sample=False) return summary[0]['summary_text'] else: return "Error: Model not loaded." # Define the Gradio interface def create_interface(): # Create a Gradio interface with text input and output interface = gr.Interface( fn=summarize_text, # Function to summarize text inputs=gr.Textbox(label="Enter Text for Summarization", placeholder="Paste or type your text here..."), outputs=gr.Textbox(label="Summary", placeholder="Summary will appear here..."), title="Text Summarizer", description="This app takes a long text as input and generates a concise summary using a pre-trained BART model.", examples=[["Hugging Face is an open-source platform that allows developers and researchers to share and access machine learning models."]] ) return interface # Launch the Gradio app if __name__ == "__main__": interface = create_interface() interface.launch(share=True) # share=True allows the app to be accessed via a public URL