Maram-almasary's picture
Update app.py
3d2863d verified
import gradio as gr
from transformers import pipeline
import torch
# Detect GPU availability and set the device
device = 0 if torch.cuda.is_available() else -1
print(f"Using device: {'GPU' if torch.cuda.is_available() else 'CPU'}")
# Load the sentiment analysis pipeline from Hugging Face
model = pipeline("sentiment-analysis",
model="nlptown/bert-base-multilingual-uncased-sentiment",
device=device)
# Function to get sentiment prediction
def analyze_sentiment(text):
result = model(text)
# Extracting the star rating (1 to 5)
sentiment = result[0]['label']
sentiment_score = int(sentiment.split(' ')[-1]) # Extract star rating
return f"Sentiment: {sentiment_score} Stars"
# Predefined examples for testing
examples = [
["I love this product! It's amazing!"],
["This was the worst experience I've ever had."],
["The movie was okay, not great but not bad either."],
["Absolutely fantastic! I would recommend it to everyone."]
]
# Create Gradio interface
interface = gr.Interface(
fn=analyze_sentiment,
inputs=gr.Textbox(label="Enter Text",
placeholder="Type a sentence here...",
lines=2),
outputs=gr.Textbox(label="Sentiment",
placeholder="Predicted sentiment will be displayed here..."),
examples=examples,
title="Sentiment Analysis with BERT",
description="This app performs sentiment analysis on the text you provide, displaying a sentiment score ranging from 1 to 5 stars."
)
# Launch the app
interface.launch()