Spaces:
Sleeping
Sleeping
File size: 1,418 Bytes
750c19d cbde44e 750c19d cbde44e a13d327 cbde44e 3e4a828 cbde44e e2bac2f cbde44e 9e99cac 6778eb6 7b33cba cbde44e e2bac2f cbde44e 6778eb6 cbde44e 9e99cac cbde44e 9e99cac | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 | import gradio as gr
import pandas as pd
from fastai.text.all import *
from huggingface_hub import from_pretrained_fastai
# Load the model from Hugging Face Hub
repo_id = "jojimene/entregable3"
learn = from_pretrained_fastai(repo_id)
# Define the prediction function
def predict_sentiment(text):
# Make prediction using the loaded model
pred, _, probs = learn.predict(text)
# Get probabilities for each class, ensuring string keys
labels = [str(label) for label in learn.dls.vocab[1]] # Convert labels to strings
result = {label: float(prob) for label, prob in zip(labels, probs)}
return {"predicted_sentiment": pred, "probabilities": result}
# Create Gradio interface
iface = gr.Interface(
fn=predict_sentiment,
inputs=gr.Textbox(lines=5, placeholder="Enter text for sentiment analysis..."),
outputs=gr.JSON(),
title="Climate Sentiment Classifier",
description="Enter a text related to climate sentiment, and the model will predict whether it's positive, negative, or neutral.",
examples=[
"Renewable energy is the future of our planet!",
"Climate change is a serious threat to humanity.",
"The weather is nice today, but I'm worried about global warming."
],
cache_examples=False # Disable caching to avoid startup error
)
# Launch the interface
if __name__ == "__main__":
iface.launch(server_name="0.0.0.0", server_port=7860)
|