Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| import pandas as pd | |
| from fastai.text.all import * | |
| from huggingface_hub import from_pretrained_fastai | |
| # Load the model from Hugging Face Hub | |
| repo_id = "jojimene/entregable3" | |
| learn = from_pretrained_fastai(repo_id) | |
| # Define the prediction function | |
| def predict_sentiment(text): | |
| # Make prediction using the loaded model | |
| pred, _, probs = learn.predict(text) | |
| # Get probabilities for each class, ensuring string keys | |
| labels = [str(label) for label in learn.dls.vocab[1]] # Convert labels to strings | |
| result = {label: float(prob) for label, prob in zip(labels, probs)} | |
| return {"predicted_sentiment": pred, "probabilities": result} | |
| # Create Gradio interface | |
| iface = gr.Interface( | |
| fn=predict_sentiment, | |
| inputs=gr.Textbox(lines=5, placeholder="Enter text for sentiment analysis..."), | |
| outputs=gr.JSON(), | |
| title="Climate Sentiment Classifier", | |
| description="Enter a text related to climate sentiment, and the model will predict whether it's positive, negative, or neutral.", | |
| examples=[ | |
| "Renewable energy is the future of our planet!", | |
| "Climate change is a serious threat to humanity.", | |
| "The weather is nice today, but I'm worried about global warming." | |
| ], | |
| cache_examples=False # Disable caching to avoid startup error | |
| ) | |
| # Launch the interface | |
| if __name__ == "__main__": | |
| iface.launch(server_name="0.0.0.0", server_port=7860) | |