Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| from transformers import AutoTokenizer, AutoModelForSequenceClassification | |
| import torch | |
| # Load the model and tokenizer | |
| tokenizer = AutoTokenizer.from_pretrained("MarkAdamsMSBA24/ADRv2024") | |
| model = AutoModelForSequenceClassification.from_pretrained("MarkAdamsMSBA24/ADRv2024") | |
| # Define the prediction function | |
| def get_prediction(text): | |
| inputs = tokenizer(text, return_tensors="pt", max_length=512, truncation=True, padding=True) | |
| with torch.no_grad(): | |
| outputs = model(**inputs) | |
| prediction_scores = outputs.logits | |
| predicted_class = torch.argmax(prediction_scores, dim=-1).item() | |
| return f"Predicted Class: {predicted_class}", prediction_scores.tolist() | |
| iface = gr.Interface( | |
| fn=get_prediction, | |
| inputs=gr.Textbox(lines=4, placeholder="Type your text..."), | |
| outputs=[gr.Textbox(label="Prediction"), gr.Dataframe(label="Scores")], | |
| title="BERT Sequence Classification Demo", | |
| description="This demo uses a BERT model hosted on Hugging Face to classify text sequences." | |
| ) | |
| if __name__ == "__main__": | |
| iface.launch() | |