Arch-Flava_API / app.py
MaryahGreene's picture
Update app.py
b3a69c7 verified
raw
history blame contribute delete
992 Bytes
import gradio as gr
from transformers import AutoTokenizer, AutoModelForSequenceClassification
import torch
model = AutoModelForSequenceClassification.from_pretrained("MaryahGreene/arch_flava_mod", trust_remote_code=True)
tokenizer = AutoTokenizer.from_pretrained("MaryahGreene/arch_flava_mod", trust_remote_code=True)
id2label = model.config.id2label # make sure this is set during training!
def predict(text):
try:
inputs = tokenizer(text, return_tensors="pt", truncation=True, padding=True)
outputs = model(**inputs)
probs = torch.nn.functional.softmax(outputs.logits, dim=1)
top_label = torch.argmax(probs, dim=1).item()
label_name = id2label[str(top_label)]
confidence = probs[0][top_label].item()
return f"Prediction: {label_name} ({confidence:.2%} confidence)"
except Exception as e:
return f"❌ Error: {str(e)}"
gr.Interface(fn=predict, inputs="text", outputs="text", title="ArchFlava Predictor").launch()