Spaces:
Sleeping
Sleeping
| import time | |
| import torch | |
| import torch.nn.functional as F | |
| from transformers import AlbertTokenizerFast, AlbertForSequenceClassification | |
| MODEL_REPO = "hadangvu/pkd-finbert-student" | |
| # From label_mapping.json — order matches FinBERT teacher training | |
| LABELS = {0: "positive", 1: "negative", 2: "neutral"} | |
| print("Loading model from HF Hub...") | |
| tokenizer = AlbertTokenizerFast.from_pretrained(MODEL_REPO) | |
| model = AlbertForSequenceClassification.from_pretrained(MODEL_REPO) | |
| model.eval() | |
| print("Model ready.") | |
| def predict(text: str) -> dict: | |
| start = time.perf_counter() | |
| inputs = tokenizer( | |
| text, | |
| return_tensors="pt", | |
| truncation=True, | |
| max_length=128, | |
| padding=True, | |
| ) | |
| with torch.no_grad(): | |
| logits = model(**inputs).logits | |
| probs = F.softmax(logits, dim=-1) | |
| confidence, predicted_class = probs.max(dim=-1) | |
| latency_ms = (time.perf_counter() - start) * 1000 | |
| return { | |
| "label": LABELS[predicted_class.item()], | |
| "confidence": round(confidence.item(), 4), | |
| "latency_ms": round(latency_ms, 2), | |
| } | |