Spaces:
Sleeping
Sleeping
File size: 1,098 Bytes
7d6b645 c5185ac 7d6b645 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 |
import time
import torch
import torch.nn.functional as F
from transformers import AlbertTokenizerFast, AlbertForSequenceClassification
MODEL_REPO = "hadangvu/pkd-finbert-student"
# From label_mapping.json — order matches FinBERT teacher training
LABELS = {0: "positive", 1: "negative", 2: "neutral"}
print("Loading model from HF Hub...")
tokenizer = AlbertTokenizerFast.from_pretrained(MODEL_REPO)
model = AlbertForSequenceClassification.from_pretrained(MODEL_REPO)
model.eval()
print("Model ready.")
def predict(text: str) -> dict:
start = time.perf_counter()
inputs = tokenizer(
text,
return_tensors="pt",
truncation=True,
max_length=128,
padding=True,
)
with torch.no_grad():
logits = model(**inputs).logits
probs = F.softmax(logits, dim=-1)
confidence, predicted_class = probs.max(dim=-1)
latency_ms = (time.perf_counter() - start) * 1000
return {
"label": LABELS[predicted_class.item()],
"confidence": round(confidence.item(), 4),
"latency_ms": round(latency_ms, 2),
}
|