File size: 1,397 Bytes
fba3401 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 | # services/inference.py
import torch
def predict_sentiment(text, tokenizer, model, device):
inputs = tokenizer(
text,
padding="max_length",
truncation=True,
max_length=256,
return_tensors="pt"
)
input_ids = inputs["input_ids"].to(device)
attention_mask = inputs["attention_mask"].to(device)
with torch.no_grad():
outputs = model(input_ids, attention_mask)
probs = torch.softmax(outputs, dim=1)
pred = torch.argmax(probs, dim=1).item()
label_map = {0: "Negative", 1: "Positive"}
return label_map[pred], probs[0][pred].item()
def batch_predict(texts, tokenizer, model, device):
inputs = tokenizer(
texts,
padding=True,
truncation=True,
max_length=256,
return_tensors="pt"
)
input_ids = inputs["input_ids"].to(device)
attention_mask = inputs["attention_mask"].to(device)
with torch.no_grad():
outputs = model(input_ids, attention_mask)
probs = torch.softmax(outputs, dim=1)
preds = torch.argmax(probs, dim=1)
label_map = {0: "Negative", 1: "Positive"}
return [
{
"text": text,
"label": label_map[p.item()],
"confidence": probs[i][p].item()
}
for i, (text, p) in enumerate(zip(texts, preds))
]
|