NT_Testing / app.py
hedy-tang's picture
Update app.py
7da1d49 verified
raw
history blame contribute delete
601 Bytes
python -m venv .env
# Load model directly
from transformers import AutoTokenizer, AutoModelForTokenClassification
import torch
tokenizer = AutoTokenizer.from_pretrained("yonigo/distilbert-base-cased-pii-en")
model = AutoModelForTokenClassification.from_pretrained("yonigo/distilbert-base-cased-pii-en")
model.eval()
text = "Hello"
inputs = tokenizer(text, return_tensors="pt")
with torch.no_grad():
outputs = model(**inputs)
# Get predicted logits
logits = outputs.logits
# Get prediction
predicted_class = torch.argmax(logits, dim=-1).item()
print(f"Predicted class: {predicted_class}")