uonlp/CulturaX
Viewer • Updated • 7.18B • 38.8k • 621
How to use dschulmeist/TiME-it-xs with Transformers:
# Use a pipeline as a high-level helper
from transformers import pipeline
pipe = pipeline("feature-extraction", model="dschulmeist/TiME-it-xs") # Load model directly
from transformers import AutoTokenizer, AutoModel
tokenizer = AutoTokenizer.from_pretrained("dschulmeist/TiME-it-xs")
model = AutoModel.from_pretrained("dschulmeist/TiME-it-xs")Monolingual BERT-style encoder that outputs embeddings for Italian. Distilled from FacebookAI/xlm-roberta-large.
from transformers import AutoTokenizer, AutoModel
import torch
repo = "dschulmeist/TiME-it-xs"
tok = AutoTokenizer.from_pretrained(repo)
mdl = AutoModel.from_pretrained(repo)
def mean_pool(last_hidden_state, attention_mask):
mask = attention_mask.unsqueeze(-1).type_as(last_hidden_state)
return (last_hidden_state * mask).sum(1) / mask.sum(1).clamp(min=1e-9)
inputs = tok(["example sentence"], padding=True, truncation=True, return_tensors="pt")
outputs = mdl(**inputs)
emb = mean_pool(outputs.last_hidden_state, inputs['attention_mask'])
print(emb.shape)