RoBERTa: A Robustly Optimized BERT Pretraining Approach
Paper • 1907.11692 • Published • 10
# Load model directly
from transformers import AutoTokenizer, AutoModelForMaskedLM
tokenizer = AutoTokenizer.from_pretrained("acul3/roberta-base-indo")
model = AutoModelForMaskedLM.from_pretrained("acul3/roberta-base-indo")
# Use a pipeline as a high-level helper from transformers import pipeline pipe = pipeline("fill-mask", model="acul3/roberta-base-indo")