Transformers How to use MU-NLPC/XLM-R-large-reflective-conf4 with Transformers:
# Use a pipeline as a high-level helper
from transformers import pipeline
pipe = pipeline("text-classification", model="MU-NLPC/XLM-R-large-reflective-conf4") # Load model directly
from transformers import AutoTokenizer, AutoModelForSequenceClassification
tokenizer = AutoTokenizer.from_pretrained("MU-NLPC/XLM-R-large-reflective-conf4")
model = AutoModelForSequenceClassification.from_pretrained("MU-NLPC/XLM-R-large-reflective-conf4")