test_dynamic_tokenizer_legacy / tokenization_fast.py
sgugger's picture
Initial commit
2ebb1d0
from transformers import BertTokenizerFast
from .tokenization import NewTokenizer
class NewTokenizerFast(BertTokenizerFast):
slow_tokenizer_class = NewTokenizer
special_attribute_present =True