task2_current_best-20260306 / tokenizer /tokenizer_config.json
teru00801's picture
Upload folder using huggingface_hub
6948777 verified
raw
history blame contribute delete
557 Bytes
{
"add_prefix_space": true,
"backend": "tokenizers",
"bos_token": "[CLS]",
"clean_up_tokenization_spaces": false,
"cls_token": "[CLS]",
"do_lower_case": false,
"eos_token": "[SEP]",
"extra_special_tokens": [
"[TITLE_EMPTY]"
],
"is_local": true,
"keep_accents": true,
"mask_token": "[MASK]",
"model_max_length": 1000000000000000019884624838656,
"pad_token": "[PAD]",
"sep_token": "[SEP]",
"sp_model_kwargs": {},
"split_by_punct": false,
"tokenizer_class": "DebertaV2Tokenizer",
"unk_id": 1,
"unk_token": "[UNK]"
}