tiny-aya-global-lora-qa / tokenizer_config.json
Bateesa's picture
Upload folder using huggingface_hub
f1c5f3f verified
raw
history blame contribute delete
560 Bytes
{
"add_prefix_space": false,
"backend": "tokenizers",
"bos_token": "<BOS_TOKEN>",
"clean_up_tokenization_spaces": false,
"cls_token": "<CLS>",
"eos_token": "<|END_OF_TURN_TOKEN|>",
"errors": "replace",
"is_local": false,
"legacy": true,
"mask_token": "<MASK_TOKEN>",
"model_max_length": 1000000000000000019884624838656,
"pad_token": "<PAD>",
"sep_token": "<SEP>",
"sp_model_kwargs": {},
"spaces_between_special_tokens": false,
"tokenizer_class": "CohereTokenizer",
"unk_token": "<UNK>",
"use_default_system_prompt": false
}