{
"added_tokens": [],
"bos_token": "",
"eos_token": "",
"unk_token": "",
"pad_token": "",
"mask_token": "",
"cls_token": "",
"sep_token": "",
"additional_special_tokens": [],
"clean_up_tokenization_spaces": true,
"model_max_length": 2048,
"tokenizer_class": "LlamaTokenizer",
"vocab_size": 50000,
"special_tokens_map": {
"": "",
"": "",
"": "",
"": ""
},
"auto_map": {
"AutoTokenizer": [
"tokenization_llama.LlamaTokenizer"
]
}
}