{
"tokenizer_class": "LlamaTokenizer",
"vocab_size": 50000,
"model_max_length": 2048,
"padding_side": "right",
"truncation_side": "right",
"use_fast": false,
"clean_up_tokenization_spaces": true,
"bos_token": "",
"eos_token": "",
"unk_token": "",
"pad_token": "",
"mask_token": "",
"cls_token": "",
"sep_token": "",
"additional_special_tokens": [
"",
"",
"",
""
],
"special_tokens_map": {
"": "",
"": "",
"": "",
"": ""
}
}