| { | |
| "bos_token": "<s>", | |
| "clean_up_tokenization_spaces": false, | |
| "eos_token": "</s>", | |
| "model_max_length": 1000000000000000019884624838656, | |
| "special_tokens_map_file": "/apdcephfs_cq2/share_47076/lemonzeng/search/mGPT/models/llama-13b/special_tokens_map.json", | |
| "tokenizer_class": "LlamaTokenizer", | |
| "unk_token": "<unk>" | |
| } | |