| { | |
| "bos_token": "<EOS>", | |
| "eos_token": "<EOS>", | |
| "model_max_length": 1000000000000000019884624838656, | |
| "name_or_path": "frames_text_cleaned_dataset_tokenizer", | |
| "special_tokens_map_file": "frames_text_cleaned_dataset_tokenizer/special_tokens_map.json", | |
| "tokenizer_class": "PreTrainedTokenizerFast" | |
| } | |