File size: 307 Bytes
617cef1
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
{
  "bos_token": "<EOS>",
  "eos_token": "<EOS>",
  "model_max_length": 1000000000000000019884624838656,
  "name_or_path": "frames_text_cleaned_dataset_tokenizer",
  "special_tokens_map_file": "frames_text_cleaned_dataset_tokenizer/special_tokens_map.json",
  "tokenizer_class": "PreTrainedTokenizerFast"
}