{ "tokenizer_class": "ChunkTokenizer", "model_max_length": 256, "vocab_size": 4466, "chunk_strategy": "variable_2_3", "special_tokens": {}, "clean_up_tokenization_spaces": false }