stldec_random / tokenizer_files.json
saracandu's picture
Upload folder using huggingface_hub
8966cc8 verified
raw
history blame contribute delete
265 Bytes
{
"name_or_path": "temporal_logic_tokenizer",
"special_tokens_map_file": "special_tokens_map.json",
"do_lower_case": false,
"model_max_length": 500,
"bos_token": "/s",
"eos_token": "s",
"unk_token": "unk",
"pad_token": "pad",
"added_tokens": []
}