| { |
| "added_tokens_decoder": { |
| "0": { |
| "content": "unk", |
| "lstrip": false, |
| "normalized": false, |
| "rstrip": false, |
| "single_word": false, |
| "special": true |
| }, |
| "1": { |
| "content": "pad", |
| "lstrip": false, |
| "normalized": false, |
| "rstrip": false, |
| "single_word": false, |
| "special": true |
| }, |
| "2": { |
| "content": "/s", |
| "lstrip": false, |
| "normalized": false, |
| "rstrip": false, |
| "single_word": false, |
| "special": true |
| }, |
| "3": { |
| "content": "s", |
| "lstrip": false, |
| "normalized": false, |
| "rstrip": false, |
| "single_word": false, |
| "special": true |
| } |
| }, |
| "auto_map": { |
| "AutoTokenizer": [ |
| "tokenizer.STLTokenizer", |
| null |
| ] |
| }, |
| "bos_token": "/s", |
| "clean_up_tokenization_spaces": false, |
| "eos_token": "s", |
| "extra_special_tokens": {}, |
| "model_max_length": 512, |
| "pad_token": "pad", |
| "tokenizer_class": "STLTokenizer", |
| "unk_token": "unk" |
| } |
|
|