Fasih-2B / tokenizer_config.json
HeshamHaroon's picture
Update tokenizer_config.json - SFT step 187
46537ba verified
{
"model_type": "gpt2",
"tokenizer_class": "PreTrainedTokenizerFast",
"bos_token": "<|bos|>",
"eos_token": "<|assistant_end|>",
"pad_token": null,
"vocab_size": 65536,
"pattern": "'(?i:[sdmt]|ll|ve|re)|[^\\r\\n\\p{L}\\p{N}]?+\\p{L}+|\\p{N}{1,2}| ?[^\\s\\p{L}\\p{N}]++[\\r\\n]*|\\s*[\\r\\n]|\\s+(?!\\S)|\\s+",
"model_max_length": 2048,
"added_tokens_decoder": {
"65527": {
"content": "<|bos|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"65528": {
"content": "<|user_start|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"65529": {
"content": "<|user_end|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"65530": {
"content": "<|assistant_start|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"65531": {
"content": "<|assistant_end|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"65532": {
"content": "<|python_start|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"65533": {
"content": "<|python_end|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"65534": {
"content": "<|output_start|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
},
"65535": {
"content": "<|output_end|>",
"lstrip": false,
"normalized": false,
"rstrip": false,
"single_word": false,
"special": true
}
}
}