QiTianTokenizer-Base / tokenizer_config.json
Morton-Li's picture
Version iteration
81615a4
raw
history blame contribute delete
637 Bytes
{
"backend": "tokenizers",
"bos_token": "<|bos|>",
"eos_token": "<|eos|>",
"extra_special_tokens": [
"<|eot|>",
"<|system|>",
"<|user|>",
"<|assistant|>",
"<|begin_of_think|>",
"<|end_of_think|>",
"<|placeholder_0|>",
"<|placeholder_1|>",
"<|placeholder_2|>",
"<|placeholder_3|>",
"<|placeholder_4|>",
"<|placeholder_5|>",
"<|placeholder_6|>",
"<|placeholder_7|>",
"<|placeholder_8|>",
"<|placeholder_9|>"
],
"mask_token": "<|mask|>",
"model_max_length": 1000000000000000019884624838656,
"pad_token": "<|pad|>",
"tokenizer_class": "QiTianTokenizerFast"
}