polyglot_ko_summary_0424 / tokenizer_config.json
eunyounglee's picture
Upload tokenizer
ef670dc
raw
history blame contribute delete
323 Bytes
{
"bos_token": "<|startoftext|>",
"clean_up_tokenization_spaces": true,
"eos_token": "<|endoftext|>",
"model_max_length": 1000000000000000019884624838656,
"pad_token": "<|pad|>",
"special_tokens_map_file": "./polyglot/polyglot-ko-1.3b\\special_tokens_map.json",
"tokenizer_class": "PreTrainedTokenizerFast"
}