polyglot_ko_newssample_03 / tokenizer_config.json
thisisHJLee's picture
Upload tokenizer
b0c4f2f
{
"bos_token": "<|startoftext|>",
"clean_up_tokenization_spaces": true,
"eos_token": "<|endoftext|>",
"model_max_length": 1000000000000000019884624838656,
"pad_token": "<|pad|>",
"special_tokens_map_file": "C:\\Users\\hj.lee/.cache\\huggingface\\hub\\models--EleutherAI--polyglot-ko-1.3b\\snapshots\\711da2957fdae31202de86c51edbc0c7f433b9e5\\special_tokens_map.json",
"tokenizer_class": "PreTrainedTokenizerFast"
}