dialogptmodel / tokenizer_config.json
debela-arg's picture
Upload 6 files
4f5d519 verified
raw
history blame contribute delete
954 Bytes
{"unk_token": {"content": "<|endoftext|>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "bos_token": {"content": "<|endoftext|>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "eos_token": {"content": "<|endoftext|>", "single_word": false, "lstrip": false, "rstrip": false, "normalized": true, "__type": "AddedToken"}, "add_prefix_space": false, "add_bos_token": false, "added_tokens_decoder": {"50256": {"content": "<|endoftext|>", "lstrip": false, "normalized": true, "rstrip": false, "single_word": false, "special": true}}, "chat_template": "{% for message in messages %}{{ message.content }}{{ eos_token }}{% endfor %}", "clean_up_tokenization_spaces": true, "errors": "replace", "model_max_length": 1024, "pad_token": null, "special_tokens_map_file": null, "name_or_path": "microsoft/DialoGPT-medium", "tokenizer_class": "GPT2Tokenizer"}