File size: 894 Bytes
38c7550
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
{
  "add_prefix_space": null,
  "backend": "tokenizers",
  "bos_token": "<s>",
  "clean_up_tokenization_spaces": false,
  "eos_token": "<|eot_id|>",
  "extra_special_tokens": [],
  "from_slow": true,
  "is_local": false,
  "legacy": false,
  "model_max_length": 4096,
  "pad_token": "<unk>",
  "padding_side": "left",
  "split_special_tokens": false,
  "tokenizer_class": "TokenizersBackend",
  "unk_token": "<unk>",
  "use_default_system_prompt": false,
  "vocab_size": 51200,
  "chat_template": "{% set loop_messages = messages %}{% for message in loop_messages %}{% set content = '<|start_header_id|>' + message['role'] + '<|end_header_id|>\n\n'+ message['content'] | trim + '<|eot_id|>' %}{% if loop.index0 == 0 %}{% set content = bos_token + content %}{% endif %}{{ content }}{% endfor %}{% if add_generation_prompt %}{{ '<|start_header_id|>assistant<|end_header_id|>\n\n' }}{% endif %}"
}