File size: 757 Bytes
79d4fcb
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
{
  "bos_token": "<s>",
  "clean_up_tokenization_spaces": false,
  "eos_token": "</s>",
  "legacy": false,
  "model_max_length": 65536,
  "pad_token": "<unk>",
  "padding_side": "left",
  "sp_model_kwargs": {},
  "tokenizer_class": "LlamaTokenizer",
  "unk_token": "<unk>",
  "use_default_system_prompt": false,
  "chat_template": "{% if messages[0]['role'] == 'system' %}{{ '<s>' }}{% endif %}{% for message in messages %}{% if message['role'] == 'user' %}{% set role = 'human' %}{% else %}{% set role = message['role'] %}{% endif %}{{ '<|BOT|> ' + role + '\\n' }}{{ message['content'] }}{% if not loop.last or message['role'] != 'assistant' %}{{ '<|EOT|>' }}{% endif %}{% endfor %}{% if add_generation_prompt %}{{ '<|BOT|> assistant\\n' }}{% endif %}"
}