Anthony commited on
Commit
61d85e1
·
1 Parent(s): 5496c56

Update tokenizer_config.json

Browse files
Files changed (1) hide show
  1. tokenizer_config.json +13 -14
tokenizer_config.json CHANGED
@@ -3,15 +3,14 @@
3
  "add_prefix_space": true,
4
  "added_tokens_decoder": {
5
  "50256": {
6
- "content": "<|user|>",
7
- "lstrip": false,
8
  "normalized": false,
9
- "rstrip": false,
10
- "single_word": false,
11
  "special": true
12
  },
13
  "50257": {
14
- "content": "<|assistant|>",
15
  "lstrip": false,
16
  "normalized": false,
17
  "rstrip": false,
@@ -19,21 +18,21 @@
19
  "special": true
20
  },
21
  "50258": {
22
- "content": "<|system|>",
23
- "lstrip": false,
24
  "normalized": false,
25
- "rstrip": false,
26
  "single_word": false,
27
  "special": true
28
  }
29
  },
30
- "bos_token": "<|startoftext|>",
31
- "chat_template": "{% for message in messages %}{{ message['role'] + '\\n' + message['content'] + '\\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|assistant|>\\n' }}{% endif %}",
32
  "clean_up_tokenization_spaces": true,
33
- "eos_token": "<|endoftext|>",
34
  "errors": "replace",
35
- "model_max_length": 2048,
36
- "pad_token": "<|pad|>",
37
  "tokenizer_class": "GPT2Tokenizer",
38
- "unk_token": "<|unknown|>"
39
  }
 
3
  "add_prefix_space": true,
4
  "added_tokens_decoder": {
5
  "50256": {
6
+ "content": "<|endoftext|>",
7
+ "lstrip": true,
8
  "normalized": false,
9
+ "rstrip": true,
 
10
  "special": true
11
  },
12
  "50257": {
13
+ "content": "<|im_end|>",
14
  "lstrip": false,
15
  "normalized": false,
16
  "rstrip": false,
 
18
  "special": true
19
  },
20
  "50258": {
21
+ "content": "<|im_start|>",
22
+ "lstrip": true,
23
  "normalized": false,
24
+ "rstrip": true,
25
  "single_word": false,
26
  "special": true
27
  }
28
  },
29
+ "bos_token": "<|endoftext|>",
30
+ "chat_template": "{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
31
  "clean_up_tokenization_spaces": true,
32
+ "eos_token": "<|im_end|>",
33
  "errors": "replace",
34
+ "model_max_length": 1000000000000000019884624838656,
35
+ "pad_token": "<|endoftext|>",
36
  "tokenizer_class": "GPT2Tokenizer",
37
+ "unk_token": "<|endoftext|>"
38
  }