denisman commited on
Commit
d90f839
·
verified ·
1 Parent(s): 598196f

Upload tokenizer

Browse files
added_tokens.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "<|im_end|>": 32000,
3
+ "<|im_start|>": 32001
4
+ }
special_tokens_map.json CHANGED
@@ -7,6 +7,13 @@
7
  "single_word": false
8
  },
9
  "eos_token": {
 
 
 
 
 
 
 
10
  "content": "</s>",
11
  "lstrip": false,
12
  "normalized": false,
 
7
  "single_word": false
8
  },
9
  "eos_token": {
10
+ "content": "<|im_end|>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
  "content": "</s>",
18
  "lstrip": false,
19
  "normalized": false,
tokenizer.json CHANGED
@@ -34,6 +34,24 @@
34
  "rstrip": false,
35
  "normalized": false,
36
  "special": true
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
37
  }
38
  ],
39
  "normalizer": {
 
34
  "rstrip": false,
35
  "normalized": false,
36
  "special": true
37
+ },
38
+ {
39
+ "id": 32000,
40
+ "content": "<|im_end|>",
41
+ "single_word": false,
42
+ "lstrip": false,
43
+ "rstrip": false,
44
+ "normalized": false,
45
+ "special": true
46
+ },
47
+ {
48
+ "id": 32001,
49
+ "content": "<|im_start|>",
50
+ "single_word": false,
51
+ "lstrip": false,
52
+ "rstrip": false,
53
+ "normalized": false,
54
+ "special": true
55
  }
56
  ],
57
  "normalizer": {
tokenizer_config.json CHANGED
@@ -25,22 +25,41 @@
25
  "rstrip": false,
26
  "single_word": false,
27
  "special": true
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
28
  }
29
  },
30
  "additional_special_tokens": [],
31
  "bos_token": "<s>",
 
32
  "clean_up_tokenization_spaces": false,
33
- "eos_token": "</s>",
34
  "legacy": true,
35
  "max_length": 2048,
36
- "model_max_length": 1000000000000000019884624838656,
37
- "pad_token": null,
38
  "sp_model_kwargs": {},
39
  "spaces_between_special_tokens": false,
40
  "stride": 0,
41
  "tokenizer_class": "LlamaTokenizer",
42
  "truncation_side": "right",
43
  "truncation_strategy": "longest_first",
 
44
  "unk_token": "<unk>",
45
- "use_default_system_prompt": false
 
46
  }
 
25
  "rstrip": false,
26
  "single_word": false,
27
  "special": true
28
+ },
29
+ "32000": {
30
+ "content": "<|im_end|>",
31
+ "lstrip": false,
32
+ "normalized": false,
33
+ "rstrip": false,
34
+ "single_word": false,
35
+ "special": true
36
+ },
37
+ "32001": {
38
+ "content": "<|im_start|>",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": false,
42
+ "single_word": false,
43
+ "special": true
44
  }
45
  },
46
  "additional_special_tokens": [],
47
  "bos_token": "<s>",
48
+ "chat_template": "{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
49
  "clean_up_tokenization_spaces": false,
50
+ "eos_token": "<|im_end|>",
51
  "legacy": true,
52
  "max_length": 2048,
53
+ "model_max_length": 2048,
54
+ "pad_token": "</s>",
55
  "sp_model_kwargs": {},
56
  "spaces_between_special_tokens": false,
57
  "stride": 0,
58
  "tokenizer_class": "LlamaTokenizer",
59
  "truncation_side": "right",
60
  "truncation_strategy": "longest_first",
61
+ "trust_remote_code": false,
62
  "unk_token": "<unk>",
63
+ "use_default_system_prompt": true,
64
+ "use_fast": true
65
  }