testaccount-ai commited on
Commit
17c19b0
·
verified ·
1 Parent(s): 0cd61d0

Create tokenizer_config.json

Browse files
Files changed (1) hide show
  1. tokenizer_config.json +52 -0
tokenizer_config.json ADDED
@@ -0,0 +1,52 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": false,
3
+ "add_eos_token": false,
4
+ "add_prefix_space": true,
5
+ "added_tokens_decoder": {
6
+ "151643": {
7
+ "content": "<|endoftext|>",
8
+ "lstrip": false,
9
+ "normalized": false,
10
+ "rstrip": false,
11
+ "single_word": false,
12
+ "special": true
13
+ },
14
+ "151644": {
15
+ "content": "<|user|>",
16
+ "lstrip": false,
17
+ "normalized": false,
18
+ "rstrip": false,
19
+ "single_word": false,
20
+ "special": true
21
+ },
22
+ "151645": {
23
+ "content": "<|assistant|>",
24
+ "lstrip": false,
25
+ "normalized": false,
26
+ "rstrip": false,
27
+ "single_word": false,
28
+ "special": true
29
+ },
30
+ "151646": {
31
+ "content": "<|system|>",
32
+ "lstrip": false,
33
+ "normalized": false,
34
+ "rstrip": false,
35
+ "single_word": false,
36
+ "special": true
37
+ }
38
+ },
39
+ "bos_token": "<|endoftext|>",
40
+ "chat_template": "{%- set ns = namespace(messages=[], system_prompt='') -%}{%- for message in messages -%}{%- if message['role'] == 'system' -%}{%- set ns.system_prompt = message['content'] -%}{%- else -%}{%- set ns.messages = ns.messages + [message] -%}{%- endif -%}{%- endfor -%}{%- if ns.system_prompt != '' -%}{%- set system_message = '<|system|>\\n' + ns.system_prompt + '\\n' -%}{%- else -%}{%- set system_message = '' -%}{%- endif -%}{%- for message in ns.messages -%}{%- if message['role'] == 'user' -%}{{ '<|user|>\\n' + message['content'] + '\\n' }}{%- elif message['role'] == 'assistant' -%}{{ '<|assistant|>\\n' + message['content'] + '\\n' }}{%- endif -%}{%- endfor -%}{%- if add_generation_prompt -%}{{ '<|assistant|>\\n' }}{%- endif -%}",
41
+ "clean_up_tokenization_spaces": false,
42
+ "eos_token": "<|endoftext|>",
43
+ "legacy": false,
44
+ "model_max_length": 8192,
45
+ "pad_token": "<|endoftext|>",
46
+ "padding_side": "right",
47
+ "sp_model_kwargs": {},
48
+ "tokenizer_class": "PreTrainedTokenizerFast",
49
+ "trust_remote_code": true,
50
+ "unk_token": null,
51
+ "use_fast": true
52
+ }