deepnet commited on
Commit
23eafa1
·
verified ·
1 Parent(s): 5a32b85

Upload tokenizer

Browse files
special_tokens_map.json CHANGED
@@ -7,13 +7,6 @@
7
  "single_word": false
8
  },
9
  "eos_token": {
10
- "content": "<|eot_id|>",
11
- "lstrip": false,
12
- "normalized": false,
13
- "rstrip": false,
14
- "single_word": false
15
- },
16
- "pad_token": {
17
  "content": "<|end_of_text|>",
18
  "lstrip": false,
19
  "normalized": false,
 
7
  "single_word": false
8
  },
9
  "eos_token": {
 
 
 
 
 
 
 
10
  "content": "<|end_of_text|>",
11
  "lstrip": false,
12
  "normalized": false,
tokenizer.json CHANGED
@@ -2334,10 +2334,69 @@
2334
  ]
2335
  },
2336
  "post_processor": {
2337
- "type": "ByteLevel",
2338
- "add_prefix_space": true,
2339
- "trim_offsets": false,
2340
- "use_regex": true
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2341
  },
2342
  "decoder": {
2343
  "type": "ByteLevel",
@@ -2353,7 +2412,7 @@
2353
  "end_of_word_suffix": null,
2354
  "fuse_unk": false,
2355
  "byte_fallback": false,
2356
- "ignore_merges": false,
2357
  "vocab": {
2358
  "!": 0,
2359
  "\"": 1,
 
2334
  ]
2335
  },
2336
  "post_processor": {
2337
+ "type": "Sequence",
2338
+ "processors": [
2339
+ {
2340
+ "type": "ByteLevel",
2341
+ "add_prefix_space": true,
2342
+ "trim_offsets": false,
2343
+ "use_regex": true
2344
+ },
2345
+ {
2346
+ "type": "TemplateProcessing",
2347
+ "single": [
2348
+ {
2349
+ "SpecialToken": {
2350
+ "id": "<|begin_of_text|>",
2351
+ "type_id": 0
2352
+ }
2353
+ },
2354
+ {
2355
+ "Sequence": {
2356
+ "id": "A",
2357
+ "type_id": 0
2358
+ }
2359
+ }
2360
+ ],
2361
+ "pair": [
2362
+ {
2363
+ "SpecialToken": {
2364
+ "id": "<|begin_of_text|>",
2365
+ "type_id": 0
2366
+ }
2367
+ },
2368
+ {
2369
+ "Sequence": {
2370
+ "id": "A",
2371
+ "type_id": 0
2372
+ }
2373
+ },
2374
+ {
2375
+ "SpecialToken": {
2376
+ "id": "<|begin_of_text|>",
2377
+ "type_id": 1
2378
+ }
2379
+ },
2380
+ {
2381
+ "Sequence": {
2382
+ "id": "B",
2383
+ "type_id": 1
2384
+ }
2385
+ }
2386
+ ],
2387
+ "special_tokens": {
2388
+ "<|begin_of_text|>": {
2389
+ "id": "<|begin_of_text|>",
2390
+ "ids": [
2391
+ 128000
2392
+ ],
2393
+ "tokens": [
2394
+ "<|begin_of_text|>"
2395
+ ]
2396
+ }
2397
+ }
2398
+ }
2399
+ ]
2400
  },
2401
  "decoder": {
2402
  "type": "ByteLevel",
 
2412
  "end_of_word_suffix": null,
2413
  "fuse_unk": false,
2414
  "byte_fallback": false,
2415
+ "ignore_merges": true,
2416
  "vocab": {
2417
  "!": 0,
2418
  "\"": 1,
tokenizer_config.json CHANGED
@@ -2050,18 +2050,15 @@
2050
  }
2051
  },
2052
  "bos_token": "<|begin_of_text|>",
2053
- "chat_template": "{% set system_message = 'You are a helpful assistant.' %}{% if messages[0]['role'] == 'system' %}{% set system_message = messages[0]['content'] %}{% endif %}{% if system_message is defined %}{{ '<|begin_of_text|>' + '<|start_header_id|>system<|end_header_id|>\\n\\n' + system_message + '<|eot_id|>' }}{% endif %}{% for message in messages %}{% set content = message['content'] %}{% if message['role'] == 'user' %}{{ '<|start_header_id|>user<|end_header_id|>\\n\\n' + content + '<|eot_id|><|start_header_id|>assistant<|end_header_id|>\\n\\n' }}{% elif message['role'] == 'assistant' %}{{ content + '<|eot_id|>' }}{% endif %}{% endfor %}",
2054
  "clean_up_tokenization_spaces": true,
2055
- "eos_token": "<|eot_id|>",
2056
  "max_length": 2048,
2057
  "model_input_names": [
2058
  "input_ids",
2059
  "attention_mask"
2060
  ],
2061
  "model_max_length": 1000000000000000019884624838656,
2062
- "pad_token": "<|end_of_text|>",
2063
- "padding_side": "right",
2064
- "split_special_tokens": false,
2065
  "stride": 0,
2066
  "tokenizer_class": "PreTrainedTokenizerFast",
2067
  "truncation_side": "right",
 
2050
  }
2051
  },
2052
  "bos_token": "<|begin_of_text|>",
2053
+ "chat_template": "{% set loop_messages = messages %}{% for message in loop_messages %}{% set content = '<|start_header_id|>' + message['role'] + '<|end_header_id|>\n\n'+ message['content'] | trim + '<|eot_id|>' %}{% if loop.index0 == 0 %}{% set content = bos_token + content %}{% endif %}{{ content }}{% endfor %}{% if add_generation_prompt %}{{ '<|start_header_id|>assistant<|end_header_id|>\n\n' }}{% endif %}",
2054
  "clean_up_tokenization_spaces": true,
2055
+ "eos_token": "<|end_of_text|>",
2056
  "max_length": 2048,
2057
  "model_input_names": [
2058
  "input_ids",
2059
  "attention_mask"
2060
  ],
2061
  "model_max_length": 1000000000000000019884624838656,
 
 
 
2062
  "stride": 0,
2063
  "tokenizer_class": "PreTrainedTokenizerFast",
2064
  "truncation_side": "right",