PyTorch
English
llama
richardmfan commited on
Commit
b86bf69
·
verified ·
1 Parent(s): 90bc69c

upload mid-4

Browse files
config.json CHANGED
@@ -11,7 +11,7 @@
11
  "hidden_size": 8192,
12
  "initializer_range": 0.02,
13
  "intermediate_size": 28672,
14
- "max_position_embeddings": 32768,
15
  "mlp_bias": false,
16
  "model_type": "llama",
17
  "num_attention_heads": 64,
@@ -21,9 +21,10 @@
21
  "pretraining_tp": 1,
22
  "rms_norm_eps": 1e-05,
23
  "rope_scaling": null,
24
- "rope_theta": 500000,
25
  "tie_word_embeddings": false,
26
- "transformers_version": "4.49.0",
 
27
  "use_cache": true,
28
- "vocab_size": 250880
29
  }
 
11
  "hidden_size": 8192,
12
  "initializer_range": 0.02,
13
  "intermediate_size": 28672,
14
+ "max_position_embeddings": 524288,
15
  "mlp_bias": false,
16
  "model_type": "llama",
17
  "num_attention_heads": 64,
 
21
  "pretraining_tp": 1,
22
  "rms_norm_eps": 1e-05,
23
  "rope_scaling": null,
24
+ "rope_theta": 10000000,
25
  "tie_word_embeddings": false,
26
+ "torch_dtype": "float32",
27
+ "transformers_version": "4.53.2",
28
  "use_cache": true,
29
+ "vocab_size": 250112
30
  }
done.txt CHANGED
@@ -1 +1 @@
1
- 2025-07-08 21:39:09.403951-05:00
 
1
+ saving done
generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 0,
4
+ "eos_token_id": 1,
5
+ "transformers_version": "4.53.2"
6
+ }
pytorch_model-00001-of-00006.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5c464ca3749daf9bc00e92e418fa9eebd6a3b182b6cfb0031b2cdff51765d4b6
3
- size 49292351711
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:90f14f6cfbc5900db7d1c9fdeb3f9d18d434fc9d6c1320f8e6ca5e1bc26f5bbe
3
+ size 49871101268
pytorch_model-00002-of-00006.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0f7930d8e4821a47b2e5e3d419308956c3f93bdae9ab12b3a03963a43c3424cc
3
- size 49795739754
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4d495a1bddb3cc0d2220caa8748ebfb1e77710be62a15637111d72550c0b58b5
3
+ size 49795740133
pytorch_model-00003-of-00006.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9a69b9d074444bb6c1a64b4feff642fb00e472221896dba8f77a0a66f0ea22d4
3
- size 49460262641
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dd2649b9bf60a08f6b08ef3e6f882c18df83bdd3936f7e1e972849c3e8325590
3
+ size 49460263104
pytorch_model-00004-of-00006.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a7ca17baf4d17a90e131b4d251544f6011d120a499029a02756cb8fd4e45ae35
3
- size 49795739754
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:378f94f937972e26dafe5e362968273340231bb111111d7a56d74be61f40a65d
3
+ size 49795740133
pytorch_model-00005-of-00006.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d1e019f6fd701a862064bf4f9218a20e9778168fdfbba07f7a58ba369898e2a1
3
- size 49460262641
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b6e6b529a6053ce3ce687410a0fe05d27ba849869ad355498c6166ebf78daf6b
3
+ size 49460263104
pytorch_model-00006-of-00006.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:2febfb2699b1ad301e5a30011ef28ecdc1b3e32a3056c7664b7591d0cfad30fb
3
- size 42447010822
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f8a087069d0da740c0b466a61db8fa5f1ad9f57fdeeba8fb1313f1cc5582fa08
3
+ size 41817930473
pytorch_model.bin.index.json CHANGED
@@ -1,6 +1,7 @@
1
  {
2
  "metadata": {
3
- "total_size": 290251112448
 
4
  },
5
  "weight_map": {
6
  "lm_head.weight": "pytorch_model-00006-of-00006.bin",
@@ -41,15 +42,15 @@
41
  "model.layers.11.self_attn.o_proj.weight": "pytorch_model-00001-of-00006.bin",
42
  "model.layers.11.self_attn.q_proj.weight": "pytorch_model-00001-of-00006.bin",
43
  "model.layers.11.self_attn.v_proj.weight": "pytorch_model-00001-of-00006.bin",
44
- "model.layers.12.input_layernorm.weight": "pytorch_model-00001-of-00006.bin",
45
  "model.layers.12.mlp.down_proj.weight": "pytorch_model-00002-of-00006.bin",
46
  "model.layers.12.mlp.gate_proj.weight": "pytorch_model-00002-of-00006.bin",
47
  "model.layers.12.mlp.up_proj.weight": "pytorch_model-00002-of-00006.bin",
48
- "model.layers.12.post_attention_layernorm.weight": "pytorch_model-00001-of-00006.bin",
49
- "model.layers.12.self_attn.k_proj.weight": "pytorch_model-00002-of-00006.bin",
50
- "model.layers.12.self_attn.o_proj.weight": "pytorch_model-00002-of-00006.bin",
51
- "model.layers.12.self_attn.q_proj.weight": "pytorch_model-00002-of-00006.bin",
52
- "model.layers.12.self_attn.v_proj.weight": "pytorch_model-00002-of-00006.bin",
53
  "model.layers.13.input_layernorm.weight": "pytorch_model-00002-of-00006.bin",
54
  "model.layers.13.mlp.down_proj.weight": "pytorch_model-00002-of-00006.bin",
55
  "model.layers.13.mlp.gate_proj.weight": "pytorch_model-00002-of-00006.bin",
@@ -176,15 +177,15 @@
176
  "model.layers.25.self_attn.o_proj.weight": "pytorch_model-00002-of-00006.bin",
177
  "model.layers.25.self_attn.q_proj.weight": "pytorch_model-00002-of-00006.bin",
178
  "model.layers.25.self_attn.v_proj.weight": "pytorch_model-00002-of-00006.bin",
179
- "model.layers.26.input_layernorm.weight": "pytorch_model-00002-of-00006.bin",
180
  "model.layers.26.mlp.down_proj.weight": "pytorch_model-00003-of-00006.bin",
181
  "model.layers.26.mlp.gate_proj.weight": "pytorch_model-00002-of-00006.bin",
182
  "model.layers.26.mlp.up_proj.weight": "pytorch_model-00002-of-00006.bin",
183
- "model.layers.26.post_attention_layernorm.weight": "pytorch_model-00002-of-00006.bin",
184
- "model.layers.26.self_attn.k_proj.weight": "pytorch_model-00003-of-00006.bin",
185
- "model.layers.26.self_attn.o_proj.weight": "pytorch_model-00003-of-00006.bin",
186
- "model.layers.26.self_attn.q_proj.weight": "pytorch_model-00003-of-00006.bin",
187
- "model.layers.26.self_attn.v_proj.weight": "pytorch_model-00003-of-00006.bin",
188
  "model.layers.27.input_layernorm.weight": "pytorch_model-00003-of-00006.bin",
189
  "model.layers.27.mlp.down_proj.weight": "pytorch_model-00003-of-00006.bin",
190
  "model.layers.27.mlp.gate_proj.weight": "pytorch_model-00003-of-00006.bin",
@@ -329,15 +330,15 @@
329
  "model.layers.40.self_attn.o_proj.weight": "pytorch_model-00003-of-00006.bin",
330
  "model.layers.40.self_attn.q_proj.weight": "pytorch_model-00003-of-00006.bin",
331
  "model.layers.40.self_attn.v_proj.weight": "pytorch_model-00003-of-00006.bin",
332
- "model.layers.41.input_layernorm.weight": "pytorch_model-00003-of-00006.bin",
333
  "model.layers.41.mlp.down_proj.weight": "pytorch_model-00004-of-00006.bin",
334
  "model.layers.41.mlp.gate_proj.weight": "pytorch_model-00004-of-00006.bin",
335
  "model.layers.41.mlp.up_proj.weight": "pytorch_model-00004-of-00006.bin",
336
- "model.layers.41.post_attention_layernorm.weight": "pytorch_model-00003-of-00006.bin",
337
- "model.layers.41.self_attn.k_proj.weight": "pytorch_model-00004-of-00006.bin",
338
- "model.layers.41.self_attn.o_proj.weight": "pytorch_model-00004-of-00006.bin",
339
- "model.layers.41.self_attn.q_proj.weight": "pytorch_model-00004-of-00006.bin",
340
- "model.layers.41.self_attn.v_proj.weight": "pytorch_model-00004-of-00006.bin",
341
  "model.layers.42.input_layernorm.weight": "pytorch_model-00004-of-00006.bin",
342
  "model.layers.42.mlp.down_proj.weight": "pytorch_model-00004-of-00006.bin",
343
  "model.layers.42.mlp.gate_proj.weight": "pytorch_model-00004-of-00006.bin",
@@ -464,15 +465,15 @@
464
  "model.layers.54.self_attn.o_proj.weight": "pytorch_model-00004-of-00006.bin",
465
  "model.layers.54.self_attn.q_proj.weight": "pytorch_model-00004-of-00006.bin",
466
  "model.layers.54.self_attn.v_proj.weight": "pytorch_model-00004-of-00006.bin",
467
- "model.layers.55.input_layernorm.weight": "pytorch_model-00004-of-00006.bin",
468
  "model.layers.55.mlp.down_proj.weight": "pytorch_model-00005-of-00006.bin",
469
  "model.layers.55.mlp.gate_proj.weight": "pytorch_model-00004-of-00006.bin",
470
  "model.layers.55.mlp.up_proj.weight": "pytorch_model-00004-of-00006.bin",
471
- "model.layers.55.post_attention_layernorm.weight": "pytorch_model-00004-of-00006.bin",
472
- "model.layers.55.self_attn.k_proj.weight": "pytorch_model-00005-of-00006.bin",
473
- "model.layers.55.self_attn.o_proj.weight": "pytorch_model-00005-of-00006.bin",
474
- "model.layers.55.self_attn.q_proj.weight": "pytorch_model-00005-of-00006.bin",
475
- "model.layers.55.self_attn.v_proj.weight": "pytorch_model-00005-of-00006.bin",
476
  "model.layers.56.input_layernorm.weight": "pytorch_model-00005-of-00006.bin",
477
  "model.layers.56.mlp.down_proj.weight": "pytorch_model-00005-of-00006.bin",
478
  "model.layers.56.mlp.gate_proj.weight": "pytorch_model-00005-of-00006.bin",
@@ -617,15 +618,15 @@
617
  "model.layers.7.self_attn.o_proj.weight": "pytorch_model-00001-of-00006.bin",
618
  "model.layers.7.self_attn.q_proj.weight": "pytorch_model-00001-of-00006.bin",
619
  "model.layers.7.self_attn.v_proj.weight": "pytorch_model-00001-of-00006.bin",
620
- "model.layers.70.input_layernorm.weight": "pytorch_model-00005-of-00006.bin",
621
  "model.layers.70.mlp.down_proj.weight": "pytorch_model-00006-of-00006.bin",
622
  "model.layers.70.mlp.gate_proj.weight": "pytorch_model-00006-of-00006.bin",
623
  "model.layers.70.mlp.up_proj.weight": "pytorch_model-00006-of-00006.bin",
624
- "model.layers.70.post_attention_layernorm.weight": "pytorch_model-00005-of-00006.bin",
625
- "model.layers.70.self_attn.k_proj.weight": "pytorch_model-00006-of-00006.bin",
626
- "model.layers.70.self_attn.o_proj.weight": "pytorch_model-00006-of-00006.bin",
627
- "model.layers.70.self_attn.q_proj.weight": "pytorch_model-00006-of-00006.bin",
628
- "model.layers.70.self_attn.v_proj.weight": "pytorch_model-00006-of-00006.bin",
629
  "model.layers.71.input_layernorm.weight": "pytorch_model-00006-of-00006.bin",
630
  "model.layers.71.mlp.down_proj.weight": "pytorch_model-00006-of-00006.bin",
631
  "model.layers.71.mlp.gate_proj.weight": "pytorch_model-00006-of-00006.bin",
 
1
  {
2
  "metadata": {
3
+ "total_parameters": 72550195200,
4
+ "total_size": 290200780800
5
  },
6
  "weight_map": {
7
  "lm_head.weight": "pytorch_model-00006-of-00006.bin",
 
42
  "model.layers.11.self_attn.o_proj.weight": "pytorch_model-00001-of-00006.bin",
43
  "model.layers.11.self_attn.q_proj.weight": "pytorch_model-00001-of-00006.bin",
44
  "model.layers.11.self_attn.v_proj.weight": "pytorch_model-00001-of-00006.bin",
45
+ "model.layers.12.input_layernorm.weight": "pytorch_model-00002-of-00006.bin",
46
  "model.layers.12.mlp.down_proj.weight": "pytorch_model-00002-of-00006.bin",
47
  "model.layers.12.mlp.gate_proj.weight": "pytorch_model-00002-of-00006.bin",
48
  "model.layers.12.mlp.up_proj.weight": "pytorch_model-00002-of-00006.bin",
49
+ "model.layers.12.post_attention_layernorm.weight": "pytorch_model-00002-of-00006.bin",
50
+ "model.layers.12.self_attn.k_proj.weight": "pytorch_model-00001-of-00006.bin",
51
+ "model.layers.12.self_attn.o_proj.weight": "pytorch_model-00001-of-00006.bin",
52
+ "model.layers.12.self_attn.q_proj.weight": "pytorch_model-00001-of-00006.bin",
53
+ "model.layers.12.self_attn.v_proj.weight": "pytorch_model-00001-of-00006.bin",
54
  "model.layers.13.input_layernorm.weight": "pytorch_model-00002-of-00006.bin",
55
  "model.layers.13.mlp.down_proj.weight": "pytorch_model-00002-of-00006.bin",
56
  "model.layers.13.mlp.gate_proj.weight": "pytorch_model-00002-of-00006.bin",
 
177
  "model.layers.25.self_attn.o_proj.weight": "pytorch_model-00002-of-00006.bin",
178
  "model.layers.25.self_attn.q_proj.weight": "pytorch_model-00002-of-00006.bin",
179
  "model.layers.25.self_attn.v_proj.weight": "pytorch_model-00002-of-00006.bin",
180
+ "model.layers.26.input_layernorm.weight": "pytorch_model-00003-of-00006.bin",
181
  "model.layers.26.mlp.down_proj.weight": "pytorch_model-00003-of-00006.bin",
182
  "model.layers.26.mlp.gate_proj.weight": "pytorch_model-00002-of-00006.bin",
183
  "model.layers.26.mlp.up_proj.weight": "pytorch_model-00002-of-00006.bin",
184
+ "model.layers.26.post_attention_layernorm.weight": "pytorch_model-00003-of-00006.bin",
185
+ "model.layers.26.self_attn.k_proj.weight": "pytorch_model-00002-of-00006.bin",
186
+ "model.layers.26.self_attn.o_proj.weight": "pytorch_model-00002-of-00006.bin",
187
+ "model.layers.26.self_attn.q_proj.weight": "pytorch_model-00002-of-00006.bin",
188
+ "model.layers.26.self_attn.v_proj.weight": "pytorch_model-00002-of-00006.bin",
189
  "model.layers.27.input_layernorm.weight": "pytorch_model-00003-of-00006.bin",
190
  "model.layers.27.mlp.down_proj.weight": "pytorch_model-00003-of-00006.bin",
191
  "model.layers.27.mlp.gate_proj.weight": "pytorch_model-00003-of-00006.bin",
 
330
  "model.layers.40.self_attn.o_proj.weight": "pytorch_model-00003-of-00006.bin",
331
  "model.layers.40.self_attn.q_proj.weight": "pytorch_model-00003-of-00006.bin",
332
  "model.layers.40.self_attn.v_proj.weight": "pytorch_model-00003-of-00006.bin",
333
+ "model.layers.41.input_layernorm.weight": "pytorch_model-00004-of-00006.bin",
334
  "model.layers.41.mlp.down_proj.weight": "pytorch_model-00004-of-00006.bin",
335
  "model.layers.41.mlp.gate_proj.weight": "pytorch_model-00004-of-00006.bin",
336
  "model.layers.41.mlp.up_proj.weight": "pytorch_model-00004-of-00006.bin",
337
+ "model.layers.41.post_attention_layernorm.weight": "pytorch_model-00004-of-00006.bin",
338
+ "model.layers.41.self_attn.k_proj.weight": "pytorch_model-00003-of-00006.bin",
339
+ "model.layers.41.self_attn.o_proj.weight": "pytorch_model-00003-of-00006.bin",
340
+ "model.layers.41.self_attn.q_proj.weight": "pytorch_model-00003-of-00006.bin",
341
+ "model.layers.41.self_attn.v_proj.weight": "pytorch_model-00003-of-00006.bin",
342
  "model.layers.42.input_layernorm.weight": "pytorch_model-00004-of-00006.bin",
343
  "model.layers.42.mlp.down_proj.weight": "pytorch_model-00004-of-00006.bin",
344
  "model.layers.42.mlp.gate_proj.weight": "pytorch_model-00004-of-00006.bin",
 
465
  "model.layers.54.self_attn.o_proj.weight": "pytorch_model-00004-of-00006.bin",
466
  "model.layers.54.self_attn.q_proj.weight": "pytorch_model-00004-of-00006.bin",
467
  "model.layers.54.self_attn.v_proj.weight": "pytorch_model-00004-of-00006.bin",
468
+ "model.layers.55.input_layernorm.weight": "pytorch_model-00005-of-00006.bin",
469
  "model.layers.55.mlp.down_proj.weight": "pytorch_model-00005-of-00006.bin",
470
  "model.layers.55.mlp.gate_proj.weight": "pytorch_model-00004-of-00006.bin",
471
  "model.layers.55.mlp.up_proj.weight": "pytorch_model-00004-of-00006.bin",
472
+ "model.layers.55.post_attention_layernorm.weight": "pytorch_model-00005-of-00006.bin",
473
+ "model.layers.55.self_attn.k_proj.weight": "pytorch_model-00004-of-00006.bin",
474
+ "model.layers.55.self_attn.o_proj.weight": "pytorch_model-00004-of-00006.bin",
475
+ "model.layers.55.self_attn.q_proj.weight": "pytorch_model-00004-of-00006.bin",
476
+ "model.layers.55.self_attn.v_proj.weight": "pytorch_model-00004-of-00006.bin",
477
  "model.layers.56.input_layernorm.weight": "pytorch_model-00005-of-00006.bin",
478
  "model.layers.56.mlp.down_proj.weight": "pytorch_model-00005-of-00006.bin",
479
  "model.layers.56.mlp.gate_proj.weight": "pytorch_model-00005-of-00006.bin",
 
618
  "model.layers.7.self_attn.o_proj.weight": "pytorch_model-00001-of-00006.bin",
619
  "model.layers.7.self_attn.q_proj.weight": "pytorch_model-00001-of-00006.bin",
620
  "model.layers.7.self_attn.v_proj.weight": "pytorch_model-00001-of-00006.bin",
621
+ "model.layers.70.input_layernorm.weight": "pytorch_model-00006-of-00006.bin",
622
  "model.layers.70.mlp.down_proj.weight": "pytorch_model-00006-of-00006.bin",
623
  "model.layers.70.mlp.gate_proj.weight": "pytorch_model-00006-of-00006.bin",
624
  "model.layers.70.mlp.up_proj.weight": "pytorch_model-00006-of-00006.bin",
625
+ "model.layers.70.post_attention_layernorm.weight": "pytorch_model-00006-of-00006.bin",
626
+ "model.layers.70.self_attn.k_proj.weight": "pytorch_model-00005-of-00006.bin",
627
+ "model.layers.70.self_attn.o_proj.weight": "pytorch_model-00005-of-00006.bin",
628
+ "model.layers.70.self_attn.q_proj.weight": "pytorch_model-00005-of-00006.bin",
629
+ "model.layers.70.self_attn.v_proj.weight": "pytorch_model-00005-of-00006.bin",
630
  "model.layers.71.input_layernorm.weight": "pytorch_model-00006-of-00006.bin",
631
  "model.layers.71.mlp.down_proj.weight": "pytorch_model-00006-of-00006.bin",
632
  "model.layers.71.mlp.gate_proj.weight": "pytorch_model-00006-of-00006.bin",
special_tokens_map.json CHANGED
@@ -1,4 +1,16 @@
1
  {
2
- "bos_token": "<|begin_of_text|>",
3
- "eos_token": "<|end_of_text|>"
 
 
 
 
 
 
 
 
 
 
 
 
4
  }
 
1
  {
2
+ "bos_token": {
3
+ "content": "<|begin_of_text|>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|end_of_text|>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ }
16
  }
tokenizer.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c8f4e9569156ab4de1c7e7ba90054ba1af10c5ee6d2ad7268ee26f7d6179f940
3
- size 13486839
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f4a4ca9d0a58168cb02f13950d12275d1cb3bccb470a907328f2d2e495116c73
3
+ size 20680501
tokenizer_config.json CHANGED
@@ -911,891 +911,12 @@
911
  "rstrip": false,
912
  "single_word": false,
913
  "special": true
914
- },
915
- "250112": {
916
- "content": "reserved_special_token_90",
917
- "lstrip": false,
918
- "normalized": false,
919
- "rstrip": false,
920
- "single_word": false,
921
- "special": true
922
- },
923
- "250113": {
924
- "content": "reserved_special_token_91",
925
- "lstrip": false,
926
- "normalized": false,
927
- "rstrip": false,
928
- "single_word": false,
929
- "special": true
930
- },
931
- "250114": {
932
- "content": "reserved_special_token_92",
933
- "lstrip": false,
934
- "normalized": false,
935
- "rstrip": false,
936
- "single_word": false,
937
- "special": true
938
- },
939
- "250115": {
940
- "content": "reserved_special_token_93",
941
- "lstrip": false,
942
- "normalized": false,
943
- "rstrip": false,
944
- "single_word": false,
945
- "special": true
946
- },
947
- "250116": {
948
- "content": "reserved_special_token_94",
949
- "lstrip": false,
950
- "normalized": false,
951
- "rstrip": false,
952
- "single_word": false,
953
- "special": true
954
- },
955
- "250117": {
956
- "content": "reserved_special_token_95",
957
- "lstrip": false,
958
- "normalized": false,
959
- "rstrip": false,
960
- "single_word": false,
961
- "special": true
962
- },
963
- "250118": {
964
- "content": "reserved_special_token_96",
965
- "lstrip": false,
966
- "normalized": false,
967
- "rstrip": false,
968
- "single_word": false,
969
- "special": true
970
- },
971
- "250119": {
972
- "content": "reserved_special_token_97",
973
- "lstrip": false,
974
- "normalized": false,
975
- "rstrip": false,
976
- "single_word": false,
977
- "special": true
978
- },
979
- "250120": {
980
- "content": "reserved_special_token_98",
981
- "lstrip": false,
982
- "normalized": false,
983
- "rstrip": false,
984
- "single_word": false,
985
- "special": true
986
- },
987
- "250121": {
988
- "content": "reserved_special_token_99",
989
- "lstrip": false,
990
- "normalized": false,
991
- "rstrip": false,
992
- "single_word": false,
993
- "special": true
994
- },
995
- "250122": {
996
- "content": "reserved_special_token_100",
997
- "lstrip": false,
998
- "normalized": false,
999
- "rstrip": false,
1000
- "single_word": false,
1001
- "special": true
1002
- },
1003
- "250123": {
1004
- "content": "reserved_special_token_101",
1005
- "lstrip": false,
1006
- "normalized": false,
1007
- "rstrip": false,
1008
- "single_word": false,
1009
- "special": true
1010
- },
1011
- "250124": {
1012
- "content": "reserved_special_token_102",
1013
- "lstrip": false,
1014
- "normalized": false,
1015
- "rstrip": false,
1016
- "single_word": false,
1017
- "special": true
1018
- },
1019
- "250125": {
1020
- "content": "reserved_special_token_103",
1021
- "lstrip": false,
1022
- "normalized": false,
1023
- "rstrip": false,
1024
- "single_word": false,
1025
- "special": true
1026
- },
1027
- "250126": {
1028
- "content": "reserved_special_token_104",
1029
- "lstrip": false,
1030
- "normalized": false,
1031
- "rstrip": false,
1032
- "single_word": false,
1033
- "special": true
1034
- },
1035
- "250127": {
1036
- "content": "reserved_special_token_105",
1037
- "lstrip": false,
1038
- "normalized": false,
1039
- "rstrip": false,
1040
- "single_word": false,
1041
- "special": true
1042
- },
1043
- "250128": {
1044
- "content": "reserved_special_token_106",
1045
- "lstrip": false,
1046
- "normalized": false,
1047
- "rstrip": false,
1048
- "single_word": false,
1049
- "special": true
1050
- },
1051
- "250129": {
1052
- "content": "reserved_special_token_107",
1053
- "lstrip": false,
1054
- "normalized": false,
1055
- "rstrip": false,
1056
- "single_word": false,
1057
- "special": true
1058
- },
1059
- "250130": {
1060
- "content": "reserved_special_token_108",
1061
- "lstrip": false,
1062
- "normalized": false,
1063
- "rstrip": false,
1064
- "single_word": false,
1065
- "special": true
1066
- },
1067
- "250131": {
1068
- "content": "reserved_special_token_109",
1069
- "lstrip": false,
1070
- "normalized": false,
1071
- "rstrip": false,
1072
- "single_word": false,
1073
- "special": true
1074
- },
1075
- "250132": {
1076
- "content": "reserved_special_token_110",
1077
- "lstrip": false,
1078
- "normalized": false,
1079
- "rstrip": false,
1080
- "single_word": false,
1081
- "special": true
1082
- },
1083
- "250133": {
1084
- "content": "reserved_special_token_111",
1085
- "lstrip": false,
1086
- "normalized": false,
1087
- "rstrip": false,
1088
- "single_word": false,
1089
- "special": true
1090
- },
1091
- "250134": {
1092
- "content": "reserved_special_token_112",
1093
- "lstrip": false,
1094
- "normalized": false,
1095
- "rstrip": false,
1096
- "single_word": false,
1097
- "special": true
1098
- },
1099
- "250135": {
1100
- "content": "reserved_special_token_113",
1101
- "lstrip": false,
1102
- "normalized": false,
1103
- "rstrip": false,
1104
- "single_word": false,
1105
- "special": true
1106
- },
1107
- "250136": {
1108
- "content": "reserved_special_token_114",
1109
- "lstrip": false,
1110
- "normalized": false,
1111
- "rstrip": false,
1112
- "single_word": false,
1113
- "special": true
1114
- },
1115
- "250137": {
1116
- "content": "reserved_special_token_115",
1117
- "lstrip": false,
1118
- "normalized": false,
1119
- "rstrip": false,
1120
- "single_word": false,
1121
- "special": true
1122
- },
1123
- "250138": {
1124
- "content": "reserved_special_token_116",
1125
- "lstrip": false,
1126
- "normalized": false,
1127
- "rstrip": false,
1128
- "single_word": false,
1129
- "special": true
1130
- },
1131
- "250139": {
1132
- "content": "reserved_special_token_117",
1133
- "lstrip": false,
1134
- "normalized": false,
1135
- "rstrip": false,
1136
- "single_word": false,
1137
- "special": true
1138
- },
1139
- "250140": {
1140
- "content": "reserved_special_token_118",
1141
- "lstrip": false,
1142
- "normalized": false,
1143
- "rstrip": false,
1144
- "single_word": false,
1145
- "special": true
1146
- },
1147
- "250141": {
1148
- "content": "reserved_special_token_119",
1149
- "lstrip": false,
1150
- "normalized": false,
1151
- "rstrip": false,
1152
- "single_word": false,
1153
- "special": true
1154
- },
1155
- "250142": {
1156
- "content": "reserved_special_token_120",
1157
- "lstrip": false,
1158
- "normalized": false,
1159
- "rstrip": false,
1160
- "single_word": false,
1161
- "special": true
1162
- },
1163
- "250143": {
1164
- "content": "reserved_special_token_121",
1165
- "lstrip": false,
1166
- "normalized": false,
1167
- "rstrip": false,
1168
- "single_word": false,
1169
- "special": true
1170
- },
1171
- "250144": {
1172
- "content": "reserved_special_token_122",
1173
- "lstrip": false,
1174
- "normalized": false,
1175
- "rstrip": false,
1176
- "single_word": false,
1177
- "special": true
1178
- },
1179
- "250145": {
1180
- "content": "reserved_special_token_123",
1181
- "lstrip": false,
1182
- "normalized": false,
1183
- "rstrip": false,
1184
- "single_word": false,
1185
- "special": true
1186
- },
1187
- "250146": {
1188
- "content": "reserved_special_token_124",
1189
- "lstrip": false,
1190
- "normalized": false,
1191
- "rstrip": false,
1192
- "single_word": false,
1193
- "special": true
1194
- },
1195
- "250147": {
1196
- "content": "reserved_special_token_125",
1197
- "lstrip": false,
1198
- "normalized": false,
1199
- "rstrip": false,
1200
- "single_word": false,
1201
- "special": true
1202
- },
1203
- "250148": {
1204
- "content": "reserved_special_token_126",
1205
- "lstrip": false,
1206
- "normalized": false,
1207
- "rstrip": false,
1208
- "single_word": false,
1209
- "special": true
1210
- },
1211
- "250149": {
1212
- "content": "reserved_special_token_127",
1213
- "lstrip": false,
1214
- "normalized": false,
1215
- "rstrip": false,
1216
- "single_word": false,
1217
- "special": true
1218
- },
1219
- "250150": {
1220
- "content": "reserved_special_token_128",
1221
- "lstrip": false,
1222
- "normalized": false,
1223
- "rstrip": false,
1224
- "single_word": false,
1225
- "special": true
1226
- },
1227
- "250151": {
1228
- "content": "reserved_special_token_129",
1229
- "lstrip": false,
1230
- "normalized": false,
1231
- "rstrip": false,
1232
- "single_word": false,
1233
- "special": true
1234
- },
1235
- "250152": {
1236
- "content": "reserved_special_token_130",
1237
- "lstrip": false,
1238
- "normalized": false,
1239
- "rstrip": false,
1240
- "single_word": false,
1241
- "special": true
1242
- },
1243
- "250153": {
1244
- "content": "reserved_special_token_131",
1245
- "lstrip": false,
1246
- "normalized": false,
1247
- "rstrip": false,
1248
- "single_word": false,
1249
- "special": true
1250
- },
1251
- "250154": {
1252
- "content": "reserved_special_token_132",
1253
- "lstrip": false,
1254
- "normalized": false,
1255
- "rstrip": false,
1256
- "single_word": false,
1257
- "special": true
1258
- },
1259
- "250155": {
1260
- "content": "reserved_special_token_133",
1261
- "lstrip": false,
1262
- "normalized": false,
1263
- "rstrip": false,
1264
- "single_word": false,
1265
- "special": true
1266
- },
1267
- "250156": {
1268
- "content": "reserved_special_token_134",
1269
- "lstrip": false,
1270
- "normalized": false,
1271
- "rstrip": false,
1272
- "single_word": false,
1273
- "special": true
1274
- },
1275
- "250157": {
1276
- "content": "reserved_special_token_135",
1277
- "lstrip": false,
1278
- "normalized": false,
1279
- "rstrip": false,
1280
- "single_word": false,
1281
- "special": true
1282
- },
1283
- "250158": {
1284
- "content": "reserved_special_token_136",
1285
- "lstrip": false,
1286
- "normalized": false,
1287
- "rstrip": false,
1288
- "single_word": false,
1289
- "special": true
1290
- },
1291
- "250159": {
1292
- "content": "reserved_special_token_137",
1293
- "lstrip": false,
1294
- "normalized": false,
1295
- "rstrip": false,
1296
- "single_word": false,
1297
- "special": true
1298
- },
1299
- "250160": {
1300
- "content": "reserved_special_token_138",
1301
- "lstrip": false,
1302
- "normalized": false,
1303
- "rstrip": false,
1304
- "single_word": false,
1305
- "special": true
1306
- },
1307
- "250161": {
1308
- "content": "reserved_special_token_139",
1309
- "lstrip": false,
1310
- "normalized": false,
1311
- "rstrip": false,
1312
- "single_word": false,
1313
- "special": true
1314
- },
1315
- "250162": {
1316
- "content": "reserved_special_token_140",
1317
- "lstrip": false,
1318
- "normalized": false,
1319
- "rstrip": false,
1320
- "single_word": false,
1321
- "special": true
1322
- },
1323
- "250163": {
1324
- "content": "reserved_special_token_141",
1325
- "lstrip": false,
1326
- "normalized": false,
1327
- "rstrip": false,
1328
- "single_word": false,
1329
- "special": true
1330
- },
1331
- "250164": {
1332
- "content": "reserved_special_token_142",
1333
- "lstrip": false,
1334
- "normalized": false,
1335
- "rstrip": false,
1336
- "single_word": false,
1337
- "special": true
1338
- },
1339
- "250165": {
1340
- "content": "reserved_special_token_143",
1341
- "lstrip": false,
1342
- "normalized": false,
1343
- "rstrip": false,
1344
- "single_word": false,
1345
- "special": true
1346
- },
1347
- "250166": {
1348
- "content": "reserved_special_token_144",
1349
- "lstrip": false,
1350
- "normalized": false,
1351
- "rstrip": false,
1352
- "single_word": false,
1353
- "special": true
1354
- },
1355
- "250167": {
1356
- "content": "reserved_special_token_145",
1357
- "lstrip": false,
1358
- "normalized": false,
1359
- "rstrip": false,
1360
- "single_word": false,
1361
- "special": true
1362
- },
1363
- "250168": {
1364
- "content": "reserved_special_token_146",
1365
- "lstrip": false,
1366
- "normalized": false,
1367
- "rstrip": false,
1368
- "single_word": false,
1369
- "special": true
1370
- },
1371
- "250169": {
1372
- "content": "reserved_special_token_147",
1373
- "lstrip": false,
1374
- "normalized": false,
1375
- "rstrip": false,
1376
- "single_word": false,
1377
- "special": true
1378
- },
1379
- "250170": {
1380
- "content": "reserved_special_token_148",
1381
- "lstrip": false,
1382
- "normalized": false,
1383
- "rstrip": false,
1384
- "single_word": false,
1385
- "special": true
1386
- },
1387
- "250171": {
1388
- "content": "reserved_special_token_149",
1389
- "lstrip": false,
1390
- "normalized": false,
1391
- "rstrip": false,
1392
- "single_word": false,
1393
- "special": true
1394
- },
1395
- "250172": {
1396
- "content": "reserved_special_token_150",
1397
- "lstrip": false,
1398
- "normalized": false,
1399
- "rstrip": false,
1400
- "single_word": false,
1401
- "special": true
1402
- },
1403
- "250173": {
1404
- "content": "reserved_special_token_151",
1405
- "lstrip": false,
1406
- "normalized": false,
1407
- "rstrip": false,
1408
- "single_word": false,
1409
- "special": true
1410
- },
1411
- "250174": {
1412
- "content": "reserved_special_token_152",
1413
- "lstrip": false,
1414
- "normalized": false,
1415
- "rstrip": false,
1416
- "single_word": false,
1417
- "special": true
1418
- },
1419
- "250175": {
1420
- "content": "reserved_special_token_153",
1421
- "lstrip": false,
1422
- "normalized": false,
1423
- "rstrip": false,
1424
- "single_word": false,
1425
- "special": true
1426
- },
1427
- "250176": {
1428
- "content": "reserved_special_token_154",
1429
- "lstrip": false,
1430
- "normalized": false,
1431
- "rstrip": false,
1432
- "single_word": false,
1433
- "special": true
1434
- },
1435
- "250177": {
1436
- "content": "reserved_special_token_155",
1437
- "lstrip": false,
1438
- "normalized": false,
1439
- "rstrip": false,
1440
- "single_word": false,
1441
- "special": true
1442
- },
1443
- "250178": {
1444
- "content": "reserved_special_token_156",
1445
- "lstrip": false,
1446
- "normalized": false,
1447
- "rstrip": false,
1448
- "single_word": false,
1449
- "special": true
1450
- },
1451
- "250179": {
1452
- "content": "reserved_special_token_157",
1453
- "lstrip": false,
1454
- "normalized": false,
1455
- "rstrip": false,
1456
- "single_word": false,
1457
- "special": true
1458
- },
1459
- "250180": {
1460
- "content": "reserved_special_token_158",
1461
- "lstrip": false,
1462
- "normalized": false,
1463
- "rstrip": false,
1464
- "single_word": false,
1465
- "special": true
1466
- },
1467
- "250181": {
1468
- "content": "reserved_special_token_159",
1469
- "lstrip": false,
1470
- "normalized": false,
1471
- "rstrip": false,
1472
- "single_word": false,
1473
- "special": true
1474
- },
1475
- "250182": {
1476
- "content": "reserved_special_token_160",
1477
- "lstrip": false,
1478
- "normalized": false,
1479
- "rstrip": false,
1480
- "single_word": false,
1481
- "special": true
1482
- },
1483
- "250183": {
1484
- "content": "reserved_special_token_161",
1485
- "lstrip": false,
1486
- "normalized": false,
1487
- "rstrip": false,
1488
- "single_word": false,
1489
- "special": true
1490
- },
1491
- "250184": {
1492
- "content": "reserved_special_token_162",
1493
- "lstrip": false,
1494
- "normalized": false,
1495
- "rstrip": false,
1496
- "single_word": false,
1497
- "special": true
1498
- },
1499
- "250185": {
1500
- "content": "reserved_special_token_163",
1501
- "lstrip": false,
1502
- "normalized": false,
1503
- "rstrip": false,
1504
- "single_word": false,
1505
- "special": true
1506
- },
1507
- "250186": {
1508
- "content": "reserved_special_token_164",
1509
- "lstrip": false,
1510
- "normalized": false,
1511
- "rstrip": false,
1512
- "single_word": false,
1513
- "special": true
1514
- },
1515
- "250187": {
1516
- "content": "reserved_special_token_165",
1517
- "lstrip": false,
1518
- "normalized": false,
1519
- "rstrip": false,
1520
- "single_word": false,
1521
- "special": true
1522
- },
1523
- "250188": {
1524
- "content": "reserved_special_token_166",
1525
- "lstrip": false,
1526
- "normalized": false,
1527
- "rstrip": false,
1528
- "single_word": false,
1529
- "special": true
1530
- },
1531
- "250189": {
1532
- "content": "reserved_special_token_167",
1533
- "lstrip": false,
1534
- "normalized": false,
1535
- "rstrip": false,
1536
- "single_word": false,
1537
- "special": true
1538
- },
1539
- "250190": {
1540
- "content": "reserved_special_token_168",
1541
- "lstrip": false,
1542
- "normalized": false,
1543
- "rstrip": false,
1544
- "single_word": false,
1545
- "special": true
1546
- },
1547
- "250191": {
1548
- "content": "reserved_special_token_169",
1549
- "lstrip": false,
1550
- "normalized": false,
1551
- "rstrip": false,
1552
- "single_word": false,
1553
- "special": true
1554
- },
1555
- "250192": {
1556
- "content": "reserved_special_token_170",
1557
- "lstrip": false,
1558
- "normalized": false,
1559
- "rstrip": false,
1560
- "single_word": false,
1561
- "special": true
1562
- },
1563
- "250193": {
1564
- "content": "reserved_special_token_171",
1565
- "lstrip": false,
1566
- "normalized": false,
1567
- "rstrip": false,
1568
- "single_word": false,
1569
- "special": true
1570
- },
1571
- "250194": {
1572
- "content": "reserved_special_token_172",
1573
- "lstrip": false,
1574
- "normalized": false,
1575
- "rstrip": false,
1576
- "single_word": false,
1577
- "special": true
1578
- },
1579
- "250195": {
1580
- "content": "reserved_special_token_173",
1581
- "lstrip": false,
1582
- "normalized": false,
1583
- "rstrip": false,
1584
- "single_word": false,
1585
- "special": true
1586
- },
1587
- "250196": {
1588
- "content": "reserved_special_token_174",
1589
- "lstrip": false,
1590
- "normalized": false,
1591
- "rstrip": false,
1592
- "single_word": false,
1593
- "special": true
1594
- },
1595
- "250197": {
1596
- "content": "reserved_special_token_175",
1597
- "lstrip": false,
1598
- "normalized": false,
1599
- "rstrip": false,
1600
- "single_word": false,
1601
- "special": true
1602
- },
1603
- "250198": {
1604
- "content": "reserved_special_token_176",
1605
- "lstrip": false,
1606
- "normalized": false,
1607
- "rstrip": false,
1608
- "single_word": false,
1609
- "special": true
1610
- },
1611
- "250199": {
1612
- "content": "reserved_special_token_177",
1613
- "lstrip": false,
1614
- "normalized": false,
1615
- "rstrip": false,
1616
- "single_word": false,
1617
- "special": true
1618
- },
1619
- "250200": {
1620
- "content": "reserved_special_token_178",
1621
- "lstrip": false,
1622
- "normalized": false,
1623
- "rstrip": false,
1624
- "single_word": false,
1625
- "special": true
1626
- },
1627
- "250201": {
1628
- "content": "reserved_special_token_179",
1629
- "lstrip": false,
1630
- "normalized": false,
1631
- "rstrip": false,
1632
- "single_word": false,
1633
- "special": true
1634
- },
1635
- "250202": {
1636
- "content": "reserved_special_token_180",
1637
- "lstrip": false,
1638
- "normalized": false,
1639
- "rstrip": false,
1640
- "single_word": false,
1641
- "special": true
1642
- },
1643
- "250203": {
1644
- "content": "reserved_special_token_181",
1645
- "lstrip": false,
1646
- "normalized": false,
1647
- "rstrip": false,
1648
- "single_word": false,
1649
- "special": true
1650
- },
1651
- "250204": {
1652
- "content": "reserved_special_token_182",
1653
- "lstrip": false,
1654
- "normalized": false,
1655
- "rstrip": false,
1656
- "single_word": false,
1657
- "special": true
1658
- },
1659
- "250205": {
1660
- "content": "reserved_special_token_183",
1661
- "lstrip": false,
1662
- "normalized": false,
1663
- "rstrip": false,
1664
- "single_word": false,
1665
- "special": true
1666
- },
1667
- "250206": {
1668
- "content": "reserved_special_token_184",
1669
- "lstrip": false,
1670
- "normalized": false,
1671
- "rstrip": false,
1672
- "single_word": false,
1673
- "special": true
1674
- },
1675
- "250207": {
1676
- "content": "reserved_special_token_185",
1677
- "lstrip": false,
1678
- "normalized": false,
1679
- "rstrip": false,
1680
- "single_word": false,
1681
- "special": true
1682
- },
1683
- "250208": {
1684
- "content": "reserved_special_token_186",
1685
- "lstrip": false,
1686
- "normalized": false,
1687
- "rstrip": false,
1688
- "single_word": false,
1689
- "special": true
1690
- },
1691
- "250209": {
1692
- "content": "reserved_special_token_187",
1693
- "lstrip": false,
1694
- "normalized": false,
1695
- "rstrip": false,
1696
- "single_word": false,
1697
- "special": true
1698
- },
1699
- "250210": {
1700
- "content": "reserved_special_token_188",
1701
- "lstrip": false,
1702
- "normalized": false,
1703
- "rstrip": false,
1704
- "single_word": false,
1705
- "special": true
1706
- },
1707
- "250211": {
1708
- "content": "reserved_special_token_189",
1709
- "lstrip": false,
1710
- "normalized": false,
1711
- "rstrip": false,
1712
- "single_word": false,
1713
- "special": true
1714
- },
1715
- "250212": {
1716
- "content": "reserved_special_token_190",
1717
- "lstrip": false,
1718
- "normalized": false,
1719
- "rstrip": false,
1720
- "single_word": false,
1721
- "special": true
1722
- },
1723
- "250213": {
1724
- "content": "reserved_special_token_191",
1725
- "lstrip": false,
1726
- "normalized": false,
1727
- "rstrip": false,
1728
- "single_word": false,
1729
- "special": true
1730
- },
1731
- "250214": {
1732
- "content": "reserved_special_token_192",
1733
- "lstrip": false,
1734
- "normalized": false,
1735
- "rstrip": false,
1736
- "single_word": false,
1737
- "special": true
1738
- },
1739
- "250215": {
1740
- "content": "reserved_special_token_193",
1741
- "lstrip": false,
1742
- "normalized": false,
1743
- "rstrip": false,
1744
- "single_word": false,
1745
- "special": true
1746
- },
1747
- "250216": {
1748
- "content": "reserved_special_token_194",
1749
- "lstrip": false,
1750
- "normalized": false,
1751
- "rstrip": false,
1752
- "single_word": false,
1753
- "special": true
1754
- },
1755
- "250217": {
1756
- "content": "reserved_special_token_195",
1757
- "lstrip": false,
1758
- "normalized": false,
1759
- "rstrip": false,
1760
- "single_word": false,
1761
- "special": true
1762
- },
1763
- "250218": {
1764
- "content": "reserved_special_token_196",
1765
- "lstrip": false,
1766
- "normalized": false,
1767
- "rstrip": false,
1768
- "single_word": false,
1769
- "special": true
1770
- },
1771
- "250219": {
1772
- "content": "reserved_special_token_197",
1773
- "lstrip": false,
1774
- "normalized": false,
1775
- "rstrip": false,
1776
- "single_word": false,
1777
- "special": true
1778
- },
1779
- "250220": {
1780
- "content": "reserved_special_token_198",
1781
- "lstrip": false,
1782
- "normalized": false,
1783
- "rstrip": false,
1784
- "single_word": false,
1785
- "special": true
1786
- },
1787
- "250221": {
1788
- "content": "reserved_special_token_199",
1789
- "lstrip": false,
1790
- "normalized": false,
1791
- "rstrip": false,
1792
- "single_word": false,
1793
- "special": true
1794
  }
1795
  },
1796
  "bos_token": "<|begin_of_text|>",
1797
  "clean_up_tokenization_spaces": true,
1798
  "eos_token": "<|end_of_text|>",
 
1799
  "model_input_names": [
1800
  "input_ids",
1801
  "attention_mask"
 
911
  "rstrip": false,
912
  "single_word": false,
913
  "special": true
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
914
  }
915
  },
916
  "bos_token": "<|begin_of_text|>",
917
  "clean_up_tokenization_spaces": true,
918
  "eos_token": "<|end_of_text|>",
919
+ "extra_special_tokens": {},
920
  "model_input_names": [
921
  "input_ids",
922
  "attention_mask"