Safetensors
internvl
KiyotakaWang commited on
Commit
915079f
·
verified ·
1 Parent(s): d11f003

Upload folder using huggingface_hub

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
added_tokens.json ADDED
@@ -0,0 +1,485 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ " begin=\"": 152071,
3
+ " class=\"": 152036,
4
+ " clip-path=\"": 152057,
5
+ " cx=\"": 152043,
6
+ " cy=\"": 152044,
7
+ " d=\"": 152048,
8
+ " dur=\"": 152067,
9
+ " fill-rule=\"": 152056,
10
+ " fill=\"": 152050,
11
+ " font-family=\"": 152061,
12
+ " font-size=\"": 152060,
13
+ " from=\"": 152068,
14
+ " gradientTransform=\"": 152064,
15
+ " gradientUnits=\"": 152063,
16
+ " height=\"": 152033,
17
+ " id=\"": 152035,
18
+ " offset=\"": 152065,
19
+ " opacity=\"": 152058,
20
+ " path=\"": 152073,
21
+ " points=\"": 152049,
22
+ " r=\"": 152045,
23
+ " repeatCount=\"": 152070,
24
+ " rotate=\"": 152072,
25
+ " rx=\"": 152046,
26
+ " ry=\"": 152047,
27
+ " stop-color=\"": 152066,
28
+ " stroke-linecap=\"": 152053,
29
+ " stroke-linejoin=\"": 152054,
30
+ " stroke-miterlimit=\"": 152055,
31
+ " stroke-width=\"": 152052,
32
+ " stroke=\"": 152051,
33
+ " text-anchor=\"": 152062,
34
+ " to=\"": 152069,
35
+ " transform=\"": 152059,
36
+ " viewBox=\"": 152034,
37
+ " width=\"": 152032,
38
+ " x1=\"": 152039,
39
+ " x2=\"": 152041,
40
+ " x=\"": 152037,
41
+ " y1=\"": 152040,
42
+ " y2=\"": 152042,
43
+ " y=\"": 152038,
44
+ "-1": 151802,
45
+ "-10": 151793,
46
+ "-100": 151703,
47
+ "-101": 151702,
48
+ "-102": 151701,
49
+ "-103": 151700,
50
+ "-104": 151699,
51
+ "-105": 151698,
52
+ "-106": 151697,
53
+ "-107": 151696,
54
+ "-108": 151695,
55
+ "-109": 151694,
56
+ "-11": 151792,
57
+ "-110": 151693,
58
+ "-111": 151692,
59
+ "-112": 151691,
60
+ "-113": 151690,
61
+ "-114": 151689,
62
+ "-115": 151688,
63
+ "-116": 151687,
64
+ "-117": 151686,
65
+ "-118": 151685,
66
+ "-119": 151684,
67
+ "-12": 151791,
68
+ "-120": 151683,
69
+ "-121": 151682,
70
+ "-122": 151681,
71
+ "-123": 151680,
72
+ "-124": 151679,
73
+ "-125": 151678,
74
+ "-126": 151677,
75
+ "-127": 151676,
76
+ "-128": 151675,
77
+ "-13": 151790,
78
+ "-14": 151789,
79
+ "-15": 151788,
80
+ "-16": 151787,
81
+ "-17": 151786,
82
+ "-18": 151785,
83
+ "-19": 151784,
84
+ "-2": 151801,
85
+ "-20": 151783,
86
+ "-21": 151782,
87
+ "-22": 151781,
88
+ "-23": 151780,
89
+ "-24": 151779,
90
+ "-25": 151778,
91
+ "-26": 151777,
92
+ "-27": 151776,
93
+ "-28": 151775,
94
+ "-29": 151774,
95
+ "-3": 151800,
96
+ "-30": 151773,
97
+ "-31": 151772,
98
+ "-32": 151771,
99
+ "-33": 151770,
100
+ "-34": 151769,
101
+ "-35": 151768,
102
+ "-36": 151767,
103
+ "-37": 151766,
104
+ "-38": 151765,
105
+ "-39": 151764,
106
+ "-4": 151799,
107
+ "-40": 151763,
108
+ "-41": 151762,
109
+ "-42": 151761,
110
+ "-43": 151760,
111
+ "-44": 151759,
112
+ "-45": 151758,
113
+ "-46": 151757,
114
+ "-47": 151756,
115
+ "-48": 151755,
116
+ "-49": 151754,
117
+ "-5": 151798,
118
+ "-50": 151753,
119
+ "-51": 151752,
120
+ "-52": 151751,
121
+ "-53": 151750,
122
+ "-54": 151749,
123
+ "-55": 151748,
124
+ "-56": 151747,
125
+ "-57": 151746,
126
+ "-58": 151745,
127
+ "-59": 151744,
128
+ "-6": 151797,
129
+ "-60": 151743,
130
+ "-61": 151742,
131
+ "-62": 151741,
132
+ "-63": 151740,
133
+ "-64": 151739,
134
+ "-65": 151738,
135
+ "-66": 151737,
136
+ "-67": 151736,
137
+ "-68": 151735,
138
+ "-69": 151734,
139
+ "-7": 151796,
140
+ "-70": 151733,
141
+ "-71": 151732,
142
+ "-72": 151731,
143
+ "-73": 151730,
144
+ "-74": 151729,
145
+ "-75": 151728,
146
+ "-76": 151727,
147
+ "-77": 151726,
148
+ "-78": 151725,
149
+ "-79": 151724,
150
+ "-8": 151795,
151
+ "-80": 151723,
152
+ "-81": 151722,
153
+ "-82": 151721,
154
+ "-83": 151720,
155
+ "-84": 151719,
156
+ "-85": 151718,
157
+ "-86": 151717,
158
+ "-87": 151716,
159
+ "-88": 151715,
160
+ "-89": 151714,
161
+ "-9": 151794,
162
+ "-90": 151713,
163
+ "-91": 151712,
164
+ "-92": 151711,
165
+ "-93": 151710,
166
+ "-94": 151709,
167
+ "-95": 151708,
168
+ "-96": 151707,
169
+ "-97": 151706,
170
+ "-98": 151705,
171
+ "-99": 151704,
172
+ ".0": 152022,
173
+ ".00": 151922,
174
+ ".01": 151923,
175
+ ".02": 151924,
176
+ ".03": 151925,
177
+ ".04": 151926,
178
+ ".05": 151927,
179
+ ".06": 151928,
180
+ ".07": 151929,
181
+ ".08": 151930,
182
+ ".09": 151931,
183
+ ".1": 152023,
184
+ ".10": 151932,
185
+ ".11": 151933,
186
+ ".12": 151934,
187
+ ".13": 151935,
188
+ ".14": 151936,
189
+ ".15": 151937,
190
+ ".16": 151938,
191
+ ".17": 151939,
192
+ ".18": 151940,
193
+ ".19": 151941,
194
+ ".2": 152024,
195
+ ".20": 151942,
196
+ ".21": 151943,
197
+ ".22": 151944,
198
+ ".23": 151945,
199
+ ".24": 151946,
200
+ ".25": 151947,
201
+ ".26": 151948,
202
+ ".27": 151949,
203
+ ".28": 151950,
204
+ ".29": 151951,
205
+ ".3": 152025,
206
+ ".30": 151952,
207
+ ".31": 151953,
208
+ ".32": 151954,
209
+ ".33": 151955,
210
+ ".34": 151956,
211
+ ".35": 151957,
212
+ ".36": 151958,
213
+ ".37": 151959,
214
+ ".38": 151960,
215
+ ".39": 151961,
216
+ ".4": 152026,
217
+ ".40": 151962,
218
+ ".41": 151963,
219
+ ".42": 151964,
220
+ ".43": 151965,
221
+ ".44": 151966,
222
+ ".45": 151967,
223
+ ".46": 151968,
224
+ ".47": 151969,
225
+ ".48": 151970,
226
+ ".49": 151971,
227
+ ".5": 152027,
228
+ ".50": 151972,
229
+ ".51": 151973,
230
+ ".52": 151974,
231
+ ".53": 151975,
232
+ ".54": 151976,
233
+ ".55": 151977,
234
+ ".56": 151978,
235
+ ".57": 151979,
236
+ ".58": 151980,
237
+ ".59": 151981,
238
+ ".6": 152028,
239
+ ".60": 151982,
240
+ ".61": 151983,
241
+ ".62": 151984,
242
+ ".63": 151985,
243
+ ".64": 151986,
244
+ ".65": 151987,
245
+ ".66": 151988,
246
+ ".67": 151989,
247
+ ".68": 151990,
248
+ ".69": 151991,
249
+ ".7": 152029,
250
+ ".70": 151992,
251
+ ".71": 151993,
252
+ ".72": 151994,
253
+ ".73": 151995,
254
+ ".74": 151996,
255
+ ".75": 151997,
256
+ ".76": 151998,
257
+ ".77": 151999,
258
+ ".78": 152000,
259
+ ".79": 152001,
260
+ ".8": 152030,
261
+ ".80": 152002,
262
+ ".81": 152003,
263
+ ".82": 152004,
264
+ ".83": 152005,
265
+ ".84": 152006,
266
+ ".85": 152007,
267
+ ".86": 152008,
268
+ ".87": 152009,
269
+ ".88": 152010,
270
+ ".89": 152011,
271
+ ".9": 152031,
272
+ ".90": 152012,
273
+ ".91": 152013,
274
+ ".92": 152014,
275
+ ".93": 152015,
276
+ ".94": 152016,
277
+ ".95": 152017,
278
+ ".96": 152018,
279
+ ".97": 152019,
280
+ ".98": 152020,
281
+ ".99": 152021,
282
+ "10": 151803,
283
+ "100": 151893,
284
+ "101": 151894,
285
+ "102": 151895,
286
+ "103": 151896,
287
+ "104": 151897,
288
+ "105": 151898,
289
+ "106": 151899,
290
+ "107": 151900,
291
+ "108": 151901,
292
+ "109": 151902,
293
+ "11": 151804,
294
+ "110": 151903,
295
+ "111": 151904,
296
+ "112": 151905,
297
+ "113": 151906,
298
+ "114": 151907,
299
+ "115": 151908,
300
+ "116": 151909,
301
+ "117": 151910,
302
+ "118": 151911,
303
+ "119": 151912,
304
+ "12": 151805,
305
+ "120": 151913,
306
+ "121": 151914,
307
+ "122": 151915,
308
+ "123": 151916,
309
+ "124": 151917,
310
+ "125": 151918,
311
+ "126": 151919,
312
+ "127": 151920,
313
+ "128": 151921,
314
+ "13": 151806,
315
+ "14": 151807,
316
+ "15": 151808,
317
+ "16": 151809,
318
+ "17": 151810,
319
+ "18": 151811,
320
+ "19": 151812,
321
+ "20": 151813,
322
+ "21": 151814,
323
+ "22": 151815,
324
+ "23": 151816,
325
+ "24": 151817,
326
+ "25": 151818,
327
+ "26": 151819,
328
+ "27": 151820,
329
+ "28": 151821,
330
+ "29": 151822,
331
+ "30": 151823,
332
+ "31": 151824,
333
+ "32": 151825,
334
+ "33": 151826,
335
+ "34": 151827,
336
+ "35": 151828,
337
+ "36": 151829,
338
+ "37": 151830,
339
+ "38": 151831,
340
+ "39": 151832,
341
+ "40": 151833,
342
+ "41": 151834,
343
+ "42": 151835,
344
+ "43": 151836,
345
+ "44": 151837,
346
+ "45": 151838,
347
+ "46": 151839,
348
+ "47": 151840,
349
+ "48": 151841,
350
+ "49": 151842,
351
+ "50": 151843,
352
+ "51": 151844,
353
+ "52": 151845,
354
+ "53": 151846,
355
+ "54": 151847,
356
+ "55": 151848,
357
+ "56": 151849,
358
+ "57": 151850,
359
+ "58": 151851,
360
+ "59": 151852,
361
+ "60": 151853,
362
+ "61": 151854,
363
+ "62": 151855,
364
+ "63": 151856,
365
+ "64": 151857,
366
+ "65": 151858,
367
+ "66": 151859,
368
+ "67": 151860,
369
+ "68": 151861,
370
+ "69": 151862,
371
+ "70": 151863,
372
+ "71": 151864,
373
+ "72": 151865,
374
+ "73": 151866,
375
+ "74": 151867,
376
+ "75": 151868,
377
+ "76": 151869,
378
+ "77": 151870,
379
+ "78": 151871,
380
+ "79": 151872,
381
+ "80": 151873,
382
+ "81": 151874,
383
+ "82": 151875,
384
+ "83": 151876,
385
+ "84": 151877,
386
+ "85": 151878,
387
+ "86": 151879,
388
+ "87": 151880,
389
+ "88": 151881,
390
+ "89": 151882,
391
+ "90": 151883,
392
+ "91": 151884,
393
+ "92": 151885,
394
+ "93": 151886,
395
+ "94": 151887,
396
+ "95": 151888,
397
+ "96": 151889,
398
+ "97": 151890,
399
+ "98": 151891,
400
+ "99": 151892,
401
+ "</animate>": 152121,
402
+ "</animateMotion>": 152123,
403
+ "</animateTransform>": 152125,
404
+ "</box>": 151673,
405
+ "</circle>": 152085,
406
+ "</clipPath>": 152107,
407
+ "</defs>": 152078,
408
+ "</ellipse>": 152087,
409
+ "</feBlend>": 152119,
410
+ "</feColorMatrix>": 152115,
411
+ "</feComposite>": 152117,
412
+ "</feGaussianBlur>": 152113,
413
+ "</filter>": 152111,
414
+ "</g>": 152076,
415
+ "</img>": 151666,
416
+ "</line>": 152089,
417
+ "</linearGradient>": 152101,
418
+ "</mask>": 152109,
419
+ "</path>": 152081,
420
+ "</polygon>": 152093,
421
+ "</polyline>": 152091,
422
+ "</quad>": 151669,
423
+ "</radialGradient>": 152103,
424
+ "</rect>": 152083,
425
+ "</ref>": 151671,
426
+ "</stop>": 152105,
427
+ "</svg>": 152075,
428
+ "</text>": 152095,
429
+ "</textPath>": 152099,
430
+ "</tool_call>": 151658,
431
+ "</tspan>": 152097,
432
+ "</use>": 152080,
433
+ "<IMG_CONTEXT>": 151667,
434
+ "<animate": 152120,
435
+ "<animateMotion": 152122,
436
+ "<animateTransform": 152124,
437
+ "<box>": 151672,
438
+ "<circle": 152084,
439
+ "<clipPath": 152106,
440
+ "<defs": 152077,
441
+ "<ellipse": 152086,
442
+ "<feBlend": 152118,
443
+ "<feColorMatrix": 152114,
444
+ "<feComposite": 152116,
445
+ "<feGaussianBlur": 152112,
446
+ "<filter": 152110,
447
+ "<img>": 151665,
448
+ "<line": 152088,
449
+ "<linearGradient": 152100,
450
+ "<mask": 152108,
451
+ "<polygon": 152092,
452
+ "<polyline": 152090,
453
+ "<quad>": 151668,
454
+ "<radialGradient": 152102,
455
+ "<rect": 152082,
456
+ "<ref>": 151670,
457
+ "<stop": 152104,
458
+ "<svg": 152074,
459
+ "<text": 152094,
460
+ "<textPath": 152098,
461
+ "<tool_call>": 151657,
462
+ "<tspan": 152096,
463
+ "<use": 152079,
464
+ "<video>": 151674,
465
+ "<|box_end|>": 151649,
466
+ "<|box_start|>": 151648,
467
+ "<|endoftext|>": 151643,
468
+ "<|file_sep|>": 151664,
469
+ "<|fim_middle|>": 151660,
470
+ "<|fim_pad|>": 151662,
471
+ "<|fim_prefix|>": 151659,
472
+ "<|fim_suffix|>": 151661,
473
+ "<|im_end|>": 151645,
474
+ "<|im_start|>": 151644,
475
+ "<|image_pad|>": 151655,
476
+ "<|object_ref_end|>": 151647,
477
+ "<|object_ref_start|>": 151646,
478
+ "<|quad_end|>": 151651,
479
+ "<|quad_start|>": 151650,
480
+ "<|repo_name|>": 151663,
481
+ "<|video_pad|>": 151656,
482
+ "<|vision_end|>": 151653,
483
+ "<|vision_pad|>": 151654,
484
+ "<|vision_start|>": 151652
485
+ }
chat_template.jinja ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {% for message in messages %}{{'<|im_start|>' + message['role'] + '
2
+ '}}{% if message['content'] is string %}{{ message['content'] }}{% else %}{% for content in message['content'] %}{% if content['type'] == 'image' %}{{ '<IMG_CONTEXT>
3
+ ' }}{% elif content['type'] == 'video' %}{{ '<video>
4
+ ' }}{% elif content['type'] == 'text' %}{{ content['text'] }}{% endif %}{% endfor %}{% endif %}{{'<|im_end|>
5
+ '}}{% endfor %}{% if add_generation_prompt %}{{'<|im_start|>assistant
6
+ ' }}{% endif %}
config.json ADDED
@@ -0,0 +1,81 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "InternVLForConditionalGeneration"
4
+ ],
5
+ "downsample_ratio": 0.5,
6
+ "hidden_size": 3584,
7
+ "image_seq_length": 256,
8
+ "image_token_id": 151667,
9
+ "model_type": "internvl",
10
+ "projector_hidden_act": "gelu",
11
+ "text_config": {
12
+ "architectures": [
13
+ "Qwen2ForCausalLM"
14
+ ],
15
+ "attention_dropout": 0.0,
16
+ "bos_token_id": 151643,
17
+ "eos_token_id": 151645,
18
+ "hidden_act": "silu",
19
+ "hidden_size": 3584,
20
+ "initializer_range": 0.02,
21
+ "intermediate_size": 18944,
22
+ "max_position_embeddings": 32768,
23
+ "max_window_layers": 70,
24
+ "model_type": "qwen2",
25
+ "num_attention_heads": 28,
26
+ "num_hidden_layers": 28,
27
+ "num_key_value_heads": 4,
28
+ "rms_norm_eps": 1e-06,
29
+ "rope_scaling": {
30
+ "factor": 2.0,
31
+ "rope_type": "dynamic",
32
+ "type": "dynamic"
33
+ },
34
+ "rope_theta": 1000000.0,
35
+ "sliding_window": null,
36
+ "torch_dtype": "bfloat16",
37
+ "use_cache": false,
38
+ "use_sliding_window": false,
39
+ "vocab_size": 152126
40
+ },
41
+ "torch_dtype": "bfloat16",
42
+ "transformers_version": "4.52.4",
43
+ "use_cache": false,
44
+ "vision_config": {
45
+ "architectures": [
46
+ "InternVisionModel"
47
+ ],
48
+ "attention_bias": true,
49
+ "attention_dropout": 0.0,
50
+ "dropout": 0.0,
51
+ "hidden_act": "gelu",
52
+ "hidden_dropout_prob": 0.0,
53
+ "hidden_size": 1024,
54
+ "image_size": [
55
+ 448,
56
+ 448
57
+ ],
58
+ "initializer_factor": 0.1,
59
+ "initializer_range": 1e-10,
60
+ "intermediate_size": 4096,
61
+ "layer_norm_eps": 1e-06,
62
+ "layer_scale_init_value": 0.1,
63
+ "model_type": "internvl_vision",
64
+ "norm_type": "layer_norm",
65
+ "num_attention_heads": 16,
66
+ "num_channels": 3,
67
+ "num_hidden_layers": 24,
68
+ "patch_size": [
69
+ 14,
70
+ 14
71
+ ],
72
+ "projection_dropout": 0.0,
73
+ "torch_dtype": "bfloat16",
74
+ "use_absolute_position_embeddings": true,
75
+ "use_mask_token": false,
76
+ "use_mean_pooling": true,
77
+ "use_qk_norm": false
78
+ },
79
+ "vision_feature_layer": -1,
80
+ "vision_feature_select_strategy": "default"
81
+ }
generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 151643,
4
+ "eos_token_id": 151645,
5
+ "transformers_version": "4.52.4"
6
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
model-00001-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4e4a470265b24a454a2c1e7d93661c1c404ea92be2d7dea2215091c9b104d236
3
+ size 4880614512
model-00002-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:95e3d41151deb66bba5660be3da7163fb02e01410fbb7e988cfea6b26bc0a3aa
3
+ size 4991497800
model-00003-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0388d5709c96064398e47b93c56c4c0c0dfd30706aa711e1617fd2b2d7a3179b
3
+ size 4932774744
model-00004-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e33b63249a00bc95e9d7ed81a46704aa44856ce95752a5f4eb889387d0f0f022
3
+ size 1090439312
model.safetensors.index.json ADDED
@@ -0,0 +1,788 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 15895227392
4
+ },
5
+ "weight_map": {
6
+ "language_model.lm_head.weight": "model-00004-of-00004.safetensors",
7
+ "language_model.model.embed_tokens.weight": "model-00001-of-00004.safetensors",
8
+ "language_model.model.layers.0.input_layernorm.weight": "model-00001-of-00004.safetensors",
9
+ "language_model.model.layers.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
10
+ "language_model.model.layers.0.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
11
+ "language_model.model.layers.0.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
12
+ "language_model.model.layers.0.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
13
+ "language_model.model.layers.0.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
14
+ "language_model.model.layers.0.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
15
+ "language_model.model.layers.0.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
16
+ "language_model.model.layers.0.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
17
+ "language_model.model.layers.0.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
18
+ "language_model.model.layers.0.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
19
+ "language_model.model.layers.0.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
20
+ "language_model.model.layers.1.input_layernorm.weight": "model-00001-of-00004.safetensors",
21
+ "language_model.model.layers.1.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
22
+ "language_model.model.layers.1.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
23
+ "language_model.model.layers.1.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
24
+ "language_model.model.layers.1.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
25
+ "language_model.model.layers.1.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
26
+ "language_model.model.layers.1.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
27
+ "language_model.model.layers.1.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
28
+ "language_model.model.layers.1.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
29
+ "language_model.model.layers.1.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
30
+ "language_model.model.layers.1.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
31
+ "language_model.model.layers.1.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
32
+ "language_model.model.layers.10.input_layernorm.weight": "model-00002-of-00004.safetensors",
33
+ "language_model.model.layers.10.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
34
+ "language_model.model.layers.10.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
35
+ "language_model.model.layers.10.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
36
+ "language_model.model.layers.10.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
37
+ "language_model.model.layers.10.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
38
+ "language_model.model.layers.10.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
39
+ "language_model.model.layers.10.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
40
+ "language_model.model.layers.10.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
41
+ "language_model.model.layers.10.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
42
+ "language_model.model.layers.10.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
43
+ "language_model.model.layers.10.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
44
+ "language_model.model.layers.11.input_layernorm.weight": "model-00002-of-00004.safetensors",
45
+ "language_model.model.layers.11.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
46
+ "language_model.model.layers.11.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
47
+ "language_model.model.layers.11.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
48
+ "language_model.model.layers.11.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
49
+ "language_model.model.layers.11.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
50
+ "language_model.model.layers.11.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
51
+ "language_model.model.layers.11.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
52
+ "language_model.model.layers.11.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
53
+ "language_model.model.layers.11.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
54
+ "language_model.model.layers.11.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
55
+ "language_model.model.layers.11.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
56
+ "language_model.model.layers.12.input_layernorm.weight": "model-00002-of-00004.safetensors",
57
+ "language_model.model.layers.12.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
58
+ "language_model.model.layers.12.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
59
+ "language_model.model.layers.12.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
60
+ "language_model.model.layers.12.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
61
+ "language_model.model.layers.12.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
62
+ "language_model.model.layers.12.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
63
+ "language_model.model.layers.12.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
64
+ "language_model.model.layers.12.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
65
+ "language_model.model.layers.12.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
66
+ "language_model.model.layers.12.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
67
+ "language_model.model.layers.12.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
68
+ "language_model.model.layers.13.input_layernorm.weight": "model-00002-of-00004.safetensors",
69
+ "language_model.model.layers.13.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
70
+ "language_model.model.layers.13.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
71
+ "language_model.model.layers.13.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
72
+ "language_model.model.layers.13.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
73
+ "language_model.model.layers.13.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
74
+ "language_model.model.layers.13.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
75
+ "language_model.model.layers.13.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
76
+ "language_model.model.layers.13.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
77
+ "language_model.model.layers.13.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
78
+ "language_model.model.layers.13.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
79
+ "language_model.model.layers.13.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
80
+ "language_model.model.layers.14.input_layernorm.weight": "model-00002-of-00004.safetensors",
81
+ "language_model.model.layers.14.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
82
+ "language_model.model.layers.14.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
83
+ "language_model.model.layers.14.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
84
+ "language_model.model.layers.14.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
85
+ "language_model.model.layers.14.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
86
+ "language_model.model.layers.14.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
87
+ "language_model.model.layers.14.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
88
+ "language_model.model.layers.14.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
89
+ "language_model.model.layers.14.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
90
+ "language_model.model.layers.14.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
91
+ "language_model.model.layers.14.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
92
+ "language_model.model.layers.15.input_layernorm.weight": "model-00002-of-00004.safetensors",
93
+ "language_model.model.layers.15.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
94
+ "language_model.model.layers.15.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
95
+ "language_model.model.layers.15.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
96
+ "language_model.model.layers.15.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
97
+ "language_model.model.layers.15.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
98
+ "language_model.model.layers.15.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
99
+ "language_model.model.layers.15.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
100
+ "language_model.model.layers.15.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
101
+ "language_model.model.layers.15.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
102
+ "language_model.model.layers.15.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
103
+ "language_model.model.layers.15.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
104
+ "language_model.model.layers.16.input_layernorm.weight": "model-00002-of-00004.safetensors",
105
+ "language_model.model.layers.16.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
106
+ "language_model.model.layers.16.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
107
+ "language_model.model.layers.16.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
108
+ "language_model.model.layers.16.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
109
+ "language_model.model.layers.16.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
110
+ "language_model.model.layers.16.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
111
+ "language_model.model.layers.16.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
112
+ "language_model.model.layers.16.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
113
+ "language_model.model.layers.16.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
114
+ "language_model.model.layers.16.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
115
+ "language_model.model.layers.16.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
116
+ "language_model.model.layers.17.input_layernorm.weight": "model-00003-of-00004.safetensors",
117
+ "language_model.model.layers.17.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
118
+ "language_model.model.layers.17.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
119
+ "language_model.model.layers.17.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
120
+ "language_model.model.layers.17.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
121
+ "language_model.model.layers.17.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
122
+ "language_model.model.layers.17.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
123
+ "language_model.model.layers.17.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
124
+ "language_model.model.layers.17.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
125
+ "language_model.model.layers.17.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
126
+ "language_model.model.layers.17.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
127
+ "language_model.model.layers.17.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
128
+ "language_model.model.layers.18.input_layernorm.weight": "model-00003-of-00004.safetensors",
129
+ "language_model.model.layers.18.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
130
+ "language_model.model.layers.18.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
131
+ "language_model.model.layers.18.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
132
+ "language_model.model.layers.18.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
133
+ "language_model.model.layers.18.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
134
+ "language_model.model.layers.18.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
135
+ "language_model.model.layers.18.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
136
+ "language_model.model.layers.18.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
137
+ "language_model.model.layers.18.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
138
+ "language_model.model.layers.18.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
139
+ "language_model.model.layers.18.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
140
+ "language_model.model.layers.19.input_layernorm.weight": "model-00003-of-00004.safetensors",
141
+ "language_model.model.layers.19.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
142
+ "language_model.model.layers.19.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
143
+ "language_model.model.layers.19.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
144
+ "language_model.model.layers.19.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
145
+ "language_model.model.layers.19.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
146
+ "language_model.model.layers.19.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
147
+ "language_model.model.layers.19.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
148
+ "language_model.model.layers.19.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
149
+ "language_model.model.layers.19.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
150
+ "language_model.model.layers.19.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
151
+ "language_model.model.layers.19.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
152
+ "language_model.model.layers.2.input_layernorm.weight": "model-00001-of-00004.safetensors",
153
+ "language_model.model.layers.2.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
154
+ "language_model.model.layers.2.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
155
+ "language_model.model.layers.2.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
156
+ "language_model.model.layers.2.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
157
+ "language_model.model.layers.2.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
158
+ "language_model.model.layers.2.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
159
+ "language_model.model.layers.2.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
160
+ "language_model.model.layers.2.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
161
+ "language_model.model.layers.2.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
162
+ "language_model.model.layers.2.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
163
+ "language_model.model.layers.2.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
164
+ "language_model.model.layers.20.input_layernorm.weight": "model-00003-of-00004.safetensors",
165
+ "language_model.model.layers.20.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
166
+ "language_model.model.layers.20.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
167
+ "language_model.model.layers.20.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
168
+ "language_model.model.layers.20.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
169
+ "language_model.model.layers.20.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
170
+ "language_model.model.layers.20.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
171
+ "language_model.model.layers.20.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
172
+ "language_model.model.layers.20.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
173
+ "language_model.model.layers.20.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
174
+ "language_model.model.layers.20.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
175
+ "language_model.model.layers.20.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
176
+ "language_model.model.layers.21.input_layernorm.weight": "model-00003-of-00004.safetensors",
177
+ "language_model.model.layers.21.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
178
+ "language_model.model.layers.21.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
179
+ "language_model.model.layers.21.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
180
+ "language_model.model.layers.21.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
181
+ "language_model.model.layers.21.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
182
+ "language_model.model.layers.21.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
183
+ "language_model.model.layers.21.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
184
+ "language_model.model.layers.21.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
185
+ "language_model.model.layers.21.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
186
+ "language_model.model.layers.21.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
187
+ "language_model.model.layers.21.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
188
+ "language_model.model.layers.22.input_layernorm.weight": "model-00003-of-00004.safetensors",
189
+ "language_model.model.layers.22.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
190
+ "language_model.model.layers.22.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
191
+ "language_model.model.layers.22.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
192
+ "language_model.model.layers.22.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
193
+ "language_model.model.layers.22.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
194
+ "language_model.model.layers.22.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
195
+ "language_model.model.layers.22.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
196
+ "language_model.model.layers.22.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
197
+ "language_model.model.layers.22.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
198
+ "language_model.model.layers.22.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
199
+ "language_model.model.layers.22.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
200
+ "language_model.model.layers.23.input_layernorm.weight": "model-00003-of-00004.safetensors",
201
+ "language_model.model.layers.23.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
202
+ "language_model.model.layers.23.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
203
+ "language_model.model.layers.23.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
204
+ "language_model.model.layers.23.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
205
+ "language_model.model.layers.23.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
206
+ "language_model.model.layers.23.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
207
+ "language_model.model.layers.23.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
208
+ "language_model.model.layers.23.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
209
+ "language_model.model.layers.23.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
210
+ "language_model.model.layers.23.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
211
+ "language_model.model.layers.23.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
212
+ "language_model.model.layers.24.input_layernorm.weight": "model-00003-of-00004.safetensors",
213
+ "language_model.model.layers.24.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
214
+ "language_model.model.layers.24.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
215
+ "language_model.model.layers.24.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
216
+ "language_model.model.layers.24.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
217
+ "language_model.model.layers.24.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
218
+ "language_model.model.layers.24.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
219
+ "language_model.model.layers.24.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
220
+ "language_model.model.layers.24.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
221
+ "language_model.model.layers.24.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
222
+ "language_model.model.layers.24.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
223
+ "language_model.model.layers.24.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
224
+ "language_model.model.layers.25.input_layernorm.weight": "model-00003-of-00004.safetensors",
225
+ "language_model.model.layers.25.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
226
+ "language_model.model.layers.25.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
227
+ "language_model.model.layers.25.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
228
+ "language_model.model.layers.25.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
229
+ "language_model.model.layers.25.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
230
+ "language_model.model.layers.25.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
231
+ "language_model.model.layers.25.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
232
+ "language_model.model.layers.25.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
233
+ "language_model.model.layers.25.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
234
+ "language_model.model.layers.25.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
235
+ "language_model.model.layers.25.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
236
+ "language_model.model.layers.26.input_layernorm.weight": "model-00003-of-00004.safetensors",
237
+ "language_model.model.layers.26.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
238
+ "language_model.model.layers.26.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
239
+ "language_model.model.layers.26.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
240
+ "language_model.model.layers.26.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
241
+ "language_model.model.layers.26.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
242
+ "language_model.model.layers.26.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
243
+ "language_model.model.layers.26.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
244
+ "language_model.model.layers.26.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
245
+ "language_model.model.layers.26.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
246
+ "language_model.model.layers.26.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
247
+ "language_model.model.layers.26.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
248
+ "language_model.model.layers.27.input_layernorm.weight": "model-00003-of-00004.safetensors",
249
+ "language_model.model.layers.27.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
250
+ "language_model.model.layers.27.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
251
+ "language_model.model.layers.27.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
252
+ "language_model.model.layers.27.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
253
+ "language_model.model.layers.27.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
254
+ "language_model.model.layers.27.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
255
+ "language_model.model.layers.27.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
256
+ "language_model.model.layers.27.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
257
+ "language_model.model.layers.27.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
258
+ "language_model.model.layers.27.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
259
+ "language_model.model.layers.27.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
260
+ "language_model.model.layers.3.input_layernorm.weight": "model-00001-of-00004.safetensors",
261
+ "language_model.model.layers.3.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
262
+ "language_model.model.layers.3.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
263
+ "language_model.model.layers.3.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
264
+ "language_model.model.layers.3.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
265
+ "language_model.model.layers.3.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
266
+ "language_model.model.layers.3.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
267
+ "language_model.model.layers.3.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
268
+ "language_model.model.layers.3.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
269
+ "language_model.model.layers.3.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
270
+ "language_model.model.layers.3.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
271
+ "language_model.model.layers.3.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
272
+ "language_model.model.layers.4.input_layernorm.weight": "model-00001-of-00004.safetensors",
273
+ "language_model.model.layers.4.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
274
+ "language_model.model.layers.4.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
275
+ "language_model.model.layers.4.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
276
+ "language_model.model.layers.4.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
277
+ "language_model.model.layers.4.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
278
+ "language_model.model.layers.4.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
279
+ "language_model.model.layers.4.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
280
+ "language_model.model.layers.4.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
281
+ "language_model.model.layers.4.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
282
+ "language_model.model.layers.4.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
283
+ "language_model.model.layers.4.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
284
+ "language_model.model.layers.5.input_layernorm.weight": "model-00001-of-00004.safetensors",
285
+ "language_model.model.layers.5.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
286
+ "language_model.model.layers.5.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
287
+ "language_model.model.layers.5.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
288
+ "language_model.model.layers.5.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
289
+ "language_model.model.layers.5.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
290
+ "language_model.model.layers.5.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
291
+ "language_model.model.layers.5.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
292
+ "language_model.model.layers.5.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
293
+ "language_model.model.layers.5.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
294
+ "language_model.model.layers.5.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
295
+ "language_model.model.layers.5.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
296
+ "language_model.model.layers.6.input_layernorm.weight": "model-00002-of-00004.safetensors",
297
+ "language_model.model.layers.6.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
298
+ "language_model.model.layers.6.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
299
+ "language_model.model.layers.6.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
300
+ "language_model.model.layers.6.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
301
+ "language_model.model.layers.6.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
302
+ "language_model.model.layers.6.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
303
+ "language_model.model.layers.6.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
304
+ "language_model.model.layers.6.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
305
+ "language_model.model.layers.6.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
306
+ "language_model.model.layers.6.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
307
+ "language_model.model.layers.6.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
308
+ "language_model.model.layers.7.input_layernorm.weight": "model-00002-of-00004.safetensors",
309
+ "language_model.model.layers.7.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
310
+ "language_model.model.layers.7.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
311
+ "language_model.model.layers.7.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
312
+ "language_model.model.layers.7.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
313
+ "language_model.model.layers.7.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
314
+ "language_model.model.layers.7.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
315
+ "language_model.model.layers.7.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
316
+ "language_model.model.layers.7.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
317
+ "language_model.model.layers.7.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
318
+ "language_model.model.layers.7.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
319
+ "language_model.model.layers.7.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
320
+ "language_model.model.layers.8.input_layernorm.weight": "model-00002-of-00004.safetensors",
321
+ "language_model.model.layers.8.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
322
+ "language_model.model.layers.8.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
323
+ "language_model.model.layers.8.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
324
+ "language_model.model.layers.8.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
325
+ "language_model.model.layers.8.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
326
+ "language_model.model.layers.8.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
327
+ "language_model.model.layers.8.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
328
+ "language_model.model.layers.8.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
329
+ "language_model.model.layers.8.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
330
+ "language_model.model.layers.8.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
331
+ "language_model.model.layers.8.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
332
+ "language_model.model.layers.9.input_layernorm.weight": "model-00002-of-00004.safetensors",
333
+ "language_model.model.layers.9.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
334
+ "language_model.model.layers.9.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
335
+ "language_model.model.layers.9.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
336
+ "language_model.model.layers.9.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
337
+ "language_model.model.layers.9.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
338
+ "language_model.model.layers.9.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
339
+ "language_model.model.layers.9.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
340
+ "language_model.model.layers.9.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
341
+ "language_model.model.layers.9.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
342
+ "language_model.model.layers.9.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
343
+ "language_model.model.layers.9.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
344
+ "language_model.model.norm.weight": "model-00003-of-00004.safetensors",
345
+ "multi_modal_projector.layer_norm.bias": "model-00001-of-00004.safetensors",
346
+ "multi_modal_projector.layer_norm.weight": "model-00001-of-00004.safetensors",
347
+ "multi_modal_projector.linear_1.bias": "model-00001-of-00004.safetensors",
348
+ "multi_modal_projector.linear_1.weight": "model-00001-of-00004.safetensors",
349
+ "multi_modal_projector.linear_2.bias": "model-00001-of-00004.safetensors",
350
+ "multi_modal_projector.linear_2.weight": "model-00001-of-00004.safetensors",
351
+ "vision_tower.embeddings.cls_token": "model-00001-of-00004.safetensors",
352
+ "vision_tower.embeddings.patch_embeddings.projection.bias": "model-00001-of-00004.safetensors",
353
+ "vision_tower.embeddings.patch_embeddings.projection.weight": "model-00001-of-00004.safetensors",
354
+ "vision_tower.embeddings.position_embeddings": "model-00001-of-00004.safetensors",
355
+ "vision_tower.encoder.layer.0.attention.k_proj.bias": "model-00001-of-00004.safetensors",
356
+ "vision_tower.encoder.layer.0.attention.k_proj.weight": "model-00001-of-00004.safetensors",
357
+ "vision_tower.encoder.layer.0.attention.projection_layer.bias": "model-00001-of-00004.safetensors",
358
+ "vision_tower.encoder.layer.0.attention.projection_layer.weight": "model-00001-of-00004.safetensors",
359
+ "vision_tower.encoder.layer.0.attention.q_proj.bias": "model-00001-of-00004.safetensors",
360
+ "vision_tower.encoder.layer.0.attention.q_proj.weight": "model-00001-of-00004.safetensors",
361
+ "vision_tower.encoder.layer.0.attention.v_proj.bias": "model-00001-of-00004.safetensors",
362
+ "vision_tower.encoder.layer.0.attention.v_proj.weight": "model-00001-of-00004.safetensors",
363
+ "vision_tower.encoder.layer.0.lambda_1": "model-00001-of-00004.safetensors",
364
+ "vision_tower.encoder.layer.0.lambda_2": "model-00001-of-00004.safetensors",
365
+ "vision_tower.encoder.layer.0.layernorm_after.bias": "model-00001-of-00004.safetensors",
366
+ "vision_tower.encoder.layer.0.layernorm_after.weight": "model-00001-of-00004.safetensors",
367
+ "vision_tower.encoder.layer.0.layernorm_before.bias": "model-00001-of-00004.safetensors",
368
+ "vision_tower.encoder.layer.0.layernorm_before.weight": "model-00001-of-00004.safetensors",
369
+ "vision_tower.encoder.layer.0.mlp.fc1.bias": "model-00001-of-00004.safetensors",
370
+ "vision_tower.encoder.layer.0.mlp.fc1.weight": "model-00001-of-00004.safetensors",
371
+ "vision_tower.encoder.layer.0.mlp.fc2.bias": "model-00001-of-00004.safetensors",
372
+ "vision_tower.encoder.layer.0.mlp.fc2.weight": "model-00001-of-00004.safetensors",
373
+ "vision_tower.encoder.layer.1.attention.k_proj.bias": "model-00001-of-00004.safetensors",
374
+ "vision_tower.encoder.layer.1.attention.k_proj.weight": "model-00001-of-00004.safetensors",
375
+ "vision_tower.encoder.layer.1.attention.projection_layer.bias": "model-00001-of-00004.safetensors",
376
+ "vision_tower.encoder.layer.1.attention.projection_layer.weight": "model-00001-of-00004.safetensors",
377
+ "vision_tower.encoder.layer.1.attention.q_proj.bias": "model-00001-of-00004.safetensors",
378
+ "vision_tower.encoder.layer.1.attention.q_proj.weight": "model-00001-of-00004.safetensors",
379
+ "vision_tower.encoder.layer.1.attention.v_proj.bias": "model-00001-of-00004.safetensors",
380
+ "vision_tower.encoder.layer.1.attention.v_proj.weight": "model-00001-of-00004.safetensors",
381
+ "vision_tower.encoder.layer.1.lambda_1": "model-00001-of-00004.safetensors",
382
+ "vision_tower.encoder.layer.1.lambda_2": "model-00001-of-00004.safetensors",
383
+ "vision_tower.encoder.layer.1.layernorm_after.bias": "model-00001-of-00004.safetensors",
384
+ "vision_tower.encoder.layer.1.layernorm_after.weight": "model-00001-of-00004.safetensors",
385
+ "vision_tower.encoder.layer.1.layernorm_before.bias": "model-00001-of-00004.safetensors",
386
+ "vision_tower.encoder.layer.1.layernorm_before.weight": "model-00001-of-00004.safetensors",
387
+ "vision_tower.encoder.layer.1.mlp.fc1.bias": "model-00001-of-00004.safetensors",
388
+ "vision_tower.encoder.layer.1.mlp.fc1.weight": "model-00001-of-00004.safetensors",
389
+ "vision_tower.encoder.layer.1.mlp.fc2.bias": "model-00001-of-00004.safetensors",
390
+ "vision_tower.encoder.layer.1.mlp.fc2.weight": "model-00001-of-00004.safetensors",
391
+ "vision_tower.encoder.layer.10.attention.k_proj.bias": "model-00001-of-00004.safetensors",
392
+ "vision_tower.encoder.layer.10.attention.k_proj.weight": "model-00001-of-00004.safetensors",
393
+ "vision_tower.encoder.layer.10.attention.projection_layer.bias": "model-00001-of-00004.safetensors",
394
+ "vision_tower.encoder.layer.10.attention.projection_layer.weight": "model-00001-of-00004.safetensors",
395
+ "vision_tower.encoder.layer.10.attention.q_proj.bias": "model-00001-of-00004.safetensors",
396
+ "vision_tower.encoder.layer.10.attention.q_proj.weight": "model-00001-of-00004.safetensors",
397
+ "vision_tower.encoder.layer.10.attention.v_proj.bias": "model-00001-of-00004.safetensors",
398
+ "vision_tower.encoder.layer.10.attention.v_proj.weight": "model-00001-of-00004.safetensors",
399
+ "vision_tower.encoder.layer.10.lambda_1": "model-00001-of-00004.safetensors",
400
+ "vision_tower.encoder.layer.10.lambda_2": "model-00001-of-00004.safetensors",
401
+ "vision_tower.encoder.layer.10.layernorm_after.bias": "model-00001-of-00004.safetensors",
402
+ "vision_tower.encoder.layer.10.layernorm_after.weight": "model-00001-of-00004.safetensors",
403
+ "vision_tower.encoder.layer.10.layernorm_before.bias": "model-00001-of-00004.safetensors",
404
+ "vision_tower.encoder.layer.10.layernorm_before.weight": "model-00001-of-00004.safetensors",
405
+ "vision_tower.encoder.layer.10.mlp.fc1.bias": "model-00001-of-00004.safetensors",
406
+ "vision_tower.encoder.layer.10.mlp.fc1.weight": "model-00001-of-00004.safetensors",
407
+ "vision_tower.encoder.layer.10.mlp.fc2.bias": "model-00001-of-00004.safetensors",
408
+ "vision_tower.encoder.layer.10.mlp.fc2.weight": "model-00001-of-00004.safetensors",
409
+ "vision_tower.encoder.layer.11.attention.k_proj.bias": "model-00001-of-00004.safetensors",
410
+ "vision_tower.encoder.layer.11.attention.k_proj.weight": "model-00001-of-00004.safetensors",
411
+ "vision_tower.encoder.layer.11.attention.projection_layer.bias": "model-00001-of-00004.safetensors",
412
+ "vision_tower.encoder.layer.11.attention.projection_layer.weight": "model-00001-of-00004.safetensors",
413
+ "vision_tower.encoder.layer.11.attention.q_proj.bias": "model-00001-of-00004.safetensors",
414
+ "vision_tower.encoder.layer.11.attention.q_proj.weight": "model-00001-of-00004.safetensors",
415
+ "vision_tower.encoder.layer.11.attention.v_proj.bias": "model-00001-of-00004.safetensors",
416
+ "vision_tower.encoder.layer.11.attention.v_proj.weight": "model-00001-of-00004.safetensors",
417
+ "vision_tower.encoder.layer.11.lambda_1": "model-00001-of-00004.safetensors",
418
+ "vision_tower.encoder.layer.11.lambda_2": "model-00001-of-00004.safetensors",
419
+ "vision_tower.encoder.layer.11.layernorm_after.bias": "model-00001-of-00004.safetensors",
420
+ "vision_tower.encoder.layer.11.layernorm_after.weight": "model-00001-of-00004.safetensors",
421
+ "vision_tower.encoder.layer.11.layernorm_before.bias": "model-00001-of-00004.safetensors",
422
+ "vision_tower.encoder.layer.11.layernorm_before.weight": "model-00001-of-00004.safetensors",
423
+ "vision_tower.encoder.layer.11.mlp.fc1.bias": "model-00001-of-00004.safetensors",
424
+ "vision_tower.encoder.layer.11.mlp.fc1.weight": "model-00001-of-00004.safetensors",
425
+ "vision_tower.encoder.layer.11.mlp.fc2.bias": "model-00001-of-00004.safetensors",
426
+ "vision_tower.encoder.layer.11.mlp.fc2.weight": "model-00001-of-00004.safetensors",
427
+ "vision_tower.encoder.layer.12.attention.k_proj.bias": "model-00001-of-00004.safetensors",
428
+ "vision_tower.encoder.layer.12.attention.k_proj.weight": "model-00001-of-00004.safetensors",
429
+ "vision_tower.encoder.layer.12.attention.projection_layer.bias": "model-00001-of-00004.safetensors",
430
+ "vision_tower.encoder.layer.12.attention.projection_layer.weight": "model-00001-of-00004.safetensors",
431
+ "vision_tower.encoder.layer.12.attention.q_proj.bias": "model-00001-of-00004.safetensors",
432
+ "vision_tower.encoder.layer.12.attention.q_proj.weight": "model-00001-of-00004.safetensors",
433
+ "vision_tower.encoder.layer.12.attention.v_proj.bias": "model-00001-of-00004.safetensors",
434
+ "vision_tower.encoder.layer.12.attention.v_proj.weight": "model-00001-of-00004.safetensors",
435
+ "vision_tower.encoder.layer.12.lambda_1": "model-00001-of-00004.safetensors",
436
+ "vision_tower.encoder.layer.12.lambda_2": "model-00001-of-00004.safetensors",
437
+ "vision_tower.encoder.layer.12.layernorm_after.bias": "model-00001-of-00004.safetensors",
438
+ "vision_tower.encoder.layer.12.layernorm_after.weight": "model-00001-of-00004.safetensors",
439
+ "vision_tower.encoder.layer.12.layernorm_before.bias": "model-00001-of-00004.safetensors",
440
+ "vision_tower.encoder.layer.12.layernorm_before.weight": "model-00001-of-00004.safetensors",
441
+ "vision_tower.encoder.layer.12.mlp.fc1.bias": "model-00001-of-00004.safetensors",
442
+ "vision_tower.encoder.layer.12.mlp.fc1.weight": "model-00001-of-00004.safetensors",
443
+ "vision_tower.encoder.layer.12.mlp.fc2.bias": "model-00001-of-00004.safetensors",
444
+ "vision_tower.encoder.layer.12.mlp.fc2.weight": "model-00001-of-00004.safetensors",
445
+ "vision_tower.encoder.layer.13.attention.k_proj.bias": "model-00001-of-00004.safetensors",
446
+ "vision_tower.encoder.layer.13.attention.k_proj.weight": "model-00001-of-00004.safetensors",
447
+ "vision_tower.encoder.layer.13.attention.projection_layer.bias": "model-00001-of-00004.safetensors",
448
+ "vision_tower.encoder.layer.13.attention.projection_layer.weight": "model-00001-of-00004.safetensors",
449
+ "vision_tower.encoder.layer.13.attention.q_proj.bias": "model-00001-of-00004.safetensors",
450
+ "vision_tower.encoder.layer.13.attention.q_proj.weight": "model-00001-of-00004.safetensors",
451
+ "vision_tower.encoder.layer.13.attention.v_proj.bias": "model-00001-of-00004.safetensors",
452
+ "vision_tower.encoder.layer.13.attention.v_proj.weight": "model-00001-of-00004.safetensors",
453
+ "vision_tower.encoder.layer.13.lambda_1": "model-00001-of-00004.safetensors",
454
+ "vision_tower.encoder.layer.13.lambda_2": "model-00001-of-00004.safetensors",
455
+ "vision_tower.encoder.layer.13.layernorm_after.bias": "model-00001-of-00004.safetensors",
456
+ "vision_tower.encoder.layer.13.layernorm_after.weight": "model-00001-of-00004.safetensors",
457
+ "vision_tower.encoder.layer.13.layernorm_before.bias": "model-00001-of-00004.safetensors",
458
+ "vision_tower.encoder.layer.13.layernorm_before.weight": "model-00001-of-00004.safetensors",
459
+ "vision_tower.encoder.layer.13.mlp.fc1.bias": "model-00001-of-00004.safetensors",
460
+ "vision_tower.encoder.layer.13.mlp.fc1.weight": "model-00001-of-00004.safetensors",
461
+ "vision_tower.encoder.layer.13.mlp.fc2.bias": "model-00001-of-00004.safetensors",
462
+ "vision_tower.encoder.layer.13.mlp.fc2.weight": "model-00001-of-00004.safetensors",
463
+ "vision_tower.encoder.layer.14.attention.k_proj.bias": "model-00001-of-00004.safetensors",
464
+ "vision_tower.encoder.layer.14.attention.k_proj.weight": "model-00001-of-00004.safetensors",
465
+ "vision_tower.encoder.layer.14.attention.projection_layer.bias": "model-00001-of-00004.safetensors",
466
+ "vision_tower.encoder.layer.14.attention.projection_layer.weight": "model-00001-of-00004.safetensors",
467
+ "vision_tower.encoder.layer.14.attention.q_proj.bias": "model-00001-of-00004.safetensors",
468
+ "vision_tower.encoder.layer.14.attention.q_proj.weight": "model-00001-of-00004.safetensors",
469
+ "vision_tower.encoder.layer.14.attention.v_proj.bias": "model-00001-of-00004.safetensors",
470
+ "vision_tower.encoder.layer.14.attention.v_proj.weight": "model-00001-of-00004.safetensors",
471
+ "vision_tower.encoder.layer.14.lambda_1": "model-00001-of-00004.safetensors",
472
+ "vision_tower.encoder.layer.14.lambda_2": "model-00001-of-00004.safetensors",
473
+ "vision_tower.encoder.layer.14.layernorm_after.bias": "model-00001-of-00004.safetensors",
474
+ "vision_tower.encoder.layer.14.layernorm_after.weight": "model-00001-of-00004.safetensors",
475
+ "vision_tower.encoder.layer.14.layernorm_before.bias": "model-00001-of-00004.safetensors",
476
+ "vision_tower.encoder.layer.14.layernorm_before.weight": "model-00001-of-00004.safetensors",
477
+ "vision_tower.encoder.layer.14.mlp.fc1.bias": "model-00001-of-00004.safetensors",
478
+ "vision_tower.encoder.layer.14.mlp.fc1.weight": "model-00001-of-00004.safetensors",
479
+ "vision_tower.encoder.layer.14.mlp.fc2.bias": "model-00001-of-00004.safetensors",
480
+ "vision_tower.encoder.layer.14.mlp.fc2.weight": "model-00001-of-00004.safetensors",
481
+ "vision_tower.encoder.layer.15.attention.k_proj.bias": "model-00001-of-00004.safetensors",
482
+ "vision_tower.encoder.layer.15.attention.k_proj.weight": "model-00001-of-00004.safetensors",
483
+ "vision_tower.encoder.layer.15.attention.projection_layer.bias": "model-00001-of-00004.safetensors",
484
+ "vision_tower.encoder.layer.15.attention.projection_layer.weight": "model-00001-of-00004.safetensors",
485
+ "vision_tower.encoder.layer.15.attention.q_proj.bias": "model-00001-of-00004.safetensors",
486
+ "vision_tower.encoder.layer.15.attention.q_proj.weight": "model-00001-of-00004.safetensors",
487
+ "vision_tower.encoder.layer.15.attention.v_proj.bias": "model-00001-of-00004.safetensors",
488
+ "vision_tower.encoder.layer.15.attention.v_proj.weight": "model-00001-of-00004.safetensors",
489
+ "vision_tower.encoder.layer.15.lambda_1": "model-00001-of-00004.safetensors",
490
+ "vision_tower.encoder.layer.15.lambda_2": "model-00001-of-00004.safetensors",
491
+ "vision_tower.encoder.layer.15.layernorm_after.bias": "model-00001-of-00004.safetensors",
492
+ "vision_tower.encoder.layer.15.layernorm_after.weight": "model-00001-of-00004.safetensors",
493
+ "vision_tower.encoder.layer.15.layernorm_before.bias": "model-00001-of-00004.safetensors",
494
+ "vision_tower.encoder.layer.15.layernorm_before.weight": "model-00001-of-00004.safetensors",
495
+ "vision_tower.encoder.layer.15.mlp.fc1.bias": "model-00001-of-00004.safetensors",
496
+ "vision_tower.encoder.layer.15.mlp.fc1.weight": "model-00001-of-00004.safetensors",
497
+ "vision_tower.encoder.layer.15.mlp.fc2.bias": "model-00001-of-00004.safetensors",
498
+ "vision_tower.encoder.layer.15.mlp.fc2.weight": "model-00001-of-00004.safetensors",
499
+ "vision_tower.encoder.layer.16.attention.k_proj.bias": "model-00001-of-00004.safetensors",
500
+ "vision_tower.encoder.layer.16.attention.k_proj.weight": "model-00001-of-00004.safetensors",
501
+ "vision_tower.encoder.layer.16.attention.projection_layer.bias": "model-00001-of-00004.safetensors",
502
+ "vision_tower.encoder.layer.16.attention.projection_layer.weight": "model-00001-of-00004.safetensors",
503
+ "vision_tower.encoder.layer.16.attention.q_proj.bias": "model-00001-of-00004.safetensors",
504
+ "vision_tower.encoder.layer.16.attention.q_proj.weight": "model-00001-of-00004.safetensors",
505
+ "vision_tower.encoder.layer.16.attention.v_proj.bias": "model-00001-of-00004.safetensors",
506
+ "vision_tower.encoder.layer.16.attention.v_proj.weight": "model-00001-of-00004.safetensors",
507
+ "vision_tower.encoder.layer.16.lambda_1": "model-00001-of-00004.safetensors",
508
+ "vision_tower.encoder.layer.16.lambda_2": "model-00001-of-00004.safetensors",
509
+ "vision_tower.encoder.layer.16.layernorm_after.bias": "model-00001-of-00004.safetensors",
510
+ "vision_tower.encoder.layer.16.layernorm_after.weight": "model-00001-of-00004.safetensors",
511
+ "vision_tower.encoder.layer.16.layernorm_before.bias": "model-00001-of-00004.safetensors",
512
+ "vision_tower.encoder.layer.16.layernorm_before.weight": "model-00001-of-00004.safetensors",
513
+ "vision_tower.encoder.layer.16.mlp.fc1.bias": "model-00001-of-00004.safetensors",
514
+ "vision_tower.encoder.layer.16.mlp.fc1.weight": "model-00001-of-00004.safetensors",
515
+ "vision_tower.encoder.layer.16.mlp.fc2.bias": "model-00001-of-00004.safetensors",
516
+ "vision_tower.encoder.layer.16.mlp.fc2.weight": "model-00001-of-00004.safetensors",
517
+ "vision_tower.encoder.layer.17.attention.k_proj.bias": "model-00001-of-00004.safetensors",
518
+ "vision_tower.encoder.layer.17.attention.k_proj.weight": "model-00001-of-00004.safetensors",
519
+ "vision_tower.encoder.layer.17.attention.projection_layer.bias": "model-00001-of-00004.safetensors",
520
+ "vision_tower.encoder.layer.17.attention.projection_layer.weight": "model-00001-of-00004.safetensors",
521
+ "vision_tower.encoder.layer.17.attention.q_proj.bias": "model-00001-of-00004.safetensors",
522
+ "vision_tower.encoder.layer.17.attention.q_proj.weight": "model-00001-of-00004.safetensors",
523
+ "vision_tower.encoder.layer.17.attention.v_proj.bias": "model-00001-of-00004.safetensors",
524
+ "vision_tower.encoder.layer.17.attention.v_proj.weight": "model-00001-of-00004.safetensors",
525
+ "vision_tower.encoder.layer.17.lambda_1": "model-00001-of-00004.safetensors",
526
+ "vision_tower.encoder.layer.17.lambda_2": "model-00001-of-00004.safetensors",
527
+ "vision_tower.encoder.layer.17.layernorm_after.bias": "model-00001-of-00004.safetensors",
528
+ "vision_tower.encoder.layer.17.layernorm_after.weight": "model-00001-of-00004.safetensors",
529
+ "vision_tower.encoder.layer.17.layernorm_before.bias": "model-00001-of-00004.safetensors",
530
+ "vision_tower.encoder.layer.17.layernorm_before.weight": "model-00001-of-00004.safetensors",
531
+ "vision_tower.encoder.layer.17.mlp.fc1.bias": "model-00001-of-00004.safetensors",
532
+ "vision_tower.encoder.layer.17.mlp.fc1.weight": "model-00001-of-00004.safetensors",
533
+ "vision_tower.encoder.layer.17.mlp.fc2.bias": "model-00001-of-00004.safetensors",
534
+ "vision_tower.encoder.layer.17.mlp.fc2.weight": "model-00001-of-00004.safetensors",
535
+ "vision_tower.encoder.layer.18.attention.k_proj.bias": "model-00001-of-00004.safetensors",
536
+ "vision_tower.encoder.layer.18.attention.k_proj.weight": "model-00001-of-00004.safetensors",
537
+ "vision_tower.encoder.layer.18.attention.projection_layer.bias": "model-00001-of-00004.safetensors",
538
+ "vision_tower.encoder.layer.18.attention.projection_layer.weight": "model-00001-of-00004.safetensors",
539
+ "vision_tower.encoder.layer.18.attention.q_proj.bias": "model-00001-of-00004.safetensors",
540
+ "vision_tower.encoder.layer.18.attention.q_proj.weight": "model-00001-of-00004.safetensors",
541
+ "vision_tower.encoder.layer.18.attention.v_proj.bias": "model-00001-of-00004.safetensors",
542
+ "vision_tower.encoder.layer.18.attention.v_proj.weight": "model-00001-of-00004.safetensors",
543
+ "vision_tower.encoder.layer.18.lambda_1": "model-00001-of-00004.safetensors",
544
+ "vision_tower.encoder.layer.18.lambda_2": "model-00001-of-00004.safetensors",
545
+ "vision_tower.encoder.layer.18.layernorm_after.bias": "model-00001-of-00004.safetensors",
546
+ "vision_tower.encoder.layer.18.layernorm_after.weight": "model-00001-of-00004.safetensors",
547
+ "vision_tower.encoder.layer.18.layernorm_before.bias": "model-00001-of-00004.safetensors",
548
+ "vision_tower.encoder.layer.18.layernorm_before.weight": "model-00001-of-00004.safetensors",
549
+ "vision_tower.encoder.layer.18.mlp.fc1.bias": "model-00001-of-00004.safetensors",
550
+ "vision_tower.encoder.layer.18.mlp.fc1.weight": "model-00001-of-00004.safetensors",
551
+ "vision_tower.encoder.layer.18.mlp.fc2.bias": "model-00001-of-00004.safetensors",
552
+ "vision_tower.encoder.layer.18.mlp.fc2.weight": "model-00001-of-00004.safetensors",
553
+ "vision_tower.encoder.layer.19.attention.k_proj.bias": "model-00001-of-00004.safetensors",
554
+ "vision_tower.encoder.layer.19.attention.k_proj.weight": "model-00001-of-00004.safetensors",
555
+ "vision_tower.encoder.layer.19.attention.projection_layer.bias": "model-00001-of-00004.safetensors",
556
+ "vision_tower.encoder.layer.19.attention.projection_layer.weight": "model-00001-of-00004.safetensors",
557
+ "vision_tower.encoder.layer.19.attention.q_proj.bias": "model-00001-of-00004.safetensors",
558
+ "vision_tower.encoder.layer.19.attention.q_proj.weight": "model-00001-of-00004.safetensors",
559
+ "vision_tower.encoder.layer.19.attention.v_proj.bias": "model-00001-of-00004.safetensors",
560
+ "vision_tower.encoder.layer.19.attention.v_proj.weight": "model-00001-of-00004.safetensors",
561
+ "vision_tower.encoder.layer.19.lambda_1": "model-00001-of-00004.safetensors",
562
+ "vision_tower.encoder.layer.19.lambda_2": "model-00001-of-00004.safetensors",
563
+ "vision_tower.encoder.layer.19.layernorm_after.bias": "model-00001-of-00004.safetensors",
564
+ "vision_tower.encoder.layer.19.layernorm_after.weight": "model-00001-of-00004.safetensors",
565
+ "vision_tower.encoder.layer.19.layernorm_before.bias": "model-00001-of-00004.safetensors",
566
+ "vision_tower.encoder.layer.19.layernorm_before.weight": "model-00001-of-00004.safetensors",
567
+ "vision_tower.encoder.layer.19.mlp.fc1.bias": "model-00001-of-00004.safetensors",
568
+ "vision_tower.encoder.layer.19.mlp.fc1.weight": "model-00001-of-00004.safetensors",
569
+ "vision_tower.encoder.layer.19.mlp.fc2.bias": "model-00001-of-00004.safetensors",
570
+ "vision_tower.encoder.layer.19.mlp.fc2.weight": "model-00001-of-00004.safetensors",
571
+ "vision_tower.encoder.layer.2.attention.k_proj.bias": "model-00001-of-00004.safetensors",
572
+ "vision_tower.encoder.layer.2.attention.k_proj.weight": "model-00001-of-00004.safetensors",
573
+ "vision_tower.encoder.layer.2.attention.projection_layer.bias": "model-00001-of-00004.safetensors",
574
+ "vision_tower.encoder.layer.2.attention.projection_layer.weight": "model-00001-of-00004.safetensors",
575
+ "vision_tower.encoder.layer.2.attention.q_proj.bias": "model-00001-of-00004.safetensors",
576
+ "vision_tower.encoder.layer.2.attention.q_proj.weight": "model-00001-of-00004.safetensors",
577
+ "vision_tower.encoder.layer.2.attention.v_proj.bias": "model-00001-of-00004.safetensors",
578
+ "vision_tower.encoder.layer.2.attention.v_proj.weight": "model-00001-of-00004.safetensors",
579
+ "vision_tower.encoder.layer.2.lambda_1": "model-00001-of-00004.safetensors",
580
+ "vision_tower.encoder.layer.2.lambda_2": "model-00001-of-00004.safetensors",
581
+ "vision_tower.encoder.layer.2.layernorm_after.bias": "model-00001-of-00004.safetensors",
582
+ "vision_tower.encoder.layer.2.layernorm_after.weight": "model-00001-of-00004.safetensors",
583
+ "vision_tower.encoder.layer.2.layernorm_before.bias": "model-00001-of-00004.safetensors",
584
+ "vision_tower.encoder.layer.2.layernorm_before.weight": "model-00001-of-00004.safetensors",
585
+ "vision_tower.encoder.layer.2.mlp.fc1.bias": "model-00001-of-00004.safetensors",
586
+ "vision_tower.encoder.layer.2.mlp.fc1.weight": "model-00001-of-00004.safetensors",
587
+ "vision_tower.encoder.layer.2.mlp.fc2.bias": "model-00001-of-00004.safetensors",
588
+ "vision_tower.encoder.layer.2.mlp.fc2.weight": "model-00001-of-00004.safetensors",
589
+ "vision_tower.encoder.layer.20.attention.k_proj.bias": "model-00001-of-00004.safetensors",
590
+ "vision_tower.encoder.layer.20.attention.k_proj.weight": "model-00001-of-00004.safetensors",
591
+ "vision_tower.encoder.layer.20.attention.projection_layer.bias": "model-00001-of-00004.safetensors",
592
+ "vision_tower.encoder.layer.20.attention.projection_layer.weight": "model-00001-of-00004.safetensors",
593
+ "vision_tower.encoder.layer.20.attention.q_proj.bias": "model-00001-of-00004.safetensors",
594
+ "vision_tower.encoder.layer.20.attention.q_proj.weight": "model-00001-of-00004.safetensors",
595
+ "vision_tower.encoder.layer.20.attention.v_proj.bias": "model-00001-of-00004.safetensors",
596
+ "vision_tower.encoder.layer.20.attention.v_proj.weight": "model-00001-of-00004.safetensors",
597
+ "vision_tower.encoder.layer.20.lambda_1": "model-00001-of-00004.safetensors",
598
+ "vision_tower.encoder.layer.20.lambda_2": "model-00001-of-00004.safetensors",
599
+ "vision_tower.encoder.layer.20.layernorm_after.bias": "model-00001-of-00004.safetensors",
600
+ "vision_tower.encoder.layer.20.layernorm_after.weight": "model-00001-of-00004.safetensors",
601
+ "vision_tower.encoder.layer.20.layernorm_before.bias": "model-00001-of-00004.safetensors",
602
+ "vision_tower.encoder.layer.20.layernorm_before.weight": "model-00001-of-00004.safetensors",
603
+ "vision_tower.encoder.layer.20.mlp.fc1.bias": "model-00001-of-00004.safetensors",
604
+ "vision_tower.encoder.layer.20.mlp.fc1.weight": "model-00001-of-00004.safetensors",
605
+ "vision_tower.encoder.layer.20.mlp.fc2.bias": "model-00001-of-00004.safetensors",
606
+ "vision_tower.encoder.layer.20.mlp.fc2.weight": "model-00001-of-00004.safetensors",
607
+ "vision_tower.encoder.layer.21.attention.k_proj.bias": "model-00001-of-00004.safetensors",
608
+ "vision_tower.encoder.layer.21.attention.k_proj.weight": "model-00001-of-00004.safetensors",
609
+ "vision_tower.encoder.layer.21.attention.projection_layer.bias": "model-00001-of-00004.safetensors",
610
+ "vision_tower.encoder.layer.21.attention.projection_layer.weight": "model-00001-of-00004.safetensors",
611
+ "vision_tower.encoder.layer.21.attention.q_proj.bias": "model-00001-of-00004.safetensors",
612
+ "vision_tower.encoder.layer.21.attention.q_proj.weight": "model-00001-of-00004.safetensors",
613
+ "vision_tower.encoder.layer.21.attention.v_proj.bias": "model-00001-of-00004.safetensors",
614
+ "vision_tower.encoder.layer.21.attention.v_proj.weight": "model-00001-of-00004.safetensors",
615
+ "vision_tower.encoder.layer.21.lambda_1": "model-00001-of-00004.safetensors",
616
+ "vision_tower.encoder.layer.21.lambda_2": "model-00001-of-00004.safetensors",
617
+ "vision_tower.encoder.layer.21.layernorm_after.bias": "model-00001-of-00004.safetensors",
618
+ "vision_tower.encoder.layer.21.layernorm_after.weight": "model-00001-of-00004.safetensors",
619
+ "vision_tower.encoder.layer.21.layernorm_before.bias": "model-00001-of-00004.safetensors",
620
+ "vision_tower.encoder.layer.21.layernorm_before.weight": "model-00001-of-00004.safetensors",
621
+ "vision_tower.encoder.layer.21.mlp.fc1.bias": "model-00001-of-00004.safetensors",
622
+ "vision_tower.encoder.layer.21.mlp.fc1.weight": "model-00001-of-00004.safetensors",
623
+ "vision_tower.encoder.layer.21.mlp.fc2.bias": "model-00001-of-00004.safetensors",
624
+ "vision_tower.encoder.layer.21.mlp.fc2.weight": "model-00001-of-00004.safetensors",
625
+ "vision_tower.encoder.layer.22.attention.k_proj.bias": "model-00001-of-00004.safetensors",
626
+ "vision_tower.encoder.layer.22.attention.k_proj.weight": "model-00001-of-00004.safetensors",
627
+ "vision_tower.encoder.layer.22.attention.projection_layer.bias": "model-00001-of-00004.safetensors",
628
+ "vision_tower.encoder.layer.22.attention.projection_layer.weight": "model-00001-of-00004.safetensors",
629
+ "vision_tower.encoder.layer.22.attention.q_proj.bias": "model-00001-of-00004.safetensors",
630
+ "vision_tower.encoder.layer.22.attention.q_proj.weight": "model-00001-of-00004.safetensors",
631
+ "vision_tower.encoder.layer.22.attention.v_proj.bias": "model-00001-of-00004.safetensors",
632
+ "vision_tower.encoder.layer.22.attention.v_proj.weight": "model-00001-of-00004.safetensors",
633
+ "vision_tower.encoder.layer.22.lambda_1": "model-00001-of-00004.safetensors",
634
+ "vision_tower.encoder.layer.22.lambda_2": "model-00001-of-00004.safetensors",
635
+ "vision_tower.encoder.layer.22.layernorm_after.bias": "model-00001-of-00004.safetensors",
636
+ "vision_tower.encoder.layer.22.layernorm_after.weight": "model-00001-of-00004.safetensors",
637
+ "vision_tower.encoder.layer.22.layernorm_before.bias": "model-00001-of-00004.safetensors",
638
+ "vision_tower.encoder.layer.22.layernorm_before.weight": "model-00001-of-00004.safetensors",
639
+ "vision_tower.encoder.layer.22.mlp.fc1.bias": "model-00001-of-00004.safetensors",
640
+ "vision_tower.encoder.layer.22.mlp.fc1.weight": "model-00001-of-00004.safetensors",
641
+ "vision_tower.encoder.layer.22.mlp.fc2.bias": "model-00001-of-00004.safetensors",
642
+ "vision_tower.encoder.layer.22.mlp.fc2.weight": "model-00001-of-00004.safetensors",
643
+ "vision_tower.encoder.layer.23.attention.k_proj.bias": "model-00001-of-00004.safetensors",
644
+ "vision_tower.encoder.layer.23.attention.k_proj.weight": "model-00001-of-00004.safetensors",
645
+ "vision_tower.encoder.layer.23.attention.projection_layer.bias": "model-00001-of-00004.safetensors",
646
+ "vision_tower.encoder.layer.23.attention.projection_layer.weight": "model-00001-of-00004.safetensors",
647
+ "vision_tower.encoder.layer.23.attention.q_proj.bias": "model-00001-of-00004.safetensors",
648
+ "vision_tower.encoder.layer.23.attention.q_proj.weight": "model-00001-of-00004.safetensors",
649
+ "vision_tower.encoder.layer.23.attention.v_proj.bias": "model-00001-of-00004.safetensors",
650
+ "vision_tower.encoder.layer.23.attention.v_proj.weight": "model-00001-of-00004.safetensors",
651
+ "vision_tower.encoder.layer.23.lambda_1": "model-00001-of-00004.safetensors",
652
+ "vision_tower.encoder.layer.23.lambda_2": "model-00001-of-00004.safetensors",
653
+ "vision_tower.encoder.layer.23.layernorm_after.bias": "model-00001-of-00004.safetensors",
654
+ "vision_tower.encoder.layer.23.layernorm_after.weight": "model-00001-of-00004.safetensors",
655
+ "vision_tower.encoder.layer.23.layernorm_before.bias": "model-00001-of-00004.safetensors",
656
+ "vision_tower.encoder.layer.23.layernorm_before.weight": "model-00001-of-00004.safetensors",
657
+ "vision_tower.encoder.layer.23.mlp.fc1.bias": "model-00001-of-00004.safetensors",
658
+ "vision_tower.encoder.layer.23.mlp.fc1.weight": "model-00001-of-00004.safetensors",
659
+ "vision_tower.encoder.layer.23.mlp.fc2.bias": "model-00001-of-00004.safetensors",
660
+ "vision_tower.encoder.layer.23.mlp.fc2.weight": "model-00001-of-00004.safetensors",
661
+ "vision_tower.encoder.layer.3.attention.k_proj.bias": "model-00001-of-00004.safetensors",
662
+ "vision_tower.encoder.layer.3.attention.k_proj.weight": "model-00001-of-00004.safetensors",
663
+ "vision_tower.encoder.layer.3.attention.projection_layer.bias": "model-00001-of-00004.safetensors",
664
+ "vision_tower.encoder.layer.3.attention.projection_layer.weight": "model-00001-of-00004.safetensors",
665
+ "vision_tower.encoder.layer.3.attention.q_proj.bias": "model-00001-of-00004.safetensors",
666
+ "vision_tower.encoder.layer.3.attention.q_proj.weight": "model-00001-of-00004.safetensors",
667
+ "vision_tower.encoder.layer.3.attention.v_proj.bias": "model-00001-of-00004.safetensors",
668
+ "vision_tower.encoder.layer.3.attention.v_proj.weight": "model-00001-of-00004.safetensors",
669
+ "vision_tower.encoder.layer.3.lambda_1": "model-00001-of-00004.safetensors",
670
+ "vision_tower.encoder.layer.3.lambda_2": "model-00001-of-00004.safetensors",
671
+ "vision_tower.encoder.layer.3.layernorm_after.bias": "model-00001-of-00004.safetensors",
672
+ "vision_tower.encoder.layer.3.layernorm_after.weight": "model-00001-of-00004.safetensors",
673
+ "vision_tower.encoder.layer.3.layernorm_before.bias": "model-00001-of-00004.safetensors",
674
+ "vision_tower.encoder.layer.3.layernorm_before.weight": "model-00001-of-00004.safetensors",
675
+ "vision_tower.encoder.layer.3.mlp.fc1.bias": "model-00001-of-00004.safetensors",
676
+ "vision_tower.encoder.layer.3.mlp.fc1.weight": "model-00001-of-00004.safetensors",
677
+ "vision_tower.encoder.layer.3.mlp.fc2.bias": "model-00001-of-00004.safetensors",
678
+ "vision_tower.encoder.layer.3.mlp.fc2.weight": "model-00001-of-00004.safetensors",
679
+ "vision_tower.encoder.layer.4.attention.k_proj.bias": "model-00001-of-00004.safetensors",
680
+ "vision_tower.encoder.layer.4.attention.k_proj.weight": "model-00001-of-00004.safetensors",
681
+ "vision_tower.encoder.layer.4.attention.projection_layer.bias": "model-00001-of-00004.safetensors",
682
+ "vision_tower.encoder.layer.4.attention.projection_layer.weight": "model-00001-of-00004.safetensors",
683
+ "vision_tower.encoder.layer.4.attention.q_proj.bias": "model-00001-of-00004.safetensors",
684
+ "vision_tower.encoder.layer.4.attention.q_proj.weight": "model-00001-of-00004.safetensors",
685
+ "vision_tower.encoder.layer.4.attention.v_proj.bias": "model-00001-of-00004.safetensors",
686
+ "vision_tower.encoder.layer.4.attention.v_proj.weight": "model-00001-of-00004.safetensors",
687
+ "vision_tower.encoder.layer.4.lambda_1": "model-00001-of-00004.safetensors",
688
+ "vision_tower.encoder.layer.4.lambda_2": "model-00001-of-00004.safetensors",
689
+ "vision_tower.encoder.layer.4.layernorm_after.bias": "model-00001-of-00004.safetensors",
690
+ "vision_tower.encoder.layer.4.layernorm_after.weight": "model-00001-of-00004.safetensors",
691
+ "vision_tower.encoder.layer.4.layernorm_before.bias": "model-00001-of-00004.safetensors",
692
+ "vision_tower.encoder.layer.4.layernorm_before.weight": "model-00001-of-00004.safetensors",
693
+ "vision_tower.encoder.layer.4.mlp.fc1.bias": "model-00001-of-00004.safetensors",
694
+ "vision_tower.encoder.layer.4.mlp.fc1.weight": "model-00001-of-00004.safetensors",
695
+ "vision_tower.encoder.layer.4.mlp.fc2.bias": "model-00001-of-00004.safetensors",
696
+ "vision_tower.encoder.layer.4.mlp.fc2.weight": "model-00001-of-00004.safetensors",
697
+ "vision_tower.encoder.layer.5.attention.k_proj.bias": "model-00001-of-00004.safetensors",
698
+ "vision_tower.encoder.layer.5.attention.k_proj.weight": "model-00001-of-00004.safetensors",
699
+ "vision_tower.encoder.layer.5.attention.projection_layer.bias": "model-00001-of-00004.safetensors",
700
+ "vision_tower.encoder.layer.5.attention.projection_layer.weight": "model-00001-of-00004.safetensors",
701
+ "vision_tower.encoder.layer.5.attention.q_proj.bias": "model-00001-of-00004.safetensors",
702
+ "vision_tower.encoder.layer.5.attention.q_proj.weight": "model-00001-of-00004.safetensors",
703
+ "vision_tower.encoder.layer.5.attention.v_proj.bias": "model-00001-of-00004.safetensors",
704
+ "vision_tower.encoder.layer.5.attention.v_proj.weight": "model-00001-of-00004.safetensors",
705
+ "vision_tower.encoder.layer.5.lambda_1": "model-00001-of-00004.safetensors",
706
+ "vision_tower.encoder.layer.5.lambda_2": "model-00001-of-00004.safetensors",
707
+ "vision_tower.encoder.layer.5.layernorm_after.bias": "model-00001-of-00004.safetensors",
708
+ "vision_tower.encoder.layer.5.layernorm_after.weight": "model-00001-of-00004.safetensors",
709
+ "vision_tower.encoder.layer.5.layernorm_before.bias": "model-00001-of-00004.safetensors",
710
+ "vision_tower.encoder.layer.5.layernorm_before.weight": "model-00001-of-00004.safetensors",
711
+ "vision_tower.encoder.layer.5.mlp.fc1.bias": "model-00001-of-00004.safetensors",
712
+ "vision_tower.encoder.layer.5.mlp.fc1.weight": "model-00001-of-00004.safetensors",
713
+ "vision_tower.encoder.layer.5.mlp.fc2.bias": "model-00001-of-00004.safetensors",
714
+ "vision_tower.encoder.layer.5.mlp.fc2.weight": "model-00001-of-00004.safetensors",
715
+ "vision_tower.encoder.layer.6.attention.k_proj.bias": "model-00001-of-00004.safetensors",
716
+ "vision_tower.encoder.layer.6.attention.k_proj.weight": "model-00001-of-00004.safetensors",
717
+ "vision_tower.encoder.layer.6.attention.projection_layer.bias": "model-00001-of-00004.safetensors",
718
+ "vision_tower.encoder.layer.6.attention.projection_layer.weight": "model-00001-of-00004.safetensors",
719
+ "vision_tower.encoder.layer.6.attention.q_proj.bias": "model-00001-of-00004.safetensors",
720
+ "vision_tower.encoder.layer.6.attention.q_proj.weight": "model-00001-of-00004.safetensors",
721
+ "vision_tower.encoder.layer.6.attention.v_proj.bias": "model-00001-of-00004.safetensors",
722
+ "vision_tower.encoder.layer.6.attention.v_proj.weight": "model-00001-of-00004.safetensors",
723
+ "vision_tower.encoder.layer.6.lambda_1": "model-00001-of-00004.safetensors",
724
+ "vision_tower.encoder.layer.6.lambda_2": "model-00001-of-00004.safetensors",
725
+ "vision_tower.encoder.layer.6.layernorm_after.bias": "model-00001-of-00004.safetensors",
726
+ "vision_tower.encoder.layer.6.layernorm_after.weight": "model-00001-of-00004.safetensors",
727
+ "vision_tower.encoder.layer.6.layernorm_before.bias": "model-00001-of-00004.safetensors",
728
+ "vision_tower.encoder.layer.6.layernorm_before.weight": "model-00001-of-00004.safetensors",
729
+ "vision_tower.encoder.layer.6.mlp.fc1.bias": "model-00001-of-00004.safetensors",
730
+ "vision_tower.encoder.layer.6.mlp.fc1.weight": "model-00001-of-00004.safetensors",
731
+ "vision_tower.encoder.layer.6.mlp.fc2.bias": "model-00001-of-00004.safetensors",
732
+ "vision_tower.encoder.layer.6.mlp.fc2.weight": "model-00001-of-00004.safetensors",
733
+ "vision_tower.encoder.layer.7.attention.k_proj.bias": "model-00001-of-00004.safetensors",
734
+ "vision_tower.encoder.layer.7.attention.k_proj.weight": "model-00001-of-00004.safetensors",
735
+ "vision_tower.encoder.layer.7.attention.projection_layer.bias": "model-00001-of-00004.safetensors",
736
+ "vision_tower.encoder.layer.7.attention.projection_layer.weight": "model-00001-of-00004.safetensors",
737
+ "vision_tower.encoder.layer.7.attention.q_proj.bias": "model-00001-of-00004.safetensors",
738
+ "vision_tower.encoder.layer.7.attention.q_proj.weight": "model-00001-of-00004.safetensors",
739
+ "vision_tower.encoder.layer.7.attention.v_proj.bias": "model-00001-of-00004.safetensors",
740
+ "vision_tower.encoder.layer.7.attention.v_proj.weight": "model-00001-of-00004.safetensors",
741
+ "vision_tower.encoder.layer.7.lambda_1": "model-00001-of-00004.safetensors",
742
+ "vision_tower.encoder.layer.7.lambda_2": "model-00001-of-00004.safetensors",
743
+ "vision_tower.encoder.layer.7.layernorm_after.bias": "model-00001-of-00004.safetensors",
744
+ "vision_tower.encoder.layer.7.layernorm_after.weight": "model-00001-of-00004.safetensors",
745
+ "vision_tower.encoder.layer.7.layernorm_before.bias": "model-00001-of-00004.safetensors",
746
+ "vision_tower.encoder.layer.7.layernorm_before.weight": "model-00001-of-00004.safetensors",
747
+ "vision_tower.encoder.layer.7.mlp.fc1.bias": "model-00001-of-00004.safetensors",
748
+ "vision_tower.encoder.layer.7.mlp.fc1.weight": "model-00001-of-00004.safetensors",
749
+ "vision_tower.encoder.layer.7.mlp.fc2.bias": "model-00001-of-00004.safetensors",
750
+ "vision_tower.encoder.layer.7.mlp.fc2.weight": "model-00001-of-00004.safetensors",
751
+ "vision_tower.encoder.layer.8.attention.k_proj.bias": "model-00001-of-00004.safetensors",
752
+ "vision_tower.encoder.layer.8.attention.k_proj.weight": "model-00001-of-00004.safetensors",
753
+ "vision_tower.encoder.layer.8.attention.projection_layer.bias": "model-00001-of-00004.safetensors",
754
+ "vision_tower.encoder.layer.8.attention.projection_layer.weight": "model-00001-of-00004.safetensors",
755
+ "vision_tower.encoder.layer.8.attention.q_proj.bias": "model-00001-of-00004.safetensors",
756
+ "vision_tower.encoder.layer.8.attention.q_proj.weight": "model-00001-of-00004.safetensors",
757
+ "vision_tower.encoder.layer.8.attention.v_proj.bias": "model-00001-of-00004.safetensors",
758
+ "vision_tower.encoder.layer.8.attention.v_proj.weight": "model-00001-of-00004.safetensors",
759
+ "vision_tower.encoder.layer.8.lambda_1": "model-00001-of-00004.safetensors",
760
+ "vision_tower.encoder.layer.8.lambda_2": "model-00001-of-00004.safetensors",
761
+ "vision_tower.encoder.layer.8.layernorm_after.bias": "model-00001-of-00004.safetensors",
762
+ "vision_tower.encoder.layer.8.layernorm_after.weight": "model-00001-of-00004.safetensors",
763
+ "vision_tower.encoder.layer.8.layernorm_before.bias": "model-00001-of-00004.safetensors",
764
+ "vision_tower.encoder.layer.8.layernorm_before.weight": "model-00001-of-00004.safetensors",
765
+ "vision_tower.encoder.layer.8.mlp.fc1.bias": "model-00001-of-00004.safetensors",
766
+ "vision_tower.encoder.layer.8.mlp.fc1.weight": "model-00001-of-00004.safetensors",
767
+ "vision_tower.encoder.layer.8.mlp.fc2.bias": "model-00001-of-00004.safetensors",
768
+ "vision_tower.encoder.layer.8.mlp.fc2.weight": "model-00001-of-00004.safetensors",
769
+ "vision_tower.encoder.layer.9.attention.k_proj.bias": "model-00001-of-00004.safetensors",
770
+ "vision_tower.encoder.layer.9.attention.k_proj.weight": "model-00001-of-00004.safetensors",
771
+ "vision_tower.encoder.layer.9.attention.projection_layer.bias": "model-00001-of-00004.safetensors",
772
+ "vision_tower.encoder.layer.9.attention.projection_layer.weight": "model-00001-of-00004.safetensors",
773
+ "vision_tower.encoder.layer.9.attention.q_proj.bias": "model-00001-of-00004.safetensors",
774
+ "vision_tower.encoder.layer.9.attention.q_proj.weight": "model-00001-of-00004.safetensors",
775
+ "vision_tower.encoder.layer.9.attention.v_proj.bias": "model-00001-of-00004.safetensors",
776
+ "vision_tower.encoder.layer.9.attention.v_proj.weight": "model-00001-of-00004.safetensors",
777
+ "vision_tower.encoder.layer.9.lambda_1": "model-00001-of-00004.safetensors",
778
+ "vision_tower.encoder.layer.9.lambda_2": "model-00001-of-00004.safetensors",
779
+ "vision_tower.encoder.layer.9.layernorm_after.bias": "model-00001-of-00004.safetensors",
780
+ "vision_tower.encoder.layer.9.layernorm_after.weight": "model-00001-of-00004.safetensors",
781
+ "vision_tower.encoder.layer.9.layernorm_before.bias": "model-00001-of-00004.safetensors",
782
+ "vision_tower.encoder.layer.9.layernorm_before.weight": "model-00001-of-00004.safetensors",
783
+ "vision_tower.encoder.layer.9.mlp.fc1.bias": "model-00001-of-00004.safetensors",
784
+ "vision_tower.encoder.layer.9.mlp.fc1.weight": "model-00001-of-00004.safetensors",
785
+ "vision_tower.encoder.layer.9.mlp.fc2.bias": "model-00001-of-00004.safetensors",
786
+ "vision_tower.encoder.layer.9.mlp.fc2.weight": "model-00001-of-00004.safetensors"
787
+ }
788
+ }
preprocessor_config.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "crop_size": null,
3
+ "crop_to_patches": false,
4
+ "data_format": "channels_first",
5
+ "default_to_square": true,
6
+ "device": null,
7
+ "do_center_crop": null,
8
+ "do_convert_rgb": true,
9
+ "do_normalize": true,
10
+ "do_rescale": true,
11
+ "do_resize": true,
12
+ "image_mean": [
13
+ 0.485,
14
+ 0.456,
15
+ 0.406
16
+ ],
17
+ "image_processor_type": "GotOcr2ImageProcessorFast",
18
+ "image_std": [
19
+ 0.229,
20
+ 0.224,
21
+ 0.225
22
+ ],
23
+ "input_data_format": null,
24
+ "max_patches": 12,
25
+ "min_patches": 1,
26
+ "processor_class": "InternVLProcessor",
27
+ "resample": 3,
28
+ "rescale_factor": 0.00392156862745098,
29
+ "return_tensors": null,
30
+ "size": {
31
+ "height": 448,
32
+ "width": 448
33
+ }
34
+ }
processor_config.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "image_seq_length": 256,
3
+ "processor_class": "InternVLProcessor"
4
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|im_start|>",
4
+ "<|im_end|>",
5
+ "<|object_ref_start|>",
6
+ "<|object_ref_end|>",
7
+ "<|box_start|>",
8
+ "<|box_end|>",
9
+ "<|quad_start|>",
10
+ "<|quad_end|>",
11
+ "<|vision_start|>",
12
+ "<|vision_end|>",
13
+ "<|vision_pad|>",
14
+ "<|image_pad|>",
15
+ "<|video_pad|>",
16
+ "<img>",
17
+ "</img>",
18
+ "<IMG_CONTEXT>",
19
+ "<quad>",
20
+ "</quad>",
21
+ "<ref>",
22
+ "</ref>",
23
+ "<box>",
24
+ "</box>"
25
+ ],
26
+ "context_image_token": "<IMG_CONTEXT>",
27
+ "end_image_token": "</img>",
28
+ "eos_token": {
29
+ "content": "<|im_end|>",
30
+ "lstrip": false,
31
+ "normalized": false,
32
+ "rstrip": false,
33
+ "single_word": false
34
+ },
35
+ "pad_token": {
36
+ "content": "<|endoftext|>",
37
+ "lstrip": false,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false
41
+ },
42
+ "start_image_token": "<img>",
43
+ "video_token": "<video>"
44
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:233abb119362bbd5eaa4c37cbc132e7d97de88820296fad36079deb992c0f43b
3
+ size 11505933
tokenizer_config.json ADDED
@@ -0,0 +1,3916 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": false,
3
+ "add_prefix_space": false,
4
+ "added_tokens_decoder": {
5
+ "151643": {
6
+ "content": "<|endoftext|>",
7
+ "lstrip": false,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ },
13
+ "151644": {
14
+ "content": "<|im_start|>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false,
19
+ "special": true
20
+ },
21
+ "151645": {
22
+ "content": "<|im_end|>",
23
+ "lstrip": false,
24
+ "normalized": false,
25
+ "rstrip": false,
26
+ "single_word": false,
27
+ "special": true
28
+ },
29
+ "151646": {
30
+ "content": "<|object_ref_start|>",
31
+ "lstrip": false,
32
+ "normalized": false,
33
+ "rstrip": false,
34
+ "single_word": false,
35
+ "special": true
36
+ },
37
+ "151647": {
38
+ "content": "<|object_ref_end|>",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": false,
42
+ "single_word": false,
43
+ "special": true
44
+ },
45
+ "151648": {
46
+ "content": "<|box_start|>",
47
+ "lstrip": false,
48
+ "normalized": false,
49
+ "rstrip": false,
50
+ "single_word": false,
51
+ "special": true
52
+ },
53
+ "151649": {
54
+ "content": "<|box_end|>",
55
+ "lstrip": false,
56
+ "normalized": false,
57
+ "rstrip": false,
58
+ "single_word": false,
59
+ "special": true
60
+ },
61
+ "151650": {
62
+ "content": "<|quad_start|>",
63
+ "lstrip": false,
64
+ "normalized": false,
65
+ "rstrip": false,
66
+ "single_word": false,
67
+ "special": true
68
+ },
69
+ "151651": {
70
+ "content": "<|quad_end|>",
71
+ "lstrip": false,
72
+ "normalized": false,
73
+ "rstrip": false,
74
+ "single_word": false,
75
+ "special": true
76
+ },
77
+ "151652": {
78
+ "content": "<|vision_start|>",
79
+ "lstrip": false,
80
+ "normalized": false,
81
+ "rstrip": false,
82
+ "single_word": false,
83
+ "special": true
84
+ },
85
+ "151653": {
86
+ "content": "<|vision_end|>",
87
+ "lstrip": false,
88
+ "normalized": false,
89
+ "rstrip": false,
90
+ "single_word": false,
91
+ "special": true
92
+ },
93
+ "151654": {
94
+ "content": "<|vision_pad|>",
95
+ "lstrip": false,
96
+ "normalized": false,
97
+ "rstrip": false,
98
+ "single_word": false,
99
+ "special": true
100
+ },
101
+ "151655": {
102
+ "content": "<|image_pad|>",
103
+ "lstrip": false,
104
+ "normalized": false,
105
+ "rstrip": false,
106
+ "single_word": false,
107
+ "special": true
108
+ },
109
+ "151656": {
110
+ "content": "<|video_pad|>",
111
+ "lstrip": false,
112
+ "normalized": false,
113
+ "rstrip": false,
114
+ "single_word": false,
115
+ "special": true
116
+ },
117
+ "151657": {
118
+ "content": "<tool_call>",
119
+ "lstrip": false,
120
+ "normalized": false,
121
+ "rstrip": false,
122
+ "single_word": false,
123
+ "special": false
124
+ },
125
+ "151658": {
126
+ "content": "</tool_call>",
127
+ "lstrip": false,
128
+ "normalized": false,
129
+ "rstrip": false,
130
+ "single_word": false,
131
+ "special": false
132
+ },
133
+ "151659": {
134
+ "content": "<|fim_prefix|>",
135
+ "lstrip": false,
136
+ "normalized": false,
137
+ "rstrip": false,
138
+ "single_word": false,
139
+ "special": false
140
+ },
141
+ "151660": {
142
+ "content": "<|fim_middle|>",
143
+ "lstrip": false,
144
+ "normalized": false,
145
+ "rstrip": false,
146
+ "single_word": false,
147
+ "special": false
148
+ },
149
+ "151661": {
150
+ "content": "<|fim_suffix|>",
151
+ "lstrip": false,
152
+ "normalized": false,
153
+ "rstrip": false,
154
+ "single_word": false,
155
+ "special": false
156
+ },
157
+ "151662": {
158
+ "content": "<|fim_pad|>",
159
+ "lstrip": false,
160
+ "normalized": false,
161
+ "rstrip": false,
162
+ "single_word": false,
163
+ "special": false
164
+ },
165
+ "151663": {
166
+ "content": "<|repo_name|>",
167
+ "lstrip": false,
168
+ "normalized": false,
169
+ "rstrip": false,
170
+ "single_word": false,
171
+ "special": false
172
+ },
173
+ "151664": {
174
+ "content": "<|file_sep|>",
175
+ "lstrip": false,
176
+ "normalized": false,
177
+ "rstrip": false,
178
+ "single_word": false,
179
+ "special": false
180
+ },
181
+ "151665": {
182
+ "content": "<img>",
183
+ "lstrip": false,
184
+ "normalized": false,
185
+ "rstrip": false,
186
+ "single_word": false,
187
+ "special": true
188
+ },
189
+ "151666": {
190
+ "content": "</img>",
191
+ "lstrip": false,
192
+ "normalized": false,
193
+ "rstrip": false,
194
+ "single_word": false,
195
+ "special": true
196
+ },
197
+ "151667": {
198
+ "content": "<IMG_CONTEXT>",
199
+ "lstrip": false,
200
+ "normalized": false,
201
+ "rstrip": false,
202
+ "single_word": false,
203
+ "special": true
204
+ },
205
+ "151668": {
206
+ "content": "<quad>",
207
+ "lstrip": false,
208
+ "normalized": false,
209
+ "rstrip": false,
210
+ "single_word": false,
211
+ "special": true
212
+ },
213
+ "151669": {
214
+ "content": "</quad>",
215
+ "lstrip": false,
216
+ "normalized": false,
217
+ "rstrip": false,
218
+ "single_word": false,
219
+ "special": true
220
+ },
221
+ "151670": {
222
+ "content": "<ref>",
223
+ "lstrip": false,
224
+ "normalized": false,
225
+ "rstrip": false,
226
+ "single_word": false,
227
+ "special": true
228
+ },
229
+ "151671": {
230
+ "content": "</ref>",
231
+ "lstrip": false,
232
+ "normalized": false,
233
+ "rstrip": false,
234
+ "single_word": false,
235
+ "special": true
236
+ },
237
+ "151672": {
238
+ "content": "<box>",
239
+ "lstrip": false,
240
+ "normalized": false,
241
+ "rstrip": false,
242
+ "single_word": false,
243
+ "special": true
244
+ },
245
+ "151673": {
246
+ "content": "</box>",
247
+ "lstrip": false,
248
+ "normalized": false,
249
+ "rstrip": false,
250
+ "single_word": false,
251
+ "special": true
252
+ },
253
+ "151674": {
254
+ "content": "<video>",
255
+ "lstrip": false,
256
+ "normalized": false,
257
+ "rstrip": false,
258
+ "single_word": false,
259
+ "special": true
260
+ },
261
+ "151675": {
262
+ "content": "-128",
263
+ "lstrip": false,
264
+ "normalized": false,
265
+ "rstrip": false,
266
+ "single_word": false,
267
+ "special": false
268
+ },
269
+ "151676": {
270
+ "content": "-127",
271
+ "lstrip": false,
272
+ "normalized": false,
273
+ "rstrip": false,
274
+ "single_word": false,
275
+ "special": false
276
+ },
277
+ "151677": {
278
+ "content": "-126",
279
+ "lstrip": false,
280
+ "normalized": false,
281
+ "rstrip": false,
282
+ "single_word": false,
283
+ "special": false
284
+ },
285
+ "151678": {
286
+ "content": "-125",
287
+ "lstrip": false,
288
+ "normalized": false,
289
+ "rstrip": false,
290
+ "single_word": false,
291
+ "special": false
292
+ },
293
+ "151679": {
294
+ "content": "-124",
295
+ "lstrip": false,
296
+ "normalized": false,
297
+ "rstrip": false,
298
+ "single_word": false,
299
+ "special": false
300
+ },
301
+ "151680": {
302
+ "content": "-123",
303
+ "lstrip": false,
304
+ "normalized": false,
305
+ "rstrip": false,
306
+ "single_word": false,
307
+ "special": false
308
+ },
309
+ "151681": {
310
+ "content": "-122",
311
+ "lstrip": false,
312
+ "normalized": false,
313
+ "rstrip": false,
314
+ "single_word": false,
315
+ "special": false
316
+ },
317
+ "151682": {
318
+ "content": "-121",
319
+ "lstrip": false,
320
+ "normalized": false,
321
+ "rstrip": false,
322
+ "single_word": false,
323
+ "special": false
324
+ },
325
+ "151683": {
326
+ "content": "-120",
327
+ "lstrip": false,
328
+ "normalized": false,
329
+ "rstrip": false,
330
+ "single_word": false,
331
+ "special": false
332
+ },
333
+ "151684": {
334
+ "content": "-119",
335
+ "lstrip": false,
336
+ "normalized": false,
337
+ "rstrip": false,
338
+ "single_word": false,
339
+ "special": false
340
+ },
341
+ "151685": {
342
+ "content": "-118",
343
+ "lstrip": false,
344
+ "normalized": false,
345
+ "rstrip": false,
346
+ "single_word": false,
347
+ "special": false
348
+ },
349
+ "151686": {
350
+ "content": "-117",
351
+ "lstrip": false,
352
+ "normalized": false,
353
+ "rstrip": false,
354
+ "single_word": false,
355
+ "special": false
356
+ },
357
+ "151687": {
358
+ "content": "-116",
359
+ "lstrip": false,
360
+ "normalized": false,
361
+ "rstrip": false,
362
+ "single_word": false,
363
+ "special": false
364
+ },
365
+ "151688": {
366
+ "content": "-115",
367
+ "lstrip": false,
368
+ "normalized": false,
369
+ "rstrip": false,
370
+ "single_word": false,
371
+ "special": false
372
+ },
373
+ "151689": {
374
+ "content": "-114",
375
+ "lstrip": false,
376
+ "normalized": false,
377
+ "rstrip": false,
378
+ "single_word": false,
379
+ "special": false
380
+ },
381
+ "151690": {
382
+ "content": "-113",
383
+ "lstrip": false,
384
+ "normalized": false,
385
+ "rstrip": false,
386
+ "single_word": false,
387
+ "special": false
388
+ },
389
+ "151691": {
390
+ "content": "-112",
391
+ "lstrip": false,
392
+ "normalized": false,
393
+ "rstrip": false,
394
+ "single_word": false,
395
+ "special": false
396
+ },
397
+ "151692": {
398
+ "content": "-111",
399
+ "lstrip": false,
400
+ "normalized": false,
401
+ "rstrip": false,
402
+ "single_word": false,
403
+ "special": false
404
+ },
405
+ "151693": {
406
+ "content": "-110",
407
+ "lstrip": false,
408
+ "normalized": false,
409
+ "rstrip": false,
410
+ "single_word": false,
411
+ "special": false
412
+ },
413
+ "151694": {
414
+ "content": "-109",
415
+ "lstrip": false,
416
+ "normalized": false,
417
+ "rstrip": false,
418
+ "single_word": false,
419
+ "special": false
420
+ },
421
+ "151695": {
422
+ "content": "-108",
423
+ "lstrip": false,
424
+ "normalized": false,
425
+ "rstrip": false,
426
+ "single_word": false,
427
+ "special": false
428
+ },
429
+ "151696": {
430
+ "content": "-107",
431
+ "lstrip": false,
432
+ "normalized": false,
433
+ "rstrip": false,
434
+ "single_word": false,
435
+ "special": false
436
+ },
437
+ "151697": {
438
+ "content": "-106",
439
+ "lstrip": false,
440
+ "normalized": false,
441
+ "rstrip": false,
442
+ "single_word": false,
443
+ "special": false
444
+ },
445
+ "151698": {
446
+ "content": "-105",
447
+ "lstrip": false,
448
+ "normalized": false,
449
+ "rstrip": false,
450
+ "single_word": false,
451
+ "special": false
452
+ },
453
+ "151699": {
454
+ "content": "-104",
455
+ "lstrip": false,
456
+ "normalized": false,
457
+ "rstrip": false,
458
+ "single_word": false,
459
+ "special": false
460
+ },
461
+ "151700": {
462
+ "content": "-103",
463
+ "lstrip": false,
464
+ "normalized": false,
465
+ "rstrip": false,
466
+ "single_word": false,
467
+ "special": false
468
+ },
469
+ "151701": {
470
+ "content": "-102",
471
+ "lstrip": false,
472
+ "normalized": false,
473
+ "rstrip": false,
474
+ "single_word": false,
475
+ "special": false
476
+ },
477
+ "151702": {
478
+ "content": "-101",
479
+ "lstrip": false,
480
+ "normalized": false,
481
+ "rstrip": false,
482
+ "single_word": false,
483
+ "special": false
484
+ },
485
+ "151703": {
486
+ "content": "-100",
487
+ "lstrip": false,
488
+ "normalized": false,
489
+ "rstrip": false,
490
+ "single_word": false,
491
+ "special": false
492
+ },
493
+ "151704": {
494
+ "content": "-99",
495
+ "lstrip": false,
496
+ "normalized": false,
497
+ "rstrip": false,
498
+ "single_word": false,
499
+ "special": false
500
+ },
501
+ "151705": {
502
+ "content": "-98",
503
+ "lstrip": false,
504
+ "normalized": false,
505
+ "rstrip": false,
506
+ "single_word": false,
507
+ "special": false
508
+ },
509
+ "151706": {
510
+ "content": "-97",
511
+ "lstrip": false,
512
+ "normalized": false,
513
+ "rstrip": false,
514
+ "single_word": false,
515
+ "special": false
516
+ },
517
+ "151707": {
518
+ "content": "-96",
519
+ "lstrip": false,
520
+ "normalized": false,
521
+ "rstrip": false,
522
+ "single_word": false,
523
+ "special": false
524
+ },
525
+ "151708": {
526
+ "content": "-95",
527
+ "lstrip": false,
528
+ "normalized": false,
529
+ "rstrip": false,
530
+ "single_word": false,
531
+ "special": false
532
+ },
533
+ "151709": {
534
+ "content": "-94",
535
+ "lstrip": false,
536
+ "normalized": false,
537
+ "rstrip": false,
538
+ "single_word": false,
539
+ "special": false
540
+ },
541
+ "151710": {
542
+ "content": "-93",
543
+ "lstrip": false,
544
+ "normalized": false,
545
+ "rstrip": false,
546
+ "single_word": false,
547
+ "special": false
548
+ },
549
+ "151711": {
550
+ "content": "-92",
551
+ "lstrip": false,
552
+ "normalized": false,
553
+ "rstrip": false,
554
+ "single_word": false,
555
+ "special": false
556
+ },
557
+ "151712": {
558
+ "content": "-91",
559
+ "lstrip": false,
560
+ "normalized": false,
561
+ "rstrip": false,
562
+ "single_word": false,
563
+ "special": false
564
+ },
565
+ "151713": {
566
+ "content": "-90",
567
+ "lstrip": false,
568
+ "normalized": false,
569
+ "rstrip": false,
570
+ "single_word": false,
571
+ "special": false
572
+ },
573
+ "151714": {
574
+ "content": "-89",
575
+ "lstrip": false,
576
+ "normalized": false,
577
+ "rstrip": false,
578
+ "single_word": false,
579
+ "special": false
580
+ },
581
+ "151715": {
582
+ "content": "-88",
583
+ "lstrip": false,
584
+ "normalized": false,
585
+ "rstrip": false,
586
+ "single_word": false,
587
+ "special": false
588
+ },
589
+ "151716": {
590
+ "content": "-87",
591
+ "lstrip": false,
592
+ "normalized": false,
593
+ "rstrip": false,
594
+ "single_word": false,
595
+ "special": false
596
+ },
597
+ "151717": {
598
+ "content": "-86",
599
+ "lstrip": false,
600
+ "normalized": false,
601
+ "rstrip": false,
602
+ "single_word": false,
603
+ "special": false
604
+ },
605
+ "151718": {
606
+ "content": "-85",
607
+ "lstrip": false,
608
+ "normalized": false,
609
+ "rstrip": false,
610
+ "single_word": false,
611
+ "special": false
612
+ },
613
+ "151719": {
614
+ "content": "-84",
615
+ "lstrip": false,
616
+ "normalized": false,
617
+ "rstrip": false,
618
+ "single_word": false,
619
+ "special": false
620
+ },
621
+ "151720": {
622
+ "content": "-83",
623
+ "lstrip": false,
624
+ "normalized": false,
625
+ "rstrip": false,
626
+ "single_word": false,
627
+ "special": false
628
+ },
629
+ "151721": {
630
+ "content": "-82",
631
+ "lstrip": false,
632
+ "normalized": false,
633
+ "rstrip": false,
634
+ "single_word": false,
635
+ "special": false
636
+ },
637
+ "151722": {
638
+ "content": "-81",
639
+ "lstrip": false,
640
+ "normalized": false,
641
+ "rstrip": false,
642
+ "single_word": false,
643
+ "special": false
644
+ },
645
+ "151723": {
646
+ "content": "-80",
647
+ "lstrip": false,
648
+ "normalized": false,
649
+ "rstrip": false,
650
+ "single_word": false,
651
+ "special": false
652
+ },
653
+ "151724": {
654
+ "content": "-79",
655
+ "lstrip": false,
656
+ "normalized": false,
657
+ "rstrip": false,
658
+ "single_word": false,
659
+ "special": false
660
+ },
661
+ "151725": {
662
+ "content": "-78",
663
+ "lstrip": false,
664
+ "normalized": false,
665
+ "rstrip": false,
666
+ "single_word": false,
667
+ "special": false
668
+ },
669
+ "151726": {
670
+ "content": "-77",
671
+ "lstrip": false,
672
+ "normalized": false,
673
+ "rstrip": false,
674
+ "single_word": false,
675
+ "special": false
676
+ },
677
+ "151727": {
678
+ "content": "-76",
679
+ "lstrip": false,
680
+ "normalized": false,
681
+ "rstrip": false,
682
+ "single_word": false,
683
+ "special": false
684
+ },
685
+ "151728": {
686
+ "content": "-75",
687
+ "lstrip": false,
688
+ "normalized": false,
689
+ "rstrip": false,
690
+ "single_word": false,
691
+ "special": false
692
+ },
693
+ "151729": {
694
+ "content": "-74",
695
+ "lstrip": false,
696
+ "normalized": false,
697
+ "rstrip": false,
698
+ "single_word": false,
699
+ "special": false
700
+ },
701
+ "151730": {
702
+ "content": "-73",
703
+ "lstrip": false,
704
+ "normalized": false,
705
+ "rstrip": false,
706
+ "single_word": false,
707
+ "special": false
708
+ },
709
+ "151731": {
710
+ "content": "-72",
711
+ "lstrip": false,
712
+ "normalized": false,
713
+ "rstrip": false,
714
+ "single_word": false,
715
+ "special": false
716
+ },
717
+ "151732": {
718
+ "content": "-71",
719
+ "lstrip": false,
720
+ "normalized": false,
721
+ "rstrip": false,
722
+ "single_word": false,
723
+ "special": false
724
+ },
725
+ "151733": {
726
+ "content": "-70",
727
+ "lstrip": false,
728
+ "normalized": false,
729
+ "rstrip": false,
730
+ "single_word": false,
731
+ "special": false
732
+ },
733
+ "151734": {
734
+ "content": "-69",
735
+ "lstrip": false,
736
+ "normalized": false,
737
+ "rstrip": false,
738
+ "single_word": false,
739
+ "special": false
740
+ },
741
+ "151735": {
742
+ "content": "-68",
743
+ "lstrip": false,
744
+ "normalized": false,
745
+ "rstrip": false,
746
+ "single_word": false,
747
+ "special": false
748
+ },
749
+ "151736": {
750
+ "content": "-67",
751
+ "lstrip": false,
752
+ "normalized": false,
753
+ "rstrip": false,
754
+ "single_word": false,
755
+ "special": false
756
+ },
757
+ "151737": {
758
+ "content": "-66",
759
+ "lstrip": false,
760
+ "normalized": false,
761
+ "rstrip": false,
762
+ "single_word": false,
763
+ "special": false
764
+ },
765
+ "151738": {
766
+ "content": "-65",
767
+ "lstrip": false,
768
+ "normalized": false,
769
+ "rstrip": false,
770
+ "single_word": false,
771
+ "special": false
772
+ },
773
+ "151739": {
774
+ "content": "-64",
775
+ "lstrip": false,
776
+ "normalized": false,
777
+ "rstrip": false,
778
+ "single_word": false,
779
+ "special": false
780
+ },
781
+ "151740": {
782
+ "content": "-63",
783
+ "lstrip": false,
784
+ "normalized": false,
785
+ "rstrip": false,
786
+ "single_word": false,
787
+ "special": false
788
+ },
789
+ "151741": {
790
+ "content": "-62",
791
+ "lstrip": false,
792
+ "normalized": false,
793
+ "rstrip": false,
794
+ "single_word": false,
795
+ "special": false
796
+ },
797
+ "151742": {
798
+ "content": "-61",
799
+ "lstrip": false,
800
+ "normalized": false,
801
+ "rstrip": false,
802
+ "single_word": false,
803
+ "special": false
804
+ },
805
+ "151743": {
806
+ "content": "-60",
807
+ "lstrip": false,
808
+ "normalized": false,
809
+ "rstrip": false,
810
+ "single_word": false,
811
+ "special": false
812
+ },
813
+ "151744": {
814
+ "content": "-59",
815
+ "lstrip": false,
816
+ "normalized": false,
817
+ "rstrip": false,
818
+ "single_word": false,
819
+ "special": false
820
+ },
821
+ "151745": {
822
+ "content": "-58",
823
+ "lstrip": false,
824
+ "normalized": false,
825
+ "rstrip": false,
826
+ "single_word": false,
827
+ "special": false
828
+ },
829
+ "151746": {
830
+ "content": "-57",
831
+ "lstrip": false,
832
+ "normalized": false,
833
+ "rstrip": false,
834
+ "single_word": false,
835
+ "special": false
836
+ },
837
+ "151747": {
838
+ "content": "-56",
839
+ "lstrip": false,
840
+ "normalized": false,
841
+ "rstrip": false,
842
+ "single_word": false,
843
+ "special": false
844
+ },
845
+ "151748": {
846
+ "content": "-55",
847
+ "lstrip": false,
848
+ "normalized": false,
849
+ "rstrip": false,
850
+ "single_word": false,
851
+ "special": false
852
+ },
853
+ "151749": {
854
+ "content": "-54",
855
+ "lstrip": false,
856
+ "normalized": false,
857
+ "rstrip": false,
858
+ "single_word": false,
859
+ "special": false
860
+ },
861
+ "151750": {
862
+ "content": "-53",
863
+ "lstrip": false,
864
+ "normalized": false,
865
+ "rstrip": false,
866
+ "single_word": false,
867
+ "special": false
868
+ },
869
+ "151751": {
870
+ "content": "-52",
871
+ "lstrip": false,
872
+ "normalized": false,
873
+ "rstrip": false,
874
+ "single_word": false,
875
+ "special": false
876
+ },
877
+ "151752": {
878
+ "content": "-51",
879
+ "lstrip": false,
880
+ "normalized": false,
881
+ "rstrip": false,
882
+ "single_word": false,
883
+ "special": false
884
+ },
885
+ "151753": {
886
+ "content": "-50",
887
+ "lstrip": false,
888
+ "normalized": false,
889
+ "rstrip": false,
890
+ "single_word": false,
891
+ "special": false
892
+ },
893
+ "151754": {
894
+ "content": "-49",
895
+ "lstrip": false,
896
+ "normalized": false,
897
+ "rstrip": false,
898
+ "single_word": false,
899
+ "special": false
900
+ },
901
+ "151755": {
902
+ "content": "-48",
903
+ "lstrip": false,
904
+ "normalized": false,
905
+ "rstrip": false,
906
+ "single_word": false,
907
+ "special": false
908
+ },
909
+ "151756": {
910
+ "content": "-47",
911
+ "lstrip": false,
912
+ "normalized": false,
913
+ "rstrip": false,
914
+ "single_word": false,
915
+ "special": false
916
+ },
917
+ "151757": {
918
+ "content": "-46",
919
+ "lstrip": false,
920
+ "normalized": false,
921
+ "rstrip": false,
922
+ "single_word": false,
923
+ "special": false
924
+ },
925
+ "151758": {
926
+ "content": "-45",
927
+ "lstrip": false,
928
+ "normalized": false,
929
+ "rstrip": false,
930
+ "single_word": false,
931
+ "special": false
932
+ },
933
+ "151759": {
934
+ "content": "-44",
935
+ "lstrip": false,
936
+ "normalized": false,
937
+ "rstrip": false,
938
+ "single_word": false,
939
+ "special": false
940
+ },
941
+ "151760": {
942
+ "content": "-43",
943
+ "lstrip": false,
944
+ "normalized": false,
945
+ "rstrip": false,
946
+ "single_word": false,
947
+ "special": false
948
+ },
949
+ "151761": {
950
+ "content": "-42",
951
+ "lstrip": false,
952
+ "normalized": false,
953
+ "rstrip": false,
954
+ "single_word": false,
955
+ "special": false
956
+ },
957
+ "151762": {
958
+ "content": "-41",
959
+ "lstrip": false,
960
+ "normalized": false,
961
+ "rstrip": false,
962
+ "single_word": false,
963
+ "special": false
964
+ },
965
+ "151763": {
966
+ "content": "-40",
967
+ "lstrip": false,
968
+ "normalized": false,
969
+ "rstrip": false,
970
+ "single_word": false,
971
+ "special": false
972
+ },
973
+ "151764": {
974
+ "content": "-39",
975
+ "lstrip": false,
976
+ "normalized": false,
977
+ "rstrip": false,
978
+ "single_word": false,
979
+ "special": false
980
+ },
981
+ "151765": {
982
+ "content": "-38",
983
+ "lstrip": false,
984
+ "normalized": false,
985
+ "rstrip": false,
986
+ "single_word": false,
987
+ "special": false
988
+ },
989
+ "151766": {
990
+ "content": "-37",
991
+ "lstrip": false,
992
+ "normalized": false,
993
+ "rstrip": false,
994
+ "single_word": false,
995
+ "special": false
996
+ },
997
+ "151767": {
998
+ "content": "-36",
999
+ "lstrip": false,
1000
+ "normalized": false,
1001
+ "rstrip": false,
1002
+ "single_word": false,
1003
+ "special": false
1004
+ },
1005
+ "151768": {
1006
+ "content": "-35",
1007
+ "lstrip": false,
1008
+ "normalized": false,
1009
+ "rstrip": false,
1010
+ "single_word": false,
1011
+ "special": false
1012
+ },
1013
+ "151769": {
1014
+ "content": "-34",
1015
+ "lstrip": false,
1016
+ "normalized": false,
1017
+ "rstrip": false,
1018
+ "single_word": false,
1019
+ "special": false
1020
+ },
1021
+ "151770": {
1022
+ "content": "-33",
1023
+ "lstrip": false,
1024
+ "normalized": false,
1025
+ "rstrip": false,
1026
+ "single_word": false,
1027
+ "special": false
1028
+ },
1029
+ "151771": {
1030
+ "content": "-32",
1031
+ "lstrip": false,
1032
+ "normalized": false,
1033
+ "rstrip": false,
1034
+ "single_word": false,
1035
+ "special": false
1036
+ },
1037
+ "151772": {
1038
+ "content": "-31",
1039
+ "lstrip": false,
1040
+ "normalized": false,
1041
+ "rstrip": false,
1042
+ "single_word": false,
1043
+ "special": false
1044
+ },
1045
+ "151773": {
1046
+ "content": "-30",
1047
+ "lstrip": false,
1048
+ "normalized": false,
1049
+ "rstrip": false,
1050
+ "single_word": false,
1051
+ "special": false
1052
+ },
1053
+ "151774": {
1054
+ "content": "-29",
1055
+ "lstrip": false,
1056
+ "normalized": false,
1057
+ "rstrip": false,
1058
+ "single_word": false,
1059
+ "special": false
1060
+ },
1061
+ "151775": {
1062
+ "content": "-28",
1063
+ "lstrip": false,
1064
+ "normalized": false,
1065
+ "rstrip": false,
1066
+ "single_word": false,
1067
+ "special": false
1068
+ },
1069
+ "151776": {
1070
+ "content": "-27",
1071
+ "lstrip": false,
1072
+ "normalized": false,
1073
+ "rstrip": false,
1074
+ "single_word": false,
1075
+ "special": false
1076
+ },
1077
+ "151777": {
1078
+ "content": "-26",
1079
+ "lstrip": false,
1080
+ "normalized": false,
1081
+ "rstrip": false,
1082
+ "single_word": false,
1083
+ "special": false
1084
+ },
1085
+ "151778": {
1086
+ "content": "-25",
1087
+ "lstrip": false,
1088
+ "normalized": false,
1089
+ "rstrip": false,
1090
+ "single_word": false,
1091
+ "special": false
1092
+ },
1093
+ "151779": {
1094
+ "content": "-24",
1095
+ "lstrip": false,
1096
+ "normalized": false,
1097
+ "rstrip": false,
1098
+ "single_word": false,
1099
+ "special": false
1100
+ },
1101
+ "151780": {
1102
+ "content": "-23",
1103
+ "lstrip": false,
1104
+ "normalized": false,
1105
+ "rstrip": false,
1106
+ "single_word": false,
1107
+ "special": false
1108
+ },
1109
+ "151781": {
1110
+ "content": "-22",
1111
+ "lstrip": false,
1112
+ "normalized": false,
1113
+ "rstrip": false,
1114
+ "single_word": false,
1115
+ "special": false
1116
+ },
1117
+ "151782": {
1118
+ "content": "-21",
1119
+ "lstrip": false,
1120
+ "normalized": false,
1121
+ "rstrip": false,
1122
+ "single_word": false,
1123
+ "special": false
1124
+ },
1125
+ "151783": {
1126
+ "content": "-20",
1127
+ "lstrip": false,
1128
+ "normalized": false,
1129
+ "rstrip": false,
1130
+ "single_word": false,
1131
+ "special": false
1132
+ },
1133
+ "151784": {
1134
+ "content": "-19",
1135
+ "lstrip": false,
1136
+ "normalized": false,
1137
+ "rstrip": false,
1138
+ "single_word": false,
1139
+ "special": false
1140
+ },
1141
+ "151785": {
1142
+ "content": "-18",
1143
+ "lstrip": false,
1144
+ "normalized": false,
1145
+ "rstrip": false,
1146
+ "single_word": false,
1147
+ "special": false
1148
+ },
1149
+ "151786": {
1150
+ "content": "-17",
1151
+ "lstrip": false,
1152
+ "normalized": false,
1153
+ "rstrip": false,
1154
+ "single_word": false,
1155
+ "special": false
1156
+ },
1157
+ "151787": {
1158
+ "content": "-16",
1159
+ "lstrip": false,
1160
+ "normalized": false,
1161
+ "rstrip": false,
1162
+ "single_word": false,
1163
+ "special": false
1164
+ },
1165
+ "151788": {
1166
+ "content": "-15",
1167
+ "lstrip": false,
1168
+ "normalized": false,
1169
+ "rstrip": false,
1170
+ "single_word": false,
1171
+ "special": false
1172
+ },
1173
+ "151789": {
1174
+ "content": "-14",
1175
+ "lstrip": false,
1176
+ "normalized": false,
1177
+ "rstrip": false,
1178
+ "single_word": false,
1179
+ "special": false
1180
+ },
1181
+ "151790": {
1182
+ "content": "-13",
1183
+ "lstrip": false,
1184
+ "normalized": false,
1185
+ "rstrip": false,
1186
+ "single_word": false,
1187
+ "special": false
1188
+ },
1189
+ "151791": {
1190
+ "content": "-12",
1191
+ "lstrip": false,
1192
+ "normalized": false,
1193
+ "rstrip": false,
1194
+ "single_word": false,
1195
+ "special": false
1196
+ },
1197
+ "151792": {
1198
+ "content": "-11",
1199
+ "lstrip": false,
1200
+ "normalized": false,
1201
+ "rstrip": false,
1202
+ "single_word": false,
1203
+ "special": false
1204
+ },
1205
+ "151793": {
1206
+ "content": "-10",
1207
+ "lstrip": false,
1208
+ "normalized": false,
1209
+ "rstrip": false,
1210
+ "single_word": false,
1211
+ "special": false
1212
+ },
1213
+ "151794": {
1214
+ "content": "-9",
1215
+ "lstrip": false,
1216
+ "normalized": false,
1217
+ "rstrip": false,
1218
+ "single_word": false,
1219
+ "special": false
1220
+ },
1221
+ "151795": {
1222
+ "content": "-8",
1223
+ "lstrip": false,
1224
+ "normalized": false,
1225
+ "rstrip": false,
1226
+ "single_word": false,
1227
+ "special": false
1228
+ },
1229
+ "151796": {
1230
+ "content": "-7",
1231
+ "lstrip": false,
1232
+ "normalized": false,
1233
+ "rstrip": false,
1234
+ "single_word": false,
1235
+ "special": false
1236
+ },
1237
+ "151797": {
1238
+ "content": "-6",
1239
+ "lstrip": false,
1240
+ "normalized": false,
1241
+ "rstrip": false,
1242
+ "single_word": false,
1243
+ "special": false
1244
+ },
1245
+ "151798": {
1246
+ "content": "-5",
1247
+ "lstrip": false,
1248
+ "normalized": false,
1249
+ "rstrip": false,
1250
+ "single_word": false,
1251
+ "special": false
1252
+ },
1253
+ "151799": {
1254
+ "content": "-4",
1255
+ "lstrip": false,
1256
+ "normalized": false,
1257
+ "rstrip": false,
1258
+ "single_word": false,
1259
+ "special": false
1260
+ },
1261
+ "151800": {
1262
+ "content": "-3",
1263
+ "lstrip": false,
1264
+ "normalized": false,
1265
+ "rstrip": false,
1266
+ "single_word": false,
1267
+ "special": false
1268
+ },
1269
+ "151801": {
1270
+ "content": "-2",
1271
+ "lstrip": false,
1272
+ "normalized": false,
1273
+ "rstrip": false,
1274
+ "single_word": false,
1275
+ "special": false
1276
+ },
1277
+ "151802": {
1278
+ "content": "-1",
1279
+ "lstrip": false,
1280
+ "normalized": false,
1281
+ "rstrip": false,
1282
+ "single_word": false,
1283
+ "special": false
1284
+ },
1285
+ "151803": {
1286
+ "content": "10",
1287
+ "lstrip": false,
1288
+ "normalized": false,
1289
+ "rstrip": false,
1290
+ "single_word": false,
1291
+ "special": false
1292
+ },
1293
+ "151804": {
1294
+ "content": "11",
1295
+ "lstrip": false,
1296
+ "normalized": false,
1297
+ "rstrip": false,
1298
+ "single_word": false,
1299
+ "special": false
1300
+ },
1301
+ "151805": {
1302
+ "content": "12",
1303
+ "lstrip": false,
1304
+ "normalized": false,
1305
+ "rstrip": false,
1306
+ "single_word": false,
1307
+ "special": false
1308
+ },
1309
+ "151806": {
1310
+ "content": "13",
1311
+ "lstrip": false,
1312
+ "normalized": false,
1313
+ "rstrip": false,
1314
+ "single_word": false,
1315
+ "special": false
1316
+ },
1317
+ "151807": {
1318
+ "content": "14",
1319
+ "lstrip": false,
1320
+ "normalized": false,
1321
+ "rstrip": false,
1322
+ "single_word": false,
1323
+ "special": false
1324
+ },
1325
+ "151808": {
1326
+ "content": "15",
1327
+ "lstrip": false,
1328
+ "normalized": false,
1329
+ "rstrip": false,
1330
+ "single_word": false,
1331
+ "special": false
1332
+ },
1333
+ "151809": {
1334
+ "content": "16",
1335
+ "lstrip": false,
1336
+ "normalized": false,
1337
+ "rstrip": false,
1338
+ "single_word": false,
1339
+ "special": false
1340
+ },
1341
+ "151810": {
1342
+ "content": "17",
1343
+ "lstrip": false,
1344
+ "normalized": false,
1345
+ "rstrip": false,
1346
+ "single_word": false,
1347
+ "special": false
1348
+ },
1349
+ "151811": {
1350
+ "content": "18",
1351
+ "lstrip": false,
1352
+ "normalized": false,
1353
+ "rstrip": false,
1354
+ "single_word": false,
1355
+ "special": false
1356
+ },
1357
+ "151812": {
1358
+ "content": "19",
1359
+ "lstrip": false,
1360
+ "normalized": false,
1361
+ "rstrip": false,
1362
+ "single_word": false,
1363
+ "special": false
1364
+ },
1365
+ "151813": {
1366
+ "content": "20",
1367
+ "lstrip": false,
1368
+ "normalized": false,
1369
+ "rstrip": false,
1370
+ "single_word": false,
1371
+ "special": false
1372
+ },
1373
+ "151814": {
1374
+ "content": "21",
1375
+ "lstrip": false,
1376
+ "normalized": false,
1377
+ "rstrip": false,
1378
+ "single_word": false,
1379
+ "special": false
1380
+ },
1381
+ "151815": {
1382
+ "content": "22",
1383
+ "lstrip": false,
1384
+ "normalized": false,
1385
+ "rstrip": false,
1386
+ "single_word": false,
1387
+ "special": false
1388
+ },
1389
+ "151816": {
1390
+ "content": "23",
1391
+ "lstrip": false,
1392
+ "normalized": false,
1393
+ "rstrip": false,
1394
+ "single_word": false,
1395
+ "special": false
1396
+ },
1397
+ "151817": {
1398
+ "content": "24",
1399
+ "lstrip": false,
1400
+ "normalized": false,
1401
+ "rstrip": false,
1402
+ "single_word": false,
1403
+ "special": false
1404
+ },
1405
+ "151818": {
1406
+ "content": "25",
1407
+ "lstrip": false,
1408
+ "normalized": false,
1409
+ "rstrip": false,
1410
+ "single_word": false,
1411
+ "special": false
1412
+ },
1413
+ "151819": {
1414
+ "content": "26",
1415
+ "lstrip": false,
1416
+ "normalized": false,
1417
+ "rstrip": false,
1418
+ "single_word": false,
1419
+ "special": false
1420
+ },
1421
+ "151820": {
1422
+ "content": "27",
1423
+ "lstrip": false,
1424
+ "normalized": false,
1425
+ "rstrip": false,
1426
+ "single_word": false,
1427
+ "special": false
1428
+ },
1429
+ "151821": {
1430
+ "content": "28",
1431
+ "lstrip": false,
1432
+ "normalized": false,
1433
+ "rstrip": false,
1434
+ "single_word": false,
1435
+ "special": false
1436
+ },
1437
+ "151822": {
1438
+ "content": "29",
1439
+ "lstrip": false,
1440
+ "normalized": false,
1441
+ "rstrip": false,
1442
+ "single_word": false,
1443
+ "special": false
1444
+ },
1445
+ "151823": {
1446
+ "content": "30",
1447
+ "lstrip": false,
1448
+ "normalized": false,
1449
+ "rstrip": false,
1450
+ "single_word": false,
1451
+ "special": false
1452
+ },
1453
+ "151824": {
1454
+ "content": "31",
1455
+ "lstrip": false,
1456
+ "normalized": false,
1457
+ "rstrip": false,
1458
+ "single_word": false,
1459
+ "special": false
1460
+ },
1461
+ "151825": {
1462
+ "content": "32",
1463
+ "lstrip": false,
1464
+ "normalized": false,
1465
+ "rstrip": false,
1466
+ "single_word": false,
1467
+ "special": false
1468
+ },
1469
+ "151826": {
1470
+ "content": "33",
1471
+ "lstrip": false,
1472
+ "normalized": false,
1473
+ "rstrip": false,
1474
+ "single_word": false,
1475
+ "special": false
1476
+ },
1477
+ "151827": {
1478
+ "content": "34",
1479
+ "lstrip": false,
1480
+ "normalized": false,
1481
+ "rstrip": false,
1482
+ "single_word": false,
1483
+ "special": false
1484
+ },
1485
+ "151828": {
1486
+ "content": "35",
1487
+ "lstrip": false,
1488
+ "normalized": false,
1489
+ "rstrip": false,
1490
+ "single_word": false,
1491
+ "special": false
1492
+ },
1493
+ "151829": {
1494
+ "content": "36",
1495
+ "lstrip": false,
1496
+ "normalized": false,
1497
+ "rstrip": false,
1498
+ "single_word": false,
1499
+ "special": false
1500
+ },
1501
+ "151830": {
1502
+ "content": "37",
1503
+ "lstrip": false,
1504
+ "normalized": false,
1505
+ "rstrip": false,
1506
+ "single_word": false,
1507
+ "special": false
1508
+ },
1509
+ "151831": {
1510
+ "content": "38",
1511
+ "lstrip": false,
1512
+ "normalized": false,
1513
+ "rstrip": false,
1514
+ "single_word": false,
1515
+ "special": false
1516
+ },
1517
+ "151832": {
1518
+ "content": "39",
1519
+ "lstrip": false,
1520
+ "normalized": false,
1521
+ "rstrip": false,
1522
+ "single_word": false,
1523
+ "special": false
1524
+ },
1525
+ "151833": {
1526
+ "content": "40",
1527
+ "lstrip": false,
1528
+ "normalized": false,
1529
+ "rstrip": false,
1530
+ "single_word": false,
1531
+ "special": false
1532
+ },
1533
+ "151834": {
1534
+ "content": "41",
1535
+ "lstrip": false,
1536
+ "normalized": false,
1537
+ "rstrip": false,
1538
+ "single_word": false,
1539
+ "special": false
1540
+ },
1541
+ "151835": {
1542
+ "content": "42",
1543
+ "lstrip": false,
1544
+ "normalized": false,
1545
+ "rstrip": false,
1546
+ "single_word": false,
1547
+ "special": false
1548
+ },
1549
+ "151836": {
1550
+ "content": "43",
1551
+ "lstrip": false,
1552
+ "normalized": false,
1553
+ "rstrip": false,
1554
+ "single_word": false,
1555
+ "special": false
1556
+ },
1557
+ "151837": {
1558
+ "content": "44",
1559
+ "lstrip": false,
1560
+ "normalized": false,
1561
+ "rstrip": false,
1562
+ "single_word": false,
1563
+ "special": false
1564
+ },
1565
+ "151838": {
1566
+ "content": "45",
1567
+ "lstrip": false,
1568
+ "normalized": false,
1569
+ "rstrip": false,
1570
+ "single_word": false,
1571
+ "special": false
1572
+ },
1573
+ "151839": {
1574
+ "content": "46",
1575
+ "lstrip": false,
1576
+ "normalized": false,
1577
+ "rstrip": false,
1578
+ "single_word": false,
1579
+ "special": false
1580
+ },
1581
+ "151840": {
1582
+ "content": "47",
1583
+ "lstrip": false,
1584
+ "normalized": false,
1585
+ "rstrip": false,
1586
+ "single_word": false,
1587
+ "special": false
1588
+ },
1589
+ "151841": {
1590
+ "content": "48",
1591
+ "lstrip": false,
1592
+ "normalized": false,
1593
+ "rstrip": false,
1594
+ "single_word": false,
1595
+ "special": false
1596
+ },
1597
+ "151842": {
1598
+ "content": "49",
1599
+ "lstrip": false,
1600
+ "normalized": false,
1601
+ "rstrip": false,
1602
+ "single_word": false,
1603
+ "special": false
1604
+ },
1605
+ "151843": {
1606
+ "content": "50",
1607
+ "lstrip": false,
1608
+ "normalized": false,
1609
+ "rstrip": false,
1610
+ "single_word": false,
1611
+ "special": false
1612
+ },
1613
+ "151844": {
1614
+ "content": "51",
1615
+ "lstrip": false,
1616
+ "normalized": false,
1617
+ "rstrip": false,
1618
+ "single_word": false,
1619
+ "special": false
1620
+ },
1621
+ "151845": {
1622
+ "content": "52",
1623
+ "lstrip": false,
1624
+ "normalized": false,
1625
+ "rstrip": false,
1626
+ "single_word": false,
1627
+ "special": false
1628
+ },
1629
+ "151846": {
1630
+ "content": "53",
1631
+ "lstrip": false,
1632
+ "normalized": false,
1633
+ "rstrip": false,
1634
+ "single_word": false,
1635
+ "special": false
1636
+ },
1637
+ "151847": {
1638
+ "content": "54",
1639
+ "lstrip": false,
1640
+ "normalized": false,
1641
+ "rstrip": false,
1642
+ "single_word": false,
1643
+ "special": false
1644
+ },
1645
+ "151848": {
1646
+ "content": "55",
1647
+ "lstrip": false,
1648
+ "normalized": false,
1649
+ "rstrip": false,
1650
+ "single_word": false,
1651
+ "special": false
1652
+ },
1653
+ "151849": {
1654
+ "content": "56",
1655
+ "lstrip": false,
1656
+ "normalized": false,
1657
+ "rstrip": false,
1658
+ "single_word": false,
1659
+ "special": false
1660
+ },
1661
+ "151850": {
1662
+ "content": "57",
1663
+ "lstrip": false,
1664
+ "normalized": false,
1665
+ "rstrip": false,
1666
+ "single_word": false,
1667
+ "special": false
1668
+ },
1669
+ "151851": {
1670
+ "content": "58",
1671
+ "lstrip": false,
1672
+ "normalized": false,
1673
+ "rstrip": false,
1674
+ "single_word": false,
1675
+ "special": false
1676
+ },
1677
+ "151852": {
1678
+ "content": "59",
1679
+ "lstrip": false,
1680
+ "normalized": false,
1681
+ "rstrip": false,
1682
+ "single_word": false,
1683
+ "special": false
1684
+ },
1685
+ "151853": {
1686
+ "content": "60",
1687
+ "lstrip": false,
1688
+ "normalized": false,
1689
+ "rstrip": false,
1690
+ "single_word": false,
1691
+ "special": false
1692
+ },
1693
+ "151854": {
1694
+ "content": "61",
1695
+ "lstrip": false,
1696
+ "normalized": false,
1697
+ "rstrip": false,
1698
+ "single_word": false,
1699
+ "special": false
1700
+ },
1701
+ "151855": {
1702
+ "content": "62",
1703
+ "lstrip": false,
1704
+ "normalized": false,
1705
+ "rstrip": false,
1706
+ "single_word": false,
1707
+ "special": false
1708
+ },
1709
+ "151856": {
1710
+ "content": "63",
1711
+ "lstrip": false,
1712
+ "normalized": false,
1713
+ "rstrip": false,
1714
+ "single_word": false,
1715
+ "special": false
1716
+ },
1717
+ "151857": {
1718
+ "content": "64",
1719
+ "lstrip": false,
1720
+ "normalized": false,
1721
+ "rstrip": false,
1722
+ "single_word": false,
1723
+ "special": false
1724
+ },
1725
+ "151858": {
1726
+ "content": "65",
1727
+ "lstrip": false,
1728
+ "normalized": false,
1729
+ "rstrip": false,
1730
+ "single_word": false,
1731
+ "special": false
1732
+ },
1733
+ "151859": {
1734
+ "content": "66",
1735
+ "lstrip": false,
1736
+ "normalized": false,
1737
+ "rstrip": false,
1738
+ "single_word": false,
1739
+ "special": false
1740
+ },
1741
+ "151860": {
1742
+ "content": "67",
1743
+ "lstrip": false,
1744
+ "normalized": false,
1745
+ "rstrip": false,
1746
+ "single_word": false,
1747
+ "special": false
1748
+ },
1749
+ "151861": {
1750
+ "content": "68",
1751
+ "lstrip": false,
1752
+ "normalized": false,
1753
+ "rstrip": false,
1754
+ "single_word": false,
1755
+ "special": false
1756
+ },
1757
+ "151862": {
1758
+ "content": "69",
1759
+ "lstrip": false,
1760
+ "normalized": false,
1761
+ "rstrip": false,
1762
+ "single_word": false,
1763
+ "special": false
1764
+ },
1765
+ "151863": {
1766
+ "content": "70",
1767
+ "lstrip": false,
1768
+ "normalized": false,
1769
+ "rstrip": false,
1770
+ "single_word": false,
1771
+ "special": false
1772
+ },
1773
+ "151864": {
1774
+ "content": "71",
1775
+ "lstrip": false,
1776
+ "normalized": false,
1777
+ "rstrip": false,
1778
+ "single_word": false,
1779
+ "special": false
1780
+ },
1781
+ "151865": {
1782
+ "content": "72",
1783
+ "lstrip": false,
1784
+ "normalized": false,
1785
+ "rstrip": false,
1786
+ "single_word": false,
1787
+ "special": false
1788
+ },
1789
+ "151866": {
1790
+ "content": "73",
1791
+ "lstrip": false,
1792
+ "normalized": false,
1793
+ "rstrip": false,
1794
+ "single_word": false,
1795
+ "special": false
1796
+ },
1797
+ "151867": {
1798
+ "content": "74",
1799
+ "lstrip": false,
1800
+ "normalized": false,
1801
+ "rstrip": false,
1802
+ "single_word": false,
1803
+ "special": false
1804
+ },
1805
+ "151868": {
1806
+ "content": "75",
1807
+ "lstrip": false,
1808
+ "normalized": false,
1809
+ "rstrip": false,
1810
+ "single_word": false,
1811
+ "special": false
1812
+ },
1813
+ "151869": {
1814
+ "content": "76",
1815
+ "lstrip": false,
1816
+ "normalized": false,
1817
+ "rstrip": false,
1818
+ "single_word": false,
1819
+ "special": false
1820
+ },
1821
+ "151870": {
1822
+ "content": "77",
1823
+ "lstrip": false,
1824
+ "normalized": false,
1825
+ "rstrip": false,
1826
+ "single_word": false,
1827
+ "special": false
1828
+ },
1829
+ "151871": {
1830
+ "content": "78",
1831
+ "lstrip": false,
1832
+ "normalized": false,
1833
+ "rstrip": false,
1834
+ "single_word": false,
1835
+ "special": false
1836
+ },
1837
+ "151872": {
1838
+ "content": "79",
1839
+ "lstrip": false,
1840
+ "normalized": false,
1841
+ "rstrip": false,
1842
+ "single_word": false,
1843
+ "special": false
1844
+ },
1845
+ "151873": {
1846
+ "content": "80",
1847
+ "lstrip": false,
1848
+ "normalized": false,
1849
+ "rstrip": false,
1850
+ "single_word": false,
1851
+ "special": false
1852
+ },
1853
+ "151874": {
1854
+ "content": "81",
1855
+ "lstrip": false,
1856
+ "normalized": false,
1857
+ "rstrip": false,
1858
+ "single_word": false,
1859
+ "special": false
1860
+ },
1861
+ "151875": {
1862
+ "content": "82",
1863
+ "lstrip": false,
1864
+ "normalized": false,
1865
+ "rstrip": false,
1866
+ "single_word": false,
1867
+ "special": false
1868
+ },
1869
+ "151876": {
1870
+ "content": "83",
1871
+ "lstrip": false,
1872
+ "normalized": false,
1873
+ "rstrip": false,
1874
+ "single_word": false,
1875
+ "special": false
1876
+ },
1877
+ "151877": {
1878
+ "content": "84",
1879
+ "lstrip": false,
1880
+ "normalized": false,
1881
+ "rstrip": false,
1882
+ "single_word": false,
1883
+ "special": false
1884
+ },
1885
+ "151878": {
1886
+ "content": "85",
1887
+ "lstrip": false,
1888
+ "normalized": false,
1889
+ "rstrip": false,
1890
+ "single_word": false,
1891
+ "special": false
1892
+ },
1893
+ "151879": {
1894
+ "content": "86",
1895
+ "lstrip": false,
1896
+ "normalized": false,
1897
+ "rstrip": false,
1898
+ "single_word": false,
1899
+ "special": false
1900
+ },
1901
+ "151880": {
1902
+ "content": "87",
1903
+ "lstrip": false,
1904
+ "normalized": false,
1905
+ "rstrip": false,
1906
+ "single_word": false,
1907
+ "special": false
1908
+ },
1909
+ "151881": {
1910
+ "content": "88",
1911
+ "lstrip": false,
1912
+ "normalized": false,
1913
+ "rstrip": false,
1914
+ "single_word": false,
1915
+ "special": false
1916
+ },
1917
+ "151882": {
1918
+ "content": "89",
1919
+ "lstrip": false,
1920
+ "normalized": false,
1921
+ "rstrip": false,
1922
+ "single_word": false,
1923
+ "special": false
1924
+ },
1925
+ "151883": {
1926
+ "content": "90",
1927
+ "lstrip": false,
1928
+ "normalized": false,
1929
+ "rstrip": false,
1930
+ "single_word": false,
1931
+ "special": false
1932
+ },
1933
+ "151884": {
1934
+ "content": "91",
1935
+ "lstrip": false,
1936
+ "normalized": false,
1937
+ "rstrip": false,
1938
+ "single_word": false,
1939
+ "special": false
1940
+ },
1941
+ "151885": {
1942
+ "content": "92",
1943
+ "lstrip": false,
1944
+ "normalized": false,
1945
+ "rstrip": false,
1946
+ "single_word": false,
1947
+ "special": false
1948
+ },
1949
+ "151886": {
1950
+ "content": "93",
1951
+ "lstrip": false,
1952
+ "normalized": false,
1953
+ "rstrip": false,
1954
+ "single_word": false,
1955
+ "special": false
1956
+ },
1957
+ "151887": {
1958
+ "content": "94",
1959
+ "lstrip": false,
1960
+ "normalized": false,
1961
+ "rstrip": false,
1962
+ "single_word": false,
1963
+ "special": false
1964
+ },
1965
+ "151888": {
1966
+ "content": "95",
1967
+ "lstrip": false,
1968
+ "normalized": false,
1969
+ "rstrip": false,
1970
+ "single_word": false,
1971
+ "special": false
1972
+ },
1973
+ "151889": {
1974
+ "content": "96",
1975
+ "lstrip": false,
1976
+ "normalized": false,
1977
+ "rstrip": false,
1978
+ "single_word": false,
1979
+ "special": false
1980
+ },
1981
+ "151890": {
1982
+ "content": "97",
1983
+ "lstrip": false,
1984
+ "normalized": false,
1985
+ "rstrip": false,
1986
+ "single_word": false,
1987
+ "special": false
1988
+ },
1989
+ "151891": {
1990
+ "content": "98",
1991
+ "lstrip": false,
1992
+ "normalized": false,
1993
+ "rstrip": false,
1994
+ "single_word": false,
1995
+ "special": false
1996
+ },
1997
+ "151892": {
1998
+ "content": "99",
1999
+ "lstrip": false,
2000
+ "normalized": false,
2001
+ "rstrip": false,
2002
+ "single_word": false,
2003
+ "special": false
2004
+ },
2005
+ "151893": {
2006
+ "content": "100",
2007
+ "lstrip": false,
2008
+ "normalized": false,
2009
+ "rstrip": false,
2010
+ "single_word": false,
2011
+ "special": false
2012
+ },
2013
+ "151894": {
2014
+ "content": "101",
2015
+ "lstrip": false,
2016
+ "normalized": false,
2017
+ "rstrip": false,
2018
+ "single_word": false,
2019
+ "special": false
2020
+ },
2021
+ "151895": {
2022
+ "content": "102",
2023
+ "lstrip": false,
2024
+ "normalized": false,
2025
+ "rstrip": false,
2026
+ "single_word": false,
2027
+ "special": false
2028
+ },
2029
+ "151896": {
2030
+ "content": "103",
2031
+ "lstrip": false,
2032
+ "normalized": false,
2033
+ "rstrip": false,
2034
+ "single_word": false,
2035
+ "special": false
2036
+ },
2037
+ "151897": {
2038
+ "content": "104",
2039
+ "lstrip": false,
2040
+ "normalized": false,
2041
+ "rstrip": false,
2042
+ "single_word": false,
2043
+ "special": false
2044
+ },
2045
+ "151898": {
2046
+ "content": "105",
2047
+ "lstrip": false,
2048
+ "normalized": false,
2049
+ "rstrip": false,
2050
+ "single_word": false,
2051
+ "special": false
2052
+ },
2053
+ "151899": {
2054
+ "content": "106",
2055
+ "lstrip": false,
2056
+ "normalized": false,
2057
+ "rstrip": false,
2058
+ "single_word": false,
2059
+ "special": false
2060
+ },
2061
+ "151900": {
2062
+ "content": "107",
2063
+ "lstrip": false,
2064
+ "normalized": false,
2065
+ "rstrip": false,
2066
+ "single_word": false,
2067
+ "special": false
2068
+ },
2069
+ "151901": {
2070
+ "content": "108",
2071
+ "lstrip": false,
2072
+ "normalized": false,
2073
+ "rstrip": false,
2074
+ "single_word": false,
2075
+ "special": false
2076
+ },
2077
+ "151902": {
2078
+ "content": "109",
2079
+ "lstrip": false,
2080
+ "normalized": false,
2081
+ "rstrip": false,
2082
+ "single_word": false,
2083
+ "special": false
2084
+ },
2085
+ "151903": {
2086
+ "content": "110",
2087
+ "lstrip": false,
2088
+ "normalized": false,
2089
+ "rstrip": false,
2090
+ "single_word": false,
2091
+ "special": false
2092
+ },
2093
+ "151904": {
2094
+ "content": "111",
2095
+ "lstrip": false,
2096
+ "normalized": false,
2097
+ "rstrip": false,
2098
+ "single_word": false,
2099
+ "special": false
2100
+ },
2101
+ "151905": {
2102
+ "content": "112",
2103
+ "lstrip": false,
2104
+ "normalized": false,
2105
+ "rstrip": false,
2106
+ "single_word": false,
2107
+ "special": false
2108
+ },
2109
+ "151906": {
2110
+ "content": "113",
2111
+ "lstrip": false,
2112
+ "normalized": false,
2113
+ "rstrip": false,
2114
+ "single_word": false,
2115
+ "special": false
2116
+ },
2117
+ "151907": {
2118
+ "content": "114",
2119
+ "lstrip": false,
2120
+ "normalized": false,
2121
+ "rstrip": false,
2122
+ "single_word": false,
2123
+ "special": false
2124
+ },
2125
+ "151908": {
2126
+ "content": "115",
2127
+ "lstrip": false,
2128
+ "normalized": false,
2129
+ "rstrip": false,
2130
+ "single_word": false,
2131
+ "special": false
2132
+ },
2133
+ "151909": {
2134
+ "content": "116",
2135
+ "lstrip": false,
2136
+ "normalized": false,
2137
+ "rstrip": false,
2138
+ "single_word": false,
2139
+ "special": false
2140
+ },
2141
+ "151910": {
2142
+ "content": "117",
2143
+ "lstrip": false,
2144
+ "normalized": false,
2145
+ "rstrip": false,
2146
+ "single_word": false,
2147
+ "special": false
2148
+ },
2149
+ "151911": {
2150
+ "content": "118",
2151
+ "lstrip": false,
2152
+ "normalized": false,
2153
+ "rstrip": false,
2154
+ "single_word": false,
2155
+ "special": false
2156
+ },
2157
+ "151912": {
2158
+ "content": "119",
2159
+ "lstrip": false,
2160
+ "normalized": false,
2161
+ "rstrip": false,
2162
+ "single_word": false,
2163
+ "special": false
2164
+ },
2165
+ "151913": {
2166
+ "content": "120",
2167
+ "lstrip": false,
2168
+ "normalized": false,
2169
+ "rstrip": false,
2170
+ "single_word": false,
2171
+ "special": false
2172
+ },
2173
+ "151914": {
2174
+ "content": "121",
2175
+ "lstrip": false,
2176
+ "normalized": false,
2177
+ "rstrip": false,
2178
+ "single_word": false,
2179
+ "special": false
2180
+ },
2181
+ "151915": {
2182
+ "content": "122",
2183
+ "lstrip": false,
2184
+ "normalized": false,
2185
+ "rstrip": false,
2186
+ "single_word": false,
2187
+ "special": false
2188
+ },
2189
+ "151916": {
2190
+ "content": "123",
2191
+ "lstrip": false,
2192
+ "normalized": false,
2193
+ "rstrip": false,
2194
+ "single_word": false,
2195
+ "special": false
2196
+ },
2197
+ "151917": {
2198
+ "content": "124",
2199
+ "lstrip": false,
2200
+ "normalized": false,
2201
+ "rstrip": false,
2202
+ "single_word": false,
2203
+ "special": false
2204
+ },
2205
+ "151918": {
2206
+ "content": "125",
2207
+ "lstrip": false,
2208
+ "normalized": false,
2209
+ "rstrip": false,
2210
+ "single_word": false,
2211
+ "special": false
2212
+ },
2213
+ "151919": {
2214
+ "content": "126",
2215
+ "lstrip": false,
2216
+ "normalized": false,
2217
+ "rstrip": false,
2218
+ "single_word": false,
2219
+ "special": false
2220
+ },
2221
+ "151920": {
2222
+ "content": "127",
2223
+ "lstrip": false,
2224
+ "normalized": false,
2225
+ "rstrip": false,
2226
+ "single_word": false,
2227
+ "special": false
2228
+ },
2229
+ "151921": {
2230
+ "content": "128",
2231
+ "lstrip": false,
2232
+ "normalized": false,
2233
+ "rstrip": false,
2234
+ "single_word": false,
2235
+ "special": false
2236
+ },
2237
+ "151922": {
2238
+ "content": ".00",
2239
+ "lstrip": false,
2240
+ "normalized": false,
2241
+ "rstrip": false,
2242
+ "single_word": false,
2243
+ "special": false
2244
+ },
2245
+ "151923": {
2246
+ "content": ".01",
2247
+ "lstrip": false,
2248
+ "normalized": false,
2249
+ "rstrip": false,
2250
+ "single_word": false,
2251
+ "special": false
2252
+ },
2253
+ "151924": {
2254
+ "content": ".02",
2255
+ "lstrip": false,
2256
+ "normalized": false,
2257
+ "rstrip": false,
2258
+ "single_word": false,
2259
+ "special": false
2260
+ },
2261
+ "151925": {
2262
+ "content": ".03",
2263
+ "lstrip": false,
2264
+ "normalized": false,
2265
+ "rstrip": false,
2266
+ "single_word": false,
2267
+ "special": false
2268
+ },
2269
+ "151926": {
2270
+ "content": ".04",
2271
+ "lstrip": false,
2272
+ "normalized": false,
2273
+ "rstrip": false,
2274
+ "single_word": false,
2275
+ "special": false
2276
+ },
2277
+ "151927": {
2278
+ "content": ".05",
2279
+ "lstrip": false,
2280
+ "normalized": false,
2281
+ "rstrip": false,
2282
+ "single_word": false,
2283
+ "special": false
2284
+ },
2285
+ "151928": {
2286
+ "content": ".06",
2287
+ "lstrip": false,
2288
+ "normalized": false,
2289
+ "rstrip": false,
2290
+ "single_word": false,
2291
+ "special": false
2292
+ },
2293
+ "151929": {
2294
+ "content": ".07",
2295
+ "lstrip": false,
2296
+ "normalized": false,
2297
+ "rstrip": false,
2298
+ "single_word": false,
2299
+ "special": false
2300
+ },
2301
+ "151930": {
2302
+ "content": ".08",
2303
+ "lstrip": false,
2304
+ "normalized": false,
2305
+ "rstrip": false,
2306
+ "single_word": false,
2307
+ "special": false
2308
+ },
2309
+ "151931": {
2310
+ "content": ".09",
2311
+ "lstrip": false,
2312
+ "normalized": false,
2313
+ "rstrip": false,
2314
+ "single_word": false,
2315
+ "special": false
2316
+ },
2317
+ "151932": {
2318
+ "content": ".10",
2319
+ "lstrip": false,
2320
+ "normalized": false,
2321
+ "rstrip": false,
2322
+ "single_word": false,
2323
+ "special": false
2324
+ },
2325
+ "151933": {
2326
+ "content": ".11",
2327
+ "lstrip": false,
2328
+ "normalized": false,
2329
+ "rstrip": false,
2330
+ "single_word": false,
2331
+ "special": false
2332
+ },
2333
+ "151934": {
2334
+ "content": ".12",
2335
+ "lstrip": false,
2336
+ "normalized": false,
2337
+ "rstrip": false,
2338
+ "single_word": false,
2339
+ "special": false
2340
+ },
2341
+ "151935": {
2342
+ "content": ".13",
2343
+ "lstrip": false,
2344
+ "normalized": false,
2345
+ "rstrip": false,
2346
+ "single_word": false,
2347
+ "special": false
2348
+ },
2349
+ "151936": {
2350
+ "content": ".14",
2351
+ "lstrip": false,
2352
+ "normalized": false,
2353
+ "rstrip": false,
2354
+ "single_word": false,
2355
+ "special": false
2356
+ },
2357
+ "151937": {
2358
+ "content": ".15",
2359
+ "lstrip": false,
2360
+ "normalized": false,
2361
+ "rstrip": false,
2362
+ "single_word": false,
2363
+ "special": false
2364
+ },
2365
+ "151938": {
2366
+ "content": ".16",
2367
+ "lstrip": false,
2368
+ "normalized": false,
2369
+ "rstrip": false,
2370
+ "single_word": false,
2371
+ "special": false
2372
+ },
2373
+ "151939": {
2374
+ "content": ".17",
2375
+ "lstrip": false,
2376
+ "normalized": false,
2377
+ "rstrip": false,
2378
+ "single_word": false,
2379
+ "special": false
2380
+ },
2381
+ "151940": {
2382
+ "content": ".18",
2383
+ "lstrip": false,
2384
+ "normalized": false,
2385
+ "rstrip": false,
2386
+ "single_word": false,
2387
+ "special": false
2388
+ },
2389
+ "151941": {
2390
+ "content": ".19",
2391
+ "lstrip": false,
2392
+ "normalized": false,
2393
+ "rstrip": false,
2394
+ "single_word": false,
2395
+ "special": false
2396
+ },
2397
+ "151942": {
2398
+ "content": ".20",
2399
+ "lstrip": false,
2400
+ "normalized": false,
2401
+ "rstrip": false,
2402
+ "single_word": false,
2403
+ "special": false
2404
+ },
2405
+ "151943": {
2406
+ "content": ".21",
2407
+ "lstrip": false,
2408
+ "normalized": false,
2409
+ "rstrip": false,
2410
+ "single_word": false,
2411
+ "special": false
2412
+ },
2413
+ "151944": {
2414
+ "content": ".22",
2415
+ "lstrip": false,
2416
+ "normalized": false,
2417
+ "rstrip": false,
2418
+ "single_word": false,
2419
+ "special": false
2420
+ },
2421
+ "151945": {
2422
+ "content": ".23",
2423
+ "lstrip": false,
2424
+ "normalized": false,
2425
+ "rstrip": false,
2426
+ "single_word": false,
2427
+ "special": false
2428
+ },
2429
+ "151946": {
2430
+ "content": ".24",
2431
+ "lstrip": false,
2432
+ "normalized": false,
2433
+ "rstrip": false,
2434
+ "single_word": false,
2435
+ "special": false
2436
+ },
2437
+ "151947": {
2438
+ "content": ".25",
2439
+ "lstrip": false,
2440
+ "normalized": false,
2441
+ "rstrip": false,
2442
+ "single_word": false,
2443
+ "special": false
2444
+ },
2445
+ "151948": {
2446
+ "content": ".26",
2447
+ "lstrip": false,
2448
+ "normalized": false,
2449
+ "rstrip": false,
2450
+ "single_word": false,
2451
+ "special": false
2452
+ },
2453
+ "151949": {
2454
+ "content": ".27",
2455
+ "lstrip": false,
2456
+ "normalized": false,
2457
+ "rstrip": false,
2458
+ "single_word": false,
2459
+ "special": false
2460
+ },
2461
+ "151950": {
2462
+ "content": ".28",
2463
+ "lstrip": false,
2464
+ "normalized": false,
2465
+ "rstrip": false,
2466
+ "single_word": false,
2467
+ "special": false
2468
+ },
2469
+ "151951": {
2470
+ "content": ".29",
2471
+ "lstrip": false,
2472
+ "normalized": false,
2473
+ "rstrip": false,
2474
+ "single_word": false,
2475
+ "special": false
2476
+ },
2477
+ "151952": {
2478
+ "content": ".30",
2479
+ "lstrip": false,
2480
+ "normalized": false,
2481
+ "rstrip": false,
2482
+ "single_word": false,
2483
+ "special": false
2484
+ },
2485
+ "151953": {
2486
+ "content": ".31",
2487
+ "lstrip": false,
2488
+ "normalized": false,
2489
+ "rstrip": false,
2490
+ "single_word": false,
2491
+ "special": false
2492
+ },
2493
+ "151954": {
2494
+ "content": ".32",
2495
+ "lstrip": false,
2496
+ "normalized": false,
2497
+ "rstrip": false,
2498
+ "single_word": false,
2499
+ "special": false
2500
+ },
2501
+ "151955": {
2502
+ "content": ".33",
2503
+ "lstrip": false,
2504
+ "normalized": false,
2505
+ "rstrip": false,
2506
+ "single_word": false,
2507
+ "special": false
2508
+ },
2509
+ "151956": {
2510
+ "content": ".34",
2511
+ "lstrip": false,
2512
+ "normalized": false,
2513
+ "rstrip": false,
2514
+ "single_word": false,
2515
+ "special": false
2516
+ },
2517
+ "151957": {
2518
+ "content": ".35",
2519
+ "lstrip": false,
2520
+ "normalized": false,
2521
+ "rstrip": false,
2522
+ "single_word": false,
2523
+ "special": false
2524
+ },
2525
+ "151958": {
2526
+ "content": ".36",
2527
+ "lstrip": false,
2528
+ "normalized": false,
2529
+ "rstrip": false,
2530
+ "single_word": false,
2531
+ "special": false
2532
+ },
2533
+ "151959": {
2534
+ "content": ".37",
2535
+ "lstrip": false,
2536
+ "normalized": false,
2537
+ "rstrip": false,
2538
+ "single_word": false,
2539
+ "special": false
2540
+ },
2541
+ "151960": {
2542
+ "content": ".38",
2543
+ "lstrip": false,
2544
+ "normalized": false,
2545
+ "rstrip": false,
2546
+ "single_word": false,
2547
+ "special": false
2548
+ },
2549
+ "151961": {
2550
+ "content": ".39",
2551
+ "lstrip": false,
2552
+ "normalized": false,
2553
+ "rstrip": false,
2554
+ "single_word": false,
2555
+ "special": false
2556
+ },
2557
+ "151962": {
2558
+ "content": ".40",
2559
+ "lstrip": false,
2560
+ "normalized": false,
2561
+ "rstrip": false,
2562
+ "single_word": false,
2563
+ "special": false
2564
+ },
2565
+ "151963": {
2566
+ "content": ".41",
2567
+ "lstrip": false,
2568
+ "normalized": false,
2569
+ "rstrip": false,
2570
+ "single_word": false,
2571
+ "special": false
2572
+ },
2573
+ "151964": {
2574
+ "content": ".42",
2575
+ "lstrip": false,
2576
+ "normalized": false,
2577
+ "rstrip": false,
2578
+ "single_word": false,
2579
+ "special": false
2580
+ },
2581
+ "151965": {
2582
+ "content": ".43",
2583
+ "lstrip": false,
2584
+ "normalized": false,
2585
+ "rstrip": false,
2586
+ "single_word": false,
2587
+ "special": false
2588
+ },
2589
+ "151966": {
2590
+ "content": ".44",
2591
+ "lstrip": false,
2592
+ "normalized": false,
2593
+ "rstrip": false,
2594
+ "single_word": false,
2595
+ "special": false
2596
+ },
2597
+ "151967": {
2598
+ "content": ".45",
2599
+ "lstrip": false,
2600
+ "normalized": false,
2601
+ "rstrip": false,
2602
+ "single_word": false,
2603
+ "special": false
2604
+ },
2605
+ "151968": {
2606
+ "content": ".46",
2607
+ "lstrip": false,
2608
+ "normalized": false,
2609
+ "rstrip": false,
2610
+ "single_word": false,
2611
+ "special": false
2612
+ },
2613
+ "151969": {
2614
+ "content": ".47",
2615
+ "lstrip": false,
2616
+ "normalized": false,
2617
+ "rstrip": false,
2618
+ "single_word": false,
2619
+ "special": false
2620
+ },
2621
+ "151970": {
2622
+ "content": ".48",
2623
+ "lstrip": false,
2624
+ "normalized": false,
2625
+ "rstrip": false,
2626
+ "single_word": false,
2627
+ "special": false
2628
+ },
2629
+ "151971": {
2630
+ "content": ".49",
2631
+ "lstrip": false,
2632
+ "normalized": false,
2633
+ "rstrip": false,
2634
+ "single_word": false,
2635
+ "special": false
2636
+ },
2637
+ "151972": {
2638
+ "content": ".50",
2639
+ "lstrip": false,
2640
+ "normalized": false,
2641
+ "rstrip": false,
2642
+ "single_word": false,
2643
+ "special": false
2644
+ },
2645
+ "151973": {
2646
+ "content": ".51",
2647
+ "lstrip": false,
2648
+ "normalized": false,
2649
+ "rstrip": false,
2650
+ "single_word": false,
2651
+ "special": false
2652
+ },
2653
+ "151974": {
2654
+ "content": ".52",
2655
+ "lstrip": false,
2656
+ "normalized": false,
2657
+ "rstrip": false,
2658
+ "single_word": false,
2659
+ "special": false
2660
+ },
2661
+ "151975": {
2662
+ "content": ".53",
2663
+ "lstrip": false,
2664
+ "normalized": false,
2665
+ "rstrip": false,
2666
+ "single_word": false,
2667
+ "special": false
2668
+ },
2669
+ "151976": {
2670
+ "content": ".54",
2671
+ "lstrip": false,
2672
+ "normalized": false,
2673
+ "rstrip": false,
2674
+ "single_word": false,
2675
+ "special": false
2676
+ },
2677
+ "151977": {
2678
+ "content": ".55",
2679
+ "lstrip": false,
2680
+ "normalized": false,
2681
+ "rstrip": false,
2682
+ "single_word": false,
2683
+ "special": false
2684
+ },
2685
+ "151978": {
2686
+ "content": ".56",
2687
+ "lstrip": false,
2688
+ "normalized": false,
2689
+ "rstrip": false,
2690
+ "single_word": false,
2691
+ "special": false
2692
+ },
2693
+ "151979": {
2694
+ "content": ".57",
2695
+ "lstrip": false,
2696
+ "normalized": false,
2697
+ "rstrip": false,
2698
+ "single_word": false,
2699
+ "special": false
2700
+ },
2701
+ "151980": {
2702
+ "content": ".58",
2703
+ "lstrip": false,
2704
+ "normalized": false,
2705
+ "rstrip": false,
2706
+ "single_word": false,
2707
+ "special": false
2708
+ },
2709
+ "151981": {
2710
+ "content": ".59",
2711
+ "lstrip": false,
2712
+ "normalized": false,
2713
+ "rstrip": false,
2714
+ "single_word": false,
2715
+ "special": false
2716
+ },
2717
+ "151982": {
2718
+ "content": ".60",
2719
+ "lstrip": false,
2720
+ "normalized": false,
2721
+ "rstrip": false,
2722
+ "single_word": false,
2723
+ "special": false
2724
+ },
2725
+ "151983": {
2726
+ "content": ".61",
2727
+ "lstrip": false,
2728
+ "normalized": false,
2729
+ "rstrip": false,
2730
+ "single_word": false,
2731
+ "special": false
2732
+ },
2733
+ "151984": {
2734
+ "content": ".62",
2735
+ "lstrip": false,
2736
+ "normalized": false,
2737
+ "rstrip": false,
2738
+ "single_word": false,
2739
+ "special": false
2740
+ },
2741
+ "151985": {
2742
+ "content": ".63",
2743
+ "lstrip": false,
2744
+ "normalized": false,
2745
+ "rstrip": false,
2746
+ "single_word": false,
2747
+ "special": false
2748
+ },
2749
+ "151986": {
2750
+ "content": ".64",
2751
+ "lstrip": false,
2752
+ "normalized": false,
2753
+ "rstrip": false,
2754
+ "single_word": false,
2755
+ "special": false
2756
+ },
2757
+ "151987": {
2758
+ "content": ".65",
2759
+ "lstrip": false,
2760
+ "normalized": false,
2761
+ "rstrip": false,
2762
+ "single_word": false,
2763
+ "special": false
2764
+ },
2765
+ "151988": {
2766
+ "content": ".66",
2767
+ "lstrip": false,
2768
+ "normalized": false,
2769
+ "rstrip": false,
2770
+ "single_word": false,
2771
+ "special": false
2772
+ },
2773
+ "151989": {
2774
+ "content": ".67",
2775
+ "lstrip": false,
2776
+ "normalized": false,
2777
+ "rstrip": false,
2778
+ "single_word": false,
2779
+ "special": false
2780
+ },
2781
+ "151990": {
2782
+ "content": ".68",
2783
+ "lstrip": false,
2784
+ "normalized": false,
2785
+ "rstrip": false,
2786
+ "single_word": false,
2787
+ "special": false
2788
+ },
2789
+ "151991": {
2790
+ "content": ".69",
2791
+ "lstrip": false,
2792
+ "normalized": false,
2793
+ "rstrip": false,
2794
+ "single_word": false,
2795
+ "special": false
2796
+ },
2797
+ "151992": {
2798
+ "content": ".70",
2799
+ "lstrip": false,
2800
+ "normalized": false,
2801
+ "rstrip": false,
2802
+ "single_word": false,
2803
+ "special": false
2804
+ },
2805
+ "151993": {
2806
+ "content": ".71",
2807
+ "lstrip": false,
2808
+ "normalized": false,
2809
+ "rstrip": false,
2810
+ "single_word": false,
2811
+ "special": false
2812
+ },
2813
+ "151994": {
2814
+ "content": ".72",
2815
+ "lstrip": false,
2816
+ "normalized": false,
2817
+ "rstrip": false,
2818
+ "single_word": false,
2819
+ "special": false
2820
+ },
2821
+ "151995": {
2822
+ "content": ".73",
2823
+ "lstrip": false,
2824
+ "normalized": false,
2825
+ "rstrip": false,
2826
+ "single_word": false,
2827
+ "special": false
2828
+ },
2829
+ "151996": {
2830
+ "content": ".74",
2831
+ "lstrip": false,
2832
+ "normalized": false,
2833
+ "rstrip": false,
2834
+ "single_word": false,
2835
+ "special": false
2836
+ },
2837
+ "151997": {
2838
+ "content": ".75",
2839
+ "lstrip": false,
2840
+ "normalized": false,
2841
+ "rstrip": false,
2842
+ "single_word": false,
2843
+ "special": false
2844
+ },
2845
+ "151998": {
2846
+ "content": ".76",
2847
+ "lstrip": false,
2848
+ "normalized": false,
2849
+ "rstrip": false,
2850
+ "single_word": false,
2851
+ "special": false
2852
+ },
2853
+ "151999": {
2854
+ "content": ".77",
2855
+ "lstrip": false,
2856
+ "normalized": false,
2857
+ "rstrip": false,
2858
+ "single_word": false,
2859
+ "special": false
2860
+ },
2861
+ "152000": {
2862
+ "content": ".78",
2863
+ "lstrip": false,
2864
+ "normalized": false,
2865
+ "rstrip": false,
2866
+ "single_word": false,
2867
+ "special": false
2868
+ },
2869
+ "152001": {
2870
+ "content": ".79",
2871
+ "lstrip": false,
2872
+ "normalized": false,
2873
+ "rstrip": false,
2874
+ "single_word": false,
2875
+ "special": false
2876
+ },
2877
+ "152002": {
2878
+ "content": ".80",
2879
+ "lstrip": false,
2880
+ "normalized": false,
2881
+ "rstrip": false,
2882
+ "single_word": false,
2883
+ "special": false
2884
+ },
2885
+ "152003": {
2886
+ "content": ".81",
2887
+ "lstrip": false,
2888
+ "normalized": false,
2889
+ "rstrip": false,
2890
+ "single_word": false,
2891
+ "special": false
2892
+ },
2893
+ "152004": {
2894
+ "content": ".82",
2895
+ "lstrip": false,
2896
+ "normalized": false,
2897
+ "rstrip": false,
2898
+ "single_word": false,
2899
+ "special": false
2900
+ },
2901
+ "152005": {
2902
+ "content": ".83",
2903
+ "lstrip": false,
2904
+ "normalized": false,
2905
+ "rstrip": false,
2906
+ "single_word": false,
2907
+ "special": false
2908
+ },
2909
+ "152006": {
2910
+ "content": ".84",
2911
+ "lstrip": false,
2912
+ "normalized": false,
2913
+ "rstrip": false,
2914
+ "single_word": false,
2915
+ "special": false
2916
+ },
2917
+ "152007": {
2918
+ "content": ".85",
2919
+ "lstrip": false,
2920
+ "normalized": false,
2921
+ "rstrip": false,
2922
+ "single_word": false,
2923
+ "special": false
2924
+ },
2925
+ "152008": {
2926
+ "content": ".86",
2927
+ "lstrip": false,
2928
+ "normalized": false,
2929
+ "rstrip": false,
2930
+ "single_word": false,
2931
+ "special": false
2932
+ },
2933
+ "152009": {
2934
+ "content": ".87",
2935
+ "lstrip": false,
2936
+ "normalized": false,
2937
+ "rstrip": false,
2938
+ "single_word": false,
2939
+ "special": false
2940
+ },
2941
+ "152010": {
2942
+ "content": ".88",
2943
+ "lstrip": false,
2944
+ "normalized": false,
2945
+ "rstrip": false,
2946
+ "single_word": false,
2947
+ "special": false
2948
+ },
2949
+ "152011": {
2950
+ "content": ".89",
2951
+ "lstrip": false,
2952
+ "normalized": false,
2953
+ "rstrip": false,
2954
+ "single_word": false,
2955
+ "special": false
2956
+ },
2957
+ "152012": {
2958
+ "content": ".90",
2959
+ "lstrip": false,
2960
+ "normalized": false,
2961
+ "rstrip": false,
2962
+ "single_word": false,
2963
+ "special": false
2964
+ },
2965
+ "152013": {
2966
+ "content": ".91",
2967
+ "lstrip": false,
2968
+ "normalized": false,
2969
+ "rstrip": false,
2970
+ "single_word": false,
2971
+ "special": false
2972
+ },
2973
+ "152014": {
2974
+ "content": ".92",
2975
+ "lstrip": false,
2976
+ "normalized": false,
2977
+ "rstrip": false,
2978
+ "single_word": false,
2979
+ "special": false
2980
+ },
2981
+ "152015": {
2982
+ "content": ".93",
2983
+ "lstrip": false,
2984
+ "normalized": false,
2985
+ "rstrip": false,
2986
+ "single_word": false,
2987
+ "special": false
2988
+ },
2989
+ "152016": {
2990
+ "content": ".94",
2991
+ "lstrip": false,
2992
+ "normalized": false,
2993
+ "rstrip": false,
2994
+ "single_word": false,
2995
+ "special": false
2996
+ },
2997
+ "152017": {
2998
+ "content": ".95",
2999
+ "lstrip": false,
3000
+ "normalized": false,
3001
+ "rstrip": false,
3002
+ "single_word": false,
3003
+ "special": false
3004
+ },
3005
+ "152018": {
3006
+ "content": ".96",
3007
+ "lstrip": false,
3008
+ "normalized": false,
3009
+ "rstrip": false,
3010
+ "single_word": false,
3011
+ "special": false
3012
+ },
3013
+ "152019": {
3014
+ "content": ".97",
3015
+ "lstrip": false,
3016
+ "normalized": false,
3017
+ "rstrip": false,
3018
+ "single_word": false,
3019
+ "special": false
3020
+ },
3021
+ "152020": {
3022
+ "content": ".98",
3023
+ "lstrip": false,
3024
+ "normalized": false,
3025
+ "rstrip": false,
3026
+ "single_word": false,
3027
+ "special": false
3028
+ },
3029
+ "152021": {
3030
+ "content": ".99",
3031
+ "lstrip": false,
3032
+ "normalized": false,
3033
+ "rstrip": false,
3034
+ "single_word": false,
3035
+ "special": false
3036
+ },
3037
+ "152022": {
3038
+ "content": ".0",
3039
+ "lstrip": false,
3040
+ "normalized": false,
3041
+ "rstrip": false,
3042
+ "single_word": false,
3043
+ "special": false
3044
+ },
3045
+ "152023": {
3046
+ "content": ".1",
3047
+ "lstrip": false,
3048
+ "normalized": false,
3049
+ "rstrip": false,
3050
+ "single_word": false,
3051
+ "special": false
3052
+ },
3053
+ "152024": {
3054
+ "content": ".2",
3055
+ "lstrip": false,
3056
+ "normalized": false,
3057
+ "rstrip": false,
3058
+ "single_word": false,
3059
+ "special": false
3060
+ },
3061
+ "152025": {
3062
+ "content": ".3",
3063
+ "lstrip": false,
3064
+ "normalized": false,
3065
+ "rstrip": false,
3066
+ "single_word": false,
3067
+ "special": false
3068
+ },
3069
+ "152026": {
3070
+ "content": ".4",
3071
+ "lstrip": false,
3072
+ "normalized": false,
3073
+ "rstrip": false,
3074
+ "single_word": false,
3075
+ "special": false
3076
+ },
3077
+ "152027": {
3078
+ "content": ".5",
3079
+ "lstrip": false,
3080
+ "normalized": false,
3081
+ "rstrip": false,
3082
+ "single_word": false,
3083
+ "special": false
3084
+ },
3085
+ "152028": {
3086
+ "content": ".6",
3087
+ "lstrip": false,
3088
+ "normalized": false,
3089
+ "rstrip": false,
3090
+ "single_word": false,
3091
+ "special": false
3092
+ },
3093
+ "152029": {
3094
+ "content": ".7",
3095
+ "lstrip": false,
3096
+ "normalized": false,
3097
+ "rstrip": false,
3098
+ "single_word": false,
3099
+ "special": false
3100
+ },
3101
+ "152030": {
3102
+ "content": ".8",
3103
+ "lstrip": false,
3104
+ "normalized": false,
3105
+ "rstrip": false,
3106
+ "single_word": false,
3107
+ "special": false
3108
+ },
3109
+ "152031": {
3110
+ "content": ".9",
3111
+ "lstrip": false,
3112
+ "normalized": false,
3113
+ "rstrip": false,
3114
+ "single_word": false,
3115
+ "special": false
3116
+ },
3117
+ "152032": {
3118
+ "content": " width=\"",
3119
+ "lstrip": false,
3120
+ "normalized": false,
3121
+ "rstrip": false,
3122
+ "single_word": false,
3123
+ "special": false
3124
+ },
3125
+ "152033": {
3126
+ "content": " height=\"",
3127
+ "lstrip": false,
3128
+ "normalized": false,
3129
+ "rstrip": false,
3130
+ "single_word": false,
3131
+ "special": false
3132
+ },
3133
+ "152034": {
3134
+ "content": " viewBox=\"",
3135
+ "lstrip": false,
3136
+ "normalized": false,
3137
+ "rstrip": false,
3138
+ "single_word": false,
3139
+ "special": false
3140
+ },
3141
+ "152035": {
3142
+ "content": " id=\"",
3143
+ "lstrip": false,
3144
+ "normalized": false,
3145
+ "rstrip": false,
3146
+ "single_word": false,
3147
+ "special": false
3148
+ },
3149
+ "152036": {
3150
+ "content": " class=\"",
3151
+ "lstrip": false,
3152
+ "normalized": false,
3153
+ "rstrip": false,
3154
+ "single_word": false,
3155
+ "special": false
3156
+ },
3157
+ "152037": {
3158
+ "content": " x=\"",
3159
+ "lstrip": false,
3160
+ "normalized": false,
3161
+ "rstrip": false,
3162
+ "single_word": false,
3163
+ "special": false
3164
+ },
3165
+ "152038": {
3166
+ "content": " y=\"",
3167
+ "lstrip": false,
3168
+ "normalized": false,
3169
+ "rstrip": false,
3170
+ "single_word": false,
3171
+ "special": false
3172
+ },
3173
+ "152039": {
3174
+ "content": " x1=\"",
3175
+ "lstrip": false,
3176
+ "normalized": false,
3177
+ "rstrip": false,
3178
+ "single_word": false,
3179
+ "special": false
3180
+ },
3181
+ "152040": {
3182
+ "content": " y1=\"",
3183
+ "lstrip": false,
3184
+ "normalized": false,
3185
+ "rstrip": false,
3186
+ "single_word": false,
3187
+ "special": false
3188
+ },
3189
+ "152041": {
3190
+ "content": " x2=\"",
3191
+ "lstrip": false,
3192
+ "normalized": false,
3193
+ "rstrip": false,
3194
+ "single_word": false,
3195
+ "special": false
3196
+ },
3197
+ "152042": {
3198
+ "content": " y2=\"",
3199
+ "lstrip": false,
3200
+ "normalized": false,
3201
+ "rstrip": false,
3202
+ "single_word": false,
3203
+ "special": false
3204
+ },
3205
+ "152043": {
3206
+ "content": " cx=\"",
3207
+ "lstrip": false,
3208
+ "normalized": false,
3209
+ "rstrip": false,
3210
+ "single_word": false,
3211
+ "special": false
3212
+ },
3213
+ "152044": {
3214
+ "content": " cy=\"",
3215
+ "lstrip": false,
3216
+ "normalized": false,
3217
+ "rstrip": false,
3218
+ "single_word": false,
3219
+ "special": false
3220
+ },
3221
+ "152045": {
3222
+ "content": " r=\"",
3223
+ "lstrip": false,
3224
+ "normalized": false,
3225
+ "rstrip": false,
3226
+ "single_word": false,
3227
+ "special": false
3228
+ },
3229
+ "152046": {
3230
+ "content": " rx=\"",
3231
+ "lstrip": false,
3232
+ "normalized": false,
3233
+ "rstrip": false,
3234
+ "single_word": false,
3235
+ "special": false
3236
+ },
3237
+ "152047": {
3238
+ "content": " ry=\"",
3239
+ "lstrip": false,
3240
+ "normalized": false,
3241
+ "rstrip": false,
3242
+ "single_word": false,
3243
+ "special": false
3244
+ },
3245
+ "152048": {
3246
+ "content": " d=\"",
3247
+ "lstrip": false,
3248
+ "normalized": false,
3249
+ "rstrip": false,
3250
+ "single_word": false,
3251
+ "special": false
3252
+ },
3253
+ "152049": {
3254
+ "content": " points=\"",
3255
+ "lstrip": false,
3256
+ "normalized": false,
3257
+ "rstrip": false,
3258
+ "single_word": false,
3259
+ "special": false
3260
+ },
3261
+ "152050": {
3262
+ "content": " fill=\"",
3263
+ "lstrip": false,
3264
+ "normalized": false,
3265
+ "rstrip": false,
3266
+ "single_word": false,
3267
+ "special": false
3268
+ },
3269
+ "152051": {
3270
+ "content": " stroke=\"",
3271
+ "lstrip": false,
3272
+ "normalized": false,
3273
+ "rstrip": false,
3274
+ "single_word": false,
3275
+ "special": false
3276
+ },
3277
+ "152052": {
3278
+ "content": " stroke-width=\"",
3279
+ "lstrip": false,
3280
+ "normalized": false,
3281
+ "rstrip": false,
3282
+ "single_word": false,
3283
+ "special": false
3284
+ },
3285
+ "152053": {
3286
+ "content": " stroke-linecap=\"",
3287
+ "lstrip": false,
3288
+ "normalized": false,
3289
+ "rstrip": false,
3290
+ "single_word": false,
3291
+ "special": false
3292
+ },
3293
+ "152054": {
3294
+ "content": " stroke-linejoin=\"",
3295
+ "lstrip": false,
3296
+ "normalized": false,
3297
+ "rstrip": false,
3298
+ "single_word": false,
3299
+ "special": false
3300
+ },
3301
+ "152055": {
3302
+ "content": " stroke-miterlimit=\"",
3303
+ "lstrip": false,
3304
+ "normalized": false,
3305
+ "rstrip": false,
3306
+ "single_word": false,
3307
+ "special": false
3308
+ },
3309
+ "152056": {
3310
+ "content": " fill-rule=\"",
3311
+ "lstrip": false,
3312
+ "normalized": false,
3313
+ "rstrip": false,
3314
+ "single_word": false,
3315
+ "special": false
3316
+ },
3317
+ "152057": {
3318
+ "content": " clip-path=\"",
3319
+ "lstrip": false,
3320
+ "normalized": false,
3321
+ "rstrip": false,
3322
+ "single_word": false,
3323
+ "special": false
3324
+ },
3325
+ "152058": {
3326
+ "content": " opacity=\"",
3327
+ "lstrip": false,
3328
+ "normalized": false,
3329
+ "rstrip": false,
3330
+ "single_word": false,
3331
+ "special": false
3332
+ },
3333
+ "152059": {
3334
+ "content": " transform=\"",
3335
+ "lstrip": false,
3336
+ "normalized": false,
3337
+ "rstrip": false,
3338
+ "single_word": false,
3339
+ "special": false
3340
+ },
3341
+ "152060": {
3342
+ "content": " font-size=\"",
3343
+ "lstrip": false,
3344
+ "normalized": false,
3345
+ "rstrip": false,
3346
+ "single_word": false,
3347
+ "special": false
3348
+ },
3349
+ "152061": {
3350
+ "content": " font-family=\"",
3351
+ "lstrip": false,
3352
+ "normalized": false,
3353
+ "rstrip": false,
3354
+ "single_word": false,
3355
+ "special": false
3356
+ },
3357
+ "152062": {
3358
+ "content": " text-anchor=\"",
3359
+ "lstrip": false,
3360
+ "normalized": false,
3361
+ "rstrip": false,
3362
+ "single_word": false,
3363
+ "special": false
3364
+ },
3365
+ "152063": {
3366
+ "content": " gradientUnits=\"",
3367
+ "lstrip": false,
3368
+ "normalized": false,
3369
+ "rstrip": false,
3370
+ "single_word": false,
3371
+ "special": false
3372
+ },
3373
+ "152064": {
3374
+ "content": " gradientTransform=\"",
3375
+ "lstrip": false,
3376
+ "normalized": false,
3377
+ "rstrip": false,
3378
+ "single_word": false,
3379
+ "special": false
3380
+ },
3381
+ "152065": {
3382
+ "content": " offset=\"",
3383
+ "lstrip": false,
3384
+ "normalized": false,
3385
+ "rstrip": false,
3386
+ "single_word": false,
3387
+ "special": false
3388
+ },
3389
+ "152066": {
3390
+ "content": " stop-color=\"",
3391
+ "lstrip": false,
3392
+ "normalized": false,
3393
+ "rstrip": false,
3394
+ "single_word": false,
3395
+ "special": false
3396
+ },
3397
+ "152067": {
3398
+ "content": " dur=\"",
3399
+ "lstrip": false,
3400
+ "normalized": false,
3401
+ "rstrip": false,
3402
+ "single_word": false,
3403
+ "special": false
3404
+ },
3405
+ "152068": {
3406
+ "content": " from=\"",
3407
+ "lstrip": false,
3408
+ "normalized": false,
3409
+ "rstrip": false,
3410
+ "single_word": false,
3411
+ "special": false
3412
+ },
3413
+ "152069": {
3414
+ "content": " to=\"",
3415
+ "lstrip": false,
3416
+ "normalized": false,
3417
+ "rstrip": false,
3418
+ "single_word": false,
3419
+ "special": false
3420
+ },
3421
+ "152070": {
3422
+ "content": " repeatCount=\"",
3423
+ "lstrip": false,
3424
+ "normalized": false,
3425
+ "rstrip": false,
3426
+ "single_word": false,
3427
+ "special": false
3428
+ },
3429
+ "152071": {
3430
+ "content": " begin=\"",
3431
+ "lstrip": false,
3432
+ "normalized": false,
3433
+ "rstrip": false,
3434
+ "single_word": false,
3435
+ "special": false
3436
+ },
3437
+ "152072": {
3438
+ "content": " rotate=\"",
3439
+ "lstrip": false,
3440
+ "normalized": false,
3441
+ "rstrip": false,
3442
+ "single_word": false,
3443
+ "special": false
3444
+ },
3445
+ "152073": {
3446
+ "content": " path=\"",
3447
+ "lstrip": false,
3448
+ "normalized": false,
3449
+ "rstrip": false,
3450
+ "single_word": false,
3451
+ "special": false
3452
+ },
3453
+ "152074": {
3454
+ "content": "<svg",
3455
+ "lstrip": false,
3456
+ "normalized": false,
3457
+ "rstrip": false,
3458
+ "single_word": false,
3459
+ "special": false
3460
+ },
3461
+ "152075": {
3462
+ "content": "</svg>",
3463
+ "lstrip": false,
3464
+ "normalized": false,
3465
+ "rstrip": false,
3466
+ "single_word": false,
3467
+ "special": false
3468
+ },
3469
+ "152076": {
3470
+ "content": "</g>",
3471
+ "lstrip": false,
3472
+ "normalized": false,
3473
+ "rstrip": false,
3474
+ "single_word": false,
3475
+ "special": false
3476
+ },
3477
+ "152077": {
3478
+ "content": "<defs",
3479
+ "lstrip": false,
3480
+ "normalized": false,
3481
+ "rstrip": false,
3482
+ "single_word": false,
3483
+ "special": false
3484
+ },
3485
+ "152078": {
3486
+ "content": "</defs>",
3487
+ "lstrip": false,
3488
+ "normalized": false,
3489
+ "rstrip": false,
3490
+ "single_word": false,
3491
+ "special": false
3492
+ },
3493
+ "152079": {
3494
+ "content": "<use",
3495
+ "lstrip": false,
3496
+ "normalized": false,
3497
+ "rstrip": false,
3498
+ "single_word": false,
3499
+ "special": false
3500
+ },
3501
+ "152080": {
3502
+ "content": "</use>",
3503
+ "lstrip": false,
3504
+ "normalized": false,
3505
+ "rstrip": false,
3506
+ "single_word": false,
3507
+ "special": false
3508
+ },
3509
+ "152081": {
3510
+ "content": "</path>",
3511
+ "lstrip": false,
3512
+ "normalized": false,
3513
+ "rstrip": false,
3514
+ "single_word": false,
3515
+ "special": false
3516
+ },
3517
+ "152082": {
3518
+ "content": "<rect",
3519
+ "lstrip": false,
3520
+ "normalized": false,
3521
+ "rstrip": false,
3522
+ "single_word": false,
3523
+ "special": false
3524
+ },
3525
+ "152083": {
3526
+ "content": "</rect>",
3527
+ "lstrip": false,
3528
+ "normalized": false,
3529
+ "rstrip": false,
3530
+ "single_word": false,
3531
+ "special": false
3532
+ },
3533
+ "152084": {
3534
+ "content": "<circle",
3535
+ "lstrip": false,
3536
+ "normalized": false,
3537
+ "rstrip": false,
3538
+ "single_word": false,
3539
+ "special": false
3540
+ },
3541
+ "152085": {
3542
+ "content": "</circle>",
3543
+ "lstrip": false,
3544
+ "normalized": false,
3545
+ "rstrip": false,
3546
+ "single_word": false,
3547
+ "special": false
3548
+ },
3549
+ "152086": {
3550
+ "content": "<ellipse",
3551
+ "lstrip": false,
3552
+ "normalized": false,
3553
+ "rstrip": false,
3554
+ "single_word": false,
3555
+ "special": false
3556
+ },
3557
+ "152087": {
3558
+ "content": "</ellipse>",
3559
+ "lstrip": false,
3560
+ "normalized": false,
3561
+ "rstrip": false,
3562
+ "single_word": false,
3563
+ "special": false
3564
+ },
3565
+ "152088": {
3566
+ "content": "<line",
3567
+ "lstrip": false,
3568
+ "normalized": false,
3569
+ "rstrip": false,
3570
+ "single_word": false,
3571
+ "special": false
3572
+ },
3573
+ "152089": {
3574
+ "content": "</line>",
3575
+ "lstrip": false,
3576
+ "normalized": false,
3577
+ "rstrip": false,
3578
+ "single_word": false,
3579
+ "special": false
3580
+ },
3581
+ "152090": {
3582
+ "content": "<polyline",
3583
+ "lstrip": false,
3584
+ "normalized": false,
3585
+ "rstrip": false,
3586
+ "single_word": false,
3587
+ "special": false
3588
+ },
3589
+ "152091": {
3590
+ "content": "</polyline>",
3591
+ "lstrip": false,
3592
+ "normalized": false,
3593
+ "rstrip": false,
3594
+ "single_word": false,
3595
+ "special": false
3596
+ },
3597
+ "152092": {
3598
+ "content": "<polygon",
3599
+ "lstrip": false,
3600
+ "normalized": false,
3601
+ "rstrip": false,
3602
+ "single_word": false,
3603
+ "special": false
3604
+ },
3605
+ "152093": {
3606
+ "content": "</polygon>",
3607
+ "lstrip": false,
3608
+ "normalized": false,
3609
+ "rstrip": false,
3610
+ "single_word": false,
3611
+ "special": false
3612
+ },
3613
+ "152094": {
3614
+ "content": "<text",
3615
+ "lstrip": false,
3616
+ "normalized": false,
3617
+ "rstrip": false,
3618
+ "single_word": false,
3619
+ "special": false
3620
+ },
3621
+ "152095": {
3622
+ "content": "</text>",
3623
+ "lstrip": false,
3624
+ "normalized": false,
3625
+ "rstrip": false,
3626
+ "single_word": false,
3627
+ "special": false
3628
+ },
3629
+ "152096": {
3630
+ "content": "<tspan",
3631
+ "lstrip": false,
3632
+ "normalized": false,
3633
+ "rstrip": false,
3634
+ "single_word": false,
3635
+ "special": false
3636
+ },
3637
+ "152097": {
3638
+ "content": "</tspan>",
3639
+ "lstrip": false,
3640
+ "normalized": false,
3641
+ "rstrip": false,
3642
+ "single_word": false,
3643
+ "special": false
3644
+ },
3645
+ "152098": {
3646
+ "content": "<textPath",
3647
+ "lstrip": false,
3648
+ "normalized": false,
3649
+ "rstrip": false,
3650
+ "single_word": false,
3651
+ "special": false
3652
+ },
3653
+ "152099": {
3654
+ "content": "</textPath>",
3655
+ "lstrip": false,
3656
+ "normalized": false,
3657
+ "rstrip": false,
3658
+ "single_word": false,
3659
+ "special": false
3660
+ },
3661
+ "152100": {
3662
+ "content": "<linearGradient",
3663
+ "lstrip": false,
3664
+ "normalized": false,
3665
+ "rstrip": false,
3666
+ "single_word": false,
3667
+ "special": false
3668
+ },
3669
+ "152101": {
3670
+ "content": "</linearGradient>",
3671
+ "lstrip": false,
3672
+ "normalized": false,
3673
+ "rstrip": false,
3674
+ "single_word": false,
3675
+ "special": false
3676
+ },
3677
+ "152102": {
3678
+ "content": "<radialGradient",
3679
+ "lstrip": false,
3680
+ "normalized": false,
3681
+ "rstrip": false,
3682
+ "single_word": false,
3683
+ "special": false
3684
+ },
3685
+ "152103": {
3686
+ "content": "</radialGradient>",
3687
+ "lstrip": false,
3688
+ "normalized": false,
3689
+ "rstrip": false,
3690
+ "single_word": false,
3691
+ "special": false
3692
+ },
3693
+ "152104": {
3694
+ "content": "<stop",
3695
+ "lstrip": false,
3696
+ "normalized": false,
3697
+ "rstrip": false,
3698
+ "single_word": false,
3699
+ "special": false
3700
+ },
3701
+ "152105": {
3702
+ "content": "</stop>",
3703
+ "lstrip": false,
3704
+ "normalized": false,
3705
+ "rstrip": false,
3706
+ "single_word": false,
3707
+ "special": false
3708
+ },
3709
+ "152106": {
3710
+ "content": "<clipPath",
3711
+ "lstrip": false,
3712
+ "normalized": false,
3713
+ "rstrip": false,
3714
+ "single_word": false,
3715
+ "special": false
3716
+ },
3717
+ "152107": {
3718
+ "content": "</clipPath>",
3719
+ "lstrip": false,
3720
+ "normalized": false,
3721
+ "rstrip": false,
3722
+ "single_word": false,
3723
+ "special": false
3724
+ },
3725
+ "152108": {
3726
+ "content": "<mask",
3727
+ "lstrip": false,
3728
+ "normalized": false,
3729
+ "rstrip": false,
3730
+ "single_word": false,
3731
+ "special": false
3732
+ },
3733
+ "152109": {
3734
+ "content": "</mask>",
3735
+ "lstrip": false,
3736
+ "normalized": false,
3737
+ "rstrip": false,
3738
+ "single_word": false,
3739
+ "special": false
3740
+ },
3741
+ "152110": {
3742
+ "content": "<filter",
3743
+ "lstrip": false,
3744
+ "normalized": false,
3745
+ "rstrip": false,
3746
+ "single_word": false,
3747
+ "special": false
3748
+ },
3749
+ "152111": {
3750
+ "content": "</filter>",
3751
+ "lstrip": false,
3752
+ "normalized": false,
3753
+ "rstrip": false,
3754
+ "single_word": false,
3755
+ "special": false
3756
+ },
3757
+ "152112": {
3758
+ "content": "<feGaussianBlur",
3759
+ "lstrip": false,
3760
+ "normalized": false,
3761
+ "rstrip": false,
3762
+ "single_word": false,
3763
+ "special": false
3764
+ },
3765
+ "152113": {
3766
+ "content": "</feGaussianBlur>",
3767
+ "lstrip": false,
3768
+ "normalized": false,
3769
+ "rstrip": false,
3770
+ "single_word": false,
3771
+ "special": false
3772
+ },
3773
+ "152114": {
3774
+ "content": "<feColorMatrix",
3775
+ "lstrip": false,
3776
+ "normalized": false,
3777
+ "rstrip": false,
3778
+ "single_word": false,
3779
+ "special": false
3780
+ },
3781
+ "152115": {
3782
+ "content": "</feColorMatrix>",
3783
+ "lstrip": false,
3784
+ "normalized": false,
3785
+ "rstrip": false,
3786
+ "single_word": false,
3787
+ "special": false
3788
+ },
3789
+ "152116": {
3790
+ "content": "<feComposite",
3791
+ "lstrip": false,
3792
+ "normalized": false,
3793
+ "rstrip": false,
3794
+ "single_word": false,
3795
+ "special": false
3796
+ },
3797
+ "152117": {
3798
+ "content": "</feComposite>",
3799
+ "lstrip": false,
3800
+ "normalized": false,
3801
+ "rstrip": false,
3802
+ "single_word": false,
3803
+ "special": false
3804
+ },
3805
+ "152118": {
3806
+ "content": "<feBlend",
3807
+ "lstrip": false,
3808
+ "normalized": false,
3809
+ "rstrip": false,
3810
+ "single_word": false,
3811
+ "special": false
3812
+ },
3813
+ "152119": {
3814
+ "content": "</feBlend>",
3815
+ "lstrip": false,
3816
+ "normalized": false,
3817
+ "rstrip": false,
3818
+ "single_word": false,
3819
+ "special": false
3820
+ },
3821
+ "152120": {
3822
+ "content": "<animate",
3823
+ "lstrip": false,
3824
+ "normalized": false,
3825
+ "rstrip": false,
3826
+ "single_word": false,
3827
+ "special": false
3828
+ },
3829
+ "152121": {
3830
+ "content": "</animate>",
3831
+ "lstrip": false,
3832
+ "normalized": false,
3833
+ "rstrip": false,
3834
+ "single_word": false,
3835
+ "special": false
3836
+ },
3837
+ "152122": {
3838
+ "content": "<animateMotion",
3839
+ "lstrip": false,
3840
+ "normalized": false,
3841
+ "rstrip": false,
3842
+ "single_word": false,
3843
+ "special": false
3844
+ },
3845
+ "152123": {
3846
+ "content": "</animateMotion>",
3847
+ "lstrip": false,
3848
+ "normalized": false,
3849
+ "rstrip": false,
3850
+ "single_word": false,
3851
+ "special": false
3852
+ },
3853
+ "152124": {
3854
+ "content": "<animateTransform",
3855
+ "lstrip": false,
3856
+ "normalized": false,
3857
+ "rstrip": false,
3858
+ "single_word": false,
3859
+ "special": false
3860
+ },
3861
+ "152125": {
3862
+ "content": "</animateTransform>",
3863
+ "lstrip": false,
3864
+ "normalized": false,
3865
+ "rstrip": false,
3866
+ "single_word": false,
3867
+ "special": false
3868
+ }
3869
+ },
3870
+ "additional_special_tokens": [
3871
+ "<|im_start|>",
3872
+ "<|im_end|>",
3873
+ "<|object_ref_start|>",
3874
+ "<|object_ref_end|>",
3875
+ "<|box_start|>",
3876
+ "<|box_end|>",
3877
+ "<|quad_start|>",
3878
+ "<|quad_end|>",
3879
+ "<|vision_start|>",
3880
+ "<|vision_end|>",
3881
+ "<|vision_pad|>",
3882
+ "<|image_pad|>",
3883
+ "<|video_pad|>",
3884
+ "<img>",
3885
+ "</img>",
3886
+ "<IMG_CONTEXT>",
3887
+ "<quad>",
3888
+ "</quad>",
3889
+ "<ref>",
3890
+ "</ref>",
3891
+ "<box>",
3892
+ "</box>"
3893
+ ],
3894
+ "bos_token": null,
3895
+ "clean_up_tokenization_spaces": false,
3896
+ "context_image_token": "<IMG_CONTEXT>",
3897
+ "end_image_token": "</img>",
3898
+ "eos_token": "<|im_end|>",
3899
+ "errors": "replace",
3900
+ "extra_special_tokens": {
3901
+ "context_image_token": "<IMG_CONTEXT>",
3902
+ "end_image_token": "</img>",
3903
+ "start_image_token": "<img>",
3904
+ "video_token": "<video>"
3905
+ },
3906
+ "model_max_length": 16384,
3907
+ "pad_token": "<|endoftext|>",
3908
+ "padding_side": "right",
3909
+ "processor_class": "InternVLProcessor",
3910
+ "return_token_type_ids": false,
3911
+ "split_special_tokens": false,
3912
+ "start_image_token": "<img>",
3913
+ "tokenizer_class": "Qwen2Tokenizer",
3914
+ "unk_token": null,
3915
+ "video_token": "<video>"
3916
+ }
video_preprocessor_config.json ADDED
@@ -0,0 +1,75 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_valid_kwargs_names": [
3
+ "do_convert_rgb",
4
+ "do_resize",
5
+ "size",
6
+ "size_divisor",
7
+ "default_to_square",
8
+ "resample",
9
+ "do_rescale",
10
+ "rescale_factor",
11
+ "do_normalize",
12
+ "image_mean",
13
+ "image_std",
14
+ "do_pad",
15
+ "do_center_crop",
16
+ "crop_size",
17
+ "data_format",
18
+ "input_data_format",
19
+ "device"
20
+ ],
21
+ "crop_size": null,
22
+ "crop_to_patches": false,
23
+ "data_format": "channels_first",
24
+ "default_to_square": true,
25
+ "device": null,
26
+ "do_center_crop": null,
27
+ "do_convert_rgb": true,
28
+ "do_normalize": true,
29
+ "do_pad": null,
30
+ "do_rescale": true,
31
+ "do_resize": true,
32
+ "image_mean": [
33
+ 0.485,
34
+ 0.456,
35
+ 0.406
36
+ ],
37
+ "image_processor_type": "GotOcr2ImageProcessorFast",
38
+ "image_std": [
39
+ 0.229,
40
+ 0.224,
41
+ 0.225
42
+ ],
43
+ "input_data_format": null,
44
+ "max_patches": 12,
45
+ "min_patches": 1,
46
+ "model_valid_processing_keys": [
47
+ "do_convert_rgb",
48
+ "do_resize",
49
+ "size",
50
+ "size_divisor",
51
+ "default_to_square",
52
+ "resample",
53
+ "do_rescale",
54
+ "rescale_factor",
55
+ "do_normalize",
56
+ "image_mean",
57
+ "image_std",
58
+ "do_pad",
59
+ "do_center_crop",
60
+ "crop_size",
61
+ "data_format",
62
+ "input_data_format",
63
+ "device"
64
+ ],
65
+ "processor_class": "InternVLProcessor",
66
+ "resample": 3,
67
+ "rescale_factor": 0.00392156862745098,
68
+ "return_tensors": null,
69
+ "size": {
70
+ "height": 448,
71
+ "width": 448
72
+ },
73
+ "size_divisor": null,
74
+ "video_processor_type": "InternVLVideoProcessor"
75
+ }
vocab.json ADDED
The diff for this file is too large to render. See raw diff