Paranioar commited on
Commit
68cabed
·
verified ·
1 Parent(s): 5809b85

Upload folder using huggingface_hub

Browse files
__init__.py ADDED
File without changes
added_tokens.json ADDED
@@ -0,0 +1,295 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "</box>": 151677,
3
+ "</img>": 151671,
4
+ "</quad>": 151673,
5
+ "</ref>": 151675,
6
+ "</think>": 151668,
7
+ "</tool_call>": 151658,
8
+ "</tool_response>": 151666,
9
+ "<FAKE_PAD_0>": 151682,
10
+ "<FAKE_PAD_100>": 151782,
11
+ "<FAKE_PAD_101>": 151783,
12
+ "<FAKE_PAD_102>": 151784,
13
+ "<FAKE_PAD_103>": 151785,
14
+ "<FAKE_PAD_104>": 151786,
15
+ "<FAKE_PAD_105>": 151787,
16
+ "<FAKE_PAD_106>": 151788,
17
+ "<FAKE_PAD_107>": 151789,
18
+ "<FAKE_PAD_108>": 151790,
19
+ "<FAKE_PAD_109>": 151791,
20
+ "<FAKE_PAD_10>": 151692,
21
+ "<FAKE_PAD_110>": 151792,
22
+ "<FAKE_PAD_111>": 151793,
23
+ "<FAKE_PAD_112>": 151794,
24
+ "<FAKE_PAD_113>": 151795,
25
+ "<FAKE_PAD_114>": 151796,
26
+ "<FAKE_PAD_115>": 151797,
27
+ "<FAKE_PAD_116>": 151798,
28
+ "<FAKE_PAD_117>": 151799,
29
+ "<FAKE_PAD_118>": 151800,
30
+ "<FAKE_PAD_119>": 151801,
31
+ "<FAKE_PAD_11>": 151693,
32
+ "<FAKE_PAD_120>": 151802,
33
+ "<FAKE_PAD_121>": 151803,
34
+ "<FAKE_PAD_122>": 151804,
35
+ "<FAKE_PAD_123>": 151805,
36
+ "<FAKE_PAD_124>": 151806,
37
+ "<FAKE_PAD_125>": 151807,
38
+ "<FAKE_PAD_126>": 151808,
39
+ "<FAKE_PAD_127>": 151809,
40
+ "<FAKE_PAD_128>": 151810,
41
+ "<FAKE_PAD_129>": 151811,
42
+ "<FAKE_PAD_12>": 151694,
43
+ "<FAKE_PAD_130>": 151812,
44
+ "<FAKE_PAD_131>": 151813,
45
+ "<FAKE_PAD_132>": 151814,
46
+ "<FAKE_PAD_133>": 151815,
47
+ "<FAKE_PAD_134>": 151816,
48
+ "<FAKE_PAD_135>": 151817,
49
+ "<FAKE_PAD_136>": 151818,
50
+ "<FAKE_PAD_137>": 151819,
51
+ "<FAKE_PAD_138>": 151820,
52
+ "<FAKE_PAD_139>": 151821,
53
+ "<FAKE_PAD_13>": 151695,
54
+ "<FAKE_PAD_140>": 151822,
55
+ "<FAKE_PAD_141>": 151823,
56
+ "<FAKE_PAD_142>": 151824,
57
+ "<FAKE_PAD_143>": 151825,
58
+ "<FAKE_PAD_144>": 151826,
59
+ "<FAKE_PAD_145>": 151827,
60
+ "<FAKE_PAD_146>": 151828,
61
+ "<FAKE_PAD_147>": 151829,
62
+ "<FAKE_PAD_148>": 151830,
63
+ "<FAKE_PAD_149>": 151831,
64
+ "<FAKE_PAD_14>": 151696,
65
+ "<FAKE_PAD_150>": 151832,
66
+ "<FAKE_PAD_151>": 151833,
67
+ "<FAKE_PAD_152>": 151834,
68
+ "<FAKE_PAD_153>": 151835,
69
+ "<FAKE_PAD_154>": 151836,
70
+ "<FAKE_PAD_155>": 151837,
71
+ "<FAKE_PAD_156>": 151838,
72
+ "<FAKE_PAD_157>": 151839,
73
+ "<FAKE_PAD_158>": 151840,
74
+ "<FAKE_PAD_159>": 151841,
75
+ "<FAKE_PAD_15>": 151697,
76
+ "<FAKE_PAD_160>": 151842,
77
+ "<FAKE_PAD_161>": 151843,
78
+ "<FAKE_PAD_162>": 151844,
79
+ "<FAKE_PAD_163>": 151845,
80
+ "<FAKE_PAD_164>": 151846,
81
+ "<FAKE_PAD_165>": 151847,
82
+ "<FAKE_PAD_166>": 151848,
83
+ "<FAKE_PAD_167>": 151849,
84
+ "<FAKE_PAD_168>": 151850,
85
+ "<FAKE_PAD_169>": 151851,
86
+ "<FAKE_PAD_16>": 151698,
87
+ "<FAKE_PAD_170>": 151852,
88
+ "<FAKE_PAD_171>": 151853,
89
+ "<FAKE_PAD_172>": 151854,
90
+ "<FAKE_PAD_173>": 151855,
91
+ "<FAKE_PAD_174>": 151856,
92
+ "<FAKE_PAD_175>": 151857,
93
+ "<FAKE_PAD_176>": 151858,
94
+ "<FAKE_PAD_177>": 151859,
95
+ "<FAKE_PAD_178>": 151860,
96
+ "<FAKE_PAD_179>": 151861,
97
+ "<FAKE_PAD_17>": 151699,
98
+ "<FAKE_PAD_180>": 151862,
99
+ "<FAKE_PAD_181>": 151863,
100
+ "<FAKE_PAD_182>": 151864,
101
+ "<FAKE_PAD_183>": 151865,
102
+ "<FAKE_PAD_184>": 151866,
103
+ "<FAKE_PAD_185>": 151867,
104
+ "<FAKE_PAD_186>": 151868,
105
+ "<FAKE_PAD_187>": 151869,
106
+ "<FAKE_PAD_188>": 151870,
107
+ "<FAKE_PAD_189>": 151871,
108
+ "<FAKE_PAD_18>": 151700,
109
+ "<FAKE_PAD_190>": 151872,
110
+ "<FAKE_PAD_191>": 151873,
111
+ "<FAKE_PAD_192>": 151874,
112
+ "<FAKE_PAD_193>": 151875,
113
+ "<FAKE_PAD_194>": 151876,
114
+ "<FAKE_PAD_195>": 151877,
115
+ "<FAKE_PAD_196>": 151878,
116
+ "<FAKE_PAD_197>": 151879,
117
+ "<FAKE_PAD_198>": 151880,
118
+ "<FAKE_PAD_199>": 151881,
119
+ "<FAKE_PAD_19>": 151701,
120
+ "<FAKE_PAD_1>": 151683,
121
+ "<FAKE_PAD_200>": 151882,
122
+ "<FAKE_PAD_201>": 151883,
123
+ "<FAKE_PAD_202>": 151884,
124
+ "<FAKE_PAD_203>": 151885,
125
+ "<FAKE_PAD_204>": 151886,
126
+ "<FAKE_PAD_205>": 151887,
127
+ "<FAKE_PAD_206>": 151888,
128
+ "<FAKE_PAD_207>": 151889,
129
+ "<FAKE_PAD_208>": 151890,
130
+ "<FAKE_PAD_209>": 151891,
131
+ "<FAKE_PAD_20>": 151702,
132
+ "<FAKE_PAD_210>": 151892,
133
+ "<FAKE_PAD_211>": 151893,
134
+ "<FAKE_PAD_212>": 151894,
135
+ "<FAKE_PAD_213>": 151895,
136
+ "<FAKE_PAD_214>": 151896,
137
+ "<FAKE_PAD_215>": 151897,
138
+ "<FAKE_PAD_216>": 151898,
139
+ "<FAKE_PAD_217>": 151899,
140
+ "<FAKE_PAD_218>": 151900,
141
+ "<FAKE_PAD_219>": 151901,
142
+ "<FAKE_PAD_21>": 151703,
143
+ "<FAKE_PAD_220>": 151902,
144
+ "<FAKE_PAD_221>": 151903,
145
+ "<FAKE_PAD_222>": 151904,
146
+ "<FAKE_PAD_223>": 151905,
147
+ "<FAKE_PAD_224>": 151906,
148
+ "<FAKE_PAD_225>": 151907,
149
+ "<FAKE_PAD_226>": 151908,
150
+ "<FAKE_PAD_227>": 151909,
151
+ "<FAKE_PAD_228>": 151910,
152
+ "<FAKE_PAD_229>": 151911,
153
+ "<FAKE_PAD_22>": 151704,
154
+ "<FAKE_PAD_230>": 151912,
155
+ "<FAKE_PAD_231>": 151913,
156
+ "<FAKE_PAD_232>": 151914,
157
+ "<FAKE_PAD_233>": 151915,
158
+ "<FAKE_PAD_234>": 151916,
159
+ "<FAKE_PAD_235>": 151917,
160
+ "<FAKE_PAD_236>": 151918,
161
+ "<FAKE_PAD_237>": 151919,
162
+ "<FAKE_PAD_238>": 151920,
163
+ "<FAKE_PAD_239>": 151921,
164
+ "<FAKE_PAD_23>": 151705,
165
+ "<FAKE_PAD_240>": 151922,
166
+ "<FAKE_PAD_241>": 151923,
167
+ "<FAKE_PAD_242>": 151924,
168
+ "<FAKE_PAD_243>": 151925,
169
+ "<FAKE_PAD_244>": 151926,
170
+ "<FAKE_PAD_245>": 151927,
171
+ "<FAKE_PAD_246>": 151928,
172
+ "<FAKE_PAD_247>": 151929,
173
+ "<FAKE_PAD_248>": 151930,
174
+ "<FAKE_PAD_249>": 151931,
175
+ "<FAKE_PAD_24>": 151706,
176
+ "<FAKE_PAD_250>": 151932,
177
+ "<FAKE_PAD_251>": 151933,
178
+ "<FAKE_PAD_252>": 151934,
179
+ "<FAKE_PAD_253>": 151935,
180
+ "<FAKE_PAD_25>": 151707,
181
+ "<FAKE_PAD_26>": 151708,
182
+ "<FAKE_PAD_27>": 151709,
183
+ "<FAKE_PAD_28>": 151710,
184
+ "<FAKE_PAD_29>": 151711,
185
+ "<FAKE_PAD_2>": 151684,
186
+ "<FAKE_PAD_30>": 151712,
187
+ "<FAKE_PAD_31>": 151713,
188
+ "<FAKE_PAD_32>": 151714,
189
+ "<FAKE_PAD_33>": 151715,
190
+ "<FAKE_PAD_34>": 151716,
191
+ "<FAKE_PAD_35>": 151717,
192
+ "<FAKE_PAD_36>": 151718,
193
+ "<FAKE_PAD_37>": 151719,
194
+ "<FAKE_PAD_38>": 151720,
195
+ "<FAKE_PAD_39>": 151721,
196
+ "<FAKE_PAD_3>": 151685,
197
+ "<FAKE_PAD_40>": 151722,
198
+ "<FAKE_PAD_41>": 151723,
199
+ "<FAKE_PAD_42>": 151724,
200
+ "<FAKE_PAD_43>": 151725,
201
+ "<FAKE_PAD_44>": 151726,
202
+ "<FAKE_PAD_45>": 151727,
203
+ "<FAKE_PAD_46>": 151728,
204
+ "<FAKE_PAD_47>": 151729,
205
+ "<FAKE_PAD_48>": 151730,
206
+ "<FAKE_PAD_49>": 151731,
207
+ "<FAKE_PAD_4>": 151686,
208
+ "<FAKE_PAD_50>": 151732,
209
+ "<FAKE_PAD_51>": 151733,
210
+ "<FAKE_PAD_52>": 151734,
211
+ "<FAKE_PAD_53>": 151735,
212
+ "<FAKE_PAD_54>": 151736,
213
+ "<FAKE_PAD_55>": 151737,
214
+ "<FAKE_PAD_56>": 151738,
215
+ "<FAKE_PAD_57>": 151739,
216
+ "<FAKE_PAD_58>": 151740,
217
+ "<FAKE_PAD_59>": 151741,
218
+ "<FAKE_PAD_5>": 151687,
219
+ "<FAKE_PAD_60>": 151742,
220
+ "<FAKE_PAD_61>": 151743,
221
+ "<FAKE_PAD_62>": 151744,
222
+ "<FAKE_PAD_63>": 151745,
223
+ "<FAKE_PAD_64>": 151746,
224
+ "<FAKE_PAD_65>": 151747,
225
+ "<FAKE_PAD_66>": 151748,
226
+ "<FAKE_PAD_67>": 151749,
227
+ "<FAKE_PAD_68>": 151750,
228
+ "<FAKE_PAD_69>": 151751,
229
+ "<FAKE_PAD_6>": 151688,
230
+ "<FAKE_PAD_70>": 151752,
231
+ "<FAKE_PAD_71>": 151753,
232
+ "<FAKE_PAD_72>": 151754,
233
+ "<FAKE_PAD_73>": 151755,
234
+ "<FAKE_PAD_74>": 151756,
235
+ "<FAKE_PAD_75>": 151757,
236
+ "<FAKE_PAD_76>": 151758,
237
+ "<FAKE_PAD_77>": 151759,
238
+ "<FAKE_PAD_78>": 151760,
239
+ "<FAKE_PAD_79>": 151761,
240
+ "<FAKE_PAD_7>": 151689,
241
+ "<FAKE_PAD_80>": 151762,
242
+ "<FAKE_PAD_81>": 151763,
243
+ "<FAKE_PAD_82>": 151764,
244
+ "<FAKE_PAD_83>": 151765,
245
+ "<FAKE_PAD_84>": 151766,
246
+ "<FAKE_PAD_85>": 151767,
247
+ "<FAKE_PAD_86>": 151768,
248
+ "<FAKE_PAD_87>": 151769,
249
+ "<FAKE_PAD_88>": 151770,
250
+ "<FAKE_PAD_89>": 151771,
251
+ "<FAKE_PAD_8>": 151690,
252
+ "<FAKE_PAD_90>": 151772,
253
+ "<FAKE_PAD_91>": 151773,
254
+ "<FAKE_PAD_92>": 151774,
255
+ "<FAKE_PAD_93>": 151775,
256
+ "<FAKE_PAD_94>": 151776,
257
+ "<FAKE_PAD_95>": 151777,
258
+ "<FAKE_PAD_96>": 151778,
259
+ "<FAKE_PAD_97>": 151779,
260
+ "<FAKE_PAD_98>": 151780,
261
+ "<FAKE_PAD_99>": 151781,
262
+ "<FAKE_PAD_9>": 151691,
263
+ "<IMG_CONTEXT>": 151669,
264
+ "<box>": 151676,
265
+ "<img>": 151670,
266
+ "<quad>": 151672,
267
+ "<ref>": 151674,
268
+ "<think>": 151667,
269
+ "<tool_call>": 151657,
270
+ "<tool_response>": 151665,
271
+ "<|action_end|>": 151679,
272
+ "<|action_start|>": 151678,
273
+ "<|box_end|>": 151649,
274
+ "<|box_start|>": 151648,
275
+ "<|endoftext|>": 151643,
276
+ "<|file_sep|>": 151664,
277
+ "<|fim_middle|>": 151660,
278
+ "<|fim_pad|>": 151662,
279
+ "<|fim_prefix|>": 151659,
280
+ "<|fim_suffix|>": 151661,
281
+ "<|im_end|>": 151645,
282
+ "<|im_start|>": 151644,
283
+ "<|image_pad|>": 151655,
284
+ "<|interpreter|>": 151681,
285
+ "<|object_ref_end|>": 151647,
286
+ "<|object_ref_start|>": 151646,
287
+ "<|plugin|>": 151680,
288
+ "<|quad_end|>": 151651,
289
+ "<|quad_start|>": 151650,
290
+ "<|repo_name|>": 151663,
291
+ "<|video_pad|>": 151656,
292
+ "<|vision_end|>": 151653,
293
+ "<|vision_pad|>": 151654,
294
+ "<|vision_start|>": 151652
295
+ }
config.json ADDED
@@ -0,0 +1,77 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "NEOChatModel"
4
+ ],
5
+ "auto_map": {
6
+ "AutoConfig": "configuration_neo_chat.NEOChatConfig",
7
+ "AutoModel": "modeling_neo_chat.NEOChatModel",
8
+ "AutoModelForCausalLM": "modeling_neo_chat.NEOChatModel"
9
+ },
10
+ "downsample_ratio": 0.5,
11
+ "eos_token_id": 151645,
12
+ "llm_config": {
13
+ "_name_or_path": null,
14
+ "architectures": [
15
+ "Qwen3ForCausalLM"
16
+ ],
17
+ "attention_bias": false,
18
+ "attention_dropout": 0.0,
19
+ "bos_token_id": 151643,
20
+ "eos_token_id": 151645,
21
+ "head_dim": 128,
22
+ "hidden_act": "silu",
23
+ "hidden_size": 2048,
24
+ "intermediate_size": 6144,
25
+ "max_position_embeddings": 40960,
26
+ "max_position_embeddings_hw": 10000,
27
+ "max_window_layers": 40,
28
+ "model_type": "qwen3",
29
+ "num_attention_heads": 16,
30
+ "num_hidden_layers": 40,
31
+ "num_key_value_heads": 8,
32
+ "rms_norm_eps": 1e-06,
33
+ "rope_scaling": null,
34
+ "rope_theta": 1000000.0,
35
+ "rope_theta_hw": 10000.0,
36
+ "sliding_window": null,
37
+ "torch_dtype": "bfloat16",
38
+ "use_cache": false,
39
+ "use_deepep": false,
40
+ "use_sliding_window": false,
41
+ "vocab_size": 151936,
42
+ "pure_llm": false
43
+ },
44
+ "model_type": "neo_chat",
45
+ "pad_token_id": 151643,
46
+ "template": "neo1_0",
47
+ "tie_word_embeddings": false,
48
+ "torch_dtype": "bfloat16",
49
+ "transformers_version": "4.37.2",
50
+ "use_backbone_lora": 0,
51
+ "use_llm_lora": 0,
52
+ "min_pixels": 65536,
53
+ "max_pixels": 4194304,
54
+ "patch_size": 16,
55
+ "vision_config": {
56
+ "architectures": [
57
+ "NEOVisionModel"
58
+ ],
59
+ "attention_dropout": 0.0,
60
+ "auto_map": {
61
+ "AutoConfig": "configuration_neo_vit.NEOVisionConfig",
62
+ "AutoModel": "modeling_neo_vit.NEOVisionModel"
63
+ },
64
+ "llm_hidden_size": 2048,
65
+ "downsample_ratio": 0.5,
66
+ "hidden_size": 1024,
67
+ "model_type": "neo_vision",
68
+ "rope_theta_vision": 10000.0,
69
+ "max_position_embeddings_vision": 10000,
70
+ "num_channels": 3,
71
+ "patch_size": 16,
72
+ "torch_dtype": "bfloat16",
73
+ "transformers_version": "4.37.2",
74
+ "min_pixels": 65536,
75
+ "max_pixels": 4194304
76
+ }
77
+ }
configuration_neo_chat.py ADDED
@@ -0,0 +1,77 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import copy
2
+
3
+ from transformers import Qwen3Config
4
+ from transformers.utils import logging
5
+ from transformers.configuration_utils import PretrainedConfig
6
+
7
+ from .configuration_neo_vit import NEOVisionConfig
8
+
9
+
10
+ logger = logging.get_logger(__name__)
11
+
12
+
13
+ class NEOLLMConfig(Qwen3Config):
14
+ def __init__(self, rope_theta_hw=10000.0, max_position_embeddings_hw=10000, **kwargs):
15
+ super().__init__(**kwargs)
16
+ self.rope_theta_hw = rope_theta_hw
17
+ self.max_position_embeddings_hw = max_position_embeddings_hw
18
+
19
+
20
+ class NEOChatConfig(PretrainedConfig):
21
+ model_type = 'neo_chat'
22
+ is_composition = True
23
+
24
+ def __init__(
25
+ self,
26
+ vision_config=None,
27
+ llm_config=None,
28
+ use_backbone_lora=0,
29
+ use_llm_lora=0,
30
+ downsample_ratio=0.5,
31
+ template=None,
32
+ **kwargs,
33
+ ):
34
+ super().__init__(**kwargs)
35
+
36
+ if vision_config is None:
37
+ vision_config = {'architectures': ['NEOVisionModel']}
38
+ logger.info('vision_config is None. Initializing the NEOVisionConfig with default values.')
39
+
40
+ if llm_config is None:
41
+ llm_config = {'architectures': ['Qwen3ForCausalLM']}
42
+ logger.info('llm_config is None. Initializing the LlamaConfig config with default values (`LlamaConfig`).')
43
+ assert 'architectures' in llm_config, "Should specify architecture in llm_config"
44
+
45
+ if isinstance(vision_config, dict):
46
+ self.vision_config = NEOVisionConfig(**vision_config)
47
+ else:
48
+ self.vision_config = vision_config
49
+
50
+ if isinstance(llm_config, dict):
51
+ self.llm_config = NEOLLMConfig(**llm_config)
52
+ else:
53
+ self.llm_config = llm_config
54
+
55
+ self.use_backbone_lora = use_backbone_lora
56
+ self.use_llm_lora = use_llm_lora
57
+ self.downsample_ratio = downsample_ratio
58
+ self.template = template
59
+ self.tie_word_embeddings = self.llm_config.tie_word_embeddings
60
+
61
+ def to_dict(self):
62
+ """
63
+ Serializes this instance to a Python dictionary. Override the default [`~PretrainedConfig.to_dict`].
64
+
65
+ Returns:
66
+ `Dict[str, any]`: Dictionary of all the attributes that make up this configuration instance,
67
+ """
68
+ output = copy.deepcopy(self.__dict__)
69
+ output['vision_config'] = self.vision_config.to_dict()
70
+ output['llm_config'] = self.llm_config.to_dict()
71
+ output['model_type'] = self.__class__.model_type
72
+ output['use_backbone_lora'] = self.use_backbone_lora
73
+ output['use_llm_lora'] = self.use_llm_lora
74
+ output['downsample_ratio'] = self.downsample_ratio
75
+ output['template'] = self.template
76
+
77
+ return output
configuration_neo_vit.py ADDED
@@ -0,0 +1,52 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from typing import Union
3
+
4
+ from transformers.configuration_utils import PretrainedConfig
5
+ from transformers.utils import logging
6
+
7
+ logger = logging.get_logger(__name__)
8
+
9
+
10
+ class NEOVisionConfig(PretrainedConfig):
11
+
12
+ model_type = 'neo_vision'
13
+
14
+ def __init__(
15
+ self,
16
+ num_channels=3,
17
+ patch_size=16,
18
+ hidden_size=1024,
19
+ llm_hidden_size=2048,
20
+ downsample_ratio=0.5,
21
+ rope_theta_vision=10000.0,
22
+ max_position_embeddings_vision=10000,
23
+ min_pixels=65536,
24
+ max_pixels=4194304,
25
+ **kwargs,
26
+ ):
27
+ super().__init__(**kwargs)
28
+
29
+ self.hidden_size = hidden_size
30
+ self.llm_hidden_size = llm_hidden_size,
31
+ self.downsample_ratio = downsample_ratio,
32
+ self.rope_theta_vision = rope_theta_vision
33
+ self.max_position_embeddings_vision = max_position_embeddings_vision
34
+ self.num_channels = num_channels
35
+ self.patch_size = patch_size
36
+ self.min_pixels = min_pixels
37
+ self.max_pixels = max_pixels
38
+
39
+ @classmethod
40
+ def from_pretrained(cls, pretrained_model_name_or_path: Union[str, os.PathLike], **kwargs) -> 'PretrainedConfig':
41
+ config_dict, kwargs = cls.get_config_dict(pretrained_model_name_or_path, **kwargs)
42
+
43
+ if 'vision_config' in config_dict:
44
+ config_dict = config_dict['vision_config']
45
+
46
+ if 'model_type' in config_dict and hasattr(cls, 'model_type') and config_dict['model_type'] != cls.model_type:
47
+ logger.warning(
48
+ f"You are using a model of type {config_dict['model_type']} to instantiate a model of type "
49
+ f'{cls.model_type}. This is not supported for all configurations of models and can yield errors.'
50
+ )
51
+
52
+ return cls.from_dict(config_dict, **kwargs)
conversation.py ADDED
@@ -0,0 +1,394 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Conversation prompt templates.
3
+
4
+ We kindly request that you import fastchat instead of copying this file if you wish to use it.
5
+ If you have changes in mind, please contribute back so the community can benefit collectively and continue to maintain these valuable templates.
6
+
7
+ Modified from https://github.com/lm-sys/FastChat/blob/main/fastchat/conversation.py
8
+ """
9
+
10
+ import dataclasses
11
+ from enum import IntEnum, auto
12
+ from typing import Dict, List, Tuple, Union
13
+
14
+
15
+ class SeparatorStyle(IntEnum):
16
+ """Separator styles."""
17
+
18
+ ADD_COLON_SINGLE = auto()
19
+ ADD_COLON_TWO = auto()
20
+ ADD_COLON_SPACE_SINGLE = auto()
21
+ NO_COLON_SINGLE = auto()
22
+ NO_COLON_TWO = auto()
23
+ ADD_NEW_LINE_SINGLE = auto()
24
+ LLAMA2 = auto()
25
+ CHATGLM = auto()
26
+ CHATML = auto()
27
+ CHATINTERN = auto()
28
+ DOLLY = auto()
29
+ RWKV = auto()
30
+ PHOENIX = auto()
31
+ ROBIN = auto()
32
+ FALCON_CHAT = auto()
33
+ CHATGLM3 = auto()
34
+ INTERNVL_ZH = auto()
35
+ MPT = auto()
36
+
37
+
38
+ @dataclasses.dataclass
39
+ class Conversation:
40
+ """A class that manages prompt templates and keeps all conversation history."""
41
+
42
+ # The name of this template
43
+ name: str
44
+ # The template of the system prompt
45
+ system_template: str = '{system_message}'
46
+ # The system message
47
+ system_message: str = ''
48
+ # The names of two roles
49
+ roles: Tuple[str] = ('USER', 'ASSISTANT')
50
+ # All messages. Each item is (role, message).
51
+ messages: List[List[str]] = ()
52
+ # The number of few shot examples
53
+ offset: int = 0
54
+ # The separator style and configurations
55
+ sep_style: SeparatorStyle = SeparatorStyle.ADD_COLON_SINGLE
56
+ sep: str = '\n'
57
+ sep2: str = None
58
+ # Stop criteria (the default one is EOS token)
59
+ stop_str: Union[str, List[str]] = None
60
+ # Stops generation if meeting any token in this list
61
+ stop_token_ids: List[int] = None
62
+
63
+ def get_prompt(self) -> str:
64
+ """Get the prompt for generation."""
65
+ if self.system_message is not None and self.system_message != '':
66
+ system_prompt = self.system_template.format(system_message=self.system_message)
67
+ else:
68
+ system_prompt = ''
69
+
70
+ if self.sep_style == SeparatorStyle.ADD_COLON_SINGLE:
71
+ ret = '' if system_prompt == '' else system_prompt + self.sep
72
+ for role, message in self.messages:
73
+ if message:
74
+ ret += role + ': ' + message + self.sep
75
+ else:
76
+ ret += role + ':'
77
+ return ret
78
+ elif self.sep_style == SeparatorStyle.ADD_COLON_TWO:
79
+ seps = [self.sep, self.sep2]
80
+ ret = '' if system_prompt == '' else system_prompt + seps[0]
81
+ for i, (role, message) in enumerate(self.messages):
82
+ if message:
83
+ ret += role + ': ' + message + seps[i % 2]
84
+ else:
85
+ ret += role + ':'
86
+ return ret
87
+ elif self.sep_style == SeparatorStyle.ADD_COLON_SPACE_SINGLE:
88
+ ret = '' if system_prompt == '' else system_prompt + self.sep
89
+ for role, message in self.messages:
90
+ if message:
91
+ ret += role + ': ' + message + self.sep
92
+ else:
93
+ ret += role + ': ' # must be end with a space
94
+ return ret
95
+ elif self.sep_style == SeparatorStyle.ADD_NEW_LINE_SINGLE:
96
+ ret = '' if system_prompt == '' else system_prompt + self.sep
97
+ for role, message in self.messages:
98
+ if message:
99
+ ret += role + '\n' + message + self.sep
100
+ else:
101
+ ret += role + '\n'
102
+ return ret
103
+ elif self.sep_style == SeparatorStyle.NO_COLON_SINGLE:
104
+ ret = system_prompt
105
+ for role, message in self.messages:
106
+ if message:
107
+ ret += role + message + self.sep
108
+ else:
109
+ ret += role
110
+ return ret
111
+ elif self.sep_style == SeparatorStyle.NO_COLON_TWO:
112
+ seps = [self.sep, self.sep2]
113
+ ret = system_prompt
114
+ for i, (role, message) in enumerate(self.messages):
115
+ if message:
116
+ ret += role + message + seps[i % 2]
117
+ else:
118
+ ret += role
119
+ return ret
120
+ elif self.sep_style == SeparatorStyle.RWKV:
121
+ ret = system_prompt
122
+ for i, (role, message) in enumerate(self.messages):
123
+ if message:
124
+ ret += (
125
+ role
126
+ + ': '
127
+ + message.replace('\r\n', '\n').replace('\n\n', '\n')
128
+ )
129
+ ret += '\n\n'
130
+ else:
131
+ ret += role + ':'
132
+ return ret
133
+ elif self.sep_style == SeparatorStyle.LLAMA2:
134
+ seps = [self.sep, self.sep2]
135
+ ret = system_prompt if system_prompt != '' else '[INST] '
136
+ for i, (role, message) in enumerate(self.messages):
137
+ tag = self.roles[i % 2]
138
+ if message:
139
+ if i == 0:
140
+ ret += message + ' '
141
+ else:
142
+ ret += tag + ' ' + message + seps[i % 2]
143
+ else:
144
+ ret += tag
145
+ return ret
146
+ elif self.sep_style == SeparatorStyle.CHATGLM:
147
+ # source: https://huggingface.co/THUDM/chatglm-6b/blob/1d240ba371910e9282298d4592532d7f0f3e9f3e/modeling_chatglm.py#L1302-L1308
148
+ # source2: https://huggingface.co/THUDM/chatglm2-6b/blob/e186c891cf64310ac66ef10a87e6635fa6c2a579/modeling_chatglm.py#L926
149
+ round_add_n = 1 if self.name == 'chatglm2' else 0
150
+ ret = '' if system_prompt == '' else system_prompt + self.sep
151
+
152
+ for i, (role, message) in enumerate(self.messages):
153
+ if i % 2 == 0:
154
+ ret += f'[Round {i//2 + round_add_n}]{self.sep}'
155
+
156
+ if message:
157
+ ret += f'{role}:{message}{self.sep}'
158
+ else:
159
+ ret += f'{role}:'
160
+ return ret
161
+ elif self.sep_style == SeparatorStyle.CHATML:
162
+ ret = '' if system_prompt == '' else system_prompt + self.sep + '\n'
163
+ for role, message in self.messages:
164
+ if message:
165
+ ret += role + '\n' + message + self.sep + '\n'
166
+ else:
167
+ ret += role + '\n'
168
+ return ret
169
+ elif self.sep_style == SeparatorStyle.CHATGLM3:
170
+ ret = system_prompt
171
+ for role, message in self.messages:
172
+ if message:
173
+ ret += role + '\n' + ' ' + message
174
+ else:
175
+ ret += role
176
+ return ret
177
+ elif self.sep_style == SeparatorStyle.CHATINTERN:
178
+ # source: https://huggingface.co/internlm/internlm-chat-7b-8k/blob/bd546fa984b4b0b86958f56bf37f94aa75ab8831/modeling_internlm.py#L771
179
+ seps = [self.sep, self.sep2]
180
+ ret = system_prompt
181
+ for i, (role, message) in enumerate(self.messages):
182
+ # if i % 2 == 0:
183
+ # ret += "<s>"
184
+ if message:
185
+ ret += role + ':' + message + seps[i % 2] + '\n'
186
+ else:
187
+ ret += role + ':'
188
+ return ret
189
+ elif self.sep_style == SeparatorStyle.DOLLY:
190
+ seps = [self.sep, self.sep2]
191
+ ret = system_prompt
192
+ for i, (role, message) in enumerate(self.messages):
193
+ if message:
194
+ ret += role + ':\n' + message + seps[i % 2]
195
+ if i % 2 == 1:
196
+ ret += '\n\n'
197
+ else:
198
+ ret += role + ':\n'
199
+ return ret
200
+ elif self.sep_style == SeparatorStyle.PHOENIX:
201
+ ret = system_prompt
202
+ for role, message in self.messages:
203
+ if message:
204
+ ret += role + ': ' + '<s>' + message + '</s>'
205
+ else:
206
+ ret += role + ': ' + '<s>'
207
+ return ret
208
+ elif self.sep_style == SeparatorStyle.ROBIN:
209
+ ret = '' if system_prompt == '' else system_prompt + self.sep
210
+ for role, message in self.messages:
211
+ if message:
212
+ ret += role + ':\n' + message + self.sep
213
+ else:
214
+ ret += role + ':\n'
215
+ return ret
216
+ elif self.sep_style == SeparatorStyle.FALCON_CHAT:
217
+ ret = '' if system_prompt == '' else system_prompt + self.sep
218
+ for role, message in self.messages:
219
+ if message:
220
+ ret += role + ': ' + message + self.sep
221
+ else:
222
+ ret += role + ':'
223
+
224
+ return ret
225
+ elif self.sep_style == SeparatorStyle.INTERNVL_ZH:
226
+ seps = [self.sep, self.sep2]
227
+ ret = '' if system_prompt == '' else self.system_message + seps[0]
228
+ for i, (role, message) in enumerate(self.messages):
229
+ if message:
230
+ ret += role + ': ' + message + seps[i % 2]
231
+ else:
232
+ ret += role + ':'
233
+ return ret
234
+ elif self.sep_style == SeparatorStyle.MPT:
235
+ ret = '' if system_prompt == '' else system_prompt + self.sep
236
+ for role, message in self.messages:
237
+ if message:
238
+ if type(message) is tuple:
239
+ message, _, _ = message
240
+ ret += role + message + self.sep
241
+ else:
242
+ ret += role
243
+ return ret
244
+ else:
245
+ raise ValueError(f'Invalid style: {self.sep_style}')
246
+
247
+ def set_system_message(self, system_message: str):
248
+ """Set the system message."""
249
+ self.system_message = system_message
250
+
251
+ def append_message(self, role: str, message: str):
252
+ """Append a new message."""
253
+ self.messages.append([role, message])
254
+
255
+ def update_last_message(self, message: str):
256
+ """Update the last output.
257
+
258
+ The last message is typically set to be None when constructing the prompt,
259
+ so we need to update it in-place after getting the response from a model.
260
+ """
261
+ self.messages[-1][1] = message
262
+
263
+ def to_gradio_chatbot(self):
264
+ """Convert the conversation to gradio chatbot format."""
265
+ ret = []
266
+ for i, (role, msg) in enumerate(self.messages[self.offset :]):
267
+ if i % 2 == 0:
268
+ ret.append([msg, None])
269
+ else:
270
+ ret[-1][-1] = msg
271
+ return ret
272
+
273
+ def to_openai_api_messages(self):
274
+ """Convert the conversation to OpenAI chat completion format."""
275
+ ret = [{'role': 'system', 'content': self.system_message}]
276
+
277
+ for i, (_, msg) in enumerate(self.messages[self.offset :]):
278
+ if i % 2 == 0:
279
+ ret.append({'role': 'user', 'content': msg})
280
+ else:
281
+ if msg is not None:
282
+ ret.append({'role': 'assistant', 'content': msg})
283
+ return ret
284
+
285
+ def copy(self):
286
+ return Conversation(
287
+ name=self.name,
288
+ system_template=self.system_template,
289
+ system_message=self.system_message,
290
+ roles=self.roles,
291
+ messages=[[x, y] for x, y in self.messages],
292
+ offset=self.offset,
293
+ sep_style=self.sep_style,
294
+ sep=self.sep,
295
+ sep2=self.sep2,
296
+ stop_str=self.stop_str,
297
+ stop_token_ids=self.stop_token_ids,
298
+ )
299
+
300
+ def dict(self):
301
+ return {
302
+ 'template_name': self.name,
303
+ 'system_message': self.system_message,
304
+ 'roles': self.roles,
305
+ 'messages': self.messages,
306
+ 'offset': self.offset,
307
+ }
308
+
309
+
310
+ # A global registry for all conversation templates
311
+ conv_templates: Dict[str, Conversation] = {}
312
+
313
+
314
+ def register_conv_template(template: Conversation, override: bool = False):
315
+ """Register a new conversation template."""
316
+ if not override:
317
+ assert (
318
+ template.name not in conv_templates
319
+ ), f'{template.name} has been registered.'
320
+
321
+ conv_templates[template.name] = template
322
+
323
+
324
+ def get_conv_template(name: str) -> Conversation:
325
+ """Get a conversation template."""
326
+ return conv_templates[name].copy()
327
+
328
+
329
+ # Both Hermes-2 and neo1_0-chat are chatml-format conversation templates. The difference
330
+ # is that during training, the preprocessing function for the Hermes-2 template doesn't add
331
+ # <s> at the beginning of the tokenized sequence, while the neo1_0-chat template does.
332
+ # Therefore, they are completely equivalent during inference.
333
+
334
+ register_conv_template(
335
+ Conversation(
336
+ name='Hermes-2',
337
+ system_template='<|im_start|>system\n{system_message}',
338
+ # note: The new system prompt was not used here to avoid changes in benchmark performance.
339
+ # system_message='我是书生·万象,英文名是InternVL,是由上海人工智能实验室、清华大学及多家合作单位联合开发的多模态大语言模型。',
340
+ system_message='你是由上海人工智能实验室联合商汤科技开发的书生多模态大模型,英文名叫InternVL, 是一个有用无害的人工智能助手。',
341
+ roles=('<|im_start|>user\n', '<|im_start|>assistant\n'),
342
+ sep_style=SeparatorStyle.MPT,
343
+ sep='<|im_end|>',
344
+ stop_str='<|endoftext|>',
345
+ )
346
+ )
347
+
348
+ register_conv_template(
349
+ Conversation(
350
+ name='internlm2-chat',
351
+ system_template='<|im_start|>system\n{system_message}',
352
+ # note: The new system prompt was not used here to avoid changes in benchmark performance.
353
+ # system_message='我是书生·万象,英文名是InternVL,是由上海人工智能实验室、清华大学及多家合作单位联合开发的多模态大语言模型。',
354
+ system_message='你是由上海人工智能实验室联合商汤科技开发的书生多模态大模型,英文名叫InternVL, 是一个有用无害的人工智能助手。',
355
+ roles=('<|im_start|>user\n', '<|im_start|>assistant\n'),
356
+ sep_style=SeparatorStyle.MPT,
357
+ sep='<|im_end|>',
358
+ )
359
+ )
360
+
361
+ register_conv_template(
362
+ Conversation(
363
+ name='phi3-chat',
364
+ system_template='<|system|>\n{system_message}',
365
+ # note: The new system prompt was not used here to avoid changes in benchmark performance.
366
+ # system_message='我是书生·万象,英文名是InternVL,是由上海人工智能实验室、清华大学及多家合作单位联合开发的多模态大语言模型。',
367
+ system_message='你是由上海人工智能实验室联合商汤科技开发的书生多模态大模型,英文名叫InternVL, 是一个有用无害的人工智能助手。',
368
+ roles=('<|user|>\n', '<|assistant|>\n'),
369
+ sep_style=SeparatorStyle.MPT,
370
+ sep='<|end|>',
371
+ )
372
+ )
373
+
374
+ register_conv_template(
375
+ Conversation(
376
+ name='internvl2_5',
377
+ system_template='<|im_start|>system\n{system_message}',
378
+ system_message='你是书生·万象,英文名是InternVL,是由上海人工智能实验室、清华大学及多家合作单位联合开发的多模态大语言模型。',
379
+ roles=('<|im_start|>user\n', '<|im_start|>assistant\n'),
380
+ sep_style=SeparatorStyle.MPT,
381
+ sep='<|im_end|>\n',
382
+ )
383
+ )
384
+
385
+ register_conv_template(
386
+ Conversation(
387
+ name='neo1_0',
388
+ system_template='<|im_start|>system\n{system_message}',
389
+ system_message='',
390
+ roles=('<|im_start|>user\n', '<|im_start|>assistant\n'),
391
+ sep_style=SeparatorStyle.MPT,
392
+ sep='<|im_end|>\n',
393
+ )
394
+ )
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
model-00001-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9253af98019ca730522b7b3abd8e111d8f954306e70490f6f02417c138c844a2
3
+ size 18356696
model-00002-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9bbcbf73561f6bc5d0a17ea6a2081feed2d1304e87602d8c502d9a5c4bd85576
3
+ size 16
model-00003-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9bbcbf73561f6bc5d0a17ea6a2081feed2d1304e87602d8c502d9a5c4bd85576
3
+ size 16
model-00004-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9bbcbf73561f6bc5d0a17ea6a2081feed2d1304e87602d8c502d9a5c4bd85576
3
+ size 16
model-00005-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4566caa48d3a46eb41adcf288ac94ffd716bafa8ea36a28908e2ea3935bdf1da
3
+ size 1697694656
model-00006-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3a4ca56192c7a276a8bfaad165bc5990d9386521c8613116952fda53dddb4d21
3
+ size 453030344
model-00007-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f61e1bf44aac52048c6f0f1691e622c906bb5d563d2840b9015d45eae8291a7f
3
+ size 453030376
model-00008-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1e80edc472dd605ef2f3aab4935558a669b3e10c09c90ef4bd4383a18d56e212
3
+ size 453030416
model-00009-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5d660acce00de8a566dd806aca967d109c102e3fcde2aad30f0403edd8fe52f8
3
+ size 453030416
model-00010-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e133cfff4f32ddc1a117f936eede8e45f472e2193472441b000780689aeb738a
3
+ size 453030416
model-00011-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a6c5eb134857565cf77af8bb9a209e2635cfe4cd1208ffc15010a5c8c297c7ab
3
+ size 453030416
model-00012-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8a5984c954de70e810f52246556b6f684b8c2e636a9d29326990c79c6d70e1b6
3
+ size 453030416
model-00013-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a5d232aa0e41a8f6035f75995c06caeff29a550426b6478a219f6d327a3e5a80
3
+ size 453030416
model-00014-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:26209192c436ed5a5bdfc349acce83a50bd850cb70f072b8b34926f11fca8ef7
3
+ size 453030416
model.safetensors.index.json ADDED
@@ -0,0 +1,692 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {},
3
+ "weight_map": {
4
+ "vision_model.embeddings.patch_embedding.weight": "model-00001-of-00014.safetensors",
5
+ "vision_model.embeddings.patch_embedding.bias": "model-00001-of-00014.safetensors",
6
+ "vision_model.embeddings.dense_embedding.weight": "model-00001-of-00014.safetensors",
7
+ "vision_model.embeddings.dense_embedding.bias": "model-00001-of-00014.safetensors",
8
+ "language_model.model.embed_tokens.weight": "model-00005-of-00014.safetensors",
9
+ "language_model.model.layers.0.self_attn.q_proj.weight": "model-00005-of-00014.safetensors",
10
+ "language_model.model.layers.0.self_attn.q_proj_hw.weight": "model-00005-of-00014.safetensors",
11
+ "language_model.model.layers.0.self_attn.k_proj.weight": "model-00005-of-00014.safetensors",
12
+ "language_model.model.layers.0.self_attn.k_proj_hw.weight": "model-00005-of-00014.safetensors",
13
+ "language_model.model.layers.0.self_attn.v_proj.weight": "model-00005-of-00014.safetensors",
14
+ "language_model.model.layers.0.self_attn.q_norm.weight": "model-00005-of-00014.safetensors",
15
+ "language_model.model.layers.0.self_attn.k_norm.weight": "model-00005-of-00014.safetensors",
16
+ "language_model.model.layers.0.self_attn.q_norm_h.weight": "model-00005-of-00014.safetensors",
17
+ "language_model.model.layers.0.self_attn.k_norm_h.weight": "model-00005-of-00014.safetensors",
18
+ "language_model.model.layers.0.self_attn.q_norm_w.weight": "model-00005-of-00014.safetensors",
19
+ "language_model.model.layers.0.self_attn.k_norm_w.weight": "model-00005-of-00014.safetensors",
20
+ "language_model.model.layers.0.self_attn.o_proj.weight": "model-00005-of-00014.safetensors",
21
+ "language_model.model.layers.0.input_layernorm.weight": "model-00005-of-00014.safetensors",
22
+ "language_model.model.layers.0.post_attention_layernorm.weight": "model-00005-of-00014.safetensors",
23
+ "language_model.model.layers.0.mlp.gate_proj.weight": "model-00005-of-00014.safetensors",
24
+ "language_model.model.layers.0.mlp.up_proj.weight": "model-00005-of-00014.safetensors",
25
+ "language_model.model.layers.0.mlp.down_proj.weight": "model-00005-of-00014.safetensors",
26
+ "language_model.model.layers.1.self_attn.q_proj.weight": "model-00005-of-00014.safetensors",
27
+ "language_model.model.layers.1.self_attn.q_proj_hw.weight": "model-00005-of-00014.safetensors",
28
+ "language_model.model.layers.1.self_attn.k_proj.weight": "model-00005-of-00014.safetensors",
29
+ "language_model.model.layers.1.self_attn.k_proj_hw.weight": "model-00005-of-00014.safetensors",
30
+ "language_model.model.layers.1.self_attn.v_proj.weight": "model-00005-of-00014.safetensors",
31
+ "language_model.model.layers.1.self_attn.q_norm.weight": "model-00005-of-00014.safetensors",
32
+ "language_model.model.layers.1.self_attn.k_norm.weight": "model-00005-of-00014.safetensors",
33
+ "language_model.model.layers.1.self_attn.q_norm_h.weight": "model-00005-of-00014.safetensors",
34
+ "language_model.model.layers.1.self_attn.k_norm_h.weight": "model-00005-of-00014.safetensors",
35
+ "language_model.model.layers.1.self_attn.q_norm_w.weight": "model-00005-of-00014.safetensors",
36
+ "language_model.model.layers.1.self_attn.k_norm_w.weight": "model-00005-of-00014.safetensors",
37
+ "language_model.model.layers.1.self_attn.o_proj.weight": "model-00005-of-00014.safetensors",
38
+ "language_model.model.layers.1.input_layernorm.weight": "model-00005-of-00014.safetensors",
39
+ "language_model.model.layers.1.post_attention_layernorm.weight": "model-00005-of-00014.safetensors",
40
+ "language_model.model.layers.1.mlp.gate_proj.weight": "model-00005-of-00014.safetensors",
41
+ "language_model.model.layers.1.mlp.up_proj.weight": "model-00005-of-00014.safetensors",
42
+ "language_model.model.layers.1.mlp.down_proj.weight": "model-00005-of-00014.safetensors",
43
+ "language_model.model.layers.2.self_attn.q_proj.weight": "model-00005-of-00014.safetensors",
44
+ "language_model.model.layers.2.self_attn.q_proj_hw.weight": "model-00005-of-00014.safetensors",
45
+ "language_model.model.layers.2.self_attn.k_proj.weight": "model-00005-of-00014.safetensors",
46
+ "language_model.model.layers.2.self_attn.k_proj_hw.weight": "model-00005-of-00014.safetensors",
47
+ "language_model.model.layers.2.self_attn.v_proj.weight": "model-00005-of-00014.safetensors",
48
+ "language_model.model.layers.2.self_attn.q_norm.weight": "model-00005-of-00014.safetensors",
49
+ "language_model.model.layers.2.self_attn.k_norm.weight": "model-00005-of-00014.safetensors",
50
+ "language_model.model.layers.2.self_attn.q_norm_h.weight": "model-00005-of-00014.safetensors",
51
+ "language_model.model.layers.2.self_attn.k_norm_h.weight": "model-00005-of-00014.safetensors",
52
+ "language_model.model.layers.2.self_attn.q_norm_w.weight": "model-00005-of-00014.safetensors",
53
+ "language_model.model.layers.2.self_attn.k_norm_w.weight": "model-00005-of-00014.safetensors",
54
+ "language_model.model.layers.2.self_attn.o_proj.weight": "model-00005-of-00014.safetensors",
55
+ "language_model.model.layers.2.input_layernorm.weight": "model-00005-of-00014.safetensors",
56
+ "language_model.model.layers.2.post_attention_layernorm.weight": "model-00005-of-00014.safetensors",
57
+ "language_model.model.layers.2.mlp.gate_proj.weight": "model-00005-of-00014.safetensors",
58
+ "language_model.model.layers.2.mlp.up_proj.weight": "model-00005-of-00014.safetensors",
59
+ "language_model.model.layers.2.mlp.down_proj.weight": "model-00005-of-00014.safetensors",
60
+ "language_model.model.layers.3.self_attn.q_proj.weight": "model-00005-of-00014.safetensors",
61
+ "language_model.model.layers.3.self_attn.q_proj_hw.weight": "model-00005-of-00014.safetensors",
62
+ "language_model.model.layers.3.self_attn.k_proj.weight": "model-00005-of-00014.safetensors",
63
+ "language_model.model.layers.3.self_attn.k_proj_hw.weight": "model-00005-of-00014.safetensors",
64
+ "language_model.model.layers.3.self_attn.v_proj.weight": "model-00005-of-00014.safetensors",
65
+ "language_model.model.layers.3.self_attn.q_norm.weight": "model-00005-of-00014.safetensors",
66
+ "language_model.model.layers.3.self_attn.k_norm.weight": "model-00005-of-00014.safetensors",
67
+ "language_model.model.layers.3.self_attn.q_norm_h.weight": "model-00005-of-00014.safetensors",
68
+ "language_model.model.layers.3.self_attn.k_norm_h.weight": "model-00005-of-00014.safetensors",
69
+ "language_model.model.layers.3.self_attn.q_norm_w.weight": "model-00005-of-00014.safetensors",
70
+ "language_model.model.layers.3.self_attn.k_norm_w.weight": "model-00005-of-00014.safetensors",
71
+ "language_model.model.layers.3.self_attn.o_proj.weight": "model-00005-of-00014.safetensors",
72
+ "language_model.model.layers.3.input_layernorm.weight": "model-00005-of-00014.safetensors",
73
+ "language_model.model.layers.3.post_attention_layernorm.weight": "model-00005-of-00014.safetensors",
74
+ "language_model.model.layers.3.mlp.gate_proj.weight": "model-00005-of-00014.safetensors",
75
+ "language_model.model.layers.3.mlp.up_proj.weight": "model-00005-of-00014.safetensors",
76
+ "language_model.model.layers.3.mlp.down_proj.weight": "model-00005-of-00014.safetensors",
77
+ "language_model.model.norm.weight": "model-00005-of-00014.safetensors",
78
+ "language_model.lm_head.weight": "model-00005-of-00014.safetensors",
79
+ "language_model.model.layers.4.self_attn.q_proj.weight": "model-00006-of-00014.safetensors",
80
+ "language_model.model.layers.4.self_attn.q_proj_hw.weight": "model-00006-of-00014.safetensors",
81
+ "language_model.model.layers.4.self_attn.k_proj.weight": "model-00006-of-00014.safetensors",
82
+ "language_model.model.layers.4.self_attn.k_proj_hw.weight": "model-00006-of-00014.safetensors",
83
+ "language_model.model.layers.4.self_attn.v_proj.weight": "model-00006-of-00014.safetensors",
84
+ "language_model.model.layers.4.self_attn.q_norm.weight": "model-00006-of-00014.safetensors",
85
+ "language_model.model.layers.4.self_attn.k_norm.weight": "model-00006-of-00014.safetensors",
86
+ "language_model.model.layers.4.self_attn.q_norm_h.weight": "model-00006-of-00014.safetensors",
87
+ "language_model.model.layers.4.self_attn.k_norm_h.weight": "model-00006-of-00014.safetensors",
88
+ "language_model.model.layers.4.self_attn.q_norm_w.weight": "model-00006-of-00014.safetensors",
89
+ "language_model.model.layers.4.self_attn.k_norm_w.weight": "model-00006-of-00014.safetensors",
90
+ "language_model.model.layers.4.self_attn.o_proj.weight": "model-00006-of-00014.safetensors",
91
+ "language_model.model.layers.4.input_layernorm.weight": "model-00006-of-00014.safetensors",
92
+ "language_model.model.layers.4.post_attention_layernorm.weight": "model-00006-of-00014.safetensors",
93
+ "language_model.model.layers.4.mlp.gate_proj.weight": "model-00006-of-00014.safetensors",
94
+ "language_model.model.layers.4.mlp.up_proj.weight": "model-00006-of-00014.safetensors",
95
+ "language_model.model.layers.4.mlp.down_proj.weight": "model-00006-of-00014.safetensors",
96
+ "language_model.model.layers.5.self_attn.q_proj.weight": "model-00006-of-00014.safetensors",
97
+ "language_model.model.layers.5.self_attn.q_proj_hw.weight": "model-00006-of-00014.safetensors",
98
+ "language_model.model.layers.5.self_attn.k_proj.weight": "model-00006-of-00014.safetensors",
99
+ "language_model.model.layers.5.self_attn.k_proj_hw.weight": "model-00006-of-00014.safetensors",
100
+ "language_model.model.layers.5.self_attn.v_proj.weight": "model-00006-of-00014.safetensors",
101
+ "language_model.model.layers.5.self_attn.q_norm.weight": "model-00006-of-00014.safetensors",
102
+ "language_model.model.layers.5.self_attn.k_norm.weight": "model-00006-of-00014.safetensors",
103
+ "language_model.model.layers.5.self_attn.q_norm_h.weight": "model-00006-of-00014.safetensors",
104
+ "language_model.model.layers.5.self_attn.k_norm_h.weight": "model-00006-of-00014.safetensors",
105
+ "language_model.model.layers.5.self_attn.q_norm_w.weight": "model-00006-of-00014.safetensors",
106
+ "language_model.model.layers.5.self_attn.k_norm_w.weight": "model-00006-of-00014.safetensors",
107
+ "language_model.model.layers.5.self_attn.o_proj.weight": "model-00006-of-00014.safetensors",
108
+ "language_model.model.layers.5.input_layernorm.weight": "model-00006-of-00014.safetensors",
109
+ "language_model.model.layers.5.post_attention_layernorm.weight": "model-00006-of-00014.safetensors",
110
+ "language_model.model.layers.5.mlp.gate_proj.weight": "model-00006-of-00014.safetensors",
111
+ "language_model.model.layers.5.mlp.up_proj.weight": "model-00006-of-00014.safetensors",
112
+ "language_model.model.layers.5.mlp.down_proj.weight": "model-00006-of-00014.safetensors",
113
+ "language_model.model.layers.6.self_attn.q_proj.weight": "model-00006-of-00014.safetensors",
114
+ "language_model.model.layers.6.self_attn.q_proj_hw.weight": "model-00006-of-00014.safetensors",
115
+ "language_model.model.layers.6.self_attn.k_proj.weight": "model-00006-of-00014.safetensors",
116
+ "language_model.model.layers.6.self_attn.k_proj_hw.weight": "model-00006-of-00014.safetensors",
117
+ "language_model.model.layers.6.self_attn.v_proj.weight": "model-00006-of-00014.safetensors",
118
+ "language_model.model.layers.6.self_attn.q_norm.weight": "model-00006-of-00014.safetensors",
119
+ "language_model.model.layers.6.self_attn.k_norm.weight": "model-00006-of-00014.safetensors",
120
+ "language_model.model.layers.6.self_attn.q_norm_h.weight": "model-00006-of-00014.safetensors",
121
+ "language_model.model.layers.6.self_attn.k_norm_h.weight": "model-00006-of-00014.safetensors",
122
+ "language_model.model.layers.6.self_attn.q_norm_w.weight": "model-00006-of-00014.safetensors",
123
+ "language_model.model.layers.6.self_attn.k_norm_w.weight": "model-00006-of-00014.safetensors",
124
+ "language_model.model.layers.6.self_attn.o_proj.weight": "model-00006-of-00014.safetensors",
125
+ "language_model.model.layers.6.input_layernorm.weight": "model-00006-of-00014.safetensors",
126
+ "language_model.model.layers.6.post_attention_layernorm.weight": "model-00006-of-00014.safetensors",
127
+ "language_model.model.layers.6.mlp.gate_proj.weight": "model-00006-of-00014.safetensors",
128
+ "language_model.model.layers.6.mlp.up_proj.weight": "model-00006-of-00014.safetensors",
129
+ "language_model.model.layers.6.mlp.down_proj.weight": "model-00006-of-00014.safetensors",
130
+ "language_model.model.layers.7.self_attn.q_proj.weight": "model-00006-of-00014.safetensors",
131
+ "language_model.model.layers.7.self_attn.q_proj_hw.weight": "model-00006-of-00014.safetensors",
132
+ "language_model.model.layers.7.self_attn.k_proj.weight": "model-00006-of-00014.safetensors",
133
+ "language_model.model.layers.7.self_attn.k_proj_hw.weight": "model-00006-of-00014.safetensors",
134
+ "language_model.model.layers.7.self_attn.v_proj.weight": "model-00006-of-00014.safetensors",
135
+ "language_model.model.layers.7.self_attn.q_norm.weight": "model-00006-of-00014.safetensors",
136
+ "language_model.model.layers.7.self_attn.k_norm.weight": "model-00006-of-00014.safetensors",
137
+ "language_model.model.layers.7.self_attn.q_norm_h.weight": "model-00006-of-00014.safetensors",
138
+ "language_model.model.layers.7.self_attn.k_norm_h.weight": "model-00006-of-00014.safetensors",
139
+ "language_model.model.layers.7.self_attn.q_norm_w.weight": "model-00006-of-00014.safetensors",
140
+ "language_model.model.layers.7.self_attn.k_norm_w.weight": "model-00006-of-00014.safetensors",
141
+ "language_model.model.layers.7.self_attn.o_proj.weight": "model-00006-of-00014.safetensors",
142
+ "language_model.model.layers.7.input_layernorm.weight": "model-00006-of-00014.safetensors",
143
+ "language_model.model.layers.7.post_attention_layernorm.weight": "model-00006-of-00014.safetensors",
144
+ "language_model.model.layers.7.mlp.gate_proj.weight": "model-00006-of-00014.safetensors",
145
+ "language_model.model.layers.7.mlp.up_proj.weight": "model-00006-of-00014.safetensors",
146
+ "language_model.model.layers.7.mlp.down_proj.weight": "model-00006-of-00014.safetensors",
147
+ "language_model.model.layers.8.self_attn.q_proj.weight": "model-00007-of-00014.safetensors",
148
+ "language_model.model.layers.8.self_attn.q_proj_hw.weight": "model-00007-of-00014.safetensors",
149
+ "language_model.model.layers.8.self_attn.k_proj.weight": "model-00007-of-00014.safetensors",
150
+ "language_model.model.layers.8.self_attn.k_proj_hw.weight": "model-00007-of-00014.safetensors",
151
+ "language_model.model.layers.8.self_attn.v_proj.weight": "model-00007-of-00014.safetensors",
152
+ "language_model.model.layers.8.self_attn.q_norm.weight": "model-00007-of-00014.safetensors",
153
+ "language_model.model.layers.8.self_attn.k_norm.weight": "model-00007-of-00014.safetensors",
154
+ "language_model.model.layers.8.self_attn.q_norm_h.weight": "model-00007-of-00014.safetensors",
155
+ "language_model.model.layers.8.self_attn.k_norm_h.weight": "model-00007-of-00014.safetensors",
156
+ "language_model.model.layers.8.self_attn.q_norm_w.weight": "model-00007-of-00014.safetensors",
157
+ "language_model.model.layers.8.self_attn.k_norm_w.weight": "model-00007-of-00014.safetensors",
158
+ "language_model.model.layers.8.self_attn.o_proj.weight": "model-00007-of-00014.safetensors",
159
+ "language_model.model.layers.8.input_layernorm.weight": "model-00007-of-00014.safetensors",
160
+ "language_model.model.layers.8.post_attention_layernorm.weight": "model-00007-of-00014.safetensors",
161
+ "language_model.model.layers.8.mlp.gate_proj.weight": "model-00007-of-00014.safetensors",
162
+ "language_model.model.layers.8.mlp.up_proj.weight": "model-00007-of-00014.safetensors",
163
+ "language_model.model.layers.8.mlp.down_proj.weight": "model-00007-of-00014.safetensors",
164
+ "language_model.model.layers.9.self_attn.q_proj.weight": "model-00007-of-00014.safetensors",
165
+ "language_model.model.layers.9.self_attn.q_proj_hw.weight": "model-00007-of-00014.safetensors",
166
+ "language_model.model.layers.9.self_attn.k_proj.weight": "model-00007-of-00014.safetensors",
167
+ "language_model.model.layers.9.self_attn.k_proj_hw.weight": "model-00007-of-00014.safetensors",
168
+ "language_model.model.layers.9.self_attn.v_proj.weight": "model-00007-of-00014.safetensors",
169
+ "language_model.model.layers.9.self_attn.q_norm.weight": "model-00007-of-00014.safetensors",
170
+ "language_model.model.layers.9.self_attn.k_norm.weight": "model-00007-of-00014.safetensors",
171
+ "language_model.model.layers.9.self_attn.q_norm_h.weight": "model-00007-of-00014.safetensors",
172
+ "language_model.model.layers.9.self_attn.k_norm_h.weight": "model-00007-of-00014.safetensors",
173
+ "language_model.model.layers.9.self_attn.q_norm_w.weight": "model-00007-of-00014.safetensors",
174
+ "language_model.model.layers.9.self_attn.k_norm_w.weight": "model-00007-of-00014.safetensors",
175
+ "language_model.model.layers.9.self_attn.o_proj.weight": "model-00007-of-00014.safetensors",
176
+ "language_model.model.layers.9.input_layernorm.weight": "model-00007-of-00014.safetensors",
177
+ "language_model.model.layers.9.post_attention_layernorm.weight": "model-00007-of-00014.safetensors",
178
+ "language_model.model.layers.9.mlp.gate_proj.weight": "model-00007-of-00014.safetensors",
179
+ "language_model.model.layers.9.mlp.up_proj.weight": "model-00007-of-00014.safetensors",
180
+ "language_model.model.layers.9.mlp.down_proj.weight": "model-00007-of-00014.safetensors",
181
+ "language_model.model.layers.10.self_attn.q_proj.weight": "model-00007-of-00014.safetensors",
182
+ "language_model.model.layers.10.self_attn.q_proj_hw.weight": "model-00007-of-00014.safetensors",
183
+ "language_model.model.layers.10.self_attn.k_proj.weight": "model-00007-of-00014.safetensors",
184
+ "language_model.model.layers.10.self_attn.k_proj_hw.weight": "model-00007-of-00014.safetensors",
185
+ "language_model.model.layers.10.self_attn.v_proj.weight": "model-00007-of-00014.safetensors",
186
+ "language_model.model.layers.10.self_attn.q_norm.weight": "model-00007-of-00014.safetensors",
187
+ "language_model.model.layers.10.self_attn.k_norm.weight": "model-00007-of-00014.safetensors",
188
+ "language_model.model.layers.10.self_attn.q_norm_h.weight": "model-00007-of-00014.safetensors",
189
+ "language_model.model.layers.10.self_attn.k_norm_h.weight": "model-00007-of-00014.safetensors",
190
+ "language_model.model.layers.10.self_attn.q_norm_w.weight": "model-00007-of-00014.safetensors",
191
+ "language_model.model.layers.10.self_attn.k_norm_w.weight": "model-00007-of-00014.safetensors",
192
+ "language_model.model.layers.10.self_attn.o_proj.weight": "model-00007-of-00014.safetensors",
193
+ "language_model.model.layers.10.input_layernorm.weight": "model-00007-of-00014.safetensors",
194
+ "language_model.model.layers.10.post_attention_layernorm.weight": "model-00007-of-00014.safetensors",
195
+ "language_model.model.layers.10.mlp.gate_proj.weight": "model-00007-of-00014.safetensors",
196
+ "language_model.model.layers.10.mlp.up_proj.weight": "model-00007-of-00014.safetensors",
197
+ "language_model.model.layers.10.mlp.down_proj.weight": "model-00007-of-00014.safetensors",
198
+ "language_model.model.layers.11.self_attn.q_proj.weight": "model-00007-of-00014.safetensors",
199
+ "language_model.model.layers.11.self_attn.q_proj_hw.weight": "model-00007-of-00014.safetensors",
200
+ "language_model.model.layers.11.self_attn.k_proj.weight": "model-00007-of-00014.safetensors",
201
+ "language_model.model.layers.11.self_attn.k_proj_hw.weight": "model-00007-of-00014.safetensors",
202
+ "language_model.model.layers.11.self_attn.v_proj.weight": "model-00007-of-00014.safetensors",
203
+ "language_model.model.layers.11.self_attn.q_norm.weight": "model-00007-of-00014.safetensors",
204
+ "language_model.model.layers.11.self_attn.k_norm.weight": "model-00007-of-00014.safetensors",
205
+ "language_model.model.layers.11.self_attn.q_norm_h.weight": "model-00007-of-00014.safetensors",
206
+ "language_model.model.layers.11.self_attn.k_norm_h.weight": "model-00007-of-00014.safetensors",
207
+ "language_model.model.layers.11.self_attn.q_norm_w.weight": "model-00007-of-00014.safetensors",
208
+ "language_model.model.layers.11.self_attn.k_norm_w.weight": "model-00007-of-00014.safetensors",
209
+ "language_model.model.layers.11.self_attn.o_proj.weight": "model-00007-of-00014.safetensors",
210
+ "language_model.model.layers.11.input_layernorm.weight": "model-00007-of-00014.safetensors",
211
+ "language_model.model.layers.11.post_attention_layernorm.weight": "model-00007-of-00014.safetensors",
212
+ "language_model.model.layers.11.mlp.gate_proj.weight": "model-00007-of-00014.safetensors",
213
+ "language_model.model.layers.11.mlp.up_proj.weight": "model-00007-of-00014.safetensors",
214
+ "language_model.model.layers.11.mlp.down_proj.weight": "model-00007-of-00014.safetensors",
215
+ "language_model.model.layers.12.self_attn.q_proj.weight": "model-00008-of-00014.safetensors",
216
+ "language_model.model.layers.12.self_attn.q_proj_hw.weight": "model-00008-of-00014.safetensors",
217
+ "language_model.model.layers.12.self_attn.k_proj.weight": "model-00008-of-00014.safetensors",
218
+ "language_model.model.layers.12.self_attn.k_proj_hw.weight": "model-00008-of-00014.safetensors",
219
+ "language_model.model.layers.12.self_attn.v_proj.weight": "model-00008-of-00014.safetensors",
220
+ "language_model.model.layers.12.self_attn.q_norm.weight": "model-00008-of-00014.safetensors",
221
+ "language_model.model.layers.12.self_attn.k_norm.weight": "model-00008-of-00014.safetensors",
222
+ "language_model.model.layers.12.self_attn.q_norm_h.weight": "model-00008-of-00014.safetensors",
223
+ "language_model.model.layers.12.self_attn.k_norm_h.weight": "model-00008-of-00014.safetensors",
224
+ "language_model.model.layers.12.self_attn.q_norm_w.weight": "model-00008-of-00014.safetensors",
225
+ "language_model.model.layers.12.self_attn.k_norm_w.weight": "model-00008-of-00014.safetensors",
226
+ "language_model.model.layers.12.self_attn.o_proj.weight": "model-00008-of-00014.safetensors",
227
+ "language_model.model.layers.12.input_layernorm.weight": "model-00008-of-00014.safetensors",
228
+ "language_model.model.layers.12.post_attention_layernorm.weight": "model-00008-of-00014.safetensors",
229
+ "language_model.model.layers.12.mlp.gate_proj.weight": "model-00008-of-00014.safetensors",
230
+ "language_model.model.layers.12.mlp.up_proj.weight": "model-00008-of-00014.safetensors",
231
+ "language_model.model.layers.12.mlp.down_proj.weight": "model-00008-of-00014.safetensors",
232
+ "language_model.model.layers.13.self_attn.q_proj.weight": "model-00008-of-00014.safetensors",
233
+ "language_model.model.layers.13.self_attn.q_proj_hw.weight": "model-00008-of-00014.safetensors",
234
+ "language_model.model.layers.13.self_attn.k_proj.weight": "model-00008-of-00014.safetensors",
235
+ "language_model.model.layers.13.self_attn.k_proj_hw.weight": "model-00008-of-00014.safetensors",
236
+ "language_model.model.layers.13.self_attn.v_proj.weight": "model-00008-of-00014.safetensors",
237
+ "language_model.model.layers.13.self_attn.q_norm.weight": "model-00008-of-00014.safetensors",
238
+ "language_model.model.layers.13.self_attn.k_norm.weight": "model-00008-of-00014.safetensors",
239
+ "language_model.model.layers.13.self_attn.q_norm_h.weight": "model-00008-of-00014.safetensors",
240
+ "language_model.model.layers.13.self_attn.k_norm_h.weight": "model-00008-of-00014.safetensors",
241
+ "language_model.model.layers.13.self_attn.q_norm_w.weight": "model-00008-of-00014.safetensors",
242
+ "language_model.model.layers.13.self_attn.k_norm_w.weight": "model-00008-of-00014.safetensors",
243
+ "language_model.model.layers.13.self_attn.o_proj.weight": "model-00008-of-00014.safetensors",
244
+ "language_model.model.layers.13.input_layernorm.weight": "model-00008-of-00014.safetensors",
245
+ "language_model.model.layers.13.post_attention_layernorm.weight": "model-00008-of-00014.safetensors",
246
+ "language_model.model.layers.13.mlp.gate_proj.weight": "model-00008-of-00014.safetensors",
247
+ "language_model.model.layers.13.mlp.up_proj.weight": "model-00008-of-00014.safetensors",
248
+ "language_model.model.layers.13.mlp.down_proj.weight": "model-00008-of-00014.safetensors",
249
+ "language_model.model.layers.14.self_attn.q_proj.weight": "model-00008-of-00014.safetensors",
250
+ "language_model.model.layers.14.self_attn.q_proj_hw.weight": "model-00008-of-00014.safetensors",
251
+ "language_model.model.layers.14.self_attn.k_proj.weight": "model-00008-of-00014.safetensors",
252
+ "language_model.model.layers.14.self_attn.k_proj_hw.weight": "model-00008-of-00014.safetensors",
253
+ "language_model.model.layers.14.self_attn.v_proj.weight": "model-00008-of-00014.safetensors",
254
+ "language_model.model.layers.14.self_attn.q_norm.weight": "model-00008-of-00014.safetensors",
255
+ "language_model.model.layers.14.self_attn.k_norm.weight": "model-00008-of-00014.safetensors",
256
+ "language_model.model.layers.14.self_attn.q_norm_h.weight": "model-00008-of-00014.safetensors",
257
+ "language_model.model.layers.14.self_attn.k_norm_h.weight": "model-00008-of-00014.safetensors",
258
+ "language_model.model.layers.14.self_attn.q_norm_w.weight": "model-00008-of-00014.safetensors",
259
+ "language_model.model.layers.14.self_attn.k_norm_w.weight": "model-00008-of-00014.safetensors",
260
+ "language_model.model.layers.14.self_attn.o_proj.weight": "model-00008-of-00014.safetensors",
261
+ "language_model.model.layers.14.input_layernorm.weight": "model-00008-of-00014.safetensors",
262
+ "language_model.model.layers.14.post_attention_layernorm.weight": "model-00008-of-00014.safetensors",
263
+ "language_model.model.layers.14.mlp.gate_proj.weight": "model-00008-of-00014.safetensors",
264
+ "language_model.model.layers.14.mlp.up_proj.weight": "model-00008-of-00014.safetensors",
265
+ "language_model.model.layers.14.mlp.down_proj.weight": "model-00008-of-00014.safetensors",
266
+ "language_model.model.layers.15.self_attn.q_proj.weight": "model-00008-of-00014.safetensors",
267
+ "language_model.model.layers.15.self_attn.q_proj_hw.weight": "model-00008-of-00014.safetensors",
268
+ "language_model.model.layers.15.self_attn.k_proj.weight": "model-00008-of-00014.safetensors",
269
+ "language_model.model.layers.15.self_attn.k_proj_hw.weight": "model-00008-of-00014.safetensors",
270
+ "language_model.model.layers.15.self_attn.v_proj.weight": "model-00008-of-00014.safetensors",
271
+ "language_model.model.layers.15.self_attn.q_norm.weight": "model-00008-of-00014.safetensors",
272
+ "language_model.model.layers.15.self_attn.k_norm.weight": "model-00008-of-00014.safetensors",
273
+ "language_model.model.layers.15.self_attn.q_norm_h.weight": "model-00008-of-00014.safetensors",
274
+ "language_model.model.layers.15.self_attn.k_norm_h.weight": "model-00008-of-00014.safetensors",
275
+ "language_model.model.layers.15.self_attn.q_norm_w.weight": "model-00008-of-00014.safetensors",
276
+ "language_model.model.layers.15.self_attn.k_norm_w.weight": "model-00008-of-00014.safetensors",
277
+ "language_model.model.layers.15.self_attn.o_proj.weight": "model-00008-of-00014.safetensors",
278
+ "language_model.model.layers.15.input_layernorm.weight": "model-00008-of-00014.safetensors",
279
+ "language_model.model.layers.15.post_attention_layernorm.weight": "model-00008-of-00014.safetensors",
280
+ "language_model.model.layers.15.mlp.gate_proj.weight": "model-00008-of-00014.safetensors",
281
+ "language_model.model.layers.15.mlp.up_proj.weight": "model-00008-of-00014.safetensors",
282
+ "language_model.model.layers.15.mlp.down_proj.weight": "model-00008-of-00014.safetensors",
283
+ "language_model.model.layers.16.self_attn.q_proj.weight": "model-00009-of-00014.safetensors",
284
+ "language_model.model.layers.16.self_attn.q_proj_hw.weight": "model-00009-of-00014.safetensors",
285
+ "language_model.model.layers.16.self_attn.k_proj.weight": "model-00009-of-00014.safetensors",
286
+ "language_model.model.layers.16.self_attn.k_proj_hw.weight": "model-00009-of-00014.safetensors",
287
+ "language_model.model.layers.16.self_attn.v_proj.weight": "model-00009-of-00014.safetensors",
288
+ "language_model.model.layers.16.self_attn.q_norm.weight": "model-00009-of-00014.safetensors",
289
+ "language_model.model.layers.16.self_attn.k_norm.weight": "model-00009-of-00014.safetensors",
290
+ "language_model.model.layers.16.self_attn.q_norm_h.weight": "model-00009-of-00014.safetensors",
291
+ "language_model.model.layers.16.self_attn.k_norm_h.weight": "model-00009-of-00014.safetensors",
292
+ "language_model.model.layers.16.self_attn.q_norm_w.weight": "model-00009-of-00014.safetensors",
293
+ "language_model.model.layers.16.self_attn.k_norm_w.weight": "model-00009-of-00014.safetensors",
294
+ "language_model.model.layers.16.self_attn.o_proj.weight": "model-00009-of-00014.safetensors",
295
+ "language_model.model.layers.16.input_layernorm.weight": "model-00009-of-00014.safetensors",
296
+ "language_model.model.layers.16.post_attention_layernorm.weight": "model-00009-of-00014.safetensors",
297
+ "language_model.model.layers.16.mlp.gate_proj.weight": "model-00009-of-00014.safetensors",
298
+ "language_model.model.layers.16.mlp.up_proj.weight": "model-00009-of-00014.safetensors",
299
+ "language_model.model.layers.16.mlp.down_proj.weight": "model-00009-of-00014.safetensors",
300
+ "language_model.model.layers.17.self_attn.q_proj.weight": "model-00009-of-00014.safetensors",
301
+ "language_model.model.layers.17.self_attn.q_proj_hw.weight": "model-00009-of-00014.safetensors",
302
+ "language_model.model.layers.17.self_attn.k_proj.weight": "model-00009-of-00014.safetensors",
303
+ "language_model.model.layers.17.self_attn.k_proj_hw.weight": "model-00009-of-00014.safetensors",
304
+ "language_model.model.layers.17.self_attn.v_proj.weight": "model-00009-of-00014.safetensors",
305
+ "language_model.model.layers.17.self_attn.q_norm.weight": "model-00009-of-00014.safetensors",
306
+ "language_model.model.layers.17.self_attn.k_norm.weight": "model-00009-of-00014.safetensors",
307
+ "language_model.model.layers.17.self_attn.q_norm_h.weight": "model-00009-of-00014.safetensors",
308
+ "language_model.model.layers.17.self_attn.k_norm_h.weight": "model-00009-of-00014.safetensors",
309
+ "language_model.model.layers.17.self_attn.q_norm_w.weight": "model-00009-of-00014.safetensors",
310
+ "language_model.model.layers.17.self_attn.k_norm_w.weight": "model-00009-of-00014.safetensors",
311
+ "language_model.model.layers.17.self_attn.o_proj.weight": "model-00009-of-00014.safetensors",
312
+ "language_model.model.layers.17.input_layernorm.weight": "model-00009-of-00014.safetensors",
313
+ "language_model.model.layers.17.post_attention_layernorm.weight": "model-00009-of-00014.safetensors",
314
+ "language_model.model.layers.17.mlp.gate_proj.weight": "model-00009-of-00014.safetensors",
315
+ "language_model.model.layers.17.mlp.up_proj.weight": "model-00009-of-00014.safetensors",
316
+ "language_model.model.layers.17.mlp.down_proj.weight": "model-00009-of-00014.safetensors",
317
+ "language_model.model.layers.18.self_attn.q_proj.weight": "model-00009-of-00014.safetensors",
318
+ "language_model.model.layers.18.self_attn.q_proj_hw.weight": "model-00009-of-00014.safetensors",
319
+ "language_model.model.layers.18.self_attn.k_proj.weight": "model-00009-of-00014.safetensors",
320
+ "language_model.model.layers.18.self_attn.k_proj_hw.weight": "model-00009-of-00014.safetensors",
321
+ "language_model.model.layers.18.self_attn.v_proj.weight": "model-00009-of-00014.safetensors",
322
+ "language_model.model.layers.18.self_attn.q_norm.weight": "model-00009-of-00014.safetensors",
323
+ "language_model.model.layers.18.self_attn.k_norm.weight": "model-00009-of-00014.safetensors",
324
+ "language_model.model.layers.18.self_attn.q_norm_h.weight": "model-00009-of-00014.safetensors",
325
+ "language_model.model.layers.18.self_attn.k_norm_h.weight": "model-00009-of-00014.safetensors",
326
+ "language_model.model.layers.18.self_attn.q_norm_w.weight": "model-00009-of-00014.safetensors",
327
+ "language_model.model.layers.18.self_attn.k_norm_w.weight": "model-00009-of-00014.safetensors",
328
+ "language_model.model.layers.18.self_attn.o_proj.weight": "model-00009-of-00014.safetensors",
329
+ "language_model.model.layers.18.input_layernorm.weight": "model-00009-of-00014.safetensors",
330
+ "language_model.model.layers.18.post_attention_layernorm.weight": "model-00009-of-00014.safetensors",
331
+ "language_model.model.layers.18.mlp.gate_proj.weight": "model-00009-of-00014.safetensors",
332
+ "language_model.model.layers.18.mlp.up_proj.weight": "model-00009-of-00014.safetensors",
333
+ "language_model.model.layers.18.mlp.down_proj.weight": "model-00009-of-00014.safetensors",
334
+ "language_model.model.layers.19.self_attn.q_proj.weight": "model-00009-of-00014.safetensors",
335
+ "language_model.model.layers.19.self_attn.q_proj_hw.weight": "model-00009-of-00014.safetensors",
336
+ "language_model.model.layers.19.self_attn.k_proj.weight": "model-00009-of-00014.safetensors",
337
+ "language_model.model.layers.19.self_attn.k_proj_hw.weight": "model-00009-of-00014.safetensors",
338
+ "language_model.model.layers.19.self_attn.v_proj.weight": "model-00009-of-00014.safetensors",
339
+ "language_model.model.layers.19.self_attn.q_norm.weight": "model-00009-of-00014.safetensors",
340
+ "language_model.model.layers.19.self_attn.k_norm.weight": "model-00009-of-00014.safetensors",
341
+ "language_model.model.layers.19.self_attn.q_norm_h.weight": "model-00009-of-00014.safetensors",
342
+ "language_model.model.layers.19.self_attn.k_norm_h.weight": "model-00009-of-00014.safetensors",
343
+ "language_model.model.layers.19.self_attn.q_norm_w.weight": "model-00009-of-00014.safetensors",
344
+ "language_model.model.layers.19.self_attn.k_norm_w.weight": "model-00009-of-00014.safetensors",
345
+ "language_model.model.layers.19.self_attn.o_proj.weight": "model-00009-of-00014.safetensors",
346
+ "language_model.model.layers.19.input_layernorm.weight": "model-00009-of-00014.safetensors",
347
+ "language_model.model.layers.19.post_attention_layernorm.weight": "model-00009-of-00014.safetensors",
348
+ "language_model.model.layers.19.mlp.gate_proj.weight": "model-00009-of-00014.safetensors",
349
+ "language_model.model.layers.19.mlp.up_proj.weight": "model-00009-of-00014.safetensors",
350
+ "language_model.model.layers.19.mlp.down_proj.weight": "model-00009-of-00014.safetensors",
351
+ "language_model.model.layers.20.self_attn.q_proj.weight": "model-00010-of-00014.safetensors",
352
+ "language_model.model.layers.20.self_attn.q_proj_hw.weight": "model-00010-of-00014.safetensors",
353
+ "language_model.model.layers.20.self_attn.k_proj.weight": "model-00010-of-00014.safetensors",
354
+ "language_model.model.layers.20.self_attn.k_proj_hw.weight": "model-00010-of-00014.safetensors",
355
+ "language_model.model.layers.20.self_attn.v_proj.weight": "model-00010-of-00014.safetensors",
356
+ "language_model.model.layers.20.self_attn.q_norm.weight": "model-00010-of-00014.safetensors",
357
+ "language_model.model.layers.20.self_attn.k_norm.weight": "model-00010-of-00014.safetensors",
358
+ "language_model.model.layers.20.self_attn.q_norm_h.weight": "model-00010-of-00014.safetensors",
359
+ "language_model.model.layers.20.self_attn.k_norm_h.weight": "model-00010-of-00014.safetensors",
360
+ "language_model.model.layers.20.self_attn.q_norm_w.weight": "model-00010-of-00014.safetensors",
361
+ "language_model.model.layers.20.self_attn.k_norm_w.weight": "model-00010-of-00014.safetensors",
362
+ "language_model.model.layers.20.self_attn.o_proj.weight": "model-00010-of-00014.safetensors",
363
+ "language_model.model.layers.20.input_layernorm.weight": "model-00010-of-00014.safetensors",
364
+ "language_model.model.layers.20.post_attention_layernorm.weight": "model-00010-of-00014.safetensors",
365
+ "language_model.model.layers.20.mlp.gate_proj.weight": "model-00010-of-00014.safetensors",
366
+ "language_model.model.layers.20.mlp.up_proj.weight": "model-00010-of-00014.safetensors",
367
+ "language_model.model.layers.20.mlp.down_proj.weight": "model-00010-of-00014.safetensors",
368
+ "language_model.model.layers.21.self_attn.q_proj.weight": "model-00010-of-00014.safetensors",
369
+ "language_model.model.layers.21.self_attn.q_proj_hw.weight": "model-00010-of-00014.safetensors",
370
+ "language_model.model.layers.21.self_attn.k_proj.weight": "model-00010-of-00014.safetensors",
371
+ "language_model.model.layers.21.self_attn.k_proj_hw.weight": "model-00010-of-00014.safetensors",
372
+ "language_model.model.layers.21.self_attn.v_proj.weight": "model-00010-of-00014.safetensors",
373
+ "language_model.model.layers.21.self_attn.q_norm.weight": "model-00010-of-00014.safetensors",
374
+ "language_model.model.layers.21.self_attn.k_norm.weight": "model-00010-of-00014.safetensors",
375
+ "language_model.model.layers.21.self_attn.q_norm_h.weight": "model-00010-of-00014.safetensors",
376
+ "language_model.model.layers.21.self_attn.k_norm_h.weight": "model-00010-of-00014.safetensors",
377
+ "language_model.model.layers.21.self_attn.q_norm_w.weight": "model-00010-of-00014.safetensors",
378
+ "language_model.model.layers.21.self_attn.k_norm_w.weight": "model-00010-of-00014.safetensors",
379
+ "language_model.model.layers.21.self_attn.o_proj.weight": "model-00010-of-00014.safetensors",
380
+ "language_model.model.layers.21.input_layernorm.weight": "model-00010-of-00014.safetensors",
381
+ "language_model.model.layers.21.post_attention_layernorm.weight": "model-00010-of-00014.safetensors",
382
+ "language_model.model.layers.21.mlp.gate_proj.weight": "model-00010-of-00014.safetensors",
383
+ "language_model.model.layers.21.mlp.up_proj.weight": "model-00010-of-00014.safetensors",
384
+ "language_model.model.layers.21.mlp.down_proj.weight": "model-00010-of-00014.safetensors",
385
+ "language_model.model.layers.22.self_attn.q_proj.weight": "model-00010-of-00014.safetensors",
386
+ "language_model.model.layers.22.self_attn.q_proj_hw.weight": "model-00010-of-00014.safetensors",
387
+ "language_model.model.layers.22.self_attn.k_proj.weight": "model-00010-of-00014.safetensors",
388
+ "language_model.model.layers.22.self_attn.k_proj_hw.weight": "model-00010-of-00014.safetensors",
389
+ "language_model.model.layers.22.self_attn.v_proj.weight": "model-00010-of-00014.safetensors",
390
+ "language_model.model.layers.22.self_attn.q_norm.weight": "model-00010-of-00014.safetensors",
391
+ "language_model.model.layers.22.self_attn.k_norm.weight": "model-00010-of-00014.safetensors",
392
+ "language_model.model.layers.22.self_attn.q_norm_h.weight": "model-00010-of-00014.safetensors",
393
+ "language_model.model.layers.22.self_attn.k_norm_h.weight": "model-00010-of-00014.safetensors",
394
+ "language_model.model.layers.22.self_attn.q_norm_w.weight": "model-00010-of-00014.safetensors",
395
+ "language_model.model.layers.22.self_attn.k_norm_w.weight": "model-00010-of-00014.safetensors",
396
+ "language_model.model.layers.22.self_attn.o_proj.weight": "model-00010-of-00014.safetensors",
397
+ "language_model.model.layers.22.input_layernorm.weight": "model-00010-of-00014.safetensors",
398
+ "language_model.model.layers.22.post_attention_layernorm.weight": "model-00010-of-00014.safetensors",
399
+ "language_model.model.layers.22.mlp.gate_proj.weight": "model-00010-of-00014.safetensors",
400
+ "language_model.model.layers.22.mlp.up_proj.weight": "model-00010-of-00014.safetensors",
401
+ "language_model.model.layers.22.mlp.down_proj.weight": "model-00010-of-00014.safetensors",
402
+ "language_model.model.layers.23.self_attn.q_proj.weight": "model-00010-of-00014.safetensors",
403
+ "language_model.model.layers.23.self_attn.q_proj_hw.weight": "model-00010-of-00014.safetensors",
404
+ "language_model.model.layers.23.self_attn.k_proj.weight": "model-00010-of-00014.safetensors",
405
+ "language_model.model.layers.23.self_attn.k_proj_hw.weight": "model-00010-of-00014.safetensors",
406
+ "language_model.model.layers.23.self_attn.v_proj.weight": "model-00010-of-00014.safetensors",
407
+ "language_model.model.layers.23.self_attn.q_norm.weight": "model-00010-of-00014.safetensors",
408
+ "language_model.model.layers.23.self_attn.k_norm.weight": "model-00010-of-00014.safetensors",
409
+ "language_model.model.layers.23.self_attn.q_norm_h.weight": "model-00010-of-00014.safetensors",
410
+ "language_model.model.layers.23.self_attn.k_norm_h.weight": "model-00010-of-00014.safetensors",
411
+ "language_model.model.layers.23.self_attn.q_norm_w.weight": "model-00010-of-00014.safetensors",
412
+ "language_model.model.layers.23.self_attn.k_norm_w.weight": "model-00010-of-00014.safetensors",
413
+ "language_model.model.layers.23.self_attn.o_proj.weight": "model-00010-of-00014.safetensors",
414
+ "language_model.model.layers.23.input_layernorm.weight": "model-00010-of-00014.safetensors",
415
+ "language_model.model.layers.23.post_attention_layernorm.weight": "model-00010-of-00014.safetensors",
416
+ "language_model.model.layers.23.mlp.gate_proj.weight": "model-00010-of-00014.safetensors",
417
+ "language_model.model.layers.23.mlp.up_proj.weight": "model-00010-of-00014.safetensors",
418
+ "language_model.model.layers.23.mlp.down_proj.weight": "model-00010-of-00014.safetensors",
419
+ "language_model.model.layers.24.self_attn.q_proj.weight": "model-00011-of-00014.safetensors",
420
+ "language_model.model.layers.24.self_attn.q_proj_hw.weight": "model-00011-of-00014.safetensors",
421
+ "language_model.model.layers.24.self_attn.k_proj.weight": "model-00011-of-00014.safetensors",
422
+ "language_model.model.layers.24.self_attn.k_proj_hw.weight": "model-00011-of-00014.safetensors",
423
+ "language_model.model.layers.24.self_attn.v_proj.weight": "model-00011-of-00014.safetensors",
424
+ "language_model.model.layers.24.self_attn.q_norm.weight": "model-00011-of-00014.safetensors",
425
+ "language_model.model.layers.24.self_attn.k_norm.weight": "model-00011-of-00014.safetensors",
426
+ "language_model.model.layers.24.self_attn.q_norm_h.weight": "model-00011-of-00014.safetensors",
427
+ "language_model.model.layers.24.self_attn.k_norm_h.weight": "model-00011-of-00014.safetensors",
428
+ "language_model.model.layers.24.self_attn.q_norm_w.weight": "model-00011-of-00014.safetensors",
429
+ "language_model.model.layers.24.self_attn.k_norm_w.weight": "model-00011-of-00014.safetensors",
430
+ "language_model.model.layers.24.self_attn.o_proj.weight": "model-00011-of-00014.safetensors",
431
+ "language_model.model.layers.24.input_layernorm.weight": "model-00011-of-00014.safetensors",
432
+ "language_model.model.layers.24.post_attention_layernorm.weight": "model-00011-of-00014.safetensors",
433
+ "language_model.model.layers.24.mlp.gate_proj.weight": "model-00011-of-00014.safetensors",
434
+ "language_model.model.layers.24.mlp.up_proj.weight": "model-00011-of-00014.safetensors",
435
+ "language_model.model.layers.24.mlp.down_proj.weight": "model-00011-of-00014.safetensors",
436
+ "language_model.model.layers.25.self_attn.q_proj.weight": "model-00011-of-00014.safetensors",
437
+ "language_model.model.layers.25.self_attn.q_proj_hw.weight": "model-00011-of-00014.safetensors",
438
+ "language_model.model.layers.25.self_attn.k_proj.weight": "model-00011-of-00014.safetensors",
439
+ "language_model.model.layers.25.self_attn.k_proj_hw.weight": "model-00011-of-00014.safetensors",
440
+ "language_model.model.layers.25.self_attn.v_proj.weight": "model-00011-of-00014.safetensors",
441
+ "language_model.model.layers.25.self_attn.q_norm.weight": "model-00011-of-00014.safetensors",
442
+ "language_model.model.layers.25.self_attn.k_norm.weight": "model-00011-of-00014.safetensors",
443
+ "language_model.model.layers.25.self_attn.q_norm_h.weight": "model-00011-of-00014.safetensors",
444
+ "language_model.model.layers.25.self_attn.k_norm_h.weight": "model-00011-of-00014.safetensors",
445
+ "language_model.model.layers.25.self_attn.q_norm_w.weight": "model-00011-of-00014.safetensors",
446
+ "language_model.model.layers.25.self_attn.k_norm_w.weight": "model-00011-of-00014.safetensors",
447
+ "language_model.model.layers.25.self_attn.o_proj.weight": "model-00011-of-00014.safetensors",
448
+ "language_model.model.layers.25.input_layernorm.weight": "model-00011-of-00014.safetensors",
449
+ "language_model.model.layers.25.post_attention_layernorm.weight": "model-00011-of-00014.safetensors",
450
+ "language_model.model.layers.25.mlp.gate_proj.weight": "model-00011-of-00014.safetensors",
451
+ "language_model.model.layers.25.mlp.up_proj.weight": "model-00011-of-00014.safetensors",
452
+ "language_model.model.layers.25.mlp.down_proj.weight": "model-00011-of-00014.safetensors",
453
+ "language_model.model.layers.26.self_attn.q_proj.weight": "model-00011-of-00014.safetensors",
454
+ "language_model.model.layers.26.self_attn.q_proj_hw.weight": "model-00011-of-00014.safetensors",
455
+ "language_model.model.layers.26.self_attn.k_proj.weight": "model-00011-of-00014.safetensors",
456
+ "language_model.model.layers.26.self_attn.k_proj_hw.weight": "model-00011-of-00014.safetensors",
457
+ "language_model.model.layers.26.self_attn.v_proj.weight": "model-00011-of-00014.safetensors",
458
+ "language_model.model.layers.26.self_attn.q_norm.weight": "model-00011-of-00014.safetensors",
459
+ "language_model.model.layers.26.self_attn.k_norm.weight": "model-00011-of-00014.safetensors",
460
+ "language_model.model.layers.26.self_attn.q_norm_h.weight": "model-00011-of-00014.safetensors",
461
+ "language_model.model.layers.26.self_attn.k_norm_h.weight": "model-00011-of-00014.safetensors",
462
+ "language_model.model.layers.26.self_attn.q_norm_w.weight": "model-00011-of-00014.safetensors",
463
+ "language_model.model.layers.26.self_attn.k_norm_w.weight": "model-00011-of-00014.safetensors",
464
+ "language_model.model.layers.26.self_attn.o_proj.weight": "model-00011-of-00014.safetensors",
465
+ "language_model.model.layers.26.input_layernorm.weight": "model-00011-of-00014.safetensors",
466
+ "language_model.model.layers.26.post_attention_layernorm.weight": "model-00011-of-00014.safetensors",
467
+ "language_model.model.layers.26.mlp.gate_proj.weight": "model-00011-of-00014.safetensors",
468
+ "language_model.model.layers.26.mlp.up_proj.weight": "model-00011-of-00014.safetensors",
469
+ "language_model.model.layers.26.mlp.down_proj.weight": "model-00011-of-00014.safetensors",
470
+ "language_model.model.layers.27.self_attn.q_proj.weight": "model-00011-of-00014.safetensors",
471
+ "language_model.model.layers.27.self_attn.q_proj_hw.weight": "model-00011-of-00014.safetensors",
472
+ "language_model.model.layers.27.self_attn.k_proj.weight": "model-00011-of-00014.safetensors",
473
+ "language_model.model.layers.27.self_attn.k_proj_hw.weight": "model-00011-of-00014.safetensors",
474
+ "language_model.model.layers.27.self_attn.v_proj.weight": "model-00011-of-00014.safetensors",
475
+ "language_model.model.layers.27.self_attn.q_norm.weight": "model-00011-of-00014.safetensors",
476
+ "language_model.model.layers.27.self_attn.k_norm.weight": "model-00011-of-00014.safetensors",
477
+ "language_model.model.layers.27.self_attn.q_norm_h.weight": "model-00011-of-00014.safetensors",
478
+ "language_model.model.layers.27.self_attn.k_norm_h.weight": "model-00011-of-00014.safetensors",
479
+ "language_model.model.layers.27.self_attn.q_norm_w.weight": "model-00011-of-00014.safetensors",
480
+ "language_model.model.layers.27.self_attn.k_norm_w.weight": "model-00011-of-00014.safetensors",
481
+ "language_model.model.layers.27.self_attn.o_proj.weight": "model-00011-of-00014.safetensors",
482
+ "language_model.model.layers.27.input_layernorm.weight": "model-00011-of-00014.safetensors",
483
+ "language_model.model.layers.27.post_attention_layernorm.weight": "model-00011-of-00014.safetensors",
484
+ "language_model.model.layers.27.mlp.gate_proj.weight": "model-00011-of-00014.safetensors",
485
+ "language_model.model.layers.27.mlp.up_proj.weight": "model-00011-of-00014.safetensors",
486
+ "language_model.model.layers.27.mlp.down_proj.weight": "model-00011-of-00014.safetensors",
487
+ "language_model.model.layers.28.self_attn.q_proj.weight": "model-00012-of-00014.safetensors",
488
+ "language_model.model.layers.28.self_attn.q_proj_hw.weight": "model-00012-of-00014.safetensors",
489
+ "language_model.model.layers.28.self_attn.k_proj.weight": "model-00012-of-00014.safetensors",
490
+ "language_model.model.layers.28.self_attn.k_proj_hw.weight": "model-00012-of-00014.safetensors",
491
+ "language_model.model.layers.28.self_attn.v_proj.weight": "model-00012-of-00014.safetensors",
492
+ "language_model.model.layers.28.self_attn.q_norm.weight": "model-00012-of-00014.safetensors",
493
+ "language_model.model.layers.28.self_attn.k_norm.weight": "model-00012-of-00014.safetensors",
494
+ "language_model.model.layers.28.self_attn.q_norm_h.weight": "model-00012-of-00014.safetensors",
495
+ "language_model.model.layers.28.self_attn.k_norm_h.weight": "model-00012-of-00014.safetensors",
496
+ "language_model.model.layers.28.self_attn.q_norm_w.weight": "model-00012-of-00014.safetensors",
497
+ "language_model.model.layers.28.self_attn.k_norm_w.weight": "model-00012-of-00014.safetensors",
498
+ "language_model.model.layers.28.self_attn.o_proj.weight": "model-00012-of-00014.safetensors",
499
+ "language_model.model.layers.28.input_layernorm.weight": "model-00012-of-00014.safetensors",
500
+ "language_model.model.layers.28.post_attention_layernorm.weight": "model-00012-of-00014.safetensors",
501
+ "language_model.model.layers.28.mlp.gate_proj.weight": "model-00012-of-00014.safetensors",
502
+ "language_model.model.layers.28.mlp.up_proj.weight": "model-00012-of-00014.safetensors",
503
+ "language_model.model.layers.28.mlp.down_proj.weight": "model-00012-of-00014.safetensors",
504
+ "language_model.model.layers.29.self_attn.q_proj.weight": "model-00012-of-00014.safetensors",
505
+ "language_model.model.layers.29.self_attn.q_proj_hw.weight": "model-00012-of-00014.safetensors",
506
+ "language_model.model.layers.29.self_attn.k_proj.weight": "model-00012-of-00014.safetensors",
507
+ "language_model.model.layers.29.self_attn.k_proj_hw.weight": "model-00012-of-00014.safetensors",
508
+ "language_model.model.layers.29.self_attn.v_proj.weight": "model-00012-of-00014.safetensors",
509
+ "language_model.model.layers.29.self_attn.q_norm.weight": "model-00012-of-00014.safetensors",
510
+ "language_model.model.layers.29.self_attn.k_norm.weight": "model-00012-of-00014.safetensors",
511
+ "language_model.model.layers.29.self_attn.q_norm_h.weight": "model-00012-of-00014.safetensors",
512
+ "language_model.model.layers.29.self_attn.k_norm_h.weight": "model-00012-of-00014.safetensors",
513
+ "language_model.model.layers.29.self_attn.q_norm_w.weight": "model-00012-of-00014.safetensors",
514
+ "language_model.model.layers.29.self_attn.k_norm_w.weight": "model-00012-of-00014.safetensors",
515
+ "language_model.model.layers.29.self_attn.o_proj.weight": "model-00012-of-00014.safetensors",
516
+ "language_model.model.layers.29.input_layernorm.weight": "model-00012-of-00014.safetensors",
517
+ "language_model.model.layers.29.post_attention_layernorm.weight": "model-00012-of-00014.safetensors",
518
+ "language_model.model.layers.29.mlp.gate_proj.weight": "model-00012-of-00014.safetensors",
519
+ "language_model.model.layers.29.mlp.up_proj.weight": "model-00012-of-00014.safetensors",
520
+ "language_model.model.layers.29.mlp.down_proj.weight": "model-00012-of-00014.safetensors",
521
+ "language_model.model.layers.30.self_attn.q_proj.weight": "model-00012-of-00014.safetensors",
522
+ "language_model.model.layers.30.self_attn.q_proj_hw.weight": "model-00012-of-00014.safetensors",
523
+ "language_model.model.layers.30.self_attn.k_proj.weight": "model-00012-of-00014.safetensors",
524
+ "language_model.model.layers.30.self_attn.k_proj_hw.weight": "model-00012-of-00014.safetensors",
525
+ "language_model.model.layers.30.self_attn.v_proj.weight": "model-00012-of-00014.safetensors",
526
+ "language_model.model.layers.30.self_attn.q_norm.weight": "model-00012-of-00014.safetensors",
527
+ "language_model.model.layers.30.self_attn.k_norm.weight": "model-00012-of-00014.safetensors",
528
+ "language_model.model.layers.30.self_attn.q_norm_h.weight": "model-00012-of-00014.safetensors",
529
+ "language_model.model.layers.30.self_attn.k_norm_h.weight": "model-00012-of-00014.safetensors",
530
+ "language_model.model.layers.30.self_attn.q_norm_w.weight": "model-00012-of-00014.safetensors",
531
+ "language_model.model.layers.30.self_attn.k_norm_w.weight": "model-00012-of-00014.safetensors",
532
+ "language_model.model.layers.30.self_attn.o_proj.weight": "model-00012-of-00014.safetensors",
533
+ "language_model.model.layers.30.input_layernorm.weight": "model-00012-of-00014.safetensors",
534
+ "language_model.model.layers.30.post_attention_layernorm.weight": "model-00012-of-00014.safetensors",
535
+ "language_model.model.layers.30.mlp.gate_proj.weight": "model-00012-of-00014.safetensors",
536
+ "language_model.model.layers.30.mlp.up_proj.weight": "model-00012-of-00014.safetensors",
537
+ "language_model.model.layers.30.mlp.down_proj.weight": "model-00012-of-00014.safetensors",
538
+ "language_model.model.layers.31.self_attn.q_proj.weight": "model-00012-of-00014.safetensors",
539
+ "language_model.model.layers.31.self_attn.q_proj_hw.weight": "model-00012-of-00014.safetensors",
540
+ "language_model.model.layers.31.self_attn.k_proj.weight": "model-00012-of-00014.safetensors",
541
+ "language_model.model.layers.31.self_attn.k_proj_hw.weight": "model-00012-of-00014.safetensors",
542
+ "language_model.model.layers.31.self_attn.v_proj.weight": "model-00012-of-00014.safetensors",
543
+ "language_model.model.layers.31.self_attn.q_norm.weight": "model-00012-of-00014.safetensors",
544
+ "language_model.model.layers.31.self_attn.k_norm.weight": "model-00012-of-00014.safetensors",
545
+ "language_model.model.layers.31.self_attn.q_norm_h.weight": "model-00012-of-00014.safetensors",
546
+ "language_model.model.layers.31.self_attn.k_norm_h.weight": "model-00012-of-00014.safetensors",
547
+ "language_model.model.layers.31.self_attn.q_norm_w.weight": "model-00012-of-00014.safetensors",
548
+ "language_model.model.layers.31.self_attn.k_norm_w.weight": "model-00012-of-00014.safetensors",
549
+ "language_model.model.layers.31.self_attn.o_proj.weight": "model-00012-of-00014.safetensors",
550
+ "language_model.model.layers.31.input_layernorm.weight": "model-00012-of-00014.safetensors",
551
+ "language_model.model.layers.31.post_attention_layernorm.weight": "model-00012-of-00014.safetensors",
552
+ "language_model.model.layers.31.mlp.gate_proj.weight": "model-00012-of-00014.safetensors",
553
+ "language_model.model.layers.31.mlp.up_proj.weight": "model-00012-of-00014.safetensors",
554
+ "language_model.model.layers.31.mlp.down_proj.weight": "model-00012-of-00014.safetensors",
555
+ "language_model.model.layers.32.self_attn.q_proj.weight": "model-00013-of-00014.safetensors",
556
+ "language_model.model.layers.32.self_attn.q_proj_hw.weight": "model-00013-of-00014.safetensors",
557
+ "language_model.model.layers.32.self_attn.k_proj.weight": "model-00013-of-00014.safetensors",
558
+ "language_model.model.layers.32.self_attn.k_proj_hw.weight": "model-00013-of-00014.safetensors",
559
+ "language_model.model.layers.32.self_attn.v_proj.weight": "model-00013-of-00014.safetensors",
560
+ "language_model.model.layers.32.self_attn.q_norm.weight": "model-00013-of-00014.safetensors",
561
+ "language_model.model.layers.32.self_attn.k_norm.weight": "model-00013-of-00014.safetensors",
562
+ "language_model.model.layers.32.self_attn.q_norm_h.weight": "model-00013-of-00014.safetensors",
563
+ "language_model.model.layers.32.self_attn.k_norm_h.weight": "model-00013-of-00014.safetensors",
564
+ "language_model.model.layers.32.self_attn.q_norm_w.weight": "model-00013-of-00014.safetensors",
565
+ "language_model.model.layers.32.self_attn.k_norm_w.weight": "model-00013-of-00014.safetensors",
566
+ "language_model.model.layers.32.self_attn.o_proj.weight": "model-00013-of-00014.safetensors",
567
+ "language_model.model.layers.32.input_layernorm.weight": "model-00013-of-00014.safetensors",
568
+ "language_model.model.layers.32.post_attention_layernorm.weight": "model-00013-of-00014.safetensors",
569
+ "language_model.model.layers.32.mlp.gate_proj.weight": "model-00013-of-00014.safetensors",
570
+ "language_model.model.layers.32.mlp.up_proj.weight": "model-00013-of-00014.safetensors",
571
+ "language_model.model.layers.32.mlp.down_proj.weight": "model-00013-of-00014.safetensors",
572
+ "language_model.model.layers.33.self_attn.q_proj.weight": "model-00013-of-00014.safetensors",
573
+ "language_model.model.layers.33.self_attn.q_proj_hw.weight": "model-00013-of-00014.safetensors",
574
+ "language_model.model.layers.33.self_attn.k_proj.weight": "model-00013-of-00014.safetensors",
575
+ "language_model.model.layers.33.self_attn.k_proj_hw.weight": "model-00013-of-00014.safetensors",
576
+ "language_model.model.layers.33.self_attn.v_proj.weight": "model-00013-of-00014.safetensors",
577
+ "language_model.model.layers.33.self_attn.q_norm.weight": "model-00013-of-00014.safetensors",
578
+ "language_model.model.layers.33.self_attn.k_norm.weight": "model-00013-of-00014.safetensors",
579
+ "language_model.model.layers.33.self_attn.q_norm_h.weight": "model-00013-of-00014.safetensors",
580
+ "language_model.model.layers.33.self_attn.k_norm_h.weight": "model-00013-of-00014.safetensors",
581
+ "language_model.model.layers.33.self_attn.q_norm_w.weight": "model-00013-of-00014.safetensors",
582
+ "language_model.model.layers.33.self_attn.k_norm_w.weight": "model-00013-of-00014.safetensors",
583
+ "language_model.model.layers.33.self_attn.o_proj.weight": "model-00013-of-00014.safetensors",
584
+ "language_model.model.layers.33.input_layernorm.weight": "model-00013-of-00014.safetensors",
585
+ "language_model.model.layers.33.post_attention_layernorm.weight": "model-00013-of-00014.safetensors",
586
+ "language_model.model.layers.33.mlp.gate_proj.weight": "model-00013-of-00014.safetensors",
587
+ "language_model.model.layers.33.mlp.up_proj.weight": "model-00013-of-00014.safetensors",
588
+ "language_model.model.layers.33.mlp.down_proj.weight": "model-00013-of-00014.safetensors",
589
+ "language_model.model.layers.34.self_attn.q_proj.weight": "model-00013-of-00014.safetensors",
590
+ "language_model.model.layers.34.self_attn.q_proj_hw.weight": "model-00013-of-00014.safetensors",
591
+ "language_model.model.layers.34.self_attn.k_proj.weight": "model-00013-of-00014.safetensors",
592
+ "language_model.model.layers.34.self_attn.k_proj_hw.weight": "model-00013-of-00014.safetensors",
593
+ "language_model.model.layers.34.self_attn.v_proj.weight": "model-00013-of-00014.safetensors",
594
+ "language_model.model.layers.34.self_attn.q_norm.weight": "model-00013-of-00014.safetensors",
595
+ "language_model.model.layers.34.self_attn.k_norm.weight": "model-00013-of-00014.safetensors",
596
+ "language_model.model.layers.34.self_attn.q_norm_h.weight": "model-00013-of-00014.safetensors",
597
+ "language_model.model.layers.34.self_attn.k_norm_h.weight": "model-00013-of-00014.safetensors",
598
+ "language_model.model.layers.34.self_attn.q_norm_w.weight": "model-00013-of-00014.safetensors",
599
+ "language_model.model.layers.34.self_attn.k_norm_w.weight": "model-00013-of-00014.safetensors",
600
+ "language_model.model.layers.34.self_attn.o_proj.weight": "model-00013-of-00014.safetensors",
601
+ "language_model.model.layers.34.input_layernorm.weight": "model-00013-of-00014.safetensors",
602
+ "language_model.model.layers.34.post_attention_layernorm.weight": "model-00013-of-00014.safetensors",
603
+ "language_model.model.layers.34.mlp.gate_proj.weight": "model-00013-of-00014.safetensors",
604
+ "language_model.model.layers.34.mlp.up_proj.weight": "model-00013-of-00014.safetensors",
605
+ "language_model.model.layers.34.mlp.down_proj.weight": "model-00013-of-00014.safetensors",
606
+ "language_model.model.layers.35.self_attn.q_proj.weight": "model-00013-of-00014.safetensors",
607
+ "language_model.model.layers.35.self_attn.q_proj_hw.weight": "model-00013-of-00014.safetensors",
608
+ "language_model.model.layers.35.self_attn.k_proj.weight": "model-00013-of-00014.safetensors",
609
+ "language_model.model.layers.35.self_attn.k_proj_hw.weight": "model-00013-of-00014.safetensors",
610
+ "language_model.model.layers.35.self_attn.v_proj.weight": "model-00013-of-00014.safetensors",
611
+ "language_model.model.layers.35.self_attn.q_norm.weight": "model-00013-of-00014.safetensors",
612
+ "language_model.model.layers.35.self_attn.k_norm.weight": "model-00013-of-00014.safetensors",
613
+ "language_model.model.layers.35.self_attn.q_norm_h.weight": "model-00013-of-00014.safetensors",
614
+ "language_model.model.layers.35.self_attn.k_norm_h.weight": "model-00013-of-00014.safetensors",
615
+ "language_model.model.layers.35.self_attn.q_norm_w.weight": "model-00013-of-00014.safetensors",
616
+ "language_model.model.layers.35.self_attn.k_norm_w.weight": "model-00013-of-00014.safetensors",
617
+ "language_model.model.layers.35.self_attn.o_proj.weight": "model-00013-of-00014.safetensors",
618
+ "language_model.model.layers.35.input_layernorm.weight": "model-00013-of-00014.safetensors",
619
+ "language_model.model.layers.35.post_attention_layernorm.weight": "model-00013-of-00014.safetensors",
620
+ "language_model.model.layers.35.mlp.gate_proj.weight": "model-00013-of-00014.safetensors",
621
+ "language_model.model.layers.35.mlp.up_proj.weight": "model-00013-of-00014.safetensors",
622
+ "language_model.model.layers.35.mlp.down_proj.weight": "model-00013-of-00014.safetensors",
623
+ "language_model.model.layers.36.self_attn.q_proj.weight": "model-00014-of-00014.safetensors",
624
+ "language_model.model.layers.36.self_attn.q_proj_hw.weight": "model-00014-of-00014.safetensors",
625
+ "language_model.model.layers.36.self_attn.k_proj.weight": "model-00014-of-00014.safetensors",
626
+ "language_model.model.layers.36.self_attn.k_proj_hw.weight": "model-00014-of-00014.safetensors",
627
+ "language_model.model.layers.36.self_attn.v_proj.weight": "model-00014-of-00014.safetensors",
628
+ "language_model.model.layers.36.self_attn.q_norm.weight": "model-00014-of-00014.safetensors",
629
+ "language_model.model.layers.36.self_attn.k_norm.weight": "model-00014-of-00014.safetensors",
630
+ "language_model.model.layers.36.self_attn.q_norm_h.weight": "model-00014-of-00014.safetensors",
631
+ "language_model.model.layers.36.self_attn.k_norm_h.weight": "model-00014-of-00014.safetensors",
632
+ "language_model.model.layers.36.self_attn.q_norm_w.weight": "model-00014-of-00014.safetensors",
633
+ "language_model.model.layers.36.self_attn.k_norm_w.weight": "model-00014-of-00014.safetensors",
634
+ "language_model.model.layers.36.self_attn.o_proj.weight": "model-00014-of-00014.safetensors",
635
+ "language_model.model.layers.36.input_layernorm.weight": "model-00014-of-00014.safetensors",
636
+ "language_model.model.layers.36.post_attention_layernorm.weight": "model-00014-of-00014.safetensors",
637
+ "language_model.model.layers.36.mlp.gate_proj.weight": "model-00014-of-00014.safetensors",
638
+ "language_model.model.layers.36.mlp.up_proj.weight": "model-00014-of-00014.safetensors",
639
+ "language_model.model.layers.36.mlp.down_proj.weight": "model-00014-of-00014.safetensors",
640
+ "language_model.model.layers.37.self_attn.q_proj.weight": "model-00014-of-00014.safetensors",
641
+ "language_model.model.layers.37.self_attn.q_proj_hw.weight": "model-00014-of-00014.safetensors",
642
+ "language_model.model.layers.37.self_attn.k_proj.weight": "model-00014-of-00014.safetensors",
643
+ "language_model.model.layers.37.self_attn.k_proj_hw.weight": "model-00014-of-00014.safetensors",
644
+ "language_model.model.layers.37.self_attn.v_proj.weight": "model-00014-of-00014.safetensors",
645
+ "language_model.model.layers.37.self_attn.q_norm.weight": "model-00014-of-00014.safetensors",
646
+ "language_model.model.layers.37.self_attn.k_norm.weight": "model-00014-of-00014.safetensors",
647
+ "language_model.model.layers.37.self_attn.q_norm_h.weight": "model-00014-of-00014.safetensors",
648
+ "language_model.model.layers.37.self_attn.k_norm_h.weight": "model-00014-of-00014.safetensors",
649
+ "language_model.model.layers.37.self_attn.q_norm_w.weight": "model-00014-of-00014.safetensors",
650
+ "language_model.model.layers.37.self_attn.k_norm_w.weight": "model-00014-of-00014.safetensors",
651
+ "language_model.model.layers.37.self_attn.o_proj.weight": "model-00014-of-00014.safetensors",
652
+ "language_model.model.layers.37.input_layernorm.weight": "model-00014-of-00014.safetensors",
653
+ "language_model.model.layers.37.post_attention_layernorm.weight": "model-00014-of-00014.safetensors",
654
+ "language_model.model.layers.37.mlp.gate_proj.weight": "model-00014-of-00014.safetensors",
655
+ "language_model.model.layers.37.mlp.up_proj.weight": "model-00014-of-00014.safetensors",
656
+ "language_model.model.layers.37.mlp.down_proj.weight": "model-00014-of-00014.safetensors",
657
+ "language_model.model.layers.38.self_attn.q_proj.weight": "model-00014-of-00014.safetensors",
658
+ "language_model.model.layers.38.self_attn.q_proj_hw.weight": "model-00014-of-00014.safetensors",
659
+ "language_model.model.layers.38.self_attn.k_proj.weight": "model-00014-of-00014.safetensors",
660
+ "language_model.model.layers.38.self_attn.k_proj_hw.weight": "model-00014-of-00014.safetensors",
661
+ "language_model.model.layers.38.self_attn.v_proj.weight": "model-00014-of-00014.safetensors",
662
+ "language_model.model.layers.38.self_attn.q_norm.weight": "model-00014-of-00014.safetensors",
663
+ "language_model.model.layers.38.self_attn.k_norm.weight": "model-00014-of-00014.safetensors",
664
+ "language_model.model.layers.38.self_attn.q_norm_h.weight": "model-00014-of-00014.safetensors",
665
+ "language_model.model.layers.38.self_attn.k_norm_h.weight": "model-00014-of-00014.safetensors",
666
+ "language_model.model.layers.38.self_attn.q_norm_w.weight": "model-00014-of-00014.safetensors",
667
+ "language_model.model.layers.38.self_attn.k_norm_w.weight": "model-00014-of-00014.safetensors",
668
+ "language_model.model.layers.38.self_attn.o_proj.weight": "model-00014-of-00014.safetensors",
669
+ "language_model.model.layers.38.input_layernorm.weight": "model-00014-of-00014.safetensors",
670
+ "language_model.model.layers.38.post_attention_layernorm.weight": "model-00014-of-00014.safetensors",
671
+ "language_model.model.layers.38.mlp.gate_proj.weight": "model-00014-of-00014.safetensors",
672
+ "language_model.model.layers.38.mlp.up_proj.weight": "model-00014-of-00014.safetensors",
673
+ "language_model.model.layers.38.mlp.down_proj.weight": "model-00014-of-00014.safetensors",
674
+ "language_model.model.layers.39.self_attn.q_proj.weight": "model-00014-of-00014.safetensors",
675
+ "language_model.model.layers.39.self_attn.q_proj_hw.weight": "model-00014-of-00014.safetensors",
676
+ "language_model.model.layers.39.self_attn.k_proj.weight": "model-00014-of-00014.safetensors",
677
+ "language_model.model.layers.39.self_attn.k_proj_hw.weight": "model-00014-of-00014.safetensors",
678
+ "language_model.model.layers.39.self_attn.v_proj.weight": "model-00014-of-00014.safetensors",
679
+ "language_model.model.layers.39.self_attn.q_norm.weight": "model-00014-of-00014.safetensors",
680
+ "language_model.model.layers.39.self_attn.k_norm.weight": "model-00014-of-00014.safetensors",
681
+ "language_model.model.layers.39.self_attn.q_norm_h.weight": "model-00014-of-00014.safetensors",
682
+ "language_model.model.layers.39.self_attn.k_norm_h.weight": "model-00014-of-00014.safetensors",
683
+ "language_model.model.layers.39.self_attn.q_norm_w.weight": "model-00014-of-00014.safetensors",
684
+ "language_model.model.layers.39.self_attn.k_norm_w.weight": "model-00014-of-00014.safetensors",
685
+ "language_model.model.layers.39.self_attn.o_proj.weight": "model-00014-of-00014.safetensors",
686
+ "language_model.model.layers.39.input_layernorm.weight": "model-00014-of-00014.safetensors",
687
+ "language_model.model.layers.39.post_attention_layernorm.weight": "model-00014-of-00014.safetensors",
688
+ "language_model.model.layers.39.mlp.gate_proj.weight": "model-00014-of-00014.safetensors",
689
+ "language_model.model.layers.39.mlp.up_proj.weight": "model-00014-of-00014.safetensors",
690
+ "language_model.model.layers.39.mlp.down_proj.weight": "model-00014-of-00014.safetensors"
691
+ }
692
+ }
modeling_neo_chat.py ADDED
@@ -0,0 +1,363 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import List, Optional, Tuple, Union
2
+
3
+ import torch.utils.checkpoint
4
+ import transformers
5
+ from torch.nn import CrossEntropyLoss
6
+ from transformers import GenerationConfig
7
+ from transformers.modeling_outputs import CausalLMOutputWithPast
8
+ from transformers.modeling_utils import PreTrainedModel
9
+ from transformers.utils import logging
10
+
11
+ from .configuration_neo_chat import NEOChatConfig
12
+ from .conversation import get_conv_template
13
+ from .modeling_neo_vit import NEOVisionModel
14
+ from .modeling_qwen3 import Qwen3ForCausalLM
15
+
16
+ logger = logging.get_logger(__name__)
17
+
18
+
19
+ def version_cmp(v1, v2, op='eq'):
20
+ import operator
21
+
22
+ from packaging import version
23
+ op_func = getattr(operator, op)
24
+ return op_func(version.parse(v1), version.parse(v2))
25
+
26
+
27
+ def build_abs_positions_from_grid_hw(grid_hw: torch.Tensor, device=None):
28
+ """
29
+ Compute patch coordinates (x, y)
30
+
31
+ Args:
32
+ grid_hw: (B, 2) tensor representing (H, W) per image
33
+ """
34
+ device = grid_hw.device
35
+ B = grid_hw.shape[0]
36
+
37
+ # Get the number of patches per image
38
+ H = grid_hw[:, 0]
39
+ W = grid_hw[:, 1]
40
+ N = H * W
41
+ N_total = N.sum()
42
+
43
+ # Create the batch index for each patch (B x patch count)
44
+ patch_to_sample = torch.repeat_interleave(torch.arange(B, device=device), N) # (N_total,)
45
+
46
+ # Generate intra-image patch index (row-major order)
47
+ patch_id_within_image = torch.arange(N_total, device=device)
48
+ patch_id_within_image = patch_id_within_image - torch.cumsum(
49
+ torch.cat([torch.tensor([0], device=device), N[:-1]]), dim=0
50
+ )[patch_to_sample]
51
+
52
+ # Get H/W for each patch according to its image
53
+ W_per_patch = W[patch_to_sample]
54
+ abs_x = patch_id_within_image % W_per_patch
55
+ abs_y = patch_id_within_image // W_per_patch
56
+
57
+ return abs_x, abs_y
58
+
59
+
60
+ class NEOChatModel(PreTrainedModel):
61
+ config_class = NEOChatConfig
62
+ main_input_name = 'pixel_values'
63
+ base_model_prefix = 'language_model'
64
+ _supports_flash_attn_2 = True
65
+ supports_gradient_checkpointing = True
66
+ _no_split_modules = [
67
+ "NEOVisionModel",
68
+ "Qwen3DecoderLayer",
69
+ ]
70
+
71
+ # support transformers 4.51.+
72
+ _tp_plan = ''
73
+
74
+ def __init__(self, config: NEOChatConfig, vision_model=None, language_model=None, use_flash_attn=True):
75
+ super().__init__(config)
76
+
77
+ assert version_cmp(transformers.__version__, '4.37.0', 'ge')
78
+ patch_size = config.vision_config.patch_size
79
+ self.patch_size = patch_size
80
+ self.template = config.template
81
+ self.downsample_ratio = config.downsample_ratio
82
+ config.llm_config._attn_implementation = 'eager'
83
+
84
+ if vision_model is not None:
85
+ self.vision_model = vision_model
86
+ else:
87
+ self.vision_model = NEOVisionModel(config.vision_config)
88
+ if language_model is not None:
89
+ self.language_model = language_model
90
+ else:
91
+ self.language_model = Qwen3ForCausalLM(config.llm_config)
92
+
93
+ self.img_context_token_id = None
94
+ self.img_start_token_id = None
95
+ self.conv_template = get_conv_template(self.template)
96
+ self.system_message = self.conv_template.system_message
97
+
98
+ def forward(
99
+ self,
100
+ pixel_values: torch.FloatTensor,
101
+ input_ids: torch.LongTensor = None,
102
+ attention_mask: Optional[torch.Tensor] = None,
103
+ position_ids: Optional[torch.LongTensor] = None,
104
+ image_flags: Optional[torch.LongTensor] = None,
105
+ past_key_values: Optional[List[torch.FloatTensor]] = None,
106
+ labels: Optional[torch.LongTensor] = None,
107
+ use_cache: Optional[bool] = None,
108
+ output_attentions: Optional[bool] = None,
109
+ output_hidden_states: Optional[bool] = None,
110
+ return_dict: Optional[bool] = None,
111
+ ) -> Union[Tuple, CausalLMOutputWithPast]:
112
+ raise NotImplementedError('forward')
113
+ return_dict = return_dict if return_dict is not None else self.config.use_return_dict
114
+
115
+ image_flags = image_flags.squeeze(-1)
116
+ input_embeds = self.language_model.get_input_embeddings()(input_ids).clone()
117
+
118
+ vit_embeds = self.extract_feature(pixel_values)
119
+ vit_embeds = vit_embeds[image_flags == 1]
120
+
121
+ B, N, C = input_embeds.shape
122
+ input_embeds = input_embeds.reshape(B * N, C)
123
+
124
+ # if torch.distributed.is_initialized() and torch.distributed.get_rank() == 0:
125
+ # print(f'dynamic ViT batch size: {vit_batch_size}, images per sample: {vit_batch_size / B}, dynamic token length: {N}')
126
+
127
+ input_ids = input_ids.reshape(B * N)
128
+ selected = (input_ids == self.img_context_token_id)
129
+ try:
130
+ input_embeds[selected] = input_embeds[selected] * 0.0 + vit_embeds.reshape(-1, C)
131
+ except Exception as e:
132
+ vit_embeds = vit_embeds.reshape(-1, C)
133
+ print(f'warning: {e}, input_embeds[selected].shape={input_embeds[selected].shape}, '
134
+ f'vit_embeds.shape={vit_embeds.shape}')
135
+ n_token = min(selected.sum(), vit_embeds.size(0))
136
+ input_embeds[selected][:n_token] = input_embeds[selected][:n_token] * 0.0 + vit_embeds[:n_token]
137
+
138
+ input_embeds = input_embeds.reshape(B, N, C)
139
+
140
+ outputs = self.language_model(
141
+ inputs_embeds=input_embeds,
142
+ attention_mask=attention_mask,
143
+ position_ids=position_ids,
144
+ past_key_values=past_key_values,
145
+ use_cache=use_cache,
146
+ output_attentions=output_attentions,
147
+ output_hidden_states=output_hidden_states,
148
+ return_dict=return_dict,
149
+ )
150
+ logits = outputs.logits
151
+
152
+ loss = None
153
+ if labels is not None:
154
+ # Shift so that tokens < n predict n
155
+ shift_logits = logits[..., :-1, :].contiguous()
156
+ shift_labels = labels[..., 1:].contiguous()
157
+ # Flatten the tokens
158
+ loss_fct = CrossEntropyLoss()
159
+ shift_logits = shift_logits.view(-1, self.language_model.config.vocab_size)
160
+ shift_labels = shift_labels.view(-1)
161
+ # Enable model parallelism
162
+ shift_labels = shift_labels.to(shift_logits.device)
163
+ loss = loss_fct(shift_logits, shift_labels)
164
+
165
+ if not return_dict:
166
+ output = (logits,) + outputs[1:]
167
+ return (loss,) + output if loss is not None else output
168
+
169
+ return CausalLMOutputWithPast(
170
+ loss=loss,
171
+ logits=logits,
172
+ past_key_values=outputs.past_key_values,
173
+ hidden_states=outputs.hidden_states,
174
+ attentions=outputs.attentions,
175
+ )
176
+
177
+ def extract_feature(self, pixel_values, grid_hw=None):
178
+
179
+ return self.vision_model(pixel_values=pixel_values,
180
+ output_hidden_states=False,
181
+ return_dict=True,
182
+ grid_hw=grid_hw).last_hidden_state
183
+
184
+ def batch_chat(self, tokenizer, pixel_values, questions, generation_config, num_patches_list=None,
185
+ history=None, return_history=False, IMG_START_TOKEN='<img>', IMG_END_TOKEN='</img>',
186
+ IMG_CONTEXT_TOKEN='<IMG_CONTEXT>', verbose=False, image_counts=None):
187
+ raise NotImplementedError('batch_chat')
188
+ if history is not None or return_history:
189
+ print('Now multi-turn chat is not supported in batch_chat.')
190
+ raise NotImplementedError
191
+
192
+ if image_counts is not None:
193
+ num_patches_list = image_counts
194
+ print('Warning: `image_counts` is deprecated. Please use `num_patches_list` instead.')
195
+
196
+ img_context_token_id = tokenizer.convert_tokens_to_ids(IMG_CONTEXT_TOKEN)
197
+ self.img_context_token_id = img_context_token_id
198
+
199
+ if verbose and pixel_values is not None:
200
+ image_bs = pixel_values.shape[0]
201
+ print(f'dynamic ViT batch size: {image_bs}')
202
+
203
+ queries = []
204
+ for idx, num_patches in enumerate(num_patches_list):
205
+ question = questions[idx]
206
+ if pixel_values is not None and '<image>' not in question:
207
+ question = '<image>\n' + question
208
+ template = get_conv_template(self.template)
209
+ template.system_message = self.system_message
210
+ template.append_message(template.roles[0], question)
211
+ template.append_message(template.roles[1], None)
212
+ query = template.get_prompt()
213
+
214
+ image_tokens = IMG_START_TOKEN + IMG_CONTEXT_TOKEN + IMG_END_TOKEN
215
+ query = query.replace('<image>', image_tokens, 1)
216
+ queries.append(query)
217
+
218
+ tokenizer.padding_side = 'left'
219
+ model_inputs = tokenizer(queries, return_tensors='pt', padding=True)
220
+ input_ids = model_inputs['input_ids'].to(self.device)
221
+ attention_mask = model_inputs['attention_mask'].to(self.device)
222
+ eos_token_id = tokenizer.convert_tokens_to_ids(template.sep.strip())
223
+ generation_config['eos_token_id'] = eos_token_id
224
+ generation_output = self.generate(
225
+ pixel_values=pixel_values,
226
+ input_ids=input_ids,
227
+ attention_mask=attention_mask,
228
+ **generation_config
229
+ )
230
+ responses = tokenizer.batch_decode(generation_output, skip_special_tokens=True)
231
+ responses = [response.split(template.sep.strip())[0].strip() for response in responses]
232
+ return responses
233
+
234
+ def chat(self, tokenizer, pixel_values, question, generation_config, history=None, return_history=False, grid_hw=None,
235
+ IMG_START_TOKEN='<img>', IMG_END_TOKEN='</img>', IMG_CONTEXT_TOKEN='<IMG_CONTEXT>', verbose=False):
236
+
237
+ if history is None and pixel_values is not None and '<image>' not in question:
238
+ question = '<image>\n' + question
239
+
240
+ img_context_token_id = tokenizer.convert_tokens_to_ids(IMG_CONTEXT_TOKEN)
241
+ self.img_context_token_id = img_context_token_id
242
+ self.img_start_token_id = tokenizer.convert_tokens_to_ids(IMG_START_TOKEN)
243
+
244
+ template = get_conv_template(self.template)
245
+ template.system_message = self.system_message
246
+ eos_token_id = tokenizer.convert_tokens_to_ids(template.sep.strip())
247
+
248
+ history = [] if history is None else history
249
+ for (old_question, old_answer) in history:
250
+ template.append_message(template.roles[0], old_question)
251
+ template.append_message(template.roles[1], old_answer)
252
+ template.append_message(template.roles[0], question)
253
+ template.append_message(template.roles[1], None)
254
+ query = template.get_prompt()
255
+
256
+ if verbose and pixel_values is not None:
257
+ print(f'dynamic image size: {grid_hw * self.patch_size}')
258
+
259
+ for i in range(grid_hw.shape[0]):
260
+ num_patch_token = int(grid_hw[i, 0] * grid_hw[i, 1] * self.downsample_ratio**2)
261
+ image_tokens = IMG_START_TOKEN + IMG_CONTEXT_TOKEN * num_patch_token + IMG_END_TOKEN
262
+ query = query.replace('<image>', image_tokens, 1)
263
+
264
+ model_inputs = tokenizer(query, return_tensors='pt')
265
+ input_ids = model_inputs['input_ids'].to(self.device)
266
+ attention_mask = model_inputs['attention_mask'].to(self.device)
267
+ generation_config['eos_token_id'] = eos_token_id
268
+ generation_output = self.generate(
269
+ pixel_values=pixel_values,
270
+ input_ids=input_ids,
271
+ grid_hw=grid_hw,
272
+ attention_mask=attention_mask,
273
+ **generation_config
274
+ )
275
+ response = tokenizer.batch_decode(generation_output, skip_special_tokens=True)[0]
276
+ response = response.split(template.sep.strip())[0].strip()
277
+ history.append((question, response))
278
+ if return_history:
279
+ return response, history
280
+ else:
281
+ query_to_print = query.replace(IMG_CONTEXT_TOKEN, '')
282
+ query_to_print = query_to_print.replace(f'{IMG_START_TOKEN}{IMG_END_TOKEN}', '<image>')
283
+ if verbose:
284
+ print(query_to_print, response)
285
+ return response
286
+
287
+ @torch.no_grad()
288
+ def generate(
289
+ self,
290
+ pixel_values: Optional[torch.FloatTensor] = None,
291
+ input_ids: Optional[torch.FloatTensor] = None,
292
+ grid_hw: Optional[torch.LongTensor] = None,
293
+ attention_mask: Optional[torch.LongTensor] = None,
294
+ visual_features: Optional[torch.FloatTensor] = None,
295
+ generation_config: Optional[GenerationConfig] = None,
296
+ output_hidden_states: Optional[bool] = None,
297
+ **generate_kwargs,
298
+ ) -> torch.LongTensor:
299
+ assert input_ids.shape[0] == 1
300
+ assert self.img_context_token_id is not None
301
+ indexes = self.get_thw_indexes(input_ids[0], grid_hw)
302
+ if pixel_values is not None:
303
+ if visual_features is not None:
304
+ vit_embeds = visual_features
305
+ else:
306
+ vit_embeds = self.extract_feature(pixel_values, grid_hw=grid_hw)
307
+
308
+ input_embeds = self.language_model.get_input_embeddings()(input_ids)
309
+ B, N, C = input_embeds.shape
310
+ input_embeds = input_embeds.reshape(B * N, C)
311
+
312
+ input_ids = input_ids.reshape(B * N)
313
+ selected = (input_ids == self.img_context_token_id)
314
+ assert selected.sum() != 0
315
+ input_embeds[selected] = vit_embeds.reshape(-1, C).to(input_embeds.device)
316
+
317
+ input_embeds = input_embeds.reshape(B, N, C)
318
+ else:
319
+ input_embeds = self.language_model.get_input_embeddings()(input_ids)
320
+
321
+ outputs = self.language_model.generate(
322
+ inputs_embeds=input_embeds,
323
+ indexes=indexes,
324
+ attention_mask=attention_mask,
325
+ generation_config=generation_config,
326
+ output_hidden_states=output_hidden_states,
327
+ use_cache=True,
328
+ **generate_kwargs,
329
+ )
330
+
331
+ return outputs
332
+
333
+ @property
334
+ def lm_head(self):
335
+ return self.language_model.get_output_embeddings()
336
+
337
+ def get_output_embeddings(self):
338
+ return self.language_model.get_output_embeddings()
339
+
340
+ def get_input_embeddings(self):
341
+ return self.language_model.get_input_embeddings()
342
+
343
+ def set_input_embeddings(self, value):
344
+ return self.language_model.set_input_embeddings(value)
345
+
346
+ def set_output_embeddings(self, value):
347
+ return self.language_model.set_output_embeddings(value)
348
+
349
+ def get_thw_indexes(self, input_ids, grid_hw):
350
+ img_start_shift = torch.cat([torch.zeros(1, dtype=torch.long).to(input_ids.device),
351
+ (input_ids == self.img_start_token_id).long()], dim=0)[:-1]
352
+ not_img_token = (input_ids != self.img_context_token_id).long()
353
+ t_indexes = ((img_start_shift + not_img_token).cumsum(0) - 1)
354
+ h_indexes = torch.zeros_like(t_indexes).to(t_indexes.device)
355
+ w_indexes = torch.zeros_like(t_indexes).to(t_indexes.device)
356
+
357
+ selected = (input_ids == self.img_context_token_id)
358
+ if selected.long().sum() > 0:
359
+ abs_pos_w, abs_pos_h = build_abs_positions_from_grid_hw(
360
+ grid_hw // int(1 / self.downsample_ratio), device=t_indexes.device)
361
+ h_indexes[selected] = abs_pos_h.to(t_indexes.device, t_indexes.dtype)
362
+ w_indexes[selected] = abs_pos_w.to(t_indexes.device, t_indexes.dtype)
363
+ return torch.stack([t_indexes, h_indexes, w_indexes], dim=0)
modeling_neo_vit.py ADDED
@@ -0,0 +1,235 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Optional, Tuple, Union
2
+
3
+ import torch
4
+ import torch.utils.checkpoint
5
+ from torch import nn
6
+ from transformers.modeling_outputs import BaseModelOutputWithPooling
7
+ from transformers.modeling_utils import PreTrainedModel
8
+
9
+ from .configuration_neo_vit import NEOVisionConfig
10
+
11
+
12
+ def precompute_rope_freqs_sincos(
13
+ dim: int, max_position: int, base: float = 10000.0, device=None
14
+ ):
15
+ """预计算 RoPE 的 cos 和 sin 值 (1D)。"""
16
+ inv_freq = 1.0 / (base ** (torch.arange(0, dim, 2, device=device).float() / dim))
17
+ t = torch.arange(max_position, device=device).type_as(inv_freq)
18
+ freqs = torch.outer(t, inv_freq)
19
+ return torch.cos(freqs), torch.sin(freqs)
20
+
21
+
22
+ def build_abs_positions_from_grid_hw(grid_hw: torch.Tensor, device=None):
23
+ """
24
+ Compute patch coordinates (x, y)
25
+
26
+ Args:
27
+ grid_hw: (B, 2) tensor representing (H, W) per image
28
+ """
29
+ device = grid_hw.device
30
+ B = grid_hw.shape[0]
31
+
32
+ # Get the number of patches per image
33
+ H = grid_hw[:, 0]
34
+ W = grid_hw[:, 1]
35
+ N = H * W
36
+ N_total = N.sum()
37
+
38
+ # Create the batch index for each patch (B x patch count)
39
+ patch_to_sample = torch.repeat_interleave(torch.arange(B, device=device), N) # (N_total,)
40
+
41
+ # Generate intra-image patch index (row-major order)
42
+ patch_id_within_image = torch.arange(N_total, device=device)
43
+ patch_id_within_image = patch_id_within_image - torch.cumsum(
44
+ torch.cat([torch.tensor([0], device=device), N[:-1]]), dim=0
45
+ )[patch_to_sample]
46
+
47
+ # Get H/W for each patch according to its image
48
+ W_per_patch = W[patch_to_sample]
49
+ abs_x = patch_id_within_image % W_per_patch
50
+ abs_y = patch_id_within_image // W_per_patch
51
+
52
+ return abs_x, abs_y
53
+
54
+
55
+ def apply_rotary_emb_1d(
56
+ x: torch.Tensor,
57
+ cos_cached: torch.Tensor,
58
+ sin_cached: torch.Tensor,
59
+ positions: torch.Tensor,
60
+ ):
61
+ """对输入张量的一部分应用1D RoPE。"""
62
+ # x: (..., seq_len, dim_part)
63
+ # positions: (..., seq_len)
64
+ # cos_cached: (max_pos, dim_part / 2)
65
+
66
+ cos = cos_cached[positions] # Shape: (positions.shape, dim_part / 2)
67
+ sin = sin_cached[positions] # Shape: (positions.shape, dim_part / 2)
68
+
69
+ x1 = x[..., 0::2]
70
+ x2 = x[..., 1::2]
71
+
72
+ rotated_x1 = x1 * cos - x2 * sin
73
+ rotated_x2 = x1 * sin + x2 * cos
74
+
75
+ x_rotated = torch.empty_like(x)
76
+ x_rotated[..., 0::2] = rotated_x1
77
+ x_rotated[..., 1::2] = rotated_x2
78
+ return x_rotated
79
+
80
+
81
+ def apply_2d_rotary_pos_emb(
82
+ x: torch.Tensor,
83
+ cos_cached_x: torch.Tensor,
84
+ sin_cached_x: torch.Tensor,
85
+ cos_cached_y: torch.Tensor,
86
+ sin_cached_y: torch.Tensor,
87
+ abs_positions_x: torch.Tensor,
88
+ abs_positions_y: torch.Tensor
89
+ ):
90
+ """应用2D RoPE到输入张量x。"""
91
+ dim = x.shape[-1]
92
+ dim_half = dim // 2
93
+
94
+ # 假设我们将embedding的前半部分用于一个方向的RoPE,后半部分用于另一个方向
95
+ # 例如,前一半给X坐标,后一半给Y坐标 (或者反过来,但要保持一致)
96
+ x_part_1 = x[..., :dim_half]
97
+ x_part_2 = x[..., dim_half:]
98
+
99
+ # 将与 abs_positions_x 相关的旋转应用于 x_part_1
100
+ rotated_part_1 = apply_rotary_emb_1d(
101
+ x_part_1, cos_cached_x, sin_cached_x, abs_positions_x
102
+ )
103
+ # 将与 abs_positions_y 相关的旋转应用于 x_part_2
104
+ rotated_part_2 = apply_rotary_emb_1d(
105
+ x_part_2, cos_cached_y, sin_cached_y, abs_positions_y
106
+ )
107
+
108
+ # 将它们重新拼接起来。确保顺序与你分割时一致。
109
+ return torch.cat((rotated_part_1, rotated_part_2), dim=-1)
110
+
111
+
112
+ class NEOVisionEmbeddings(nn.Module):
113
+ """
114
+ Embedding Module for Vision.
115
+ """
116
+
117
+ def __init__(self, config: NEOVisionConfig):
118
+ super().__init__()
119
+ self.config = config
120
+ self.embed_dim = config.hidden_size
121
+ self.llm_embed_dim = config.llm_hidden_size[0]
122
+ self.downsample_factor = int(1 / config.downsample_ratio[0])
123
+ self.patch_size = config.patch_size
124
+
125
+ self.patch_embedding = nn.Conv2d(
126
+ in_channels=config.num_channels, out_channels=self.embed_dim, kernel_size=self.patch_size, stride=self.patch_size
127
+ )
128
+ self.dense_embedding = nn.Conv2d(
129
+ in_channels=self.embed_dim, out_channels=self.llm_embed_dim, kernel_size=self.downsample_factor, stride=self.downsample_factor
130
+ )
131
+ self.gelu = nn.GELU()
132
+
133
+ self.rope_dim_part = self.embed_dim // 2
134
+ cos_x, sin_x = precompute_rope_freqs_sincos(
135
+ self.rope_dim_part, config.max_position_embeddings_vision, base=config.rope_theta_vision, device=None
136
+ )
137
+ cos_y, sin_y = precompute_rope_freqs_sincos(
138
+ self.rope_dim_part, config.max_position_embeddings_vision, base=config.rope_theta_vision, device=None
139
+ )
140
+
141
+ self.register_buffer("cos_cached_x", cos_x, persistent=False)
142
+ self.register_buffer("sin_cached_x", sin_x, persistent=False)
143
+ self.register_buffer("cos_cached_y", cos_y, persistent=False)
144
+ self.register_buffer("sin_cached_y", sin_y, persistent=False)
145
+
146
+ def _apply_2d_rotary_pos_emb(self, patch_embeds, grid_hw):
147
+ """
148
+ Apply 2D Rotary Position Embedding to the patch embeddings.
149
+ """
150
+ abs_pos_x, abs_pos_y = build_abs_positions_from_grid_hw(grid_hw, device=patch_embeds.device)
151
+ embeddings = apply_2d_rotary_pos_emb(
152
+ patch_embeds.to(torch.float32), # RoPE calculations are often more stable in float32
153
+ self.cos_cached_x, self.sin_cached_x,
154
+ self.cos_cached_y, self.sin_cached_y,
155
+ abs_pos_x,
156
+ abs_pos_y
157
+ ).to(self.patch_embedding.weight.dtype)
158
+ return embeddings
159
+
160
+ def forward(self, pixel_values: torch.FloatTensor, grid_hw=None) -> torch.Tensor:
161
+
162
+ pixel_values = pixel_values.view( #
163
+ -1,
164
+ 3,
165
+ self.patch_size,
166
+ self.patch_size,
167
+ ) # [28072, 768] -> [28072, 3, 16, 16]
168
+ patch_embeds = self.gelu(self.patch_embedding(pixel_values)).view(-1, self.embed_dim)
169
+ self.cos_cached_x = self.cos_cached_x.to(patch_embeds.device)
170
+ self.sin_cached_x = self.sin_cached_x.to(patch_embeds.device)
171
+ self.cos_cached_y = self.cos_cached_y.to(patch_embeds.device)
172
+ self.sin_cached_y = self.sin_cached_y.to(patch_embeds.device)
173
+ patch_embeds = self._apply_2d_rotary_pos_emb(patch_embeds, grid_hw) # [28072, 1024]
174
+ assert (grid_hw[:,0] * grid_hw[:,1]).sum() == patch_embeds.shape[0]
175
+
176
+ patches_list = []
177
+ cur_position = 0
178
+ for i in range(grid_hw.shape[0]):
179
+ h, w = grid_hw[i]
180
+ patches_per_img = patch_embeds[cur_position : cur_position + h * w].view(h, w, -1).unsqueeze(0)
181
+ patches_per_img = self.dense_embedding(patches_per_img.permute(0, 3, 1, 2))
182
+ patches_per_img = patches_per_img.permute(0, 2, 3, 1)
183
+ patches_list.append(patches_per_img.view(-1, patches_per_img.shape[-1]))
184
+ cur_position += h * w
185
+
186
+ embeddings = torch.cat(patches_list, dim=0) # (N_total // downsample_factor**2, C)
187
+
188
+ assert cur_position == patch_embeds.shape[0]
189
+ assert embeddings.shape[0] == int(patch_embeds.shape[0] / self.downsample_factor**2)
190
+
191
+ return embeddings
192
+
193
+
194
+ class NEOVisionModel(PreTrainedModel):
195
+ main_input_name = 'pixel_values'
196
+ _supports_flash_attn_2 = True
197
+ supports_gradient_checkpointing = True
198
+ config_class = NEOVisionConfig
199
+ # support transformers 4.51.+
200
+ _tp_plan = ''
201
+
202
+ def __init__(self, config: NEOVisionConfig):
203
+ super().__init__(config)
204
+ self.config = config
205
+
206
+ self.embeddings = NEOVisionEmbeddings(config)
207
+
208
+ def forward(
209
+ self,
210
+ pixel_values: Optional[torch.FloatTensor] = None,
211
+ output_hidden_states: Optional[bool] = None,
212
+ return_dict: Optional[bool] = None,
213
+ pixel_embeds: Optional[torch.FloatTensor] = None,
214
+ grid_hw: Optional[torch.Tensor] = None
215
+ ) -> Union[Tuple, BaseModelOutputWithPooling]:
216
+ output_hidden_states = (
217
+ output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states
218
+ )
219
+ return_dict = return_dict if return_dict is not None else self.config.use_return_dict
220
+
221
+ if pixel_values is None and pixel_embeds is None:
222
+ raise ValueError('You have to specify pixel_values or pixel_embeds')
223
+
224
+ if pixel_embeds is not None:
225
+ hidden_states = pixel_embeds
226
+ else:
227
+ assert pixel_values.dim() == 2, f"pixel_values must be 2D for native resolution, got: {pixel_values.dim()}"
228
+ hidden_states = self.embeddings(pixel_values, grid_hw=grid_hw)
229
+
230
+ return BaseModelOutputWithPooling(
231
+ last_hidden_state=hidden_states,
232
+ pooler_output=None,
233
+ hidden_states=None,
234
+ attentions=None,
235
+ )
modeling_qwen3.py ADDED
@@ -0,0 +1,594 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Callable, Optional, Union
2
+
3
+ import torch
4
+ from torch import nn
5
+
6
+ import copy
7
+ from transformers.activations import ACT2FN
8
+ from transformers.cache_utils import Cache, DynamicCache
9
+ from transformers.generation import GenerationMixin
10
+ from transformers.integrations import use_kernel_forward_from_hub
11
+ from transformers.masking_utils import create_causal_mask
12
+ from transformers.modeling_flash_attention_utils import FlashAttentionKwargs
13
+ from transformers.modeling_layers import (
14
+ GenericForQuestionAnswering,
15
+ GenericForSequenceClassification,
16
+ GenericForTokenClassification,
17
+ GradientCheckpointingLayer,
18
+ )
19
+ from transformers.modeling_outputs import BaseModelOutputWithPast, CausalLMOutputWithPast
20
+ from transformers.modeling_rope_utils import ROPE_INIT_FUNCTIONS, dynamic_rope_update
21
+ from transformers.modeling_utils import ALL_ATTENTION_FUNCTIONS, PreTrainedModel
22
+ from transformers.processing_utils import Unpack
23
+ from transformers.utils import TransformersKwargs, auto_docstring, can_return_tuple
24
+ from transformers.utils.deprecation import deprecate_kwarg
25
+ from transformers.utils.generic import check_model_inputs
26
+ from transformers import Qwen3Config
27
+
28
+
29
+ def create_block_causal_mask(index: torch.Tensor):
30
+ """
31
+ index: (L)
32
+ return: (1, 1, L, L) block-wise causal attention mask
33
+ """
34
+ L = index.size(0)
35
+ idx_i = index.unsqueeze(1).expand(L, L)
36
+ idx_j = index.unsqueeze(0).expand(L, L)
37
+
38
+ arange = torch.arange(L, device=index.device)
39
+ mask = (idx_j == idx_i) | (arange.unsqueeze(0) <= arange.unsqueeze(1))
40
+
41
+ return torch.where(mask[None, None, :, :] > 0, torch.tensor(0.0), torch.tensor(float('-inf')))
42
+
43
+
44
+ def visualize_mask(mask: torch.Tensor, i: int = 0, j: int = 12):
45
+ """
46
+ mask: (1,1, L, L)
47
+ """
48
+ submask = torch.where(mask[0, 0, :, :] == 0, torch.tensor(1.0), torch.tensor(0.0))
49
+ submask = mask[i:j, i:j].int().cpu().numpy()
50
+ for row in submask:
51
+ print(" ".join(map(str, row)))
52
+
53
+
54
+ @use_kernel_forward_from_hub("RMSNorm")
55
+ class Qwen3RMSNorm(nn.Module):
56
+ def __init__(self, hidden_size, eps: float = 1e-6) -> None:
57
+ """
58
+ Qwen3RMSNorm is equivalent to T5LayerNorm
59
+ """
60
+ super().__init__()
61
+ self.weight = nn.Parameter(torch.ones(hidden_size))
62
+ self.variance_epsilon = eps
63
+
64
+ def forward(self, hidden_states: torch.Tensor) -> torch.Tensor:
65
+ input_dtype = hidden_states.dtype
66
+ hidden_states = hidden_states.to(torch.float32)
67
+ variance = hidden_states.pow(2).mean(-1, keepdim=True)
68
+ hidden_states = hidden_states * torch.rsqrt(variance + self.variance_epsilon)
69
+ return self.weight * hidden_states.to(input_dtype)
70
+
71
+ def extra_repr(self):
72
+ return f"{tuple(self.weight.shape)}, eps={self.variance_epsilon}"
73
+
74
+
75
+ class Qwen3MLP(nn.Module):
76
+ def __init__(self, config):
77
+ super().__init__()
78
+ self.config = config
79
+ self.hidden_size = config.hidden_size
80
+ self.intermediate_size = config.intermediate_size
81
+ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False)
82
+ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False)
83
+ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=False)
84
+ self.act_fn = ACT2FN[config.hidden_act]
85
+
86
+ def forward(self, x):
87
+ down_proj = self.down_proj(self.act_fn(self.gate_proj(x)) * self.up_proj(x))
88
+ return down_proj
89
+
90
+
91
+ def rotate_half(x):
92
+ """Rotates half the hidden dims of the input."""
93
+ x1 = x[..., : x.shape[-1] // 2]
94
+ x2 = x[..., x.shape[-1] // 2 :]
95
+ return torch.cat((-x2, x1), dim=-1)
96
+
97
+
98
+ def apply_rotary_pos_emb(q, k, cos, sin, position_ids=None, unsqueeze_dim=1):
99
+ """Applies Rotary Position Embedding to the query and key tensors.
100
+
101
+ Args:
102
+ q (`torch.Tensor`): The query tensor.
103
+ k (`torch.Tensor`): The key tensor.
104
+ cos (`torch.Tensor`): The cosine part of the rotary embedding.
105
+ sin (`torch.Tensor`): The sine part of the rotary embedding.
106
+ position_ids (`torch.Tensor`, *optional*):
107
+ Deprecated and unused.
108
+ unsqueeze_dim (`int`, *optional*, defaults to 1):
109
+ The 'unsqueeze_dim' argument specifies the dimension along which to unsqueeze cos[position_ids] and
110
+ sin[position_ids] so that they can be properly broadcasted to the dimensions of q and k. For example, note
111
+ that cos[position_ids] and sin[position_ids] have the shape [batch_size, seq_len, head_dim]. Then, if q and
112
+ k have the shape [batch_size, heads, seq_len, head_dim], then setting unsqueeze_dim=1 makes
113
+ cos[position_ids] and sin[position_ids] broadcastable to the shapes of q and k. Similarly, if q and k have
114
+ the shape [batch_size, seq_len, heads, head_dim], then set unsqueeze_dim=2.
115
+ Returns:
116
+ `tuple(torch.Tensor)` comprising of the query and key tensors rotated using the Rotary Position Embedding.
117
+ """
118
+ cos = cos.unsqueeze(unsqueeze_dim)
119
+ sin = sin.unsqueeze(unsqueeze_dim)
120
+ q_embed = (q * cos) + (rotate_half(q) * sin)
121
+ k_embed = (k * cos) + (rotate_half(k) * sin)
122
+ return q_embed, k_embed
123
+
124
+
125
+ def repeat_kv(hidden_states: torch.Tensor, n_rep: int) -> torch.Tensor:
126
+ """
127
+ This is the equivalent of torch.repeat_interleave(x, dim=1, repeats=n_rep). The hidden states go from (batch,
128
+ num_key_value_heads, seqlen, head_dim) to (batch, num_attention_heads, seqlen, head_dim)
129
+ """
130
+ batch, num_key_value_heads, slen, head_dim = hidden_states.shape
131
+ if n_rep == 1:
132
+ return hidden_states
133
+ hidden_states = hidden_states[:, :, None, :, :].expand(batch, num_key_value_heads, n_rep, slen, head_dim)
134
+ return hidden_states.reshape(batch, num_key_value_heads * n_rep, slen, head_dim)
135
+
136
+
137
+ def eager_attention_forward(
138
+ module: nn.Module,
139
+ query: torch.Tensor,
140
+ key: torch.Tensor,
141
+ value: torch.Tensor,
142
+ attention_mask: Optional[torch.Tensor],
143
+ scaling: float,
144
+ dropout: float = 0.0,
145
+ **kwargs: Unpack[TransformersKwargs],
146
+ ):
147
+ key_states = repeat_kv(key, module.num_key_value_groups)
148
+ value_states = repeat_kv(value, module.num_key_value_groups)
149
+
150
+ attn_weights = torch.matmul(query, key_states.transpose(2, 3)) * scaling
151
+ if attention_mask is not None:
152
+ causal_mask = attention_mask[:, :, :, : key_states.shape[-2]]
153
+ attn_weights = attn_weights + causal_mask
154
+
155
+ attn_weights = nn.functional.softmax(attn_weights, dim=-1, dtype=torch.float32).to(query.dtype)
156
+ attn_weights = nn.functional.dropout(attn_weights, p=dropout, training=module.training)
157
+ attn_output = torch.matmul(attn_weights, value_states)
158
+ attn_output = attn_output.transpose(1, 2).contiguous()
159
+
160
+ return attn_output, attn_weights
161
+
162
+
163
+ class Qwen3RotaryEmbedding(nn.Module):
164
+ inv_freq: torch.Tensor # fix linting for `register_buffer`
165
+
166
+ def __init__(self, config: Qwen3Config, device=None):
167
+ super().__init__()
168
+ # BC: "rope_type" was originally "type"
169
+ if hasattr(config, "rope_scaling") and isinstance(config.rope_scaling, dict):
170
+ self.rope_type = config.rope_scaling.get("rope_type", config.rope_scaling.get("type"))
171
+ else:
172
+ self.rope_type = "default"
173
+ self.max_seq_len_cached = config.max_position_embeddings
174
+ self.original_max_seq_len = config.max_position_embeddings
175
+
176
+ self.config = config
177
+ self.rope_init_fn = ROPE_INIT_FUNCTIONS[self.rope_type]
178
+
179
+ inv_freq, self.attention_scaling = self.rope_init_fn(self.config, device)
180
+ self.register_buffer("inv_freq", inv_freq, persistent=False)
181
+ self.original_inv_freq = self.inv_freq
182
+
183
+ @torch.no_grad()
184
+ @dynamic_rope_update # power user: used with advanced RoPE types (e.g. dynamic rope)
185
+ def forward(self, x, position_ids):
186
+ inv_freq_expanded = self.inv_freq[None, :, None].float().expand(position_ids.shape[0], -1, 1).to(x.device)
187
+ position_ids_expanded = position_ids[:, None, :].float()
188
+
189
+ device_type = x.device.type if isinstance(x.device.type, str) and x.device.type != "mps" else "cpu"
190
+ with torch.autocast(device_type=device_type, enabled=False): # Force float32
191
+ freqs = (inv_freq_expanded.float() @ position_ids_expanded.float()).transpose(1, 2)
192
+ emb = torch.cat((freqs, freqs), dim=-1)
193
+ cos = emb.cos() * self.attention_scaling
194
+ sin = emb.sin() * self.attention_scaling
195
+
196
+ return cos.to(dtype=x.dtype), sin.to(dtype=x.dtype)
197
+
198
+
199
+ class Qwen3Attention(nn.Module):
200
+ """Multi-headed attention from 'Attention Is All You Need' paper"""
201
+
202
+ def __init__(self, config: Qwen3Config, layer_idx: int):
203
+ super().__init__()
204
+ self.config = config
205
+ self.layer_idx = layer_idx
206
+ self.head_dim = getattr(config, "head_dim", config.hidden_size // config.num_attention_heads)
207
+ self.num_key_value_groups = config.num_attention_heads // config.num_key_value_heads
208
+ self.scaling = self.head_dim**-0.5
209
+ self.attention_dropout = config.attention_dropout
210
+ self.is_causal = True
211
+
212
+ self.q_proj = nn.Linear(
213
+ config.hidden_size, config.num_attention_heads * self.head_dim, bias=config.attention_bias
214
+ )
215
+ self.q_proj_hw = nn.Linear(
216
+ config.hidden_size, config.num_attention_heads * self.head_dim, bias=config.attention_bias
217
+ )
218
+
219
+ self.k_proj = nn.Linear(
220
+ config.hidden_size, config.num_key_value_heads * self.head_dim, bias=config.attention_bias
221
+ )
222
+ self.k_proj_hw = nn.Linear(
223
+ config.hidden_size, config.num_key_value_heads * self.head_dim, bias=config.attention_bias
224
+ )
225
+
226
+ self.v_proj = nn.Linear(
227
+ config.hidden_size, config.num_key_value_heads * self.head_dim, bias=config.attention_bias
228
+ )
229
+ self.o_proj = nn.Linear(
230
+ config.num_attention_heads * self.head_dim, config.hidden_size, bias=config.attention_bias
231
+ )
232
+
233
+ self.q_norm = Qwen3RMSNorm(self.head_dim, eps=config.rms_norm_eps) # unlike olmo, only on the head dim!
234
+ self.q_norm_h = Qwen3RMSNorm(self.head_dim // 2, eps=config.rms_norm_eps)
235
+ self.q_norm_w = Qwen3RMSNorm(self.head_dim // 2, eps=config.rms_norm_eps)
236
+
237
+ self.k_norm = Qwen3RMSNorm(self.head_dim, eps=config.rms_norm_eps) # thus post q_norm does not need reshape
238
+ self.k_norm_h = Qwen3RMSNorm(self.head_dim // 2, eps=config.rms_norm_eps) # thus post q_norm does not need reshape
239
+ self.k_norm_w = Qwen3RMSNorm(self.head_dim // 2, eps=config.rms_norm_eps) # thus post q_norm does not need reshape
240
+
241
+ self.sliding_window = config.sliding_window if config.layer_types[layer_idx] == "sliding_attention" else None
242
+
243
+ self.rotary_emb = Qwen3RotaryEmbedding(config=config)
244
+
245
+ hw_config = copy.deepcopy(config)
246
+ hw_config.head_dim = config.head_dim // 2
247
+ hw_config.rope_theta = config.rope_theta_hw
248
+ hw_config.max_position_embeddings = config.max_position_embeddings_hw
249
+ self.rotary_emb_hw = Qwen3RotaryEmbedding(config=hw_config)
250
+
251
+ @deprecate_kwarg("past_key_value", new_name="past_key_values", version="4.58")
252
+ def forward(
253
+ self,
254
+ hidden_states: torch.Tensor,
255
+ indexes: Optional[torch.LongTensor],
256
+ attention_mask: Optional[torch.Tensor],
257
+ past_key_values: Optional[Cache] = None,
258
+ cache_position: Optional[torch.LongTensor] = None,
259
+ **kwargs: Unpack[FlashAttentionKwargs],
260
+ ) -> tuple[torch.Tensor, Optional[torch.Tensor]]:
261
+
262
+ assert self.config._attn_implementation == "eager"
263
+ input_shape = hidden_states.shape[:-1]
264
+ hidden_shape = (*input_shape, -1, self.head_dim)
265
+
266
+ query_states_t = self.q_norm(self.q_proj(hidden_states).view(hidden_shape)).transpose(1, 2)
267
+ query_states_h, query_states_w = self.q_proj_hw(hidden_states).view(hidden_shape).transpose(1, 2).chunk(2, dim=-1)
268
+ query_states_h, query_states_w = self.q_norm_h(query_states_h), self.q_norm_w(query_states_w)
269
+
270
+ key_states_t = self.k_norm(self.k_proj(hidden_states).view(hidden_shape)).transpose(1, 2)
271
+ key_states_h, key_states_w = self.k_proj_hw(hidden_states).view(hidden_shape).transpose(1, 2).chunk(2, dim=-1)
272
+ key_states_h, key_states_w = self.k_norm_h(key_states_h), self.k_norm_w(key_states_w)
273
+
274
+ value_states = self.v_proj(hidden_states).view(hidden_shape).transpose(1, 2)
275
+
276
+ cos_t, sin_t = self.rotary_emb(hidden_states, indexes[0].unsqueeze(0))
277
+ query_states_t, key_states_t = apply_rotary_pos_emb(query_states_t, key_states_t, cos_t, sin_t)
278
+
279
+ cos_h, sin_h = self.rotary_emb_hw(hidden_states, indexes[1].unsqueeze(0))
280
+ query_states_h, key_states_h = apply_rotary_pos_emb(query_states_h, key_states_h, cos_h, sin_h)
281
+
282
+ cos_w, sin_w = self.rotary_emb_hw(hidden_states, indexes[2].unsqueeze(0))
283
+ query_states_w, key_states_w = apply_rotary_pos_emb(query_states_w, key_states_w, cos_w, sin_w)
284
+
285
+ query_states = torch.cat([query_states_t, query_states_h, query_states_w], dim=-1)
286
+ key_states = torch.cat([key_states_t, key_states_h, key_states_w], dim=-1)
287
+
288
+
289
+ if past_key_values is not None:
290
+ # sin and cos are specific to RoPE models; cache_position needed for the static cache
291
+ # cache_kwargs = {"sin": sin, "cos": cos, "cache_position": cache_position}
292
+ # key_states, value_states = past_key_values.update(key_states, value_states, self.layer_idx, cache_kwargs)
293
+
294
+ key_states, value_states = past_key_values.update(key_states, value_states, self.layer_idx, cache_kwargs=None)
295
+
296
+ attention_interface: Callable = eager_attention_forward
297
+ if self.config._attn_implementation != "eager":
298
+ attention_interface = ALL_ATTENTION_FUNCTIONS[self.config._attn_implementation]
299
+
300
+ attn_output, attn_weights = attention_interface(
301
+ self,
302
+ query_states,
303
+ key_states,
304
+ value_states,
305
+ attention_mask,
306
+ dropout=0.0 if not self.training else self.attention_dropout,
307
+ scaling=self.scaling,
308
+ sliding_window=self.sliding_window, # diff with Llama
309
+ **kwargs,
310
+ )
311
+
312
+ attn_output = attn_output.reshape(*input_shape, -1).contiguous()
313
+ attn_output = self.o_proj(attn_output)
314
+ return attn_output, attn_weights
315
+
316
+
317
+ class Qwen3DecoderLayer(GradientCheckpointingLayer):
318
+ def __init__(self, config: Qwen3Config, layer_idx: int):
319
+ super().__init__()
320
+ self.hidden_size = config.hidden_size
321
+
322
+ self.self_attn = Qwen3Attention(config=config, layer_idx=layer_idx)
323
+
324
+ self.mlp = Qwen3MLP(config)
325
+ self.input_layernorm = Qwen3RMSNorm(config.hidden_size, eps=config.rms_norm_eps)
326
+ self.post_attention_layernorm = Qwen3RMSNorm(config.hidden_size, eps=config.rms_norm_eps)
327
+ self.attention_type = config.layer_types[layer_idx]
328
+
329
+ @deprecate_kwarg("past_key_value", new_name="past_key_values", version="4.58")
330
+ def forward(
331
+ self,
332
+ hidden_states: torch.Tensor,
333
+ indexes: Optional[torch.LongTensor] = None,
334
+ attention_mask: Optional[torch.Tensor] = None,
335
+ position_ids: Optional[torch.LongTensor] = None,
336
+ past_key_values: Optional[Cache] = None,
337
+ use_cache: Optional[bool] = False,
338
+ cache_position: Optional[torch.LongTensor] = None,
339
+ **kwargs: Unpack[TransformersKwargs],
340
+ ) -> torch.Tensor:
341
+ residual = hidden_states
342
+ hidden_states = self.input_layernorm(hidden_states)
343
+ # Self Attention
344
+ hidden_states, _ = self.self_attn(
345
+ hidden_states=hidden_states,
346
+ indexes=indexes,
347
+ attention_mask=attention_mask,
348
+ position_ids=position_ids,
349
+ past_key_values=past_key_values,
350
+ use_cache=use_cache,
351
+ cache_position=cache_position,
352
+ **kwargs,
353
+ )
354
+ hidden_states = residual + hidden_states
355
+
356
+ # Fully Connected
357
+ residual = hidden_states
358
+ hidden_states = self.post_attention_layernorm(hidden_states)
359
+ hidden_states = self.mlp(hidden_states)
360
+ hidden_states = residual + hidden_states
361
+ return hidden_states
362
+
363
+
364
+ @auto_docstring
365
+ class Qwen3PreTrainedModel(PreTrainedModel):
366
+ config: Qwen3Config
367
+ base_model_prefix = "model"
368
+ supports_gradient_checkpointing = True
369
+ _no_split_modules = ["Qwen3DecoderLayer"]
370
+ _skip_keys_device_placement = ["past_key_values"]
371
+ _supports_flash_attn = True
372
+ _supports_sdpa = True
373
+ _supports_flex_attn = True
374
+
375
+ _can_compile_fullgraph = True
376
+ _supports_attention_backend = True
377
+ _can_record_outputs = {
378
+ "hidden_states": Qwen3DecoderLayer,
379
+ "attentions": Qwen3Attention,
380
+ }
381
+
382
+
383
+ @auto_docstring
384
+ class Qwen3Model(Qwen3PreTrainedModel):
385
+ def __init__(self, config: Qwen3Config):
386
+ super().__init__(config)
387
+ self.padding_idx = config.pad_token_id
388
+ self.vocab_size = config.vocab_size
389
+
390
+ self.embed_tokens = nn.Embedding(config.vocab_size, config.hidden_size, self.padding_idx)
391
+ self.layers = nn.ModuleList(
392
+ [Qwen3DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)]
393
+ )
394
+ self.norm = Qwen3RMSNorm(config.hidden_size, eps=config.rms_norm_eps)
395
+
396
+ self.gradient_checkpointing = False
397
+ self.has_sliding_layers = "sliding_attention" in self.config.layer_types
398
+ self.current_index = -1
399
+
400
+ # Initialize weights and apply final processing
401
+ self.post_init()
402
+
403
+ @check_model_inputs
404
+ @auto_docstring
405
+ def forward(
406
+ self,
407
+ input_ids: Optional[torch.LongTensor] = None,
408
+ indexes: Optional[torch.LongTensor] = None,
409
+ attention_mask: Optional[torch.Tensor] = None,
410
+ position_ids: Optional[torch.LongTensor] = None,
411
+ past_key_values: Optional[Cache] = None,
412
+ inputs_embeds: Optional[torch.FloatTensor] = None,
413
+ use_cache: Optional[bool] = None,
414
+ cache_position: Optional[torch.LongTensor] = None,
415
+ **kwargs: Unpack[TransformersKwargs],
416
+ ) -> BaseModelOutputWithPast:
417
+
418
+ assert position_ids is not None
419
+ assert cache_position is not None
420
+ assert past_key_values is not None
421
+
422
+ if (input_ids is None) ^ (inputs_embeds is not None):
423
+ raise ValueError("You must specify exactly one of input_ids or inputs_embeds")
424
+
425
+ if inputs_embeds is None:
426
+ inputs_embeds = self.embed_tokens(input_ids)
427
+
428
+ if use_cache and past_key_values is None:
429
+ past_key_values = DynamicCache(config=self.config)
430
+
431
+ if cache_position is None:
432
+ past_seen_tokens = past_key_values.get_seq_length() if past_key_values is not None else 0
433
+ cache_position = torch.arange(
434
+ past_seen_tokens, past_seen_tokens + inputs_embeds.shape[1], device=inputs_embeds.device
435
+ )
436
+
437
+ if position_ids is None:
438
+ position_ids = cache_position.unsqueeze(0)
439
+
440
+ # It may already have been prepared by e.g. `generate`
441
+ if not isinstance(causal_mask_mapping := attention_mask, dict):
442
+ # Prepare mask arguments
443
+ if input_ids is not None:
444
+ mask_kwargs = {
445
+ "config": self.config,
446
+ "input_embeds": inputs_embeds,
447
+ "attention_mask": attention_mask,
448
+ "cache_position": cache_position,
449
+ "past_key_values": past_key_values,
450
+ "position_ids": position_ids,
451
+ }
452
+ # Create the masks
453
+ causal_mask_mapping = {
454
+ "full_attention": create_causal_mask(**mask_kwargs),
455
+ }
456
+ self.current_index += 1
457
+ indexes = torch.LongTensor([[self.current_index], [0], [0]]).to(input_ids.device)
458
+ else:
459
+ causal_mask_mapping = {
460
+ "full_attention": create_block_causal_mask(indexes[0]),
461
+ }
462
+ self.current_index = indexes[0].max()
463
+ else:
464
+ raise NotImplementedError('not isinstance(causal_mask_mapping := attention_mask, dict)')
465
+
466
+ # The sliding window alternating layers are not always activated depending on the config
467
+ # if self.has_sliding_layers:
468
+ # causal_mask_mapping["sliding_attention"] = create_sliding_window_causal_mask(**mask_kwargs)
469
+
470
+ hidden_states = inputs_embeds
471
+
472
+ for decoder_layer in self.layers[: self.config.num_hidden_layers]:
473
+ hidden_states = decoder_layer(
474
+ hidden_states,
475
+ indexes=indexes,
476
+ attention_mask=causal_mask_mapping[decoder_layer.attention_type],
477
+ position_ids=position_ids,
478
+ past_key_values=past_key_values,
479
+ use_cache=use_cache,
480
+ cache_position=cache_position,
481
+ **kwargs,
482
+ )
483
+
484
+ hidden_states = self.norm(hidden_states)
485
+ return BaseModelOutputWithPast(
486
+ last_hidden_state=hidden_states,
487
+ past_key_values=past_key_values if use_cache else None,
488
+ )
489
+
490
+
491
+ @auto_docstring
492
+ class Qwen3ForCausalLM(Qwen3PreTrainedModel, GenerationMixin):
493
+ _tied_weights_keys = ["lm_head.weight"]
494
+ _tp_plan = {"lm_head": "colwise_rep"}
495
+ _pp_plan = {"lm_head": (["hidden_states"], ["logits"])}
496
+
497
+ def __init__(self, config):
498
+ super().__init__(config)
499
+ self.model = Qwen3Model(config)
500
+ self.vocab_size = config.vocab_size
501
+ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False)
502
+
503
+ # Initialize weights and apply final processing
504
+ self.post_init()
505
+
506
+ @can_return_tuple
507
+ @auto_docstring
508
+ def forward(
509
+ self,
510
+ input_ids: Optional[torch.LongTensor] = None,
511
+ indexes: Optional[torch.LongTensor] = None,
512
+ attention_mask: Optional[torch.Tensor] = None,
513
+ position_ids: Optional[torch.LongTensor] = None,
514
+ past_key_values: Optional[Cache] = None,
515
+ inputs_embeds: Optional[torch.FloatTensor] = None,
516
+ labels: Optional[torch.LongTensor] = None,
517
+ use_cache: Optional[bool] = None,
518
+ cache_position: Optional[torch.LongTensor] = None,
519
+ logits_to_keep: Union[int, torch.Tensor] = 0,
520
+ **kwargs: Unpack[TransformersKwargs],
521
+ ) -> CausalLMOutputWithPast:
522
+ r"""
523
+ labels (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):
524
+ Labels for computing the masked language modeling loss. Indices should either be in `[0, ...,
525
+ config.vocab_size]` or -100 (see `input_ids` docstring). Tokens with indices set to `-100` are ignored
526
+ (masked), the loss is only computed for the tokens with labels in `[0, ..., config.vocab_size]`.
527
+
528
+ Example:
529
+
530
+ ```python
531
+ >>> from transformers import AutoTokenizer, Qwen3ForCausalLM
532
+
533
+ >>> model = Qwen3ForCausalLM.from_pretrained("Qwen/Qwen3-8B")
534
+ >>> tokenizer = AutoTokenizer.from_pretrained("Qwen/Qwen3-8B")
535
+
536
+ >>> prompt = "Hey, are you conscious? Can you talk to me?"
537
+ >>> inputs = tokenizer(prompt, return_tensors="pt")
538
+
539
+ >>> # Generate
540
+ >>> generate_ids = model.generate(inputs.input_ids, max_length=30)
541
+ >>> tokenizer.batch_decode(generate_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
542
+ "Hey, are you conscious? Can you talk to me?\nI'm not conscious, but I can talk to you."
543
+ ```"""
544
+
545
+ outputs: BaseModelOutputWithPast = self.model(
546
+ input_ids=input_ids,
547
+ indexes=indexes,
548
+ attention_mask=attention_mask,
549
+ position_ids=position_ids,
550
+ past_key_values=past_key_values,
551
+ inputs_embeds=inputs_embeds,
552
+ use_cache=use_cache,
553
+ cache_position=cache_position,
554
+ **kwargs,
555
+ )
556
+
557
+ hidden_states = outputs.last_hidden_state
558
+ # Only compute necessary logits, and do not upcast them to float if we are not computing the loss
559
+ slice_indices = slice(-logits_to_keep, None) if isinstance(logits_to_keep, int) else logits_to_keep
560
+ logits = self.lm_head(hidden_states[:, slice_indices, :])
561
+
562
+ loss = None
563
+ if labels is not None:
564
+ loss = self.loss_function(logits=logits, labels=labels, vocab_size=self.config.vocab_size, **kwargs)
565
+
566
+ return CausalLMOutputWithPast(
567
+ loss=loss,
568
+ logits=logits,
569
+ past_key_values=outputs.past_key_values,
570
+ hidden_states=outputs.hidden_states,
571
+ attentions=outputs.attentions,
572
+ )
573
+
574
+
575
+ class Qwen3ForSequenceClassification(GenericForSequenceClassification, Qwen3PreTrainedModel):
576
+ pass
577
+
578
+
579
+ class Qwen3ForTokenClassification(GenericForTokenClassification, Qwen3PreTrainedModel):
580
+ pass
581
+
582
+
583
+ class Qwen3ForQuestionAnswering(GenericForQuestionAnswering, Qwen3PreTrainedModel):
584
+ base_model_prefix = "transformer" # For BC, where `transformer` was used instead of `model`
585
+
586
+
587
+ __all__ = [
588
+ "Qwen3ForCausalLM",
589
+ "Qwen3ForQuestionAnswering",
590
+ "Qwen3PreTrainedModel",
591
+ "Qwen3Model",
592
+ "Qwen3ForSequenceClassification",
593
+ "Qwen3ForTokenClassification",
594
+ ]
special_tokens_map.json ADDED
@@ -0,0 +1,1900 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|im_start|>",
4
+ "<|im_end|>",
5
+ "<|object_ref_start|>",
6
+ "<|object_ref_end|>",
7
+ "<|box_start|>",
8
+ "<|box_end|>",
9
+ "<|quad_start|>",
10
+ "<|quad_end|>",
11
+ "<|vision_start|>",
12
+ "<|vision_end|>",
13
+ "<|vision_pad|>",
14
+ "<|image_pad|>",
15
+ "<|video_pad|>",
16
+ {
17
+ "content": "<IMG_CONTEXT>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ {
24
+ "content": "<img>",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ },
30
+ {
31
+ "content": "</img>",
32
+ "lstrip": false,
33
+ "normalized": false,
34
+ "rstrip": false,
35
+ "single_word": false
36
+ },
37
+ {
38
+ "content": "<quad>",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": false,
42
+ "single_word": false
43
+ },
44
+ {
45
+ "content": "</quad>",
46
+ "lstrip": false,
47
+ "normalized": false,
48
+ "rstrip": false,
49
+ "single_word": false
50
+ },
51
+ {
52
+ "content": "<ref>",
53
+ "lstrip": false,
54
+ "normalized": false,
55
+ "rstrip": false,
56
+ "single_word": false
57
+ },
58
+ {
59
+ "content": "</ref>",
60
+ "lstrip": false,
61
+ "normalized": false,
62
+ "rstrip": false,
63
+ "single_word": false
64
+ },
65
+ {
66
+ "content": "<box>",
67
+ "lstrip": false,
68
+ "normalized": false,
69
+ "rstrip": false,
70
+ "single_word": false
71
+ },
72
+ {
73
+ "content": "</box>",
74
+ "lstrip": false,
75
+ "normalized": false,
76
+ "rstrip": false,
77
+ "single_word": false
78
+ },
79
+ {
80
+ "content": "<|action_start|>",
81
+ "lstrip": false,
82
+ "normalized": false,
83
+ "rstrip": false,
84
+ "single_word": false
85
+ },
86
+ {
87
+ "content": "<|action_end|>",
88
+ "lstrip": false,
89
+ "normalized": false,
90
+ "rstrip": false,
91
+ "single_word": false
92
+ },
93
+ {
94
+ "content": "<|plugin|>",
95
+ "lstrip": false,
96
+ "normalized": false,
97
+ "rstrip": false,
98
+ "single_word": false
99
+ },
100
+ {
101
+ "content": "<|interpreter|>",
102
+ "lstrip": false,
103
+ "normalized": false,
104
+ "rstrip": false,
105
+ "single_word": false
106
+ },
107
+ {
108
+ "content": "<FAKE_PAD_0>",
109
+ "lstrip": false,
110
+ "normalized": false,
111
+ "rstrip": false,
112
+ "single_word": false
113
+ },
114
+ {
115
+ "content": "<FAKE_PAD_1>",
116
+ "lstrip": false,
117
+ "normalized": false,
118
+ "rstrip": false,
119
+ "single_word": false
120
+ },
121
+ {
122
+ "content": "<FAKE_PAD_2>",
123
+ "lstrip": false,
124
+ "normalized": false,
125
+ "rstrip": false,
126
+ "single_word": false
127
+ },
128
+ {
129
+ "content": "<FAKE_PAD_3>",
130
+ "lstrip": false,
131
+ "normalized": false,
132
+ "rstrip": false,
133
+ "single_word": false
134
+ },
135
+ {
136
+ "content": "<FAKE_PAD_4>",
137
+ "lstrip": false,
138
+ "normalized": false,
139
+ "rstrip": false,
140
+ "single_word": false
141
+ },
142
+ {
143
+ "content": "<FAKE_PAD_5>",
144
+ "lstrip": false,
145
+ "normalized": false,
146
+ "rstrip": false,
147
+ "single_word": false
148
+ },
149
+ {
150
+ "content": "<FAKE_PAD_6>",
151
+ "lstrip": false,
152
+ "normalized": false,
153
+ "rstrip": false,
154
+ "single_word": false
155
+ },
156
+ {
157
+ "content": "<FAKE_PAD_7>",
158
+ "lstrip": false,
159
+ "normalized": false,
160
+ "rstrip": false,
161
+ "single_word": false
162
+ },
163
+ {
164
+ "content": "<FAKE_PAD_8>",
165
+ "lstrip": false,
166
+ "normalized": false,
167
+ "rstrip": false,
168
+ "single_word": false
169
+ },
170
+ {
171
+ "content": "<FAKE_PAD_9>",
172
+ "lstrip": false,
173
+ "normalized": false,
174
+ "rstrip": false,
175
+ "single_word": false
176
+ },
177
+ {
178
+ "content": "<FAKE_PAD_10>",
179
+ "lstrip": false,
180
+ "normalized": false,
181
+ "rstrip": false,
182
+ "single_word": false
183
+ },
184
+ {
185
+ "content": "<FAKE_PAD_11>",
186
+ "lstrip": false,
187
+ "normalized": false,
188
+ "rstrip": false,
189
+ "single_word": false
190
+ },
191
+ {
192
+ "content": "<FAKE_PAD_12>",
193
+ "lstrip": false,
194
+ "normalized": false,
195
+ "rstrip": false,
196
+ "single_word": false
197
+ },
198
+ {
199
+ "content": "<FAKE_PAD_13>",
200
+ "lstrip": false,
201
+ "normalized": false,
202
+ "rstrip": false,
203
+ "single_word": false
204
+ },
205
+ {
206
+ "content": "<FAKE_PAD_14>",
207
+ "lstrip": false,
208
+ "normalized": false,
209
+ "rstrip": false,
210
+ "single_word": false
211
+ },
212
+ {
213
+ "content": "<FAKE_PAD_15>",
214
+ "lstrip": false,
215
+ "normalized": false,
216
+ "rstrip": false,
217
+ "single_word": false
218
+ },
219
+ {
220
+ "content": "<FAKE_PAD_16>",
221
+ "lstrip": false,
222
+ "normalized": false,
223
+ "rstrip": false,
224
+ "single_word": false
225
+ },
226
+ {
227
+ "content": "<FAKE_PAD_17>",
228
+ "lstrip": false,
229
+ "normalized": false,
230
+ "rstrip": false,
231
+ "single_word": false
232
+ },
233
+ {
234
+ "content": "<FAKE_PAD_18>",
235
+ "lstrip": false,
236
+ "normalized": false,
237
+ "rstrip": false,
238
+ "single_word": false
239
+ },
240
+ {
241
+ "content": "<FAKE_PAD_19>",
242
+ "lstrip": false,
243
+ "normalized": false,
244
+ "rstrip": false,
245
+ "single_word": false
246
+ },
247
+ {
248
+ "content": "<FAKE_PAD_20>",
249
+ "lstrip": false,
250
+ "normalized": false,
251
+ "rstrip": false,
252
+ "single_word": false
253
+ },
254
+ {
255
+ "content": "<FAKE_PAD_21>",
256
+ "lstrip": false,
257
+ "normalized": false,
258
+ "rstrip": false,
259
+ "single_word": false
260
+ },
261
+ {
262
+ "content": "<FAKE_PAD_22>",
263
+ "lstrip": false,
264
+ "normalized": false,
265
+ "rstrip": false,
266
+ "single_word": false
267
+ },
268
+ {
269
+ "content": "<FAKE_PAD_23>",
270
+ "lstrip": false,
271
+ "normalized": false,
272
+ "rstrip": false,
273
+ "single_word": false
274
+ },
275
+ {
276
+ "content": "<FAKE_PAD_24>",
277
+ "lstrip": false,
278
+ "normalized": false,
279
+ "rstrip": false,
280
+ "single_word": false
281
+ },
282
+ {
283
+ "content": "<FAKE_PAD_25>",
284
+ "lstrip": false,
285
+ "normalized": false,
286
+ "rstrip": false,
287
+ "single_word": false
288
+ },
289
+ {
290
+ "content": "<FAKE_PAD_26>",
291
+ "lstrip": false,
292
+ "normalized": false,
293
+ "rstrip": false,
294
+ "single_word": false
295
+ },
296
+ {
297
+ "content": "<FAKE_PAD_27>",
298
+ "lstrip": false,
299
+ "normalized": false,
300
+ "rstrip": false,
301
+ "single_word": false
302
+ },
303
+ {
304
+ "content": "<FAKE_PAD_28>",
305
+ "lstrip": false,
306
+ "normalized": false,
307
+ "rstrip": false,
308
+ "single_word": false
309
+ },
310
+ {
311
+ "content": "<FAKE_PAD_29>",
312
+ "lstrip": false,
313
+ "normalized": false,
314
+ "rstrip": false,
315
+ "single_word": false
316
+ },
317
+ {
318
+ "content": "<FAKE_PAD_30>",
319
+ "lstrip": false,
320
+ "normalized": false,
321
+ "rstrip": false,
322
+ "single_word": false
323
+ },
324
+ {
325
+ "content": "<FAKE_PAD_31>",
326
+ "lstrip": false,
327
+ "normalized": false,
328
+ "rstrip": false,
329
+ "single_word": false
330
+ },
331
+ {
332
+ "content": "<FAKE_PAD_32>",
333
+ "lstrip": false,
334
+ "normalized": false,
335
+ "rstrip": false,
336
+ "single_word": false
337
+ },
338
+ {
339
+ "content": "<FAKE_PAD_33>",
340
+ "lstrip": false,
341
+ "normalized": false,
342
+ "rstrip": false,
343
+ "single_word": false
344
+ },
345
+ {
346
+ "content": "<FAKE_PAD_34>",
347
+ "lstrip": false,
348
+ "normalized": false,
349
+ "rstrip": false,
350
+ "single_word": false
351
+ },
352
+ {
353
+ "content": "<FAKE_PAD_35>",
354
+ "lstrip": false,
355
+ "normalized": false,
356
+ "rstrip": false,
357
+ "single_word": false
358
+ },
359
+ {
360
+ "content": "<FAKE_PAD_36>",
361
+ "lstrip": false,
362
+ "normalized": false,
363
+ "rstrip": false,
364
+ "single_word": false
365
+ },
366
+ {
367
+ "content": "<FAKE_PAD_37>",
368
+ "lstrip": false,
369
+ "normalized": false,
370
+ "rstrip": false,
371
+ "single_word": false
372
+ },
373
+ {
374
+ "content": "<FAKE_PAD_38>",
375
+ "lstrip": false,
376
+ "normalized": false,
377
+ "rstrip": false,
378
+ "single_word": false
379
+ },
380
+ {
381
+ "content": "<FAKE_PAD_39>",
382
+ "lstrip": false,
383
+ "normalized": false,
384
+ "rstrip": false,
385
+ "single_word": false
386
+ },
387
+ {
388
+ "content": "<FAKE_PAD_40>",
389
+ "lstrip": false,
390
+ "normalized": false,
391
+ "rstrip": false,
392
+ "single_word": false
393
+ },
394
+ {
395
+ "content": "<FAKE_PAD_41>",
396
+ "lstrip": false,
397
+ "normalized": false,
398
+ "rstrip": false,
399
+ "single_word": false
400
+ },
401
+ {
402
+ "content": "<FAKE_PAD_42>",
403
+ "lstrip": false,
404
+ "normalized": false,
405
+ "rstrip": false,
406
+ "single_word": false
407
+ },
408
+ {
409
+ "content": "<FAKE_PAD_43>",
410
+ "lstrip": false,
411
+ "normalized": false,
412
+ "rstrip": false,
413
+ "single_word": false
414
+ },
415
+ {
416
+ "content": "<FAKE_PAD_44>",
417
+ "lstrip": false,
418
+ "normalized": false,
419
+ "rstrip": false,
420
+ "single_word": false
421
+ },
422
+ {
423
+ "content": "<FAKE_PAD_45>",
424
+ "lstrip": false,
425
+ "normalized": false,
426
+ "rstrip": false,
427
+ "single_word": false
428
+ },
429
+ {
430
+ "content": "<FAKE_PAD_46>",
431
+ "lstrip": false,
432
+ "normalized": false,
433
+ "rstrip": false,
434
+ "single_word": false
435
+ },
436
+ {
437
+ "content": "<FAKE_PAD_47>",
438
+ "lstrip": false,
439
+ "normalized": false,
440
+ "rstrip": false,
441
+ "single_word": false
442
+ },
443
+ {
444
+ "content": "<FAKE_PAD_48>",
445
+ "lstrip": false,
446
+ "normalized": false,
447
+ "rstrip": false,
448
+ "single_word": false
449
+ },
450
+ {
451
+ "content": "<FAKE_PAD_49>",
452
+ "lstrip": false,
453
+ "normalized": false,
454
+ "rstrip": false,
455
+ "single_word": false
456
+ },
457
+ {
458
+ "content": "<FAKE_PAD_50>",
459
+ "lstrip": false,
460
+ "normalized": false,
461
+ "rstrip": false,
462
+ "single_word": false
463
+ },
464
+ {
465
+ "content": "<FAKE_PAD_51>",
466
+ "lstrip": false,
467
+ "normalized": false,
468
+ "rstrip": false,
469
+ "single_word": false
470
+ },
471
+ {
472
+ "content": "<FAKE_PAD_52>",
473
+ "lstrip": false,
474
+ "normalized": false,
475
+ "rstrip": false,
476
+ "single_word": false
477
+ },
478
+ {
479
+ "content": "<FAKE_PAD_53>",
480
+ "lstrip": false,
481
+ "normalized": false,
482
+ "rstrip": false,
483
+ "single_word": false
484
+ },
485
+ {
486
+ "content": "<FAKE_PAD_54>",
487
+ "lstrip": false,
488
+ "normalized": false,
489
+ "rstrip": false,
490
+ "single_word": false
491
+ },
492
+ {
493
+ "content": "<FAKE_PAD_55>",
494
+ "lstrip": false,
495
+ "normalized": false,
496
+ "rstrip": false,
497
+ "single_word": false
498
+ },
499
+ {
500
+ "content": "<FAKE_PAD_56>",
501
+ "lstrip": false,
502
+ "normalized": false,
503
+ "rstrip": false,
504
+ "single_word": false
505
+ },
506
+ {
507
+ "content": "<FAKE_PAD_57>",
508
+ "lstrip": false,
509
+ "normalized": false,
510
+ "rstrip": false,
511
+ "single_word": false
512
+ },
513
+ {
514
+ "content": "<FAKE_PAD_58>",
515
+ "lstrip": false,
516
+ "normalized": false,
517
+ "rstrip": false,
518
+ "single_word": false
519
+ },
520
+ {
521
+ "content": "<FAKE_PAD_59>",
522
+ "lstrip": false,
523
+ "normalized": false,
524
+ "rstrip": false,
525
+ "single_word": false
526
+ },
527
+ {
528
+ "content": "<FAKE_PAD_60>",
529
+ "lstrip": false,
530
+ "normalized": false,
531
+ "rstrip": false,
532
+ "single_word": false
533
+ },
534
+ {
535
+ "content": "<FAKE_PAD_61>",
536
+ "lstrip": false,
537
+ "normalized": false,
538
+ "rstrip": false,
539
+ "single_word": false
540
+ },
541
+ {
542
+ "content": "<FAKE_PAD_62>",
543
+ "lstrip": false,
544
+ "normalized": false,
545
+ "rstrip": false,
546
+ "single_word": false
547
+ },
548
+ {
549
+ "content": "<FAKE_PAD_63>",
550
+ "lstrip": false,
551
+ "normalized": false,
552
+ "rstrip": false,
553
+ "single_word": false
554
+ },
555
+ {
556
+ "content": "<FAKE_PAD_64>",
557
+ "lstrip": false,
558
+ "normalized": false,
559
+ "rstrip": false,
560
+ "single_word": false
561
+ },
562
+ {
563
+ "content": "<FAKE_PAD_65>",
564
+ "lstrip": false,
565
+ "normalized": false,
566
+ "rstrip": false,
567
+ "single_word": false
568
+ },
569
+ {
570
+ "content": "<FAKE_PAD_66>",
571
+ "lstrip": false,
572
+ "normalized": false,
573
+ "rstrip": false,
574
+ "single_word": false
575
+ },
576
+ {
577
+ "content": "<FAKE_PAD_67>",
578
+ "lstrip": false,
579
+ "normalized": false,
580
+ "rstrip": false,
581
+ "single_word": false
582
+ },
583
+ {
584
+ "content": "<FAKE_PAD_68>",
585
+ "lstrip": false,
586
+ "normalized": false,
587
+ "rstrip": false,
588
+ "single_word": false
589
+ },
590
+ {
591
+ "content": "<FAKE_PAD_69>",
592
+ "lstrip": false,
593
+ "normalized": false,
594
+ "rstrip": false,
595
+ "single_word": false
596
+ },
597
+ {
598
+ "content": "<FAKE_PAD_70>",
599
+ "lstrip": false,
600
+ "normalized": false,
601
+ "rstrip": false,
602
+ "single_word": false
603
+ },
604
+ {
605
+ "content": "<FAKE_PAD_71>",
606
+ "lstrip": false,
607
+ "normalized": false,
608
+ "rstrip": false,
609
+ "single_word": false
610
+ },
611
+ {
612
+ "content": "<FAKE_PAD_72>",
613
+ "lstrip": false,
614
+ "normalized": false,
615
+ "rstrip": false,
616
+ "single_word": false
617
+ },
618
+ {
619
+ "content": "<FAKE_PAD_73>",
620
+ "lstrip": false,
621
+ "normalized": false,
622
+ "rstrip": false,
623
+ "single_word": false
624
+ },
625
+ {
626
+ "content": "<FAKE_PAD_74>",
627
+ "lstrip": false,
628
+ "normalized": false,
629
+ "rstrip": false,
630
+ "single_word": false
631
+ },
632
+ {
633
+ "content": "<FAKE_PAD_75>",
634
+ "lstrip": false,
635
+ "normalized": false,
636
+ "rstrip": false,
637
+ "single_word": false
638
+ },
639
+ {
640
+ "content": "<FAKE_PAD_76>",
641
+ "lstrip": false,
642
+ "normalized": false,
643
+ "rstrip": false,
644
+ "single_word": false
645
+ },
646
+ {
647
+ "content": "<FAKE_PAD_77>",
648
+ "lstrip": false,
649
+ "normalized": false,
650
+ "rstrip": false,
651
+ "single_word": false
652
+ },
653
+ {
654
+ "content": "<FAKE_PAD_78>",
655
+ "lstrip": false,
656
+ "normalized": false,
657
+ "rstrip": false,
658
+ "single_word": false
659
+ },
660
+ {
661
+ "content": "<FAKE_PAD_79>",
662
+ "lstrip": false,
663
+ "normalized": false,
664
+ "rstrip": false,
665
+ "single_word": false
666
+ },
667
+ {
668
+ "content": "<FAKE_PAD_80>",
669
+ "lstrip": false,
670
+ "normalized": false,
671
+ "rstrip": false,
672
+ "single_word": false
673
+ },
674
+ {
675
+ "content": "<FAKE_PAD_81>",
676
+ "lstrip": false,
677
+ "normalized": false,
678
+ "rstrip": false,
679
+ "single_word": false
680
+ },
681
+ {
682
+ "content": "<FAKE_PAD_82>",
683
+ "lstrip": false,
684
+ "normalized": false,
685
+ "rstrip": false,
686
+ "single_word": false
687
+ },
688
+ {
689
+ "content": "<FAKE_PAD_83>",
690
+ "lstrip": false,
691
+ "normalized": false,
692
+ "rstrip": false,
693
+ "single_word": false
694
+ },
695
+ {
696
+ "content": "<FAKE_PAD_84>",
697
+ "lstrip": false,
698
+ "normalized": false,
699
+ "rstrip": false,
700
+ "single_word": false
701
+ },
702
+ {
703
+ "content": "<FAKE_PAD_85>",
704
+ "lstrip": false,
705
+ "normalized": false,
706
+ "rstrip": false,
707
+ "single_word": false
708
+ },
709
+ {
710
+ "content": "<FAKE_PAD_86>",
711
+ "lstrip": false,
712
+ "normalized": false,
713
+ "rstrip": false,
714
+ "single_word": false
715
+ },
716
+ {
717
+ "content": "<FAKE_PAD_87>",
718
+ "lstrip": false,
719
+ "normalized": false,
720
+ "rstrip": false,
721
+ "single_word": false
722
+ },
723
+ {
724
+ "content": "<FAKE_PAD_88>",
725
+ "lstrip": false,
726
+ "normalized": false,
727
+ "rstrip": false,
728
+ "single_word": false
729
+ },
730
+ {
731
+ "content": "<FAKE_PAD_89>",
732
+ "lstrip": false,
733
+ "normalized": false,
734
+ "rstrip": false,
735
+ "single_word": false
736
+ },
737
+ {
738
+ "content": "<FAKE_PAD_90>",
739
+ "lstrip": false,
740
+ "normalized": false,
741
+ "rstrip": false,
742
+ "single_word": false
743
+ },
744
+ {
745
+ "content": "<FAKE_PAD_91>",
746
+ "lstrip": false,
747
+ "normalized": false,
748
+ "rstrip": false,
749
+ "single_word": false
750
+ },
751
+ {
752
+ "content": "<FAKE_PAD_92>",
753
+ "lstrip": false,
754
+ "normalized": false,
755
+ "rstrip": false,
756
+ "single_word": false
757
+ },
758
+ {
759
+ "content": "<FAKE_PAD_93>",
760
+ "lstrip": false,
761
+ "normalized": false,
762
+ "rstrip": false,
763
+ "single_word": false
764
+ },
765
+ {
766
+ "content": "<FAKE_PAD_94>",
767
+ "lstrip": false,
768
+ "normalized": false,
769
+ "rstrip": false,
770
+ "single_word": false
771
+ },
772
+ {
773
+ "content": "<FAKE_PAD_95>",
774
+ "lstrip": false,
775
+ "normalized": false,
776
+ "rstrip": false,
777
+ "single_word": false
778
+ },
779
+ {
780
+ "content": "<FAKE_PAD_96>",
781
+ "lstrip": false,
782
+ "normalized": false,
783
+ "rstrip": false,
784
+ "single_word": false
785
+ },
786
+ {
787
+ "content": "<FAKE_PAD_97>",
788
+ "lstrip": false,
789
+ "normalized": false,
790
+ "rstrip": false,
791
+ "single_word": false
792
+ },
793
+ {
794
+ "content": "<FAKE_PAD_98>",
795
+ "lstrip": false,
796
+ "normalized": false,
797
+ "rstrip": false,
798
+ "single_word": false
799
+ },
800
+ {
801
+ "content": "<FAKE_PAD_99>",
802
+ "lstrip": false,
803
+ "normalized": false,
804
+ "rstrip": false,
805
+ "single_word": false
806
+ },
807
+ {
808
+ "content": "<FAKE_PAD_100>",
809
+ "lstrip": false,
810
+ "normalized": false,
811
+ "rstrip": false,
812
+ "single_word": false
813
+ },
814
+ {
815
+ "content": "<FAKE_PAD_101>",
816
+ "lstrip": false,
817
+ "normalized": false,
818
+ "rstrip": false,
819
+ "single_word": false
820
+ },
821
+ {
822
+ "content": "<FAKE_PAD_102>",
823
+ "lstrip": false,
824
+ "normalized": false,
825
+ "rstrip": false,
826
+ "single_word": false
827
+ },
828
+ {
829
+ "content": "<FAKE_PAD_103>",
830
+ "lstrip": false,
831
+ "normalized": false,
832
+ "rstrip": false,
833
+ "single_word": false
834
+ },
835
+ {
836
+ "content": "<FAKE_PAD_104>",
837
+ "lstrip": false,
838
+ "normalized": false,
839
+ "rstrip": false,
840
+ "single_word": false
841
+ },
842
+ {
843
+ "content": "<FAKE_PAD_105>",
844
+ "lstrip": false,
845
+ "normalized": false,
846
+ "rstrip": false,
847
+ "single_word": false
848
+ },
849
+ {
850
+ "content": "<FAKE_PAD_106>",
851
+ "lstrip": false,
852
+ "normalized": false,
853
+ "rstrip": false,
854
+ "single_word": false
855
+ },
856
+ {
857
+ "content": "<FAKE_PAD_107>",
858
+ "lstrip": false,
859
+ "normalized": false,
860
+ "rstrip": false,
861
+ "single_word": false
862
+ },
863
+ {
864
+ "content": "<FAKE_PAD_108>",
865
+ "lstrip": false,
866
+ "normalized": false,
867
+ "rstrip": false,
868
+ "single_word": false
869
+ },
870
+ {
871
+ "content": "<FAKE_PAD_109>",
872
+ "lstrip": false,
873
+ "normalized": false,
874
+ "rstrip": false,
875
+ "single_word": false
876
+ },
877
+ {
878
+ "content": "<FAKE_PAD_110>",
879
+ "lstrip": false,
880
+ "normalized": false,
881
+ "rstrip": false,
882
+ "single_word": false
883
+ },
884
+ {
885
+ "content": "<FAKE_PAD_111>",
886
+ "lstrip": false,
887
+ "normalized": false,
888
+ "rstrip": false,
889
+ "single_word": false
890
+ },
891
+ {
892
+ "content": "<FAKE_PAD_112>",
893
+ "lstrip": false,
894
+ "normalized": false,
895
+ "rstrip": false,
896
+ "single_word": false
897
+ },
898
+ {
899
+ "content": "<FAKE_PAD_113>",
900
+ "lstrip": false,
901
+ "normalized": false,
902
+ "rstrip": false,
903
+ "single_word": false
904
+ },
905
+ {
906
+ "content": "<FAKE_PAD_114>",
907
+ "lstrip": false,
908
+ "normalized": false,
909
+ "rstrip": false,
910
+ "single_word": false
911
+ },
912
+ {
913
+ "content": "<FAKE_PAD_115>",
914
+ "lstrip": false,
915
+ "normalized": false,
916
+ "rstrip": false,
917
+ "single_word": false
918
+ },
919
+ {
920
+ "content": "<FAKE_PAD_116>",
921
+ "lstrip": false,
922
+ "normalized": false,
923
+ "rstrip": false,
924
+ "single_word": false
925
+ },
926
+ {
927
+ "content": "<FAKE_PAD_117>",
928
+ "lstrip": false,
929
+ "normalized": false,
930
+ "rstrip": false,
931
+ "single_word": false
932
+ },
933
+ {
934
+ "content": "<FAKE_PAD_118>",
935
+ "lstrip": false,
936
+ "normalized": false,
937
+ "rstrip": false,
938
+ "single_word": false
939
+ },
940
+ {
941
+ "content": "<FAKE_PAD_119>",
942
+ "lstrip": false,
943
+ "normalized": false,
944
+ "rstrip": false,
945
+ "single_word": false
946
+ },
947
+ {
948
+ "content": "<FAKE_PAD_120>",
949
+ "lstrip": false,
950
+ "normalized": false,
951
+ "rstrip": false,
952
+ "single_word": false
953
+ },
954
+ {
955
+ "content": "<FAKE_PAD_121>",
956
+ "lstrip": false,
957
+ "normalized": false,
958
+ "rstrip": false,
959
+ "single_word": false
960
+ },
961
+ {
962
+ "content": "<FAKE_PAD_122>",
963
+ "lstrip": false,
964
+ "normalized": false,
965
+ "rstrip": false,
966
+ "single_word": false
967
+ },
968
+ {
969
+ "content": "<FAKE_PAD_123>",
970
+ "lstrip": false,
971
+ "normalized": false,
972
+ "rstrip": false,
973
+ "single_word": false
974
+ },
975
+ {
976
+ "content": "<FAKE_PAD_124>",
977
+ "lstrip": false,
978
+ "normalized": false,
979
+ "rstrip": false,
980
+ "single_word": false
981
+ },
982
+ {
983
+ "content": "<FAKE_PAD_125>",
984
+ "lstrip": false,
985
+ "normalized": false,
986
+ "rstrip": false,
987
+ "single_word": false
988
+ },
989
+ {
990
+ "content": "<FAKE_PAD_126>",
991
+ "lstrip": false,
992
+ "normalized": false,
993
+ "rstrip": false,
994
+ "single_word": false
995
+ },
996
+ {
997
+ "content": "<FAKE_PAD_127>",
998
+ "lstrip": false,
999
+ "normalized": false,
1000
+ "rstrip": false,
1001
+ "single_word": false
1002
+ },
1003
+ {
1004
+ "content": "<FAKE_PAD_128>",
1005
+ "lstrip": false,
1006
+ "normalized": false,
1007
+ "rstrip": false,
1008
+ "single_word": false
1009
+ },
1010
+ {
1011
+ "content": "<FAKE_PAD_129>",
1012
+ "lstrip": false,
1013
+ "normalized": false,
1014
+ "rstrip": false,
1015
+ "single_word": false
1016
+ },
1017
+ {
1018
+ "content": "<FAKE_PAD_130>",
1019
+ "lstrip": false,
1020
+ "normalized": false,
1021
+ "rstrip": false,
1022
+ "single_word": false
1023
+ },
1024
+ {
1025
+ "content": "<FAKE_PAD_131>",
1026
+ "lstrip": false,
1027
+ "normalized": false,
1028
+ "rstrip": false,
1029
+ "single_word": false
1030
+ },
1031
+ {
1032
+ "content": "<FAKE_PAD_132>",
1033
+ "lstrip": false,
1034
+ "normalized": false,
1035
+ "rstrip": false,
1036
+ "single_word": false
1037
+ },
1038
+ {
1039
+ "content": "<FAKE_PAD_133>",
1040
+ "lstrip": false,
1041
+ "normalized": false,
1042
+ "rstrip": false,
1043
+ "single_word": false
1044
+ },
1045
+ {
1046
+ "content": "<FAKE_PAD_134>",
1047
+ "lstrip": false,
1048
+ "normalized": false,
1049
+ "rstrip": false,
1050
+ "single_word": false
1051
+ },
1052
+ {
1053
+ "content": "<FAKE_PAD_135>",
1054
+ "lstrip": false,
1055
+ "normalized": false,
1056
+ "rstrip": false,
1057
+ "single_word": false
1058
+ },
1059
+ {
1060
+ "content": "<FAKE_PAD_136>",
1061
+ "lstrip": false,
1062
+ "normalized": false,
1063
+ "rstrip": false,
1064
+ "single_word": false
1065
+ },
1066
+ {
1067
+ "content": "<FAKE_PAD_137>",
1068
+ "lstrip": false,
1069
+ "normalized": false,
1070
+ "rstrip": false,
1071
+ "single_word": false
1072
+ },
1073
+ {
1074
+ "content": "<FAKE_PAD_138>",
1075
+ "lstrip": false,
1076
+ "normalized": false,
1077
+ "rstrip": false,
1078
+ "single_word": false
1079
+ },
1080
+ {
1081
+ "content": "<FAKE_PAD_139>",
1082
+ "lstrip": false,
1083
+ "normalized": false,
1084
+ "rstrip": false,
1085
+ "single_word": false
1086
+ },
1087
+ {
1088
+ "content": "<FAKE_PAD_140>",
1089
+ "lstrip": false,
1090
+ "normalized": false,
1091
+ "rstrip": false,
1092
+ "single_word": false
1093
+ },
1094
+ {
1095
+ "content": "<FAKE_PAD_141>",
1096
+ "lstrip": false,
1097
+ "normalized": false,
1098
+ "rstrip": false,
1099
+ "single_word": false
1100
+ },
1101
+ {
1102
+ "content": "<FAKE_PAD_142>",
1103
+ "lstrip": false,
1104
+ "normalized": false,
1105
+ "rstrip": false,
1106
+ "single_word": false
1107
+ },
1108
+ {
1109
+ "content": "<FAKE_PAD_143>",
1110
+ "lstrip": false,
1111
+ "normalized": false,
1112
+ "rstrip": false,
1113
+ "single_word": false
1114
+ },
1115
+ {
1116
+ "content": "<FAKE_PAD_144>",
1117
+ "lstrip": false,
1118
+ "normalized": false,
1119
+ "rstrip": false,
1120
+ "single_word": false
1121
+ },
1122
+ {
1123
+ "content": "<FAKE_PAD_145>",
1124
+ "lstrip": false,
1125
+ "normalized": false,
1126
+ "rstrip": false,
1127
+ "single_word": false
1128
+ },
1129
+ {
1130
+ "content": "<FAKE_PAD_146>",
1131
+ "lstrip": false,
1132
+ "normalized": false,
1133
+ "rstrip": false,
1134
+ "single_word": false
1135
+ },
1136
+ {
1137
+ "content": "<FAKE_PAD_147>",
1138
+ "lstrip": false,
1139
+ "normalized": false,
1140
+ "rstrip": false,
1141
+ "single_word": false
1142
+ },
1143
+ {
1144
+ "content": "<FAKE_PAD_148>",
1145
+ "lstrip": false,
1146
+ "normalized": false,
1147
+ "rstrip": false,
1148
+ "single_word": false
1149
+ },
1150
+ {
1151
+ "content": "<FAKE_PAD_149>",
1152
+ "lstrip": false,
1153
+ "normalized": false,
1154
+ "rstrip": false,
1155
+ "single_word": false
1156
+ },
1157
+ {
1158
+ "content": "<FAKE_PAD_150>",
1159
+ "lstrip": false,
1160
+ "normalized": false,
1161
+ "rstrip": false,
1162
+ "single_word": false
1163
+ },
1164
+ {
1165
+ "content": "<FAKE_PAD_151>",
1166
+ "lstrip": false,
1167
+ "normalized": false,
1168
+ "rstrip": false,
1169
+ "single_word": false
1170
+ },
1171
+ {
1172
+ "content": "<FAKE_PAD_152>",
1173
+ "lstrip": false,
1174
+ "normalized": false,
1175
+ "rstrip": false,
1176
+ "single_word": false
1177
+ },
1178
+ {
1179
+ "content": "<FAKE_PAD_153>",
1180
+ "lstrip": false,
1181
+ "normalized": false,
1182
+ "rstrip": false,
1183
+ "single_word": false
1184
+ },
1185
+ {
1186
+ "content": "<FAKE_PAD_154>",
1187
+ "lstrip": false,
1188
+ "normalized": false,
1189
+ "rstrip": false,
1190
+ "single_word": false
1191
+ },
1192
+ {
1193
+ "content": "<FAKE_PAD_155>",
1194
+ "lstrip": false,
1195
+ "normalized": false,
1196
+ "rstrip": false,
1197
+ "single_word": false
1198
+ },
1199
+ {
1200
+ "content": "<FAKE_PAD_156>",
1201
+ "lstrip": false,
1202
+ "normalized": false,
1203
+ "rstrip": false,
1204
+ "single_word": false
1205
+ },
1206
+ {
1207
+ "content": "<FAKE_PAD_157>",
1208
+ "lstrip": false,
1209
+ "normalized": false,
1210
+ "rstrip": false,
1211
+ "single_word": false
1212
+ },
1213
+ {
1214
+ "content": "<FAKE_PAD_158>",
1215
+ "lstrip": false,
1216
+ "normalized": false,
1217
+ "rstrip": false,
1218
+ "single_word": false
1219
+ },
1220
+ {
1221
+ "content": "<FAKE_PAD_159>",
1222
+ "lstrip": false,
1223
+ "normalized": false,
1224
+ "rstrip": false,
1225
+ "single_word": false
1226
+ },
1227
+ {
1228
+ "content": "<FAKE_PAD_160>",
1229
+ "lstrip": false,
1230
+ "normalized": false,
1231
+ "rstrip": false,
1232
+ "single_word": false
1233
+ },
1234
+ {
1235
+ "content": "<FAKE_PAD_161>",
1236
+ "lstrip": false,
1237
+ "normalized": false,
1238
+ "rstrip": false,
1239
+ "single_word": false
1240
+ },
1241
+ {
1242
+ "content": "<FAKE_PAD_162>",
1243
+ "lstrip": false,
1244
+ "normalized": false,
1245
+ "rstrip": false,
1246
+ "single_word": false
1247
+ },
1248
+ {
1249
+ "content": "<FAKE_PAD_163>",
1250
+ "lstrip": false,
1251
+ "normalized": false,
1252
+ "rstrip": false,
1253
+ "single_word": false
1254
+ },
1255
+ {
1256
+ "content": "<FAKE_PAD_164>",
1257
+ "lstrip": false,
1258
+ "normalized": false,
1259
+ "rstrip": false,
1260
+ "single_word": false
1261
+ },
1262
+ {
1263
+ "content": "<FAKE_PAD_165>",
1264
+ "lstrip": false,
1265
+ "normalized": false,
1266
+ "rstrip": false,
1267
+ "single_word": false
1268
+ },
1269
+ {
1270
+ "content": "<FAKE_PAD_166>",
1271
+ "lstrip": false,
1272
+ "normalized": false,
1273
+ "rstrip": false,
1274
+ "single_word": false
1275
+ },
1276
+ {
1277
+ "content": "<FAKE_PAD_167>",
1278
+ "lstrip": false,
1279
+ "normalized": false,
1280
+ "rstrip": false,
1281
+ "single_word": false
1282
+ },
1283
+ {
1284
+ "content": "<FAKE_PAD_168>",
1285
+ "lstrip": false,
1286
+ "normalized": false,
1287
+ "rstrip": false,
1288
+ "single_word": false
1289
+ },
1290
+ {
1291
+ "content": "<FAKE_PAD_169>",
1292
+ "lstrip": false,
1293
+ "normalized": false,
1294
+ "rstrip": false,
1295
+ "single_word": false
1296
+ },
1297
+ {
1298
+ "content": "<FAKE_PAD_170>",
1299
+ "lstrip": false,
1300
+ "normalized": false,
1301
+ "rstrip": false,
1302
+ "single_word": false
1303
+ },
1304
+ {
1305
+ "content": "<FAKE_PAD_171>",
1306
+ "lstrip": false,
1307
+ "normalized": false,
1308
+ "rstrip": false,
1309
+ "single_word": false
1310
+ },
1311
+ {
1312
+ "content": "<FAKE_PAD_172>",
1313
+ "lstrip": false,
1314
+ "normalized": false,
1315
+ "rstrip": false,
1316
+ "single_word": false
1317
+ },
1318
+ {
1319
+ "content": "<FAKE_PAD_173>",
1320
+ "lstrip": false,
1321
+ "normalized": false,
1322
+ "rstrip": false,
1323
+ "single_word": false
1324
+ },
1325
+ {
1326
+ "content": "<FAKE_PAD_174>",
1327
+ "lstrip": false,
1328
+ "normalized": false,
1329
+ "rstrip": false,
1330
+ "single_word": false
1331
+ },
1332
+ {
1333
+ "content": "<FAKE_PAD_175>",
1334
+ "lstrip": false,
1335
+ "normalized": false,
1336
+ "rstrip": false,
1337
+ "single_word": false
1338
+ },
1339
+ {
1340
+ "content": "<FAKE_PAD_176>",
1341
+ "lstrip": false,
1342
+ "normalized": false,
1343
+ "rstrip": false,
1344
+ "single_word": false
1345
+ },
1346
+ {
1347
+ "content": "<FAKE_PAD_177>",
1348
+ "lstrip": false,
1349
+ "normalized": false,
1350
+ "rstrip": false,
1351
+ "single_word": false
1352
+ },
1353
+ {
1354
+ "content": "<FAKE_PAD_178>",
1355
+ "lstrip": false,
1356
+ "normalized": false,
1357
+ "rstrip": false,
1358
+ "single_word": false
1359
+ },
1360
+ {
1361
+ "content": "<FAKE_PAD_179>",
1362
+ "lstrip": false,
1363
+ "normalized": false,
1364
+ "rstrip": false,
1365
+ "single_word": false
1366
+ },
1367
+ {
1368
+ "content": "<FAKE_PAD_180>",
1369
+ "lstrip": false,
1370
+ "normalized": false,
1371
+ "rstrip": false,
1372
+ "single_word": false
1373
+ },
1374
+ {
1375
+ "content": "<FAKE_PAD_181>",
1376
+ "lstrip": false,
1377
+ "normalized": false,
1378
+ "rstrip": false,
1379
+ "single_word": false
1380
+ },
1381
+ {
1382
+ "content": "<FAKE_PAD_182>",
1383
+ "lstrip": false,
1384
+ "normalized": false,
1385
+ "rstrip": false,
1386
+ "single_word": false
1387
+ },
1388
+ {
1389
+ "content": "<FAKE_PAD_183>",
1390
+ "lstrip": false,
1391
+ "normalized": false,
1392
+ "rstrip": false,
1393
+ "single_word": false
1394
+ },
1395
+ {
1396
+ "content": "<FAKE_PAD_184>",
1397
+ "lstrip": false,
1398
+ "normalized": false,
1399
+ "rstrip": false,
1400
+ "single_word": false
1401
+ },
1402
+ {
1403
+ "content": "<FAKE_PAD_185>",
1404
+ "lstrip": false,
1405
+ "normalized": false,
1406
+ "rstrip": false,
1407
+ "single_word": false
1408
+ },
1409
+ {
1410
+ "content": "<FAKE_PAD_186>",
1411
+ "lstrip": false,
1412
+ "normalized": false,
1413
+ "rstrip": false,
1414
+ "single_word": false
1415
+ },
1416
+ {
1417
+ "content": "<FAKE_PAD_187>",
1418
+ "lstrip": false,
1419
+ "normalized": false,
1420
+ "rstrip": false,
1421
+ "single_word": false
1422
+ },
1423
+ {
1424
+ "content": "<FAKE_PAD_188>",
1425
+ "lstrip": false,
1426
+ "normalized": false,
1427
+ "rstrip": false,
1428
+ "single_word": false
1429
+ },
1430
+ {
1431
+ "content": "<FAKE_PAD_189>",
1432
+ "lstrip": false,
1433
+ "normalized": false,
1434
+ "rstrip": false,
1435
+ "single_word": false
1436
+ },
1437
+ {
1438
+ "content": "<FAKE_PAD_190>",
1439
+ "lstrip": false,
1440
+ "normalized": false,
1441
+ "rstrip": false,
1442
+ "single_word": false
1443
+ },
1444
+ {
1445
+ "content": "<FAKE_PAD_191>",
1446
+ "lstrip": false,
1447
+ "normalized": false,
1448
+ "rstrip": false,
1449
+ "single_word": false
1450
+ },
1451
+ {
1452
+ "content": "<FAKE_PAD_192>",
1453
+ "lstrip": false,
1454
+ "normalized": false,
1455
+ "rstrip": false,
1456
+ "single_word": false
1457
+ },
1458
+ {
1459
+ "content": "<FAKE_PAD_193>",
1460
+ "lstrip": false,
1461
+ "normalized": false,
1462
+ "rstrip": false,
1463
+ "single_word": false
1464
+ },
1465
+ {
1466
+ "content": "<FAKE_PAD_194>",
1467
+ "lstrip": false,
1468
+ "normalized": false,
1469
+ "rstrip": false,
1470
+ "single_word": false
1471
+ },
1472
+ {
1473
+ "content": "<FAKE_PAD_195>",
1474
+ "lstrip": false,
1475
+ "normalized": false,
1476
+ "rstrip": false,
1477
+ "single_word": false
1478
+ },
1479
+ {
1480
+ "content": "<FAKE_PAD_196>",
1481
+ "lstrip": false,
1482
+ "normalized": false,
1483
+ "rstrip": false,
1484
+ "single_word": false
1485
+ },
1486
+ {
1487
+ "content": "<FAKE_PAD_197>",
1488
+ "lstrip": false,
1489
+ "normalized": false,
1490
+ "rstrip": false,
1491
+ "single_word": false
1492
+ },
1493
+ {
1494
+ "content": "<FAKE_PAD_198>",
1495
+ "lstrip": false,
1496
+ "normalized": false,
1497
+ "rstrip": false,
1498
+ "single_word": false
1499
+ },
1500
+ {
1501
+ "content": "<FAKE_PAD_199>",
1502
+ "lstrip": false,
1503
+ "normalized": false,
1504
+ "rstrip": false,
1505
+ "single_word": false
1506
+ },
1507
+ {
1508
+ "content": "<FAKE_PAD_200>",
1509
+ "lstrip": false,
1510
+ "normalized": false,
1511
+ "rstrip": false,
1512
+ "single_word": false
1513
+ },
1514
+ {
1515
+ "content": "<FAKE_PAD_201>",
1516
+ "lstrip": false,
1517
+ "normalized": false,
1518
+ "rstrip": false,
1519
+ "single_word": false
1520
+ },
1521
+ {
1522
+ "content": "<FAKE_PAD_202>",
1523
+ "lstrip": false,
1524
+ "normalized": false,
1525
+ "rstrip": false,
1526
+ "single_word": false
1527
+ },
1528
+ {
1529
+ "content": "<FAKE_PAD_203>",
1530
+ "lstrip": false,
1531
+ "normalized": false,
1532
+ "rstrip": false,
1533
+ "single_word": false
1534
+ },
1535
+ {
1536
+ "content": "<FAKE_PAD_204>",
1537
+ "lstrip": false,
1538
+ "normalized": false,
1539
+ "rstrip": false,
1540
+ "single_word": false
1541
+ },
1542
+ {
1543
+ "content": "<FAKE_PAD_205>",
1544
+ "lstrip": false,
1545
+ "normalized": false,
1546
+ "rstrip": false,
1547
+ "single_word": false
1548
+ },
1549
+ {
1550
+ "content": "<FAKE_PAD_206>",
1551
+ "lstrip": false,
1552
+ "normalized": false,
1553
+ "rstrip": false,
1554
+ "single_word": false
1555
+ },
1556
+ {
1557
+ "content": "<FAKE_PAD_207>",
1558
+ "lstrip": false,
1559
+ "normalized": false,
1560
+ "rstrip": false,
1561
+ "single_word": false
1562
+ },
1563
+ {
1564
+ "content": "<FAKE_PAD_208>",
1565
+ "lstrip": false,
1566
+ "normalized": false,
1567
+ "rstrip": false,
1568
+ "single_word": false
1569
+ },
1570
+ {
1571
+ "content": "<FAKE_PAD_209>",
1572
+ "lstrip": false,
1573
+ "normalized": false,
1574
+ "rstrip": false,
1575
+ "single_word": false
1576
+ },
1577
+ {
1578
+ "content": "<FAKE_PAD_210>",
1579
+ "lstrip": false,
1580
+ "normalized": false,
1581
+ "rstrip": false,
1582
+ "single_word": false
1583
+ },
1584
+ {
1585
+ "content": "<FAKE_PAD_211>",
1586
+ "lstrip": false,
1587
+ "normalized": false,
1588
+ "rstrip": false,
1589
+ "single_word": false
1590
+ },
1591
+ {
1592
+ "content": "<FAKE_PAD_212>",
1593
+ "lstrip": false,
1594
+ "normalized": false,
1595
+ "rstrip": false,
1596
+ "single_word": false
1597
+ },
1598
+ {
1599
+ "content": "<FAKE_PAD_213>",
1600
+ "lstrip": false,
1601
+ "normalized": false,
1602
+ "rstrip": false,
1603
+ "single_word": false
1604
+ },
1605
+ {
1606
+ "content": "<FAKE_PAD_214>",
1607
+ "lstrip": false,
1608
+ "normalized": false,
1609
+ "rstrip": false,
1610
+ "single_word": false
1611
+ },
1612
+ {
1613
+ "content": "<FAKE_PAD_215>",
1614
+ "lstrip": false,
1615
+ "normalized": false,
1616
+ "rstrip": false,
1617
+ "single_word": false
1618
+ },
1619
+ {
1620
+ "content": "<FAKE_PAD_216>",
1621
+ "lstrip": false,
1622
+ "normalized": false,
1623
+ "rstrip": false,
1624
+ "single_word": false
1625
+ },
1626
+ {
1627
+ "content": "<FAKE_PAD_217>",
1628
+ "lstrip": false,
1629
+ "normalized": false,
1630
+ "rstrip": false,
1631
+ "single_word": false
1632
+ },
1633
+ {
1634
+ "content": "<FAKE_PAD_218>",
1635
+ "lstrip": false,
1636
+ "normalized": false,
1637
+ "rstrip": false,
1638
+ "single_word": false
1639
+ },
1640
+ {
1641
+ "content": "<FAKE_PAD_219>",
1642
+ "lstrip": false,
1643
+ "normalized": false,
1644
+ "rstrip": false,
1645
+ "single_word": false
1646
+ },
1647
+ {
1648
+ "content": "<FAKE_PAD_220>",
1649
+ "lstrip": false,
1650
+ "normalized": false,
1651
+ "rstrip": false,
1652
+ "single_word": false
1653
+ },
1654
+ {
1655
+ "content": "<FAKE_PAD_221>",
1656
+ "lstrip": false,
1657
+ "normalized": false,
1658
+ "rstrip": false,
1659
+ "single_word": false
1660
+ },
1661
+ {
1662
+ "content": "<FAKE_PAD_222>",
1663
+ "lstrip": false,
1664
+ "normalized": false,
1665
+ "rstrip": false,
1666
+ "single_word": false
1667
+ },
1668
+ {
1669
+ "content": "<FAKE_PAD_223>",
1670
+ "lstrip": false,
1671
+ "normalized": false,
1672
+ "rstrip": false,
1673
+ "single_word": false
1674
+ },
1675
+ {
1676
+ "content": "<FAKE_PAD_224>",
1677
+ "lstrip": false,
1678
+ "normalized": false,
1679
+ "rstrip": false,
1680
+ "single_word": false
1681
+ },
1682
+ {
1683
+ "content": "<FAKE_PAD_225>",
1684
+ "lstrip": false,
1685
+ "normalized": false,
1686
+ "rstrip": false,
1687
+ "single_word": false
1688
+ },
1689
+ {
1690
+ "content": "<FAKE_PAD_226>",
1691
+ "lstrip": false,
1692
+ "normalized": false,
1693
+ "rstrip": false,
1694
+ "single_word": false
1695
+ },
1696
+ {
1697
+ "content": "<FAKE_PAD_227>",
1698
+ "lstrip": false,
1699
+ "normalized": false,
1700
+ "rstrip": false,
1701
+ "single_word": false
1702
+ },
1703
+ {
1704
+ "content": "<FAKE_PAD_228>",
1705
+ "lstrip": false,
1706
+ "normalized": false,
1707
+ "rstrip": false,
1708
+ "single_word": false
1709
+ },
1710
+ {
1711
+ "content": "<FAKE_PAD_229>",
1712
+ "lstrip": false,
1713
+ "normalized": false,
1714
+ "rstrip": false,
1715
+ "single_word": false
1716
+ },
1717
+ {
1718
+ "content": "<FAKE_PAD_230>",
1719
+ "lstrip": false,
1720
+ "normalized": false,
1721
+ "rstrip": false,
1722
+ "single_word": false
1723
+ },
1724
+ {
1725
+ "content": "<FAKE_PAD_231>",
1726
+ "lstrip": false,
1727
+ "normalized": false,
1728
+ "rstrip": false,
1729
+ "single_word": false
1730
+ },
1731
+ {
1732
+ "content": "<FAKE_PAD_232>",
1733
+ "lstrip": false,
1734
+ "normalized": false,
1735
+ "rstrip": false,
1736
+ "single_word": false
1737
+ },
1738
+ {
1739
+ "content": "<FAKE_PAD_233>",
1740
+ "lstrip": false,
1741
+ "normalized": false,
1742
+ "rstrip": false,
1743
+ "single_word": false
1744
+ },
1745
+ {
1746
+ "content": "<FAKE_PAD_234>",
1747
+ "lstrip": false,
1748
+ "normalized": false,
1749
+ "rstrip": false,
1750
+ "single_word": false
1751
+ },
1752
+ {
1753
+ "content": "<FAKE_PAD_235>",
1754
+ "lstrip": false,
1755
+ "normalized": false,
1756
+ "rstrip": false,
1757
+ "single_word": false
1758
+ },
1759
+ {
1760
+ "content": "<FAKE_PAD_236>",
1761
+ "lstrip": false,
1762
+ "normalized": false,
1763
+ "rstrip": false,
1764
+ "single_word": false
1765
+ },
1766
+ {
1767
+ "content": "<FAKE_PAD_237>",
1768
+ "lstrip": false,
1769
+ "normalized": false,
1770
+ "rstrip": false,
1771
+ "single_word": false
1772
+ },
1773
+ {
1774
+ "content": "<FAKE_PAD_238>",
1775
+ "lstrip": false,
1776
+ "normalized": false,
1777
+ "rstrip": false,
1778
+ "single_word": false
1779
+ },
1780
+ {
1781
+ "content": "<FAKE_PAD_239>",
1782
+ "lstrip": false,
1783
+ "normalized": false,
1784
+ "rstrip": false,
1785
+ "single_word": false
1786
+ },
1787
+ {
1788
+ "content": "<FAKE_PAD_240>",
1789
+ "lstrip": false,
1790
+ "normalized": false,
1791
+ "rstrip": false,
1792
+ "single_word": false
1793
+ },
1794
+ {
1795
+ "content": "<FAKE_PAD_241>",
1796
+ "lstrip": false,
1797
+ "normalized": false,
1798
+ "rstrip": false,
1799
+ "single_word": false
1800
+ },
1801
+ {
1802
+ "content": "<FAKE_PAD_242>",
1803
+ "lstrip": false,
1804
+ "normalized": false,
1805
+ "rstrip": false,
1806
+ "single_word": false
1807
+ },
1808
+ {
1809
+ "content": "<FAKE_PAD_243>",
1810
+ "lstrip": false,
1811
+ "normalized": false,
1812
+ "rstrip": false,
1813
+ "single_word": false
1814
+ },
1815
+ {
1816
+ "content": "<FAKE_PAD_244>",
1817
+ "lstrip": false,
1818
+ "normalized": false,
1819
+ "rstrip": false,
1820
+ "single_word": false
1821
+ },
1822
+ {
1823
+ "content": "<FAKE_PAD_245>",
1824
+ "lstrip": false,
1825
+ "normalized": false,
1826
+ "rstrip": false,
1827
+ "single_word": false
1828
+ },
1829
+ {
1830
+ "content": "<FAKE_PAD_246>",
1831
+ "lstrip": false,
1832
+ "normalized": false,
1833
+ "rstrip": false,
1834
+ "single_word": false
1835
+ },
1836
+ {
1837
+ "content": "<FAKE_PAD_247>",
1838
+ "lstrip": false,
1839
+ "normalized": false,
1840
+ "rstrip": false,
1841
+ "single_word": false
1842
+ },
1843
+ {
1844
+ "content": "<FAKE_PAD_248>",
1845
+ "lstrip": false,
1846
+ "normalized": false,
1847
+ "rstrip": false,
1848
+ "single_word": false
1849
+ },
1850
+ {
1851
+ "content": "<FAKE_PAD_249>",
1852
+ "lstrip": false,
1853
+ "normalized": false,
1854
+ "rstrip": false,
1855
+ "single_word": false
1856
+ },
1857
+ {
1858
+ "content": "<FAKE_PAD_250>",
1859
+ "lstrip": false,
1860
+ "normalized": false,
1861
+ "rstrip": false,
1862
+ "single_word": false
1863
+ },
1864
+ {
1865
+ "content": "<FAKE_PAD_251>",
1866
+ "lstrip": false,
1867
+ "normalized": false,
1868
+ "rstrip": false,
1869
+ "single_word": false
1870
+ },
1871
+ {
1872
+ "content": "<FAKE_PAD_252>",
1873
+ "lstrip": false,
1874
+ "normalized": false,
1875
+ "rstrip": false,
1876
+ "single_word": false
1877
+ },
1878
+ {
1879
+ "content": "<FAKE_PAD_253>",
1880
+ "lstrip": false,
1881
+ "normalized": false,
1882
+ "rstrip": false,
1883
+ "single_word": false
1884
+ }
1885
+ ],
1886
+ "eos_token": {
1887
+ "content": "<|im_end|>",
1888
+ "lstrip": false,
1889
+ "normalized": false,
1890
+ "rstrip": false,
1891
+ "single_word": false
1892
+ },
1893
+ "pad_token": {
1894
+ "content": "<|endoftext|>",
1895
+ "lstrip": false,
1896
+ "normalized": false,
1897
+ "rstrip": false,
1898
+ "single_word": false
1899
+ }
1900
+ }
tokenizer_config.json ADDED
@@ -0,0 +1,2643 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": false,
3
+ "add_eos_token": false,
4
+ "add_prefix_space": false,
5
+ "added_tokens_decoder": {
6
+ "151643": {
7
+ "content": "<|endoftext|>",
8
+ "lstrip": false,
9
+ "normalized": false,
10
+ "rstrip": false,
11
+ "single_word": false,
12
+ "special": true
13
+ },
14
+ "151644": {
15
+ "content": "<|im_start|>",
16
+ "lstrip": false,
17
+ "normalized": false,
18
+ "rstrip": false,
19
+ "single_word": false,
20
+ "special": true
21
+ },
22
+ "151645": {
23
+ "content": "<|im_end|>",
24
+ "lstrip": false,
25
+ "normalized": false,
26
+ "rstrip": false,
27
+ "single_word": false,
28
+ "special": true
29
+ },
30
+ "151646": {
31
+ "content": "<|object_ref_start|>",
32
+ "lstrip": false,
33
+ "normalized": false,
34
+ "rstrip": false,
35
+ "single_word": false,
36
+ "special": true
37
+ },
38
+ "151647": {
39
+ "content": "<|object_ref_end|>",
40
+ "lstrip": false,
41
+ "normalized": false,
42
+ "rstrip": false,
43
+ "single_word": false,
44
+ "special": true
45
+ },
46
+ "151648": {
47
+ "content": "<|box_start|>",
48
+ "lstrip": false,
49
+ "normalized": false,
50
+ "rstrip": false,
51
+ "single_word": false,
52
+ "special": true
53
+ },
54
+ "151649": {
55
+ "content": "<|box_end|>",
56
+ "lstrip": false,
57
+ "normalized": false,
58
+ "rstrip": false,
59
+ "single_word": false,
60
+ "special": true
61
+ },
62
+ "151650": {
63
+ "content": "<|quad_start|>",
64
+ "lstrip": false,
65
+ "normalized": false,
66
+ "rstrip": false,
67
+ "single_word": false,
68
+ "special": true
69
+ },
70
+ "151651": {
71
+ "content": "<|quad_end|>",
72
+ "lstrip": false,
73
+ "normalized": false,
74
+ "rstrip": false,
75
+ "single_word": false,
76
+ "special": true
77
+ },
78
+ "151652": {
79
+ "content": "<|vision_start|>",
80
+ "lstrip": false,
81
+ "normalized": false,
82
+ "rstrip": false,
83
+ "single_word": false,
84
+ "special": true
85
+ },
86
+ "151653": {
87
+ "content": "<|vision_end|>",
88
+ "lstrip": false,
89
+ "normalized": false,
90
+ "rstrip": false,
91
+ "single_word": false,
92
+ "special": true
93
+ },
94
+ "151654": {
95
+ "content": "<|vision_pad|>",
96
+ "lstrip": false,
97
+ "normalized": false,
98
+ "rstrip": false,
99
+ "single_word": false,
100
+ "special": true
101
+ },
102
+ "151655": {
103
+ "content": "<|image_pad|>",
104
+ "lstrip": false,
105
+ "normalized": false,
106
+ "rstrip": false,
107
+ "single_word": false,
108
+ "special": true
109
+ },
110
+ "151656": {
111
+ "content": "<|video_pad|>",
112
+ "lstrip": false,
113
+ "normalized": false,
114
+ "rstrip": false,
115
+ "single_word": false,
116
+ "special": true
117
+ },
118
+ "151657": {
119
+ "content": "<tool_call>",
120
+ "lstrip": false,
121
+ "normalized": false,
122
+ "rstrip": false,
123
+ "single_word": false,
124
+ "special": false
125
+ },
126
+ "151658": {
127
+ "content": "</tool_call>",
128
+ "lstrip": false,
129
+ "normalized": false,
130
+ "rstrip": false,
131
+ "single_word": false,
132
+ "special": false
133
+ },
134
+ "151659": {
135
+ "content": "<|fim_prefix|>",
136
+ "lstrip": false,
137
+ "normalized": false,
138
+ "rstrip": false,
139
+ "single_word": false,
140
+ "special": false
141
+ },
142
+ "151660": {
143
+ "content": "<|fim_middle|>",
144
+ "lstrip": false,
145
+ "normalized": false,
146
+ "rstrip": false,
147
+ "single_word": false,
148
+ "special": false
149
+ },
150
+ "151661": {
151
+ "content": "<|fim_suffix|>",
152
+ "lstrip": false,
153
+ "normalized": false,
154
+ "rstrip": false,
155
+ "single_word": false,
156
+ "special": false
157
+ },
158
+ "151662": {
159
+ "content": "<|fim_pad|>",
160
+ "lstrip": false,
161
+ "normalized": false,
162
+ "rstrip": false,
163
+ "single_word": false,
164
+ "special": false
165
+ },
166
+ "151663": {
167
+ "content": "<|repo_name|>",
168
+ "lstrip": false,
169
+ "normalized": false,
170
+ "rstrip": false,
171
+ "single_word": false,
172
+ "special": false
173
+ },
174
+ "151664": {
175
+ "content": "<|file_sep|>",
176
+ "lstrip": false,
177
+ "normalized": false,
178
+ "rstrip": false,
179
+ "single_word": false,
180
+ "special": false
181
+ },
182
+ "151665": {
183
+ "content": "<tool_response>",
184
+ "lstrip": false,
185
+ "normalized": false,
186
+ "rstrip": false,
187
+ "single_word": false,
188
+ "special": false
189
+ },
190
+ "151666": {
191
+ "content": "</tool_response>",
192
+ "lstrip": false,
193
+ "normalized": false,
194
+ "rstrip": false,
195
+ "single_word": false,
196
+ "special": false
197
+ },
198
+ "151667": {
199
+ "content": "<think>",
200
+ "lstrip": false,
201
+ "normalized": false,
202
+ "rstrip": false,
203
+ "single_word": false,
204
+ "special": false
205
+ },
206
+ "151668": {
207
+ "content": "</think>",
208
+ "lstrip": false,
209
+ "normalized": false,
210
+ "rstrip": false,
211
+ "single_word": false,
212
+ "special": false
213
+ },
214
+ "151669": {
215
+ "content": "<IMG_CONTEXT>",
216
+ "lstrip": false,
217
+ "normalized": false,
218
+ "rstrip": false,
219
+ "single_word": false,
220
+ "special": true
221
+ },
222
+ "151670": {
223
+ "content": "<img>",
224
+ "lstrip": false,
225
+ "normalized": false,
226
+ "rstrip": false,
227
+ "single_word": false,
228
+ "special": true
229
+ },
230
+ "151671": {
231
+ "content": "</img>",
232
+ "lstrip": false,
233
+ "normalized": false,
234
+ "rstrip": false,
235
+ "single_word": false,
236
+ "special": true
237
+ },
238
+ "151672": {
239
+ "content": "<quad>",
240
+ "lstrip": false,
241
+ "normalized": false,
242
+ "rstrip": false,
243
+ "single_word": false,
244
+ "special": true
245
+ },
246
+ "151673": {
247
+ "content": "</quad>",
248
+ "lstrip": false,
249
+ "normalized": false,
250
+ "rstrip": false,
251
+ "single_word": false,
252
+ "special": true
253
+ },
254
+ "151674": {
255
+ "content": "<ref>",
256
+ "lstrip": false,
257
+ "normalized": false,
258
+ "rstrip": false,
259
+ "single_word": false,
260
+ "special": true
261
+ },
262
+ "151675": {
263
+ "content": "</ref>",
264
+ "lstrip": false,
265
+ "normalized": false,
266
+ "rstrip": false,
267
+ "single_word": false,
268
+ "special": true
269
+ },
270
+ "151676": {
271
+ "content": "<box>",
272
+ "lstrip": false,
273
+ "normalized": false,
274
+ "rstrip": false,
275
+ "single_word": false,
276
+ "special": true
277
+ },
278
+ "151677": {
279
+ "content": "</box>",
280
+ "lstrip": false,
281
+ "normalized": false,
282
+ "rstrip": false,
283
+ "single_word": false,
284
+ "special": true
285
+ },
286
+ "151678": {
287
+ "content": "<|action_start|>",
288
+ "lstrip": false,
289
+ "normalized": false,
290
+ "rstrip": false,
291
+ "single_word": false,
292
+ "special": true
293
+ },
294
+ "151679": {
295
+ "content": "<|action_end|>",
296
+ "lstrip": false,
297
+ "normalized": false,
298
+ "rstrip": false,
299
+ "single_word": false,
300
+ "special": true
301
+ },
302
+ "151680": {
303
+ "content": "<|plugin|>",
304
+ "lstrip": false,
305
+ "normalized": false,
306
+ "rstrip": false,
307
+ "single_word": false,
308
+ "special": true
309
+ },
310
+ "151681": {
311
+ "content": "<|interpreter|>",
312
+ "lstrip": false,
313
+ "normalized": false,
314
+ "rstrip": false,
315
+ "single_word": false,
316
+ "special": true
317
+ },
318
+ "151682": {
319
+ "content": "<FAKE_PAD_0>",
320
+ "lstrip": false,
321
+ "normalized": false,
322
+ "rstrip": false,
323
+ "single_word": false,
324
+ "special": true
325
+ },
326
+ "151683": {
327
+ "content": "<FAKE_PAD_1>",
328
+ "lstrip": false,
329
+ "normalized": false,
330
+ "rstrip": false,
331
+ "single_word": false,
332
+ "special": true
333
+ },
334
+ "151684": {
335
+ "content": "<FAKE_PAD_2>",
336
+ "lstrip": false,
337
+ "normalized": false,
338
+ "rstrip": false,
339
+ "single_word": false,
340
+ "special": true
341
+ },
342
+ "151685": {
343
+ "content": "<FAKE_PAD_3>",
344
+ "lstrip": false,
345
+ "normalized": false,
346
+ "rstrip": false,
347
+ "single_word": false,
348
+ "special": true
349
+ },
350
+ "151686": {
351
+ "content": "<FAKE_PAD_4>",
352
+ "lstrip": false,
353
+ "normalized": false,
354
+ "rstrip": false,
355
+ "single_word": false,
356
+ "special": true
357
+ },
358
+ "151687": {
359
+ "content": "<FAKE_PAD_5>",
360
+ "lstrip": false,
361
+ "normalized": false,
362
+ "rstrip": false,
363
+ "single_word": false,
364
+ "special": true
365
+ },
366
+ "151688": {
367
+ "content": "<FAKE_PAD_6>",
368
+ "lstrip": false,
369
+ "normalized": false,
370
+ "rstrip": false,
371
+ "single_word": false,
372
+ "special": true
373
+ },
374
+ "151689": {
375
+ "content": "<FAKE_PAD_7>",
376
+ "lstrip": false,
377
+ "normalized": false,
378
+ "rstrip": false,
379
+ "single_word": false,
380
+ "special": true
381
+ },
382
+ "151690": {
383
+ "content": "<FAKE_PAD_8>",
384
+ "lstrip": false,
385
+ "normalized": false,
386
+ "rstrip": false,
387
+ "single_word": false,
388
+ "special": true
389
+ },
390
+ "151691": {
391
+ "content": "<FAKE_PAD_9>",
392
+ "lstrip": false,
393
+ "normalized": false,
394
+ "rstrip": false,
395
+ "single_word": false,
396
+ "special": true
397
+ },
398
+ "151692": {
399
+ "content": "<FAKE_PAD_10>",
400
+ "lstrip": false,
401
+ "normalized": false,
402
+ "rstrip": false,
403
+ "single_word": false,
404
+ "special": true
405
+ },
406
+ "151693": {
407
+ "content": "<FAKE_PAD_11>",
408
+ "lstrip": false,
409
+ "normalized": false,
410
+ "rstrip": false,
411
+ "single_word": false,
412
+ "special": true
413
+ },
414
+ "151694": {
415
+ "content": "<FAKE_PAD_12>",
416
+ "lstrip": false,
417
+ "normalized": false,
418
+ "rstrip": false,
419
+ "single_word": false,
420
+ "special": true
421
+ },
422
+ "151695": {
423
+ "content": "<FAKE_PAD_13>",
424
+ "lstrip": false,
425
+ "normalized": false,
426
+ "rstrip": false,
427
+ "single_word": false,
428
+ "special": true
429
+ },
430
+ "151696": {
431
+ "content": "<FAKE_PAD_14>",
432
+ "lstrip": false,
433
+ "normalized": false,
434
+ "rstrip": false,
435
+ "single_word": false,
436
+ "special": true
437
+ },
438
+ "151697": {
439
+ "content": "<FAKE_PAD_15>",
440
+ "lstrip": false,
441
+ "normalized": false,
442
+ "rstrip": false,
443
+ "single_word": false,
444
+ "special": true
445
+ },
446
+ "151698": {
447
+ "content": "<FAKE_PAD_16>",
448
+ "lstrip": false,
449
+ "normalized": false,
450
+ "rstrip": false,
451
+ "single_word": false,
452
+ "special": true
453
+ },
454
+ "151699": {
455
+ "content": "<FAKE_PAD_17>",
456
+ "lstrip": false,
457
+ "normalized": false,
458
+ "rstrip": false,
459
+ "single_word": false,
460
+ "special": true
461
+ },
462
+ "151700": {
463
+ "content": "<FAKE_PAD_18>",
464
+ "lstrip": false,
465
+ "normalized": false,
466
+ "rstrip": false,
467
+ "single_word": false,
468
+ "special": true
469
+ },
470
+ "151701": {
471
+ "content": "<FAKE_PAD_19>",
472
+ "lstrip": false,
473
+ "normalized": false,
474
+ "rstrip": false,
475
+ "single_word": false,
476
+ "special": true
477
+ },
478
+ "151702": {
479
+ "content": "<FAKE_PAD_20>",
480
+ "lstrip": false,
481
+ "normalized": false,
482
+ "rstrip": false,
483
+ "single_word": false,
484
+ "special": true
485
+ },
486
+ "151703": {
487
+ "content": "<FAKE_PAD_21>",
488
+ "lstrip": false,
489
+ "normalized": false,
490
+ "rstrip": false,
491
+ "single_word": false,
492
+ "special": true
493
+ },
494
+ "151704": {
495
+ "content": "<FAKE_PAD_22>",
496
+ "lstrip": false,
497
+ "normalized": false,
498
+ "rstrip": false,
499
+ "single_word": false,
500
+ "special": true
501
+ },
502
+ "151705": {
503
+ "content": "<FAKE_PAD_23>",
504
+ "lstrip": false,
505
+ "normalized": false,
506
+ "rstrip": false,
507
+ "single_word": false,
508
+ "special": true
509
+ },
510
+ "151706": {
511
+ "content": "<FAKE_PAD_24>",
512
+ "lstrip": false,
513
+ "normalized": false,
514
+ "rstrip": false,
515
+ "single_word": false,
516
+ "special": true
517
+ },
518
+ "151707": {
519
+ "content": "<FAKE_PAD_25>",
520
+ "lstrip": false,
521
+ "normalized": false,
522
+ "rstrip": false,
523
+ "single_word": false,
524
+ "special": true
525
+ },
526
+ "151708": {
527
+ "content": "<FAKE_PAD_26>",
528
+ "lstrip": false,
529
+ "normalized": false,
530
+ "rstrip": false,
531
+ "single_word": false,
532
+ "special": true
533
+ },
534
+ "151709": {
535
+ "content": "<FAKE_PAD_27>",
536
+ "lstrip": false,
537
+ "normalized": false,
538
+ "rstrip": false,
539
+ "single_word": false,
540
+ "special": true
541
+ },
542
+ "151710": {
543
+ "content": "<FAKE_PAD_28>",
544
+ "lstrip": false,
545
+ "normalized": false,
546
+ "rstrip": false,
547
+ "single_word": false,
548
+ "special": true
549
+ },
550
+ "151711": {
551
+ "content": "<FAKE_PAD_29>",
552
+ "lstrip": false,
553
+ "normalized": false,
554
+ "rstrip": false,
555
+ "single_word": false,
556
+ "special": true
557
+ },
558
+ "151712": {
559
+ "content": "<FAKE_PAD_30>",
560
+ "lstrip": false,
561
+ "normalized": false,
562
+ "rstrip": false,
563
+ "single_word": false,
564
+ "special": true
565
+ },
566
+ "151713": {
567
+ "content": "<FAKE_PAD_31>",
568
+ "lstrip": false,
569
+ "normalized": false,
570
+ "rstrip": false,
571
+ "single_word": false,
572
+ "special": true
573
+ },
574
+ "151714": {
575
+ "content": "<FAKE_PAD_32>",
576
+ "lstrip": false,
577
+ "normalized": false,
578
+ "rstrip": false,
579
+ "single_word": false,
580
+ "special": true
581
+ },
582
+ "151715": {
583
+ "content": "<FAKE_PAD_33>",
584
+ "lstrip": false,
585
+ "normalized": false,
586
+ "rstrip": false,
587
+ "single_word": false,
588
+ "special": true
589
+ },
590
+ "151716": {
591
+ "content": "<FAKE_PAD_34>",
592
+ "lstrip": false,
593
+ "normalized": false,
594
+ "rstrip": false,
595
+ "single_word": false,
596
+ "special": true
597
+ },
598
+ "151717": {
599
+ "content": "<FAKE_PAD_35>",
600
+ "lstrip": false,
601
+ "normalized": false,
602
+ "rstrip": false,
603
+ "single_word": false,
604
+ "special": true
605
+ },
606
+ "151718": {
607
+ "content": "<FAKE_PAD_36>",
608
+ "lstrip": false,
609
+ "normalized": false,
610
+ "rstrip": false,
611
+ "single_word": false,
612
+ "special": true
613
+ },
614
+ "151719": {
615
+ "content": "<FAKE_PAD_37>",
616
+ "lstrip": false,
617
+ "normalized": false,
618
+ "rstrip": false,
619
+ "single_word": false,
620
+ "special": true
621
+ },
622
+ "151720": {
623
+ "content": "<FAKE_PAD_38>",
624
+ "lstrip": false,
625
+ "normalized": false,
626
+ "rstrip": false,
627
+ "single_word": false,
628
+ "special": true
629
+ },
630
+ "151721": {
631
+ "content": "<FAKE_PAD_39>",
632
+ "lstrip": false,
633
+ "normalized": false,
634
+ "rstrip": false,
635
+ "single_word": false,
636
+ "special": true
637
+ },
638
+ "151722": {
639
+ "content": "<FAKE_PAD_40>",
640
+ "lstrip": false,
641
+ "normalized": false,
642
+ "rstrip": false,
643
+ "single_word": false,
644
+ "special": true
645
+ },
646
+ "151723": {
647
+ "content": "<FAKE_PAD_41>",
648
+ "lstrip": false,
649
+ "normalized": false,
650
+ "rstrip": false,
651
+ "single_word": false,
652
+ "special": true
653
+ },
654
+ "151724": {
655
+ "content": "<FAKE_PAD_42>",
656
+ "lstrip": false,
657
+ "normalized": false,
658
+ "rstrip": false,
659
+ "single_word": false,
660
+ "special": true
661
+ },
662
+ "151725": {
663
+ "content": "<FAKE_PAD_43>",
664
+ "lstrip": false,
665
+ "normalized": false,
666
+ "rstrip": false,
667
+ "single_word": false,
668
+ "special": true
669
+ },
670
+ "151726": {
671
+ "content": "<FAKE_PAD_44>",
672
+ "lstrip": false,
673
+ "normalized": false,
674
+ "rstrip": false,
675
+ "single_word": false,
676
+ "special": true
677
+ },
678
+ "151727": {
679
+ "content": "<FAKE_PAD_45>",
680
+ "lstrip": false,
681
+ "normalized": false,
682
+ "rstrip": false,
683
+ "single_word": false,
684
+ "special": true
685
+ },
686
+ "151728": {
687
+ "content": "<FAKE_PAD_46>",
688
+ "lstrip": false,
689
+ "normalized": false,
690
+ "rstrip": false,
691
+ "single_word": false,
692
+ "special": true
693
+ },
694
+ "151729": {
695
+ "content": "<FAKE_PAD_47>",
696
+ "lstrip": false,
697
+ "normalized": false,
698
+ "rstrip": false,
699
+ "single_word": false,
700
+ "special": true
701
+ },
702
+ "151730": {
703
+ "content": "<FAKE_PAD_48>",
704
+ "lstrip": false,
705
+ "normalized": false,
706
+ "rstrip": false,
707
+ "single_word": false,
708
+ "special": true
709
+ },
710
+ "151731": {
711
+ "content": "<FAKE_PAD_49>",
712
+ "lstrip": false,
713
+ "normalized": false,
714
+ "rstrip": false,
715
+ "single_word": false,
716
+ "special": true
717
+ },
718
+ "151732": {
719
+ "content": "<FAKE_PAD_50>",
720
+ "lstrip": false,
721
+ "normalized": false,
722
+ "rstrip": false,
723
+ "single_word": false,
724
+ "special": true
725
+ },
726
+ "151733": {
727
+ "content": "<FAKE_PAD_51>",
728
+ "lstrip": false,
729
+ "normalized": false,
730
+ "rstrip": false,
731
+ "single_word": false,
732
+ "special": true
733
+ },
734
+ "151734": {
735
+ "content": "<FAKE_PAD_52>",
736
+ "lstrip": false,
737
+ "normalized": false,
738
+ "rstrip": false,
739
+ "single_word": false,
740
+ "special": true
741
+ },
742
+ "151735": {
743
+ "content": "<FAKE_PAD_53>",
744
+ "lstrip": false,
745
+ "normalized": false,
746
+ "rstrip": false,
747
+ "single_word": false,
748
+ "special": true
749
+ },
750
+ "151736": {
751
+ "content": "<FAKE_PAD_54>",
752
+ "lstrip": false,
753
+ "normalized": false,
754
+ "rstrip": false,
755
+ "single_word": false,
756
+ "special": true
757
+ },
758
+ "151737": {
759
+ "content": "<FAKE_PAD_55>",
760
+ "lstrip": false,
761
+ "normalized": false,
762
+ "rstrip": false,
763
+ "single_word": false,
764
+ "special": true
765
+ },
766
+ "151738": {
767
+ "content": "<FAKE_PAD_56>",
768
+ "lstrip": false,
769
+ "normalized": false,
770
+ "rstrip": false,
771
+ "single_word": false,
772
+ "special": true
773
+ },
774
+ "151739": {
775
+ "content": "<FAKE_PAD_57>",
776
+ "lstrip": false,
777
+ "normalized": false,
778
+ "rstrip": false,
779
+ "single_word": false,
780
+ "special": true
781
+ },
782
+ "151740": {
783
+ "content": "<FAKE_PAD_58>",
784
+ "lstrip": false,
785
+ "normalized": false,
786
+ "rstrip": false,
787
+ "single_word": false,
788
+ "special": true
789
+ },
790
+ "151741": {
791
+ "content": "<FAKE_PAD_59>",
792
+ "lstrip": false,
793
+ "normalized": false,
794
+ "rstrip": false,
795
+ "single_word": false,
796
+ "special": true
797
+ },
798
+ "151742": {
799
+ "content": "<FAKE_PAD_60>",
800
+ "lstrip": false,
801
+ "normalized": false,
802
+ "rstrip": false,
803
+ "single_word": false,
804
+ "special": true
805
+ },
806
+ "151743": {
807
+ "content": "<FAKE_PAD_61>",
808
+ "lstrip": false,
809
+ "normalized": false,
810
+ "rstrip": false,
811
+ "single_word": false,
812
+ "special": true
813
+ },
814
+ "151744": {
815
+ "content": "<FAKE_PAD_62>",
816
+ "lstrip": false,
817
+ "normalized": false,
818
+ "rstrip": false,
819
+ "single_word": false,
820
+ "special": true
821
+ },
822
+ "151745": {
823
+ "content": "<FAKE_PAD_63>",
824
+ "lstrip": false,
825
+ "normalized": false,
826
+ "rstrip": false,
827
+ "single_word": false,
828
+ "special": true
829
+ },
830
+ "151746": {
831
+ "content": "<FAKE_PAD_64>",
832
+ "lstrip": false,
833
+ "normalized": false,
834
+ "rstrip": false,
835
+ "single_word": false,
836
+ "special": true
837
+ },
838
+ "151747": {
839
+ "content": "<FAKE_PAD_65>",
840
+ "lstrip": false,
841
+ "normalized": false,
842
+ "rstrip": false,
843
+ "single_word": false,
844
+ "special": true
845
+ },
846
+ "151748": {
847
+ "content": "<FAKE_PAD_66>",
848
+ "lstrip": false,
849
+ "normalized": false,
850
+ "rstrip": false,
851
+ "single_word": false,
852
+ "special": true
853
+ },
854
+ "151749": {
855
+ "content": "<FAKE_PAD_67>",
856
+ "lstrip": false,
857
+ "normalized": false,
858
+ "rstrip": false,
859
+ "single_word": false,
860
+ "special": true
861
+ },
862
+ "151750": {
863
+ "content": "<FAKE_PAD_68>",
864
+ "lstrip": false,
865
+ "normalized": false,
866
+ "rstrip": false,
867
+ "single_word": false,
868
+ "special": true
869
+ },
870
+ "151751": {
871
+ "content": "<FAKE_PAD_69>",
872
+ "lstrip": false,
873
+ "normalized": false,
874
+ "rstrip": false,
875
+ "single_word": false,
876
+ "special": true
877
+ },
878
+ "151752": {
879
+ "content": "<FAKE_PAD_70>",
880
+ "lstrip": false,
881
+ "normalized": false,
882
+ "rstrip": false,
883
+ "single_word": false,
884
+ "special": true
885
+ },
886
+ "151753": {
887
+ "content": "<FAKE_PAD_71>",
888
+ "lstrip": false,
889
+ "normalized": false,
890
+ "rstrip": false,
891
+ "single_word": false,
892
+ "special": true
893
+ },
894
+ "151754": {
895
+ "content": "<FAKE_PAD_72>",
896
+ "lstrip": false,
897
+ "normalized": false,
898
+ "rstrip": false,
899
+ "single_word": false,
900
+ "special": true
901
+ },
902
+ "151755": {
903
+ "content": "<FAKE_PAD_73>",
904
+ "lstrip": false,
905
+ "normalized": false,
906
+ "rstrip": false,
907
+ "single_word": false,
908
+ "special": true
909
+ },
910
+ "151756": {
911
+ "content": "<FAKE_PAD_74>",
912
+ "lstrip": false,
913
+ "normalized": false,
914
+ "rstrip": false,
915
+ "single_word": false,
916
+ "special": true
917
+ },
918
+ "151757": {
919
+ "content": "<FAKE_PAD_75>",
920
+ "lstrip": false,
921
+ "normalized": false,
922
+ "rstrip": false,
923
+ "single_word": false,
924
+ "special": true
925
+ },
926
+ "151758": {
927
+ "content": "<FAKE_PAD_76>",
928
+ "lstrip": false,
929
+ "normalized": false,
930
+ "rstrip": false,
931
+ "single_word": false,
932
+ "special": true
933
+ },
934
+ "151759": {
935
+ "content": "<FAKE_PAD_77>",
936
+ "lstrip": false,
937
+ "normalized": false,
938
+ "rstrip": false,
939
+ "single_word": false,
940
+ "special": true
941
+ },
942
+ "151760": {
943
+ "content": "<FAKE_PAD_78>",
944
+ "lstrip": false,
945
+ "normalized": false,
946
+ "rstrip": false,
947
+ "single_word": false,
948
+ "special": true
949
+ },
950
+ "151761": {
951
+ "content": "<FAKE_PAD_79>",
952
+ "lstrip": false,
953
+ "normalized": false,
954
+ "rstrip": false,
955
+ "single_word": false,
956
+ "special": true
957
+ },
958
+ "151762": {
959
+ "content": "<FAKE_PAD_80>",
960
+ "lstrip": false,
961
+ "normalized": false,
962
+ "rstrip": false,
963
+ "single_word": false,
964
+ "special": true
965
+ },
966
+ "151763": {
967
+ "content": "<FAKE_PAD_81>",
968
+ "lstrip": false,
969
+ "normalized": false,
970
+ "rstrip": false,
971
+ "single_word": false,
972
+ "special": true
973
+ },
974
+ "151764": {
975
+ "content": "<FAKE_PAD_82>",
976
+ "lstrip": false,
977
+ "normalized": false,
978
+ "rstrip": false,
979
+ "single_word": false,
980
+ "special": true
981
+ },
982
+ "151765": {
983
+ "content": "<FAKE_PAD_83>",
984
+ "lstrip": false,
985
+ "normalized": false,
986
+ "rstrip": false,
987
+ "single_word": false,
988
+ "special": true
989
+ },
990
+ "151766": {
991
+ "content": "<FAKE_PAD_84>",
992
+ "lstrip": false,
993
+ "normalized": false,
994
+ "rstrip": false,
995
+ "single_word": false,
996
+ "special": true
997
+ },
998
+ "151767": {
999
+ "content": "<FAKE_PAD_85>",
1000
+ "lstrip": false,
1001
+ "normalized": false,
1002
+ "rstrip": false,
1003
+ "single_word": false,
1004
+ "special": true
1005
+ },
1006
+ "151768": {
1007
+ "content": "<FAKE_PAD_86>",
1008
+ "lstrip": false,
1009
+ "normalized": false,
1010
+ "rstrip": false,
1011
+ "single_word": false,
1012
+ "special": true
1013
+ },
1014
+ "151769": {
1015
+ "content": "<FAKE_PAD_87>",
1016
+ "lstrip": false,
1017
+ "normalized": false,
1018
+ "rstrip": false,
1019
+ "single_word": false,
1020
+ "special": true
1021
+ },
1022
+ "151770": {
1023
+ "content": "<FAKE_PAD_88>",
1024
+ "lstrip": false,
1025
+ "normalized": false,
1026
+ "rstrip": false,
1027
+ "single_word": false,
1028
+ "special": true
1029
+ },
1030
+ "151771": {
1031
+ "content": "<FAKE_PAD_89>",
1032
+ "lstrip": false,
1033
+ "normalized": false,
1034
+ "rstrip": false,
1035
+ "single_word": false,
1036
+ "special": true
1037
+ },
1038
+ "151772": {
1039
+ "content": "<FAKE_PAD_90>",
1040
+ "lstrip": false,
1041
+ "normalized": false,
1042
+ "rstrip": false,
1043
+ "single_word": false,
1044
+ "special": true
1045
+ },
1046
+ "151773": {
1047
+ "content": "<FAKE_PAD_91>",
1048
+ "lstrip": false,
1049
+ "normalized": false,
1050
+ "rstrip": false,
1051
+ "single_word": false,
1052
+ "special": true
1053
+ },
1054
+ "151774": {
1055
+ "content": "<FAKE_PAD_92>",
1056
+ "lstrip": false,
1057
+ "normalized": false,
1058
+ "rstrip": false,
1059
+ "single_word": false,
1060
+ "special": true
1061
+ },
1062
+ "151775": {
1063
+ "content": "<FAKE_PAD_93>",
1064
+ "lstrip": false,
1065
+ "normalized": false,
1066
+ "rstrip": false,
1067
+ "single_word": false,
1068
+ "special": true
1069
+ },
1070
+ "151776": {
1071
+ "content": "<FAKE_PAD_94>",
1072
+ "lstrip": false,
1073
+ "normalized": false,
1074
+ "rstrip": false,
1075
+ "single_word": false,
1076
+ "special": true
1077
+ },
1078
+ "151777": {
1079
+ "content": "<FAKE_PAD_95>",
1080
+ "lstrip": false,
1081
+ "normalized": false,
1082
+ "rstrip": false,
1083
+ "single_word": false,
1084
+ "special": true
1085
+ },
1086
+ "151778": {
1087
+ "content": "<FAKE_PAD_96>",
1088
+ "lstrip": false,
1089
+ "normalized": false,
1090
+ "rstrip": false,
1091
+ "single_word": false,
1092
+ "special": true
1093
+ },
1094
+ "151779": {
1095
+ "content": "<FAKE_PAD_97>",
1096
+ "lstrip": false,
1097
+ "normalized": false,
1098
+ "rstrip": false,
1099
+ "single_word": false,
1100
+ "special": true
1101
+ },
1102
+ "151780": {
1103
+ "content": "<FAKE_PAD_98>",
1104
+ "lstrip": false,
1105
+ "normalized": false,
1106
+ "rstrip": false,
1107
+ "single_word": false,
1108
+ "special": true
1109
+ },
1110
+ "151781": {
1111
+ "content": "<FAKE_PAD_99>",
1112
+ "lstrip": false,
1113
+ "normalized": false,
1114
+ "rstrip": false,
1115
+ "single_word": false,
1116
+ "special": true
1117
+ },
1118
+ "151782": {
1119
+ "content": "<FAKE_PAD_100>",
1120
+ "lstrip": false,
1121
+ "normalized": false,
1122
+ "rstrip": false,
1123
+ "single_word": false,
1124
+ "special": true
1125
+ },
1126
+ "151783": {
1127
+ "content": "<FAKE_PAD_101>",
1128
+ "lstrip": false,
1129
+ "normalized": false,
1130
+ "rstrip": false,
1131
+ "single_word": false,
1132
+ "special": true
1133
+ },
1134
+ "151784": {
1135
+ "content": "<FAKE_PAD_102>",
1136
+ "lstrip": false,
1137
+ "normalized": false,
1138
+ "rstrip": false,
1139
+ "single_word": false,
1140
+ "special": true
1141
+ },
1142
+ "151785": {
1143
+ "content": "<FAKE_PAD_103>",
1144
+ "lstrip": false,
1145
+ "normalized": false,
1146
+ "rstrip": false,
1147
+ "single_word": false,
1148
+ "special": true
1149
+ },
1150
+ "151786": {
1151
+ "content": "<FAKE_PAD_104>",
1152
+ "lstrip": false,
1153
+ "normalized": false,
1154
+ "rstrip": false,
1155
+ "single_word": false,
1156
+ "special": true
1157
+ },
1158
+ "151787": {
1159
+ "content": "<FAKE_PAD_105>",
1160
+ "lstrip": false,
1161
+ "normalized": false,
1162
+ "rstrip": false,
1163
+ "single_word": false,
1164
+ "special": true
1165
+ },
1166
+ "151788": {
1167
+ "content": "<FAKE_PAD_106>",
1168
+ "lstrip": false,
1169
+ "normalized": false,
1170
+ "rstrip": false,
1171
+ "single_word": false,
1172
+ "special": true
1173
+ },
1174
+ "151789": {
1175
+ "content": "<FAKE_PAD_107>",
1176
+ "lstrip": false,
1177
+ "normalized": false,
1178
+ "rstrip": false,
1179
+ "single_word": false,
1180
+ "special": true
1181
+ },
1182
+ "151790": {
1183
+ "content": "<FAKE_PAD_108>",
1184
+ "lstrip": false,
1185
+ "normalized": false,
1186
+ "rstrip": false,
1187
+ "single_word": false,
1188
+ "special": true
1189
+ },
1190
+ "151791": {
1191
+ "content": "<FAKE_PAD_109>",
1192
+ "lstrip": false,
1193
+ "normalized": false,
1194
+ "rstrip": false,
1195
+ "single_word": false,
1196
+ "special": true
1197
+ },
1198
+ "151792": {
1199
+ "content": "<FAKE_PAD_110>",
1200
+ "lstrip": false,
1201
+ "normalized": false,
1202
+ "rstrip": false,
1203
+ "single_word": false,
1204
+ "special": true
1205
+ },
1206
+ "151793": {
1207
+ "content": "<FAKE_PAD_111>",
1208
+ "lstrip": false,
1209
+ "normalized": false,
1210
+ "rstrip": false,
1211
+ "single_word": false,
1212
+ "special": true
1213
+ },
1214
+ "151794": {
1215
+ "content": "<FAKE_PAD_112>",
1216
+ "lstrip": false,
1217
+ "normalized": false,
1218
+ "rstrip": false,
1219
+ "single_word": false,
1220
+ "special": true
1221
+ },
1222
+ "151795": {
1223
+ "content": "<FAKE_PAD_113>",
1224
+ "lstrip": false,
1225
+ "normalized": false,
1226
+ "rstrip": false,
1227
+ "single_word": false,
1228
+ "special": true
1229
+ },
1230
+ "151796": {
1231
+ "content": "<FAKE_PAD_114>",
1232
+ "lstrip": false,
1233
+ "normalized": false,
1234
+ "rstrip": false,
1235
+ "single_word": false,
1236
+ "special": true
1237
+ },
1238
+ "151797": {
1239
+ "content": "<FAKE_PAD_115>",
1240
+ "lstrip": false,
1241
+ "normalized": false,
1242
+ "rstrip": false,
1243
+ "single_word": false,
1244
+ "special": true
1245
+ },
1246
+ "151798": {
1247
+ "content": "<FAKE_PAD_116>",
1248
+ "lstrip": false,
1249
+ "normalized": false,
1250
+ "rstrip": false,
1251
+ "single_word": false,
1252
+ "special": true
1253
+ },
1254
+ "151799": {
1255
+ "content": "<FAKE_PAD_117>",
1256
+ "lstrip": false,
1257
+ "normalized": false,
1258
+ "rstrip": false,
1259
+ "single_word": false,
1260
+ "special": true
1261
+ },
1262
+ "151800": {
1263
+ "content": "<FAKE_PAD_118>",
1264
+ "lstrip": false,
1265
+ "normalized": false,
1266
+ "rstrip": false,
1267
+ "single_word": false,
1268
+ "special": true
1269
+ },
1270
+ "151801": {
1271
+ "content": "<FAKE_PAD_119>",
1272
+ "lstrip": false,
1273
+ "normalized": false,
1274
+ "rstrip": false,
1275
+ "single_word": false,
1276
+ "special": true
1277
+ },
1278
+ "151802": {
1279
+ "content": "<FAKE_PAD_120>",
1280
+ "lstrip": false,
1281
+ "normalized": false,
1282
+ "rstrip": false,
1283
+ "single_word": false,
1284
+ "special": true
1285
+ },
1286
+ "151803": {
1287
+ "content": "<FAKE_PAD_121>",
1288
+ "lstrip": false,
1289
+ "normalized": false,
1290
+ "rstrip": false,
1291
+ "single_word": false,
1292
+ "special": true
1293
+ },
1294
+ "151804": {
1295
+ "content": "<FAKE_PAD_122>",
1296
+ "lstrip": false,
1297
+ "normalized": false,
1298
+ "rstrip": false,
1299
+ "single_word": false,
1300
+ "special": true
1301
+ },
1302
+ "151805": {
1303
+ "content": "<FAKE_PAD_123>",
1304
+ "lstrip": false,
1305
+ "normalized": false,
1306
+ "rstrip": false,
1307
+ "single_word": false,
1308
+ "special": true
1309
+ },
1310
+ "151806": {
1311
+ "content": "<FAKE_PAD_124>",
1312
+ "lstrip": false,
1313
+ "normalized": false,
1314
+ "rstrip": false,
1315
+ "single_word": false,
1316
+ "special": true
1317
+ },
1318
+ "151807": {
1319
+ "content": "<FAKE_PAD_125>",
1320
+ "lstrip": false,
1321
+ "normalized": false,
1322
+ "rstrip": false,
1323
+ "single_word": false,
1324
+ "special": true
1325
+ },
1326
+ "151808": {
1327
+ "content": "<FAKE_PAD_126>",
1328
+ "lstrip": false,
1329
+ "normalized": false,
1330
+ "rstrip": false,
1331
+ "single_word": false,
1332
+ "special": true
1333
+ },
1334
+ "151809": {
1335
+ "content": "<FAKE_PAD_127>",
1336
+ "lstrip": false,
1337
+ "normalized": false,
1338
+ "rstrip": false,
1339
+ "single_word": false,
1340
+ "special": true
1341
+ },
1342
+ "151810": {
1343
+ "content": "<FAKE_PAD_128>",
1344
+ "lstrip": false,
1345
+ "normalized": false,
1346
+ "rstrip": false,
1347
+ "single_word": false,
1348
+ "special": true
1349
+ },
1350
+ "151811": {
1351
+ "content": "<FAKE_PAD_129>",
1352
+ "lstrip": false,
1353
+ "normalized": false,
1354
+ "rstrip": false,
1355
+ "single_word": false,
1356
+ "special": true
1357
+ },
1358
+ "151812": {
1359
+ "content": "<FAKE_PAD_130>",
1360
+ "lstrip": false,
1361
+ "normalized": false,
1362
+ "rstrip": false,
1363
+ "single_word": false,
1364
+ "special": true
1365
+ },
1366
+ "151813": {
1367
+ "content": "<FAKE_PAD_131>",
1368
+ "lstrip": false,
1369
+ "normalized": false,
1370
+ "rstrip": false,
1371
+ "single_word": false,
1372
+ "special": true
1373
+ },
1374
+ "151814": {
1375
+ "content": "<FAKE_PAD_132>",
1376
+ "lstrip": false,
1377
+ "normalized": false,
1378
+ "rstrip": false,
1379
+ "single_word": false,
1380
+ "special": true
1381
+ },
1382
+ "151815": {
1383
+ "content": "<FAKE_PAD_133>",
1384
+ "lstrip": false,
1385
+ "normalized": false,
1386
+ "rstrip": false,
1387
+ "single_word": false,
1388
+ "special": true
1389
+ },
1390
+ "151816": {
1391
+ "content": "<FAKE_PAD_134>",
1392
+ "lstrip": false,
1393
+ "normalized": false,
1394
+ "rstrip": false,
1395
+ "single_word": false,
1396
+ "special": true
1397
+ },
1398
+ "151817": {
1399
+ "content": "<FAKE_PAD_135>",
1400
+ "lstrip": false,
1401
+ "normalized": false,
1402
+ "rstrip": false,
1403
+ "single_word": false,
1404
+ "special": true
1405
+ },
1406
+ "151818": {
1407
+ "content": "<FAKE_PAD_136>",
1408
+ "lstrip": false,
1409
+ "normalized": false,
1410
+ "rstrip": false,
1411
+ "single_word": false,
1412
+ "special": true
1413
+ },
1414
+ "151819": {
1415
+ "content": "<FAKE_PAD_137>",
1416
+ "lstrip": false,
1417
+ "normalized": false,
1418
+ "rstrip": false,
1419
+ "single_word": false,
1420
+ "special": true
1421
+ },
1422
+ "151820": {
1423
+ "content": "<FAKE_PAD_138>",
1424
+ "lstrip": false,
1425
+ "normalized": false,
1426
+ "rstrip": false,
1427
+ "single_word": false,
1428
+ "special": true
1429
+ },
1430
+ "151821": {
1431
+ "content": "<FAKE_PAD_139>",
1432
+ "lstrip": false,
1433
+ "normalized": false,
1434
+ "rstrip": false,
1435
+ "single_word": false,
1436
+ "special": true
1437
+ },
1438
+ "151822": {
1439
+ "content": "<FAKE_PAD_140>",
1440
+ "lstrip": false,
1441
+ "normalized": false,
1442
+ "rstrip": false,
1443
+ "single_word": false,
1444
+ "special": true
1445
+ },
1446
+ "151823": {
1447
+ "content": "<FAKE_PAD_141>",
1448
+ "lstrip": false,
1449
+ "normalized": false,
1450
+ "rstrip": false,
1451
+ "single_word": false,
1452
+ "special": true
1453
+ },
1454
+ "151824": {
1455
+ "content": "<FAKE_PAD_142>",
1456
+ "lstrip": false,
1457
+ "normalized": false,
1458
+ "rstrip": false,
1459
+ "single_word": false,
1460
+ "special": true
1461
+ },
1462
+ "151825": {
1463
+ "content": "<FAKE_PAD_143>",
1464
+ "lstrip": false,
1465
+ "normalized": false,
1466
+ "rstrip": false,
1467
+ "single_word": false,
1468
+ "special": true
1469
+ },
1470
+ "151826": {
1471
+ "content": "<FAKE_PAD_144>",
1472
+ "lstrip": false,
1473
+ "normalized": false,
1474
+ "rstrip": false,
1475
+ "single_word": false,
1476
+ "special": true
1477
+ },
1478
+ "151827": {
1479
+ "content": "<FAKE_PAD_145>",
1480
+ "lstrip": false,
1481
+ "normalized": false,
1482
+ "rstrip": false,
1483
+ "single_word": false,
1484
+ "special": true
1485
+ },
1486
+ "151828": {
1487
+ "content": "<FAKE_PAD_146>",
1488
+ "lstrip": false,
1489
+ "normalized": false,
1490
+ "rstrip": false,
1491
+ "single_word": false,
1492
+ "special": true
1493
+ },
1494
+ "151829": {
1495
+ "content": "<FAKE_PAD_147>",
1496
+ "lstrip": false,
1497
+ "normalized": false,
1498
+ "rstrip": false,
1499
+ "single_word": false,
1500
+ "special": true
1501
+ },
1502
+ "151830": {
1503
+ "content": "<FAKE_PAD_148>",
1504
+ "lstrip": false,
1505
+ "normalized": false,
1506
+ "rstrip": false,
1507
+ "single_word": false,
1508
+ "special": true
1509
+ },
1510
+ "151831": {
1511
+ "content": "<FAKE_PAD_149>",
1512
+ "lstrip": false,
1513
+ "normalized": false,
1514
+ "rstrip": false,
1515
+ "single_word": false,
1516
+ "special": true
1517
+ },
1518
+ "151832": {
1519
+ "content": "<FAKE_PAD_150>",
1520
+ "lstrip": false,
1521
+ "normalized": false,
1522
+ "rstrip": false,
1523
+ "single_word": false,
1524
+ "special": true
1525
+ },
1526
+ "151833": {
1527
+ "content": "<FAKE_PAD_151>",
1528
+ "lstrip": false,
1529
+ "normalized": false,
1530
+ "rstrip": false,
1531
+ "single_word": false,
1532
+ "special": true
1533
+ },
1534
+ "151834": {
1535
+ "content": "<FAKE_PAD_152>",
1536
+ "lstrip": false,
1537
+ "normalized": false,
1538
+ "rstrip": false,
1539
+ "single_word": false,
1540
+ "special": true
1541
+ },
1542
+ "151835": {
1543
+ "content": "<FAKE_PAD_153>",
1544
+ "lstrip": false,
1545
+ "normalized": false,
1546
+ "rstrip": false,
1547
+ "single_word": false,
1548
+ "special": true
1549
+ },
1550
+ "151836": {
1551
+ "content": "<FAKE_PAD_154>",
1552
+ "lstrip": false,
1553
+ "normalized": false,
1554
+ "rstrip": false,
1555
+ "single_word": false,
1556
+ "special": true
1557
+ },
1558
+ "151837": {
1559
+ "content": "<FAKE_PAD_155>",
1560
+ "lstrip": false,
1561
+ "normalized": false,
1562
+ "rstrip": false,
1563
+ "single_word": false,
1564
+ "special": true
1565
+ },
1566
+ "151838": {
1567
+ "content": "<FAKE_PAD_156>",
1568
+ "lstrip": false,
1569
+ "normalized": false,
1570
+ "rstrip": false,
1571
+ "single_word": false,
1572
+ "special": true
1573
+ },
1574
+ "151839": {
1575
+ "content": "<FAKE_PAD_157>",
1576
+ "lstrip": false,
1577
+ "normalized": false,
1578
+ "rstrip": false,
1579
+ "single_word": false,
1580
+ "special": true
1581
+ },
1582
+ "151840": {
1583
+ "content": "<FAKE_PAD_158>",
1584
+ "lstrip": false,
1585
+ "normalized": false,
1586
+ "rstrip": false,
1587
+ "single_word": false,
1588
+ "special": true
1589
+ },
1590
+ "151841": {
1591
+ "content": "<FAKE_PAD_159>",
1592
+ "lstrip": false,
1593
+ "normalized": false,
1594
+ "rstrip": false,
1595
+ "single_word": false,
1596
+ "special": true
1597
+ },
1598
+ "151842": {
1599
+ "content": "<FAKE_PAD_160>",
1600
+ "lstrip": false,
1601
+ "normalized": false,
1602
+ "rstrip": false,
1603
+ "single_word": false,
1604
+ "special": true
1605
+ },
1606
+ "151843": {
1607
+ "content": "<FAKE_PAD_161>",
1608
+ "lstrip": false,
1609
+ "normalized": false,
1610
+ "rstrip": false,
1611
+ "single_word": false,
1612
+ "special": true
1613
+ },
1614
+ "151844": {
1615
+ "content": "<FAKE_PAD_162>",
1616
+ "lstrip": false,
1617
+ "normalized": false,
1618
+ "rstrip": false,
1619
+ "single_word": false,
1620
+ "special": true
1621
+ },
1622
+ "151845": {
1623
+ "content": "<FAKE_PAD_163>",
1624
+ "lstrip": false,
1625
+ "normalized": false,
1626
+ "rstrip": false,
1627
+ "single_word": false,
1628
+ "special": true
1629
+ },
1630
+ "151846": {
1631
+ "content": "<FAKE_PAD_164>",
1632
+ "lstrip": false,
1633
+ "normalized": false,
1634
+ "rstrip": false,
1635
+ "single_word": false,
1636
+ "special": true
1637
+ },
1638
+ "151847": {
1639
+ "content": "<FAKE_PAD_165>",
1640
+ "lstrip": false,
1641
+ "normalized": false,
1642
+ "rstrip": false,
1643
+ "single_word": false,
1644
+ "special": true
1645
+ },
1646
+ "151848": {
1647
+ "content": "<FAKE_PAD_166>",
1648
+ "lstrip": false,
1649
+ "normalized": false,
1650
+ "rstrip": false,
1651
+ "single_word": false,
1652
+ "special": true
1653
+ },
1654
+ "151849": {
1655
+ "content": "<FAKE_PAD_167>",
1656
+ "lstrip": false,
1657
+ "normalized": false,
1658
+ "rstrip": false,
1659
+ "single_word": false,
1660
+ "special": true
1661
+ },
1662
+ "151850": {
1663
+ "content": "<FAKE_PAD_168>",
1664
+ "lstrip": false,
1665
+ "normalized": false,
1666
+ "rstrip": false,
1667
+ "single_word": false,
1668
+ "special": true
1669
+ },
1670
+ "151851": {
1671
+ "content": "<FAKE_PAD_169>",
1672
+ "lstrip": false,
1673
+ "normalized": false,
1674
+ "rstrip": false,
1675
+ "single_word": false,
1676
+ "special": true
1677
+ },
1678
+ "151852": {
1679
+ "content": "<FAKE_PAD_170>",
1680
+ "lstrip": false,
1681
+ "normalized": false,
1682
+ "rstrip": false,
1683
+ "single_word": false,
1684
+ "special": true
1685
+ },
1686
+ "151853": {
1687
+ "content": "<FAKE_PAD_171>",
1688
+ "lstrip": false,
1689
+ "normalized": false,
1690
+ "rstrip": false,
1691
+ "single_word": false,
1692
+ "special": true
1693
+ },
1694
+ "151854": {
1695
+ "content": "<FAKE_PAD_172>",
1696
+ "lstrip": false,
1697
+ "normalized": false,
1698
+ "rstrip": false,
1699
+ "single_word": false,
1700
+ "special": true
1701
+ },
1702
+ "151855": {
1703
+ "content": "<FAKE_PAD_173>",
1704
+ "lstrip": false,
1705
+ "normalized": false,
1706
+ "rstrip": false,
1707
+ "single_word": false,
1708
+ "special": true
1709
+ },
1710
+ "151856": {
1711
+ "content": "<FAKE_PAD_174>",
1712
+ "lstrip": false,
1713
+ "normalized": false,
1714
+ "rstrip": false,
1715
+ "single_word": false,
1716
+ "special": true
1717
+ },
1718
+ "151857": {
1719
+ "content": "<FAKE_PAD_175>",
1720
+ "lstrip": false,
1721
+ "normalized": false,
1722
+ "rstrip": false,
1723
+ "single_word": false,
1724
+ "special": true
1725
+ },
1726
+ "151858": {
1727
+ "content": "<FAKE_PAD_176>",
1728
+ "lstrip": false,
1729
+ "normalized": false,
1730
+ "rstrip": false,
1731
+ "single_word": false,
1732
+ "special": true
1733
+ },
1734
+ "151859": {
1735
+ "content": "<FAKE_PAD_177>",
1736
+ "lstrip": false,
1737
+ "normalized": false,
1738
+ "rstrip": false,
1739
+ "single_word": false,
1740
+ "special": true
1741
+ },
1742
+ "151860": {
1743
+ "content": "<FAKE_PAD_178>",
1744
+ "lstrip": false,
1745
+ "normalized": false,
1746
+ "rstrip": false,
1747
+ "single_word": false,
1748
+ "special": true
1749
+ },
1750
+ "151861": {
1751
+ "content": "<FAKE_PAD_179>",
1752
+ "lstrip": false,
1753
+ "normalized": false,
1754
+ "rstrip": false,
1755
+ "single_word": false,
1756
+ "special": true
1757
+ },
1758
+ "151862": {
1759
+ "content": "<FAKE_PAD_180>",
1760
+ "lstrip": false,
1761
+ "normalized": false,
1762
+ "rstrip": false,
1763
+ "single_word": false,
1764
+ "special": true
1765
+ },
1766
+ "151863": {
1767
+ "content": "<FAKE_PAD_181>",
1768
+ "lstrip": false,
1769
+ "normalized": false,
1770
+ "rstrip": false,
1771
+ "single_word": false,
1772
+ "special": true
1773
+ },
1774
+ "151864": {
1775
+ "content": "<FAKE_PAD_182>",
1776
+ "lstrip": false,
1777
+ "normalized": false,
1778
+ "rstrip": false,
1779
+ "single_word": false,
1780
+ "special": true
1781
+ },
1782
+ "151865": {
1783
+ "content": "<FAKE_PAD_183>",
1784
+ "lstrip": false,
1785
+ "normalized": false,
1786
+ "rstrip": false,
1787
+ "single_word": false,
1788
+ "special": true
1789
+ },
1790
+ "151866": {
1791
+ "content": "<FAKE_PAD_184>",
1792
+ "lstrip": false,
1793
+ "normalized": false,
1794
+ "rstrip": false,
1795
+ "single_word": false,
1796
+ "special": true
1797
+ },
1798
+ "151867": {
1799
+ "content": "<FAKE_PAD_185>",
1800
+ "lstrip": false,
1801
+ "normalized": false,
1802
+ "rstrip": false,
1803
+ "single_word": false,
1804
+ "special": true
1805
+ },
1806
+ "151868": {
1807
+ "content": "<FAKE_PAD_186>",
1808
+ "lstrip": false,
1809
+ "normalized": false,
1810
+ "rstrip": false,
1811
+ "single_word": false,
1812
+ "special": true
1813
+ },
1814
+ "151869": {
1815
+ "content": "<FAKE_PAD_187>",
1816
+ "lstrip": false,
1817
+ "normalized": false,
1818
+ "rstrip": false,
1819
+ "single_word": false,
1820
+ "special": true
1821
+ },
1822
+ "151870": {
1823
+ "content": "<FAKE_PAD_188>",
1824
+ "lstrip": false,
1825
+ "normalized": false,
1826
+ "rstrip": false,
1827
+ "single_word": false,
1828
+ "special": true
1829
+ },
1830
+ "151871": {
1831
+ "content": "<FAKE_PAD_189>",
1832
+ "lstrip": false,
1833
+ "normalized": false,
1834
+ "rstrip": false,
1835
+ "single_word": false,
1836
+ "special": true
1837
+ },
1838
+ "151872": {
1839
+ "content": "<FAKE_PAD_190>",
1840
+ "lstrip": false,
1841
+ "normalized": false,
1842
+ "rstrip": false,
1843
+ "single_word": false,
1844
+ "special": true
1845
+ },
1846
+ "151873": {
1847
+ "content": "<FAKE_PAD_191>",
1848
+ "lstrip": false,
1849
+ "normalized": false,
1850
+ "rstrip": false,
1851
+ "single_word": false,
1852
+ "special": true
1853
+ },
1854
+ "151874": {
1855
+ "content": "<FAKE_PAD_192>",
1856
+ "lstrip": false,
1857
+ "normalized": false,
1858
+ "rstrip": false,
1859
+ "single_word": false,
1860
+ "special": true
1861
+ },
1862
+ "151875": {
1863
+ "content": "<FAKE_PAD_193>",
1864
+ "lstrip": false,
1865
+ "normalized": false,
1866
+ "rstrip": false,
1867
+ "single_word": false,
1868
+ "special": true
1869
+ },
1870
+ "151876": {
1871
+ "content": "<FAKE_PAD_194>",
1872
+ "lstrip": false,
1873
+ "normalized": false,
1874
+ "rstrip": false,
1875
+ "single_word": false,
1876
+ "special": true
1877
+ },
1878
+ "151877": {
1879
+ "content": "<FAKE_PAD_195>",
1880
+ "lstrip": false,
1881
+ "normalized": false,
1882
+ "rstrip": false,
1883
+ "single_word": false,
1884
+ "special": true
1885
+ },
1886
+ "151878": {
1887
+ "content": "<FAKE_PAD_196>",
1888
+ "lstrip": false,
1889
+ "normalized": false,
1890
+ "rstrip": false,
1891
+ "single_word": false,
1892
+ "special": true
1893
+ },
1894
+ "151879": {
1895
+ "content": "<FAKE_PAD_197>",
1896
+ "lstrip": false,
1897
+ "normalized": false,
1898
+ "rstrip": false,
1899
+ "single_word": false,
1900
+ "special": true
1901
+ },
1902
+ "151880": {
1903
+ "content": "<FAKE_PAD_198>",
1904
+ "lstrip": false,
1905
+ "normalized": false,
1906
+ "rstrip": false,
1907
+ "single_word": false,
1908
+ "special": true
1909
+ },
1910
+ "151881": {
1911
+ "content": "<FAKE_PAD_199>",
1912
+ "lstrip": false,
1913
+ "normalized": false,
1914
+ "rstrip": false,
1915
+ "single_word": false,
1916
+ "special": true
1917
+ },
1918
+ "151882": {
1919
+ "content": "<FAKE_PAD_200>",
1920
+ "lstrip": false,
1921
+ "normalized": false,
1922
+ "rstrip": false,
1923
+ "single_word": false,
1924
+ "special": true
1925
+ },
1926
+ "151883": {
1927
+ "content": "<FAKE_PAD_201>",
1928
+ "lstrip": false,
1929
+ "normalized": false,
1930
+ "rstrip": false,
1931
+ "single_word": false,
1932
+ "special": true
1933
+ },
1934
+ "151884": {
1935
+ "content": "<FAKE_PAD_202>",
1936
+ "lstrip": false,
1937
+ "normalized": false,
1938
+ "rstrip": false,
1939
+ "single_word": false,
1940
+ "special": true
1941
+ },
1942
+ "151885": {
1943
+ "content": "<FAKE_PAD_203>",
1944
+ "lstrip": false,
1945
+ "normalized": false,
1946
+ "rstrip": false,
1947
+ "single_word": false,
1948
+ "special": true
1949
+ },
1950
+ "151886": {
1951
+ "content": "<FAKE_PAD_204>",
1952
+ "lstrip": false,
1953
+ "normalized": false,
1954
+ "rstrip": false,
1955
+ "single_word": false,
1956
+ "special": true
1957
+ },
1958
+ "151887": {
1959
+ "content": "<FAKE_PAD_205>",
1960
+ "lstrip": false,
1961
+ "normalized": false,
1962
+ "rstrip": false,
1963
+ "single_word": false,
1964
+ "special": true
1965
+ },
1966
+ "151888": {
1967
+ "content": "<FAKE_PAD_206>",
1968
+ "lstrip": false,
1969
+ "normalized": false,
1970
+ "rstrip": false,
1971
+ "single_word": false,
1972
+ "special": true
1973
+ },
1974
+ "151889": {
1975
+ "content": "<FAKE_PAD_207>",
1976
+ "lstrip": false,
1977
+ "normalized": false,
1978
+ "rstrip": false,
1979
+ "single_word": false,
1980
+ "special": true
1981
+ },
1982
+ "151890": {
1983
+ "content": "<FAKE_PAD_208>",
1984
+ "lstrip": false,
1985
+ "normalized": false,
1986
+ "rstrip": false,
1987
+ "single_word": false,
1988
+ "special": true
1989
+ },
1990
+ "151891": {
1991
+ "content": "<FAKE_PAD_209>",
1992
+ "lstrip": false,
1993
+ "normalized": false,
1994
+ "rstrip": false,
1995
+ "single_word": false,
1996
+ "special": true
1997
+ },
1998
+ "151892": {
1999
+ "content": "<FAKE_PAD_210>",
2000
+ "lstrip": false,
2001
+ "normalized": false,
2002
+ "rstrip": false,
2003
+ "single_word": false,
2004
+ "special": true
2005
+ },
2006
+ "151893": {
2007
+ "content": "<FAKE_PAD_211>",
2008
+ "lstrip": false,
2009
+ "normalized": false,
2010
+ "rstrip": false,
2011
+ "single_word": false,
2012
+ "special": true
2013
+ },
2014
+ "151894": {
2015
+ "content": "<FAKE_PAD_212>",
2016
+ "lstrip": false,
2017
+ "normalized": false,
2018
+ "rstrip": false,
2019
+ "single_word": false,
2020
+ "special": true
2021
+ },
2022
+ "151895": {
2023
+ "content": "<FAKE_PAD_213>",
2024
+ "lstrip": false,
2025
+ "normalized": false,
2026
+ "rstrip": false,
2027
+ "single_word": false,
2028
+ "special": true
2029
+ },
2030
+ "151896": {
2031
+ "content": "<FAKE_PAD_214>",
2032
+ "lstrip": false,
2033
+ "normalized": false,
2034
+ "rstrip": false,
2035
+ "single_word": false,
2036
+ "special": true
2037
+ },
2038
+ "151897": {
2039
+ "content": "<FAKE_PAD_215>",
2040
+ "lstrip": false,
2041
+ "normalized": false,
2042
+ "rstrip": false,
2043
+ "single_word": false,
2044
+ "special": true
2045
+ },
2046
+ "151898": {
2047
+ "content": "<FAKE_PAD_216>",
2048
+ "lstrip": false,
2049
+ "normalized": false,
2050
+ "rstrip": false,
2051
+ "single_word": false,
2052
+ "special": true
2053
+ },
2054
+ "151899": {
2055
+ "content": "<FAKE_PAD_217>",
2056
+ "lstrip": false,
2057
+ "normalized": false,
2058
+ "rstrip": false,
2059
+ "single_word": false,
2060
+ "special": true
2061
+ },
2062
+ "151900": {
2063
+ "content": "<FAKE_PAD_218>",
2064
+ "lstrip": false,
2065
+ "normalized": false,
2066
+ "rstrip": false,
2067
+ "single_word": false,
2068
+ "special": true
2069
+ },
2070
+ "151901": {
2071
+ "content": "<FAKE_PAD_219>",
2072
+ "lstrip": false,
2073
+ "normalized": false,
2074
+ "rstrip": false,
2075
+ "single_word": false,
2076
+ "special": true
2077
+ },
2078
+ "151902": {
2079
+ "content": "<FAKE_PAD_220>",
2080
+ "lstrip": false,
2081
+ "normalized": false,
2082
+ "rstrip": false,
2083
+ "single_word": false,
2084
+ "special": true
2085
+ },
2086
+ "151903": {
2087
+ "content": "<FAKE_PAD_221>",
2088
+ "lstrip": false,
2089
+ "normalized": false,
2090
+ "rstrip": false,
2091
+ "single_word": false,
2092
+ "special": true
2093
+ },
2094
+ "151904": {
2095
+ "content": "<FAKE_PAD_222>",
2096
+ "lstrip": false,
2097
+ "normalized": false,
2098
+ "rstrip": false,
2099
+ "single_word": false,
2100
+ "special": true
2101
+ },
2102
+ "151905": {
2103
+ "content": "<FAKE_PAD_223>",
2104
+ "lstrip": false,
2105
+ "normalized": false,
2106
+ "rstrip": false,
2107
+ "single_word": false,
2108
+ "special": true
2109
+ },
2110
+ "151906": {
2111
+ "content": "<FAKE_PAD_224>",
2112
+ "lstrip": false,
2113
+ "normalized": false,
2114
+ "rstrip": false,
2115
+ "single_word": false,
2116
+ "special": true
2117
+ },
2118
+ "151907": {
2119
+ "content": "<FAKE_PAD_225>",
2120
+ "lstrip": false,
2121
+ "normalized": false,
2122
+ "rstrip": false,
2123
+ "single_word": false,
2124
+ "special": true
2125
+ },
2126
+ "151908": {
2127
+ "content": "<FAKE_PAD_226>",
2128
+ "lstrip": false,
2129
+ "normalized": false,
2130
+ "rstrip": false,
2131
+ "single_word": false,
2132
+ "special": true
2133
+ },
2134
+ "151909": {
2135
+ "content": "<FAKE_PAD_227>",
2136
+ "lstrip": false,
2137
+ "normalized": false,
2138
+ "rstrip": false,
2139
+ "single_word": false,
2140
+ "special": true
2141
+ },
2142
+ "151910": {
2143
+ "content": "<FAKE_PAD_228>",
2144
+ "lstrip": false,
2145
+ "normalized": false,
2146
+ "rstrip": false,
2147
+ "single_word": false,
2148
+ "special": true
2149
+ },
2150
+ "151911": {
2151
+ "content": "<FAKE_PAD_229>",
2152
+ "lstrip": false,
2153
+ "normalized": false,
2154
+ "rstrip": false,
2155
+ "single_word": false,
2156
+ "special": true
2157
+ },
2158
+ "151912": {
2159
+ "content": "<FAKE_PAD_230>",
2160
+ "lstrip": false,
2161
+ "normalized": false,
2162
+ "rstrip": false,
2163
+ "single_word": false,
2164
+ "special": true
2165
+ },
2166
+ "151913": {
2167
+ "content": "<FAKE_PAD_231>",
2168
+ "lstrip": false,
2169
+ "normalized": false,
2170
+ "rstrip": false,
2171
+ "single_word": false,
2172
+ "special": true
2173
+ },
2174
+ "151914": {
2175
+ "content": "<FAKE_PAD_232>",
2176
+ "lstrip": false,
2177
+ "normalized": false,
2178
+ "rstrip": false,
2179
+ "single_word": false,
2180
+ "special": true
2181
+ },
2182
+ "151915": {
2183
+ "content": "<FAKE_PAD_233>",
2184
+ "lstrip": false,
2185
+ "normalized": false,
2186
+ "rstrip": false,
2187
+ "single_word": false,
2188
+ "special": true
2189
+ },
2190
+ "151916": {
2191
+ "content": "<FAKE_PAD_234>",
2192
+ "lstrip": false,
2193
+ "normalized": false,
2194
+ "rstrip": false,
2195
+ "single_word": false,
2196
+ "special": true
2197
+ },
2198
+ "151917": {
2199
+ "content": "<FAKE_PAD_235>",
2200
+ "lstrip": false,
2201
+ "normalized": false,
2202
+ "rstrip": false,
2203
+ "single_word": false,
2204
+ "special": true
2205
+ },
2206
+ "151918": {
2207
+ "content": "<FAKE_PAD_236>",
2208
+ "lstrip": false,
2209
+ "normalized": false,
2210
+ "rstrip": false,
2211
+ "single_word": false,
2212
+ "special": true
2213
+ },
2214
+ "151919": {
2215
+ "content": "<FAKE_PAD_237>",
2216
+ "lstrip": false,
2217
+ "normalized": false,
2218
+ "rstrip": false,
2219
+ "single_word": false,
2220
+ "special": true
2221
+ },
2222
+ "151920": {
2223
+ "content": "<FAKE_PAD_238>",
2224
+ "lstrip": false,
2225
+ "normalized": false,
2226
+ "rstrip": false,
2227
+ "single_word": false,
2228
+ "special": true
2229
+ },
2230
+ "151921": {
2231
+ "content": "<FAKE_PAD_239>",
2232
+ "lstrip": false,
2233
+ "normalized": false,
2234
+ "rstrip": false,
2235
+ "single_word": false,
2236
+ "special": true
2237
+ },
2238
+ "151922": {
2239
+ "content": "<FAKE_PAD_240>",
2240
+ "lstrip": false,
2241
+ "normalized": false,
2242
+ "rstrip": false,
2243
+ "single_word": false,
2244
+ "special": true
2245
+ },
2246
+ "151923": {
2247
+ "content": "<FAKE_PAD_241>",
2248
+ "lstrip": false,
2249
+ "normalized": false,
2250
+ "rstrip": false,
2251
+ "single_word": false,
2252
+ "special": true
2253
+ },
2254
+ "151924": {
2255
+ "content": "<FAKE_PAD_242>",
2256
+ "lstrip": false,
2257
+ "normalized": false,
2258
+ "rstrip": false,
2259
+ "single_word": false,
2260
+ "special": true
2261
+ },
2262
+ "151925": {
2263
+ "content": "<FAKE_PAD_243>",
2264
+ "lstrip": false,
2265
+ "normalized": false,
2266
+ "rstrip": false,
2267
+ "single_word": false,
2268
+ "special": true
2269
+ },
2270
+ "151926": {
2271
+ "content": "<FAKE_PAD_244>",
2272
+ "lstrip": false,
2273
+ "normalized": false,
2274
+ "rstrip": false,
2275
+ "single_word": false,
2276
+ "special": true
2277
+ },
2278
+ "151927": {
2279
+ "content": "<FAKE_PAD_245>",
2280
+ "lstrip": false,
2281
+ "normalized": false,
2282
+ "rstrip": false,
2283
+ "single_word": false,
2284
+ "special": true
2285
+ },
2286
+ "151928": {
2287
+ "content": "<FAKE_PAD_246>",
2288
+ "lstrip": false,
2289
+ "normalized": false,
2290
+ "rstrip": false,
2291
+ "single_word": false,
2292
+ "special": true
2293
+ },
2294
+ "151929": {
2295
+ "content": "<FAKE_PAD_247>",
2296
+ "lstrip": false,
2297
+ "normalized": false,
2298
+ "rstrip": false,
2299
+ "single_word": false,
2300
+ "special": true
2301
+ },
2302
+ "151930": {
2303
+ "content": "<FAKE_PAD_248>",
2304
+ "lstrip": false,
2305
+ "normalized": false,
2306
+ "rstrip": false,
2307
+ "single_word": false,
2308
+ "special": true
2309
+ },
2310
+ "151931": {
2311
+ "content": "<FAKE_PAD_249>",
2312
+ "lstrip": false,
2313
+ "normalized": false,
2314
+ "rstrip": false,
2315
+ "single_word": false,
2316
+ "special": true
2317
+ },
2318
+ "151932": {
2319
+ "content": "<FAKE_PAD_250>",
2320
+ "lstrip": false,
2321
+ "normalized": false,
2322
+ "rstrip": false,
2323
+ "single_word": false,
2324
+ "special": true
2325
+ },
2326
+ "151933": {
2327
+ "content": "<FAKE_PAD_251>",
2328
+ "lstrip": false,
2329
+ "normalized": false,
2330
+ "rstrip": false,
2331
+ "single_word": false,
2332
+ "special": true
2333
+ },
2334
+ "151934": {
2335
+ "content": "<FAKE_PAD_252>",
2336
+ "lstrip": false,
2337
+ "normalized": false,
2338
+ "rstrip": false,
2339
+ "single_word": false,
2340
+ "special": true
2341
+ },
2342
+ "151935": {
2343
+ "content": "<FAKE_PAD_253>",
2344
+ "lstrip": false,
2345
+ "normalized": false,
2346
+ "rstrip": false,
2347
+ "single_word": false,
2348
+ "special": true
2349
+ }
2350
+ },
2351
+ "additional_special_tokens": [
2352
+ "<|im_start|>",
2353
+ "<|im_end|>",
2354
+ "<|object_ref_start|>",
2355
+ "<|object_ref_end|>",
2356
+ "<|box_start|>",
2357
+ "<|box_end|>",
2358
+ "<|quad_start|>",
2359
+ "<|quad_end|>",
2360
+ "<|vision_start|>",
2361
+ "<|vision_end|>",
2362
+ "<|vision_pad|>",
2363
+ "<|image_pad|>",
2364
+ "<|video_pad|>",
2365
+ "<IMG_CONTEXT>",
2366
+ "<img>",
2367
+ "</img>",
2368
+ "<quad>",
2369
+ "</quad>",
2370
+ "<ref>",
2371
+ "</ref>",
2372
+ "<box>",
2373
+ "</box>",
2374
+ "<|action_start|>",
2375
+ "<|action_end|>",
2376
+ "<|plugin|>",
2377
+ "<|interpreter|>",
2378
+ "<FAKE_PAD_0>",
2379
+ "<FAKE_PAD_1>",
2380
+ "<FAKE_PAD_2>",
2381
+ "<FAKE_PAD_3>",
2382
+ "<FAKE_PAD_4>",
2383
+ "<FAKE_PAD_5>",
2384
+ "<FAKE_PAD_6>",
2385
+ "<FAKE_PAD_7>",
2386
+ "<FAKE_PAD_8>",
2387
+ "<FAKE_PAD_9>",
2388
+ "<FAKE_PAD_10>",
2389
+ "<FAKE_PAD_11>",
2390
+ "<FAKE_PAD_12>",
2391
+ "<FAKE_PAD_13>",
2392
+ "<FAKE_PAD_14>",
2393
+ "<FAKE_PAD_15>",
2394
+ "<FAKE_PAD_16>",
2395
+ "<FAKE_PAD_17>",
2396
+ "<FAKE_PAD_18>",
2397
+ "<FAKE_PAD_19>",
2398
+ "<FAKE_PAD_20>",
2399
+ "<FAKE_PAD_21>",
2400
+ "<FAKE_PAD_22>",
2401
+ "<FAKE_PAD_23>",
2402
+ "<FAKE_PAD_24>",
2403
+ "<FAKE_PAD_25>",
2404
+ "<FAKE_PAD_26>",
2405
+ "<FAKE_PAD_27>",
2406
+ "<FAKE_PAD_28>",
2407
+ "<FAKE_PAD_29>",
2408
+ "<FAKE_PAD_30>",
2409
+ "<FAKE_PAD_31>",
2410
+ "<FAKE_PAD_32>",
2411
+ "<FAKE_PAD_33>",
2412
+ "<FAKE_PAD_34>",
2413
+ "<FAKE_PAD_35>",
2414
+ "<FAKE_PAD_36>",
2415
+ "<FAKE_PAD_37>",
2416
+ "<FAKE_PAD_38>",
2417
+ "<FAKE_PAD_39>",
2418
+ "<FAKE_PAD_40>",
2419
+ "<FAKE_PAD_41>",
2420
+ "<FAKE_PAD_42>",
2421
+ "<FAKE_PAD_43>",
2422
+ "<FAKE_PAD_44>",
2423
+ "<FAKE_PAD_45>",
2424
+ "<FAKE_PAD_46>",
2425
+ "<FAKE_PAD_47>",
2426
+ "<FAKE_PAD_48>",
2427
+ "<FAKE_PAD_49>",
2428
+ "<FAKE_PAD_50>",
2429
+ "<FAKE_PAD_51>",
2430
+ "<FAKE_PAD_52>",
2431
+ "<FAKE_PAD_53>",
2432
+ "<FAKE_PAD_54>",
2433
+ "<FAKE_PAD_55>",
2434
+ "<FAKE_PAD_56>",
2435
+ "<FAKE_PAD_57>",
2436
+ "<FAKE_PAD_58>",
2437
+ "<FAKE_PAD_59>",
2438
+ "<FAKE_PAD_60>",
2439
+ "<FAKE_PAD_61>",
2440
+ "<FAKE_PAD_62>",
2441
+ "<FAKE_PAD_63>",
2442
+ "<FAKE_PAD_64>",
2443
+ "<FAKE_PAD_65>",
2444
+ "<FAKE_PAD_66>",
2445
+ "<FAKE_PAD_67>",
2446
+ "<FAKE_PAD_68>",
2447
+ "<FAKE_PAD_69>",
2448
+ "<FAKE_PAD_70>",
2449
+ "<FAKE_PAD_71>",
2450
+ "<FAKE_PAD_72>",
2451
+ "<FAKE_PAD_73>",
2452
+ "<FAKE_PAD_74>",
2453
+ "<FAKE_PAD_75>",
2454
+ "<FAKE_PAD_76>",
2455
+ "<FAKE_PAD_77>",
2456
+ "<FAKE_PAD_78>",
2457
+ "<FAKE_PAD_79>",
2458
+ "<FAKE_PAD_80>",
2459
+ "<FAKE_PAD_81>",
2460
+ "<FAKE_PAD_82>",
2461
+ "<FAKE_PAD_83>",
2462
+ "<FAKE_PAD_84>",
2463
+ "<FAKE_PAD_85>",
2464
+ "<FAKE_PAD_86>",
2465
+ "<FAKE_PAD_87>",
2466
+ "<FAKE_PAD_88>",
2467
+ "<FAKE_PAD_89>",
2468
+ "<FAKE_PAD_90>",
2469
+ "<FAKE_PAD_91>",
2470
+ "<FAKE_PAD_92>",
2471
+ "<FAKE_PAD_93>",
2472
+ "<FAKE_PAD_94>",
2473
+ "<FAKE_PAD_95>",
2474
+ "<FAKE_PAD_96>",
2475
+ "<FAKE_PAD_97>",
2476
+ "<FAKE_PAD_98>",
2477
+ "<FAKE_PAD_99>",
2478
+ "<FAKE_PAD_100>",
2479
+ "<FAKE_PAD_101>",
2480
+ "<FAKE_PAD_102>",
2481
+ "<FAKE_PAD_103>",
2482
+ "<FAKE_PAD_104>",
2483
+ "<FAKE_PAD_105>",
2484
+ "<FAKE_PAD_106>",
2485
+ "<FAKE_PAD_107>",
2486
+ "<FAKE_PAD_108>",
2487
+ "<FAKE_PAD_109>",
2488
+ "<FAKE_PAD_110>",
2489
+ "<FAKE_PAD_111>",
2490
+ "<FAKE_PAD_112>",
2491
+ "<FAKE_PAD_113>",
2492
+ "<FAKE_PAD_114>",
2493
+ "<FAKE_PAD_115>",
2494
+ "<FAKE_PAD_116>",
2495
+ "<FAKE_PAD_117>",
2496
+ "<FAKE_PAD_118>",
2497
+ "<FAKE_PAD_119>",
2498
+ "<FAKE_PAD_120>",
2499
+ "<FAKE_PAD_121>",
2500
+ "<FAKE_PAD_122>",
2501
+ "<FAKE_PAD_123>",
2502
+ "<FAKE_PAD_124>",
2503
+ "<FAKE_PAD_125>",
2504
+ "<FAKE_PAD_126>",
2505
+ "<FAKE_PAD_127>",
2506
+ "<FAKE_PAD_128>",
2507
+ "<FAKE_PAD_129>",
2508
+ "<FAKE_PAD_130>",
2509
+ "<FAKE_PAD_131>",
2510
+ "<FAKE_PAD_132>",
2511
+ "<FAKE_PAD_133>",
2512
+ "<FAKE_PAD_134>",
2513
+ "<FAKE_PAD_135>",
2514
+ "<FAKE_PAD_136>",
2515
+ "<FAKE_PAD_137>",
2516
+ "<FAKE_PAD_138>",
2517
+ "<FAKE_PAD_139>",
2518
+ "<FAKE_PAD_140>",
2519
+ "<FAKE_PAD_141>",
2520
+ "<FAKE_PAD_142>",
2521
+ "<FAKE_PAD_143>",
2522
+ "<FAKE_PAD_144>",
2523
+ "<FAKE_PAD_145>",
2524
+ "<FAKE_PAD_146>",
2525
+ "<FAKE_PAD_147>",
2526
+ "<FAKE_PAD_148>",
2527
+ "<FAKE_PAD_149>",
2528
+ "<FAKE_PAD_150>",
2529
+ "<FAKE_PAD_151>",
2530
+ "<FAKE_PAD_152>",
2531
+ "<FAKE_PAD_153>",
2532
+ "<FAKE_PAD_154>",
2533
+ "<FAKE_PAD_155>",
2534
+ "<FAKE_PAD_156>",
2535
+ "<FAKE_PAD_157>",
2536
+ "<FAKE_PAD_158>",
2537
+ "<FAKE_PAD_159>",
2538
+ "<FAKE_PAD_160>",
2539
+ "<FAKE_PAD_161>",
2540
+ "<FAKE_PAD_162>",
2541
+ "<FAKE_PAD_163>",
2542
+ "<FAKE_PAD_164>",
2543
+ "<FAKE_PAD_165>",
2544
+ "<FAKE_PAD_166>",
2545
+ "<FAKE_PAD_167>",
2546
+ "<FAKE_PAD_168>",
2547
+ "<FAKE_PAD_169>",
2548
+ "<FAKE_PAD_170>",
2549
+ "<FAKE_PAD_171>",
2550
+ "<FAKE_PAD_172>",
2551
+ "<FAKE_PAD_173>",
2552
+ "<FAKE_PAD_174>",
2553
+ "<FAKE_PAD_175>",
2554
+ "<FAKE_PAD_176>",
2555
+ "<FAKE_PAD_177>",
2556
+ "<FAKE_PAD_178>",
2557
+ "<FAKE_PAD_179>",
2558
+ "<FAKE_PAD_180>",
2559
+ "<FAKE_PAD_181>",
2560
+ "<FAKE_PAD_182>",
2561
+ "<FAKE_PAD_183>",
2562
+ "<FAKE_PAD_184>",
2563
+ "<FAKE_PAD_185>",
2564
+ "<FAKE_PAD_186>",
2565
+ "<FAKE_PAD_187>",
2566
+ "<FAKE_PAD_188>",
2567
+ "<FAKE_PAD_189>",
2568
+ "<FAKE_PAD_190>",
2569
+ "<FAKE_PAD_191>",
2570
+ "<FAKE_PAD_192>",
2571
+ "<FAKE_PAD_193>",
2572
+ "<FAKE_PAD_194>",
2573
+ "<FAKE_PAD_195>",
2574
+ "<FAKE_PAD_196>",
2575
+ "<FAKE_PAD_197>",
2576
+ "<FAKE_PAD_198>",
2577
+ "<FAKE_PAD_199>",
2578
+ "<FAKE_PAD_200>",
2579
+ "<FAKE_PAD_201>",
2580
+ "<FAKE_PAD_202>",
2581
+ "<FAKE_PAD_203>",
2582
+ "<FAKE_PAD_204>",
2583
+ "<FAKE_PAD_205>",
2584
+ "<FAKE_PAD_206>",
2585
+ "<FAKE_PAD_207>",
2586
+ "<FAKE_PAD_208>",
2587
+ "<FAKE_PAD_209>",
2588
+ "<FAKE_PAD_210>",
2589
+ "<FAKE_PAD_211>",
2590
+ "<FAKE_PAD_212>",
2591
+ "<FAKE_PAD_213>",
2592
+ "<FAKE_PAD_214>",
2593
+ "<FAKE_PAD_215>",
2594
+ "<FAKE_PAD_216>",
2595
+ "<FAKE_PAD_217>",
2596
+ "<FAKE_PAD_218>",
2597
+ "<FAKE_PAD_219>",
2598
+ "<FAKE_PAD_220>",
2599
+ "<FAKE_PAD_221>",
2600
+ "<FAKE_PAD_222>",
2601
+ "<FAKE_PAD_223>",
2602
+ "<FAKE_PAD_224>",
2603
+ "<FAKE_PAD_225>",
2604
+ "<FAKE_PAD_226>",
2605
+ "<FAKE_PAD_227>",
2606
+ "<FAKE_PAD_228>",
2607
+ "<FAKE_PAD_229>",
2608
+ "<FAKE_PAD_230>",
2609
+ "<FAKE_PAD_231>",
2610
+ "<FAKE_PAD_232>",
2611
+ "<FAKE_PAD_233>",
2612
+ "<FAKE_PAD_234>",
2613
+ "<FAKE_PAD_235>",
2614
+ "<FAKE_PAD_236>",
2615
+ "<FAKE_PAD_237>",
2616
+ "<FAKE_PAD_238>",
2617
+ "<FAKE_PAD_239>",
2618
+ "<FAKE_PAD_240>",
2619
+ "<FAKE_PAD_241>",
2620
+ "<FAKE_PAD_242>",
2621
+ "<FAKE_PAD_243>",
2622
+ "<FAKE_PAD_244>",
2623
+ "<FAKE_PAD_245>",
2624
+ "<FAKE_PAD_246>",
2625
+ "<FAKE_PAD_247>",
2626
+ "<FAKE_PAD_248>",
2627
+ "<FAKE_PAD_249>",
2628
+ "<FAKE_PAD_250>",
2629
+ "<FAKE_PAD_251>",
2630
+ "<FAKE_PAD_252>",
2631
+ "<FAKE_PAD_253>"
2632
+ ],
2633
+ "bos_token": null,
2634
+ "chat_template": "{%- if tools %}\n {{- '<|im_start|>system\\n' }}\n {%- if messages[0].role == 'system' %}\n {{- messages[0].content + '\\n\\n' }}\n {%- endif %}\n {{- \"# Tools\\n\\nYou may call one or more functions to assist with the user query.\\n\\nYou are provided with function signatures within <tools></tools> XML tags:\\n<tools>\" }}\n {%- for tool in tools %}\n {{- \"\\n\" }}\n {{- tool | tojson }}\n {%- endfor %}\n {{- \"\\n</tools>\\n\\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\\n<tool_call>\\n{\\\"name\\\": <function-name>, \\\"arguments\\\": <args-json-object>}\\n</tool_call><|im_end|>\\n\" }}\n{%- else %}\n {%- if messages[0].role == 'system' %}\n {{- '<|im_start|>system\\n' + messages[0].content + '<|im_end|>\\n' }}\n {%- endif %}\n{%- endif %}\n{%- set ns = namespace(multi_step_tool=true, last_query_index=messages|length - 1) %}\n{%- for message in messages[::-1] %}\n {%- set index = (messages|length - 1) - loop.index0 %}\n {%- if ns.multi_step_tool and message.role == \"user\" and not(message.content.startswith('<tool_response>') and message.content.endswith('</tool_response>')) %}\n {%- set ns.multi_step_tool = false %}\n {%- set ns.last_query_index = index %}\n {%- endif %}\n{%- endfor %}\n{%- for message in messages %}\n {%- if (message.role == \"user\") or (message.role == \"system\" and not loop.first) %}\n {{- '<|im_start|>' + message.role + '\\n' + message.content + '<|im_end|>' + '\\n' }}\n {%- elif message.role == \"assistant\" %}\n {%- set content = message.content %}\n {%- set reasoning_content = '' %}\n {%- if message.reasoning_content is defined and message.reasoning_content is not none %}\n {%- set reasoning_content = message.reasoning_content %}\n {%- else %}\n {%- if '</think>' in message.content %}\n {%- set content = message.content.split('</think>')[-1].lstrip('\\n') %}\n {%- set reasoning_content = message.content.split('</think>')[0].rstrip('\\n').split('<think>')[-1].lstrip('\\n') %}\n {%- endif %}\n {%- endif %}\n {%- if loop.index0 > ns.last_query_index %}\n {%- if loop.last or (not loop.last and reasoning_content) %}\n {{- '<|im_start|>' + message.role + '\\n<think>\\n' + reasoning_content.strip('\\n') + '\\n</think>\\n\\n' + content.lstrip('\\n') }}\n {%- else %}\n {{- '<|im_start|>' + message.role + '\\n' + content }}\n {%- endif %}\n {%- else %}\n {{- '<|im_start|>' + message.role + '\\n' + content }}\n {%- endif %}\n {%- if message.tool_calls %}\n {%- for tool_call in message.tool_calls %}\n {%- if (loop.first and content) or (not loop.first) %}\n {{- '\\n' }}\n {%- endif %}\n {%- if tool_call.function %}\n {%- set tool_call = tool_call.function %}\n {%- endif %}\n {{- '<tool_call>\\n{\"name\": \"' }}\n {{- tool_call.name }}\n {{- '\", \"arguments\": ' }}\n {%- if tool_call.arguments is string %}\n {{- tool_call.arguments }}\n {%- else %}\n {{- tool_call.arguments | tojson }}\n {%- endif %}\n {{- '}\\n</tool_call>' }}\n {%- endfor %}\n {%- endif %}\n {{- '<|im_end|>\\n' }}\n {%- elif message.role == \"tool\" %}\n {%- if loop.first or (messages[loop.index0 - 1].role != \"tool\") %}\n {{- '<|im_start|>user' }}\n {%- endif %}\n {{- '\\n<tool_response>\\n' }}\n {{- message.content }}\n {{- '\\n</tool_response>' }}\n {%- if loop.last or (messages[loop.index0 + 1].role != \"tool\") %}\n {{- '<|im_end|>\\n' }}\n {%- endif %}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|im_start|>assistant\\n' }}\n {%- if enable_thinking is defined and enable_thinking is false %}\n {{- '<think>\\n\\n</think>\\n\\n' }}\n {%- endif %}\n{%- endif %}",
2635
+ "clean_up_tokenization_spaces": false,
2636
+ "eos_token": "<|im_end|>",
2637
+ "errors": "replace",
2638
+ "model_max_length": 12288,
2639
+ "pad_token": "<|endoftext|>",
2640
+ "split_special_tokens": false,
2641
+ "tokenizer_class": "Qwen2Tokenizer",
2642
+ "unk_token": null
2643
+ }
vocab.json ADDED
The diff for this file is too large to render. See raw diff