yifanzhang114 commited on
Commit
27a54ba
·
verified ·
1 Parent(s): 35e1d16

Upload folder using huggingface_hub

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
added_tokens.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "</tool_call>": 151658,
3
+ "<tool_call>": 151657,
4
+ "<|box_end|>": 151649,
5
+ "<|box_start|>": 151648,
6
+ "<|endoftext|>": 151643,
7
+ "<|file_sep|>": 151664,
8
+ "<|fim_middle|>": 151660,
9
+ "<|fim_pad|>": 151662,
10
+ "<|fim_prefix|>": 151659,
11
+ "<|fim_suffix|>": 151661,
12
+ "<|im_end|>": 151645,
13
+ "<|im_start|>": 151644,
14
+ "<|image_pad|>": 151655,
15
+ "<|object_ref_end|>": 151647,
16
+ "<|object_ref_start|>": 151646,
17
+ "<|quad_end|>": 151651,
18
+ "<|quad_start|>": 151650,
19
+ "<|repo_name|>": 151663,
20
+ "<|video_pad|>": 151656,
21
+ "<|vision_end|>": 151653,
22
+ "<|vision_pad|>": 151654,
23
+ "<|vision_start|>": 151652
24
+ }
args.json ADDED
@@ -0,0 +1,381 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "output_dir": "/data_r1v4/data_r1v4/yifan.zhang/models/GLM_kwai_all_general_12_claude_general_high_res_wothink12/v1-20250915-132509",
3
+ "overwrite_output_dir": false,
4
+ "do_train": false,
5
+ "do_eval": false,
6
+ "do_predict": false,
7
+ "eval_strategy": "no",
8
+ "prediction_loss_only": false,
9
+ "per_device_train_batch_size": 1,
10
+ "per_device_eval_batch_size": 1,
11
+ "per_gpu_train_batch_size": null,
12
+ "per_gpu_eval_batch_size": null,
13
+ "gradient_accumulation_steps": 2,
14
+ "eval_accumulation_steps": null,
15
+ "eval_delay": 0,
16
+ "torch_empty_cache_steps": null,
17
+ "learning_rate": 1e-05,
18
+ "weight_decay": 0.1,
19
+ "adam_beta1": 0.9,
20
+ "adam_beta2": 0.95,
21
+ "adam_epsilon": 1e-08,
22
+ "max_grad_norm": 1.0,
23
+ "num_train_epochs": 5.0,
24
+ "max_steps": -1,
25
+ "lr_scheduler_type": "cosine",
26
+ "lr_scheduler_kwargs": null,
27
+ "warmup_ratio": 0.05,
28
+ "warmup_steps": 0,
29
+ "log_level": "passive",
30
+ "log_level_replica": "warning",
31
+ "log_on_each_node": true,
32
+ "logging_dir": "/data_r1v4/data_r1v4/yifan.zhang/models/GLM_kwai_all_general_12_claude_general_high_res_wothink12/v1-20250915-132509/runs",
33
+ "logging_strategy": "steps",
34
+ "logging_first_step": true,
35
+ "logging_steps": 5,
36
+ "logging_nan_inf_filter": true,
37
+ "save_strategy": "epoch",
38
+ "save_steps": 500,
39
+ "save_total_limit": 5,
40
+ "save_safetensors": true,
41
+ "save_on_each_node": false,
42
+ "save_only_model": true,
43
+ "restore_callback_states_from_checkpoint": false,
44
+ "no_cuda": false,
45
+ "use_cpu": false,
46
+ "use_mps_device": false,
47
+ "seed": 42,
48
+ "data_seed": 42,
49
+ "jit_mode_eval": false,
50
+ "use_ipex": false,
51
+ "bf16": true,
52
+ "fp16": false,
53
+ "fp16_opt_level": "O1",
54
+ "half_precision_backend": "auto",
55
+ "bf16_full_eval": false,
56
+ "fp16_full_eval": false,
57
+ "tf32": null,
58
+ "local_rank": 0,
59
+ "ddp_backend": null,
60
+ "tpu_num_cores": null,
61
+ "tpu_metrics_debug": false,
62
+ "debug": null,
63
+ "dataloader_drop_last": false,
64
+ "eval_steps": null,
65
+ "dataloader_num_workers": 4,
66
+ "dataloader_prefetch_factor": null,
67
+ "past_index": -1,
68
+ "run_name": "/data_r1v4/data_r1v4/yifan.zhang/models/GLM_kwai_all_general_12_claude_general_high_res_wothink12/v1-20250915-132509",
69
+ "disable_tqdm": null,
70
+ "remove_unused_columns": true,
71
+ "label_names": null,
72
+ "load_best_model_at_end": false,
73
+ "metric_for_best_model": "loss",
74
+ "greater_is_better": false,
75
+ "ignore_data_skip": false,
76
+ "fsdp": "",
77
+ "fsdp_min_num_params": 0,
78
+ "fsdp_config": null,
79
+ "fsdp_transformer_layer_cls_to_wrap": null,
80
+ "accelerator_config": {
81
+ "dispatch_batches": false
82
+ },
83
+ "deepspeed": {
84
+ "fp16": {
85
+ "enabled": "auto",
86
+ "loss_scale": 0,
87
+ "loss_scale_window": 1000,
88
+ "initial_scale_power": 16,
89
+ "hysteresis": 2,
90
+ "min_loss_scale": 1
91
+ },
92
+ "bf16": {
93
+ "enabled": "auto"
94
+ },
95
+ "zero_optimization": {
96
+ "stage": 2,
97
+ "offload_optimizer": {
98
+ "device": "none",
99
+ "pin_memory": true
100
+ },
101
+ "allgather_partitions": true,
102
+ "allgather_bucket_size": 200000000.0,
103
+ "overlap_comm": false,
104
+ "reduce_scatter": true,
105
+ "reduce_bucket_size": 200000000.0,
106
+ "contiguous_gradients": true
107
+ },
108
+ "gradient_accumulation_steps": "auto",
109
+ "gradient_clipping": "auto",
110
+ "steps_per_print": 2000,
111
+ "train_batch_size": "auto",
112
+ "train_micro_batch_size_per_gpu": "auto",
113
+ "wall_clock_breakdown": false
114
+ },
115
+ "label_smoothing_factor": 0.0,
116
+ "optim": "adamw_torch",
117
+ "optim_args": null,
118
+ "adafactor": false,
119
+ "group_by_length": false,
120
+ "length_column_name": "length",
121
+ "report_to": [
122
+ "wandb"
123
+ ],
124
+ "ddp_find_unused_parameters": null,
125
+ "ddp_bucket_cap_mb": null,
126
+ "ddp_broadcast_buffers": null,
127
+ "dataloader_pin_memory": true,
128
+ "dataloader_persistent_workers": false,
129
+ "skip_memory_metrics": true,
130
+ "use_legacy_prediction_loop": false,
131
+ "push_to_hub": false,
132
+ "resume_from_checkpoint": null,
133
+ "hub_model_id": null,
134
+ "hub_strategy": "every_save",
135
+ "hub_token": null,
136
+ "hub_private_repo": null,
137
+ "hub_always_push": false,
138
+ "hub_revision": null,
139
+ "gradient_checkpointing": true,
140
+ "gradient_checkpointing_kwargs": null,
141
+ "include_inputs_for_metrics": false,
142
+ "include_for_metrics": [],
143
+ "eval_do_concat_batches": true,
144
+ "fp16_backend": "auto",
145
+ "push_to_hub_model_id": null,
146
+ "push_to_hub_organization": null,
147
+ "push_to_hub_token": null,
148
+ "mp_parameters": "",
149
+ "auto_find_batch_size": false,
150
+ "full_determinism": false,
151
+ "torchdynamo": null,
152
+ "ray_scope": "last",
153
+ "ddp_timeout": 18000000,
154
+ "torch_compile": false,
155
+ "torch_compile_backend": null,
156
+ "torch_compile_mode": null,
157
+ "include_tokens_per_second": false,
158
+ "include_num_input_tokens_seen": false,
159
+ "neftune_noise_alpha": null,
160
+ "optim_target_modules": null,
161
+ "batch_eval_metrics": false,
162
+ "eval_on_start": false,
163
+ "use_liger_kernel": false,
164
+ "liger_kernel_config": null,
165
+ "eval_use_gather_object": false,
166
+ "average_tokens_across_devices": true,
167
+ "sortish_sampler": false,
168
+ "predict_with_generate": false,
169
+ "generation_max_length": null,
170
+ "generation_num_beams": null,
171
+ "generation_config": null,
172
+ "tuner_backend": "peft",
173
+ "vit_gradient_checkpointing": null,
174
+ "router_aux_loss_coef": 0.0,
175
+ "enable_dft_loss": false,
176
+ "enable_channel_loss": false,
177
+ "check_model": true,
178
+ "acc_strategy": "token",
179
+ "train_dataloader_shuffle": true,
180
+ "max_epochs": null,
181
+ "aligner_lr": null,
182
+ "vit_lr": null,
183
+ "use_logits_to_keep": null,
184
+ "ds3_gather_for_generation": true,
185
+ "resume_only_model": false,
186
+ "optimizer": null,
187
+ "loss_type": null,
188
+ "metric": null,
189
+ "eval_use_evalscope": false,
190
+ "eval_dataset": [],
191
+ "eval_dataset_args": null,
192
+ "eval_limit": null,
193
+ "eval_generation_config": null,
194
+ "extra_eval_args": null,
195
+ "use_flash_ckpt": false,
196
+ "model": "/data_genie/genie/yfzhang/models/Qwen2.5-VL-7B-Instruct",
197
+ "model_type": "qwen2_5_vl",
198
+ "model_revision": null,
199
+ "task_type": "causal_lm",
200
+ "torch_dtype": "bfloat16",
201
+ "attn_impl": "flash_attn",
202
+ "new_special_tokens": [],
203
+ "num_labels": null,
204
+ "problem_type": null,
205
+ "rope_scaling": null,
206
+ "device_map": null,
207
+ "max_memory": {},
208
+ "max_model_len": null,
209
+ "local_repo_path": null,
210
+ "init_strategy": null,
211
+ "template": "qwen2_5_vl",
212
+ "system": "You are a helpful assistant.\n\nSolve the following problem step by step, and optionally write Python code for image manipulation to enhance your reasoning process. The Python code will be executed by an external sandbox, and the processed image or result (wrapped in <sandbox_output></sandbox_output>) can be returned to aid your reasoning and help you arrive at the final answer.\n\n**Reasoning & Image Manipulation (Optional but Encouraged):**\n * You have the capability to write executable Python code to perform image manipulations (e.g., cropping to a Region of Interest (ROI), resizing, rotation, adjusting contrast) or perform calculation for better reasoning.\n * The code will be executed in a secure sandbox, and its output will be provided back to you for further analysis.\n * All Python code snippets **must** be wrapped as follows:\n <code>\n ```python\n # your code.\n ```\n </code>\n * At the end of the code, print the path of the processed image (processed_path) or the result for further processing in a sandbox environment.",
213
+ "max_length": 20480,
214
+ "truncation_strategy": "delete",
215
+ "max_pixels": null,
216
+ "agent_template": null,
217
+ "norm_bbox": null,
218
+ "use_chat_template": true,
219
+ "padding_free": false,
220
+ "padding_side": "right",
221
+ "loss_scale": "default",
222
+ "sequence_parallel_size": 1,
223
+ "response_prefix": null,
224
+ "template_backend": "swift",
225
+ "dataset": [
226
+ "/data_r1v4/data_r1v4/yifan.zhang/Thyme-v2/glm4.5/step6_all_rounds/kwai_high_res_consistent_valid_round_1_2_3_conversation.jsonl",
227
+ "/data_r1v4/data_r1v4/yifan.zhang/Thyme-v2/glm4.5/step6_all_rounds/kwai_ocr_count_consistent_valid_round_0_1_2_conversation.jsonl",
228
+ "/data_r1v4/data_r1v4/yifan.zhang/Thyme-v2/glm4.5/step6_all_rounds/general_vqa_consistent_valid_round1_2_conversation.jsonl",
229
+ "/data_r1v4/data_r1v4/yifan.zhang/Thyme-v2/training_data_v3/step5_conversation_round/high_res_rl_yes_merged_3k_step_step_match_round_1234_conversation.jsonl",
230
+ "/data_r1v4/data_r1v4/yifan.zhang/Thyme-v2/training_data_v3/step5_conversation_round/general_yes_consist_consistent_step_rigious_step_match_combined_1_2_3_4_conversation.jsonl",
231
+ "/data_r1v4/data_r1v4/yifan.zhang/Thyme-v2/training_data_v3/step5_conversation_round/wo_think_consist_step_rigious_step_match_round12_conversation.jsonl"
232
+ ],
233
+ "val_dataset": [],
234
+ "split_dataset_ratio": 0.0,
235
+ "dataset_num_proc": 1,
236
+ "load_from_cache_file": true,
237
+ "dataset_shuffle": true,
238
+ "val_dataset_shuffle": false,
239
+ "streaming": false,
240
+ "interleave_prob": null,
241
+ "stopping_strategy": "first_exhausted",
242
+ "shuffle_buffer_size": 1000,
243
+ "download_mode": "reuse_dataset_if_exists",
244
+ "columns": {},
245
+ "strict": false,
246
+ "model_name": null,
247
+ "model_author": null,
248
+ "custom_dataset_info": [],
249
+ "quant_method": null,
250
+ "quant_bits": null,
251
+ "hqq_axis": null,
252
+ "bnb_4bit_compute_dtype": "bfloat16",
253
+ "bnb_4bit_quant_type": "nf4",
254
+ "bnb_4bit_use_double_quant": true,
255
+ "bnb_4bit_quant_storage": null,
256
+ "max_new_tokens": 64,
257
+ "temperature": 0.0,
258
+ "top_k": null,
259
+ "top_p": null,
260
+ "repetition_penalty": null,
261
+ "num_beams": 1,
262
+ "stream": false,
263
+ "stop_words": [],
264
+ "logprobs": false,
265
+ "top_logprobs": null,
266
+ "O3": true,
267
+ "ckpt_dir": null,
268
+ "lora_modules": [],
269
+ "train_type": "full",
270
+ "adapters": [],
271
+ "external_plugins": [],
272
+ "model_kwargs": {},
273
+ "load_args": false,
274
+ "load_data_args": false,
275
+ "packing": false,
276
+ "packing_length": null,
277
+ "lazy_tokenize": true,
278
+ "cached_dataset": [],
279
+ "custom_register_path": [],
280
+ "use_hf": false,
281
+ "ignore_args_error": false,
282
+ "use_swift_lora": false,
283
+ "freeze_parameters": [
284
+ "model.visual",
285
+ "model.visual.merger"
286
+ ],
287
+ "freeze_parameters_regex": null,
288
+ "freeze_parameters_ratio": 0.0,
289
+ "trainable_parameters": [],
290
+ "trainable_parameters_regex": null,
291
+ "freeze_llm": false,
292
+ "freeze_vit": true,
293
+ "freeze_aligner": true,
294
+ "target_modules": [
295
+ "all-linear"
296
+ ],
297
+ "target_regex": null,
298
+ "target_parameters": null,
299
+ "modules_to_save": [],
300
+ "lora_rank": 8,
301
+ "lora_alpha": 32,
302
+ "lora_dropout": 0.05,
303
+ "lora_bias": "none",
304
+ "lora_dtype": null,
305
+ "lorap_lr_ratio": null,
306
+ "use_rslora": false,
307
+ "use_dora": false,
308
+ "lora_ga_batch_size": 2,
309
+ "lora_ga_iters": 2,
310
+ "lora_ga_max_length": 1024,
311
+ "lora_ga_direction": "ArB2r",
312
+ "lora_ga_scale": "stable",
313
+ "lora_ga_stable_gamma": 16,
314
+ "init_weights": true,
315
+ "fourier_n_frequency": 2000,
316
+ "fourier_scaling": 300.0,
317
+ "boft_block_size": 4,
318
+ "boft_block_num": 0,
319
+ "boft_n_butterfly_factor": 1,
320
+ "boft_dropout": 0.0,
321
+ "vera_rank": 256,
322
+ "vera_projection_prng_key": 0,
323
+ "vera_dropout": 0.0,
324
+ "vera_d_initial": 0.1,
325
+ "adapter_act": "gelu",
326
+ "adapter_length": 128,
327
+ "use_galore": false,
328
+ "galore_target_modules": null,
329
+ "galore_rank": 128,
330
+ "galore_update_proj_gap": 50,
331
+ "galore_scale": 1.0,
332
+ "galore_proj_type": "std",
333
+ "galore_optim_per_parameter": false,
334
+ "galore_with_embedding": false,
335
+ "galore_quantization": false,
336
+ "galore_proj_quant": false,
337
+ "galore_proj_bits": 4,
338
+ "galore_proj_group_size": 256,
339
+ "galore_cos_threshold": 0.4,
340
+ "galore_gamma_proj": 2,
341
+ "galore_queue_size": 5,
342
+ "adalora_target_r": 8,
343
+ "adalora_init_r": 12,
344
+ "adalora_tinit": 0,
345
+ "adalora_tfinal": 0,
346
+ "adalora_deltaT": 1,
347
+ "adalora_beta1": 0.85,
348
+ "adalora_beta2": 0.85,
349
+ "adalora_orth_reg_weight": 0.5,
350
+ "llamapro_num_new_blocks": 4,
351
+ "llamapro_num_groups": null,
352
+ "lisa_activated_layers": 0,
353
+ "lisa_step_interval": 20,
354
+ "reft_layer_key": null,
355
+ "reft_layers": null,
356
+ "reft_rank": 4,
357
+ "reft_intervention_type": "LoreftIntervention",
358
+ "reft_args": null,
359
+ "swanlab_token": null,
360
+ "swanlab_project": null,
361
+ "swanlab_workspace": null,
362
+ "swanlab_exp_name": null,
363
+ "swanlab_lark_webhook_url": null,
364
+ "swanlab_lark_secret": null,
365
+ "swanlab_mode": "cloud",
366
+ "add_version": true,
367
+ "create_checkpoint_symlink": false,
368
+ "zero_hpz_partition_size": null,
369
+ "deepspeed_autotp_size": null,
370
+ "early_stop_interval": null,
371
+ "rank": 0,
372
+ "global_world_size": 64,
373
+ "local_world_size": 8,
374
+ "model_suffix": "Qwen2.5-VL-7B-Instruct",
375
+ "model_info": "ModelInfo(model_type='qwen2_5_vl', model_dir='/data_genie/genie/yfzhang/models/Qwen2.5-VL-7B-Instruct', torch_dtype=torch.bfloat16, max_model_len=128000, quant_method=None, quant_bits=None, rope_scaling={'type': 'default', 'mrope_section': [16, 24, 24], 'rope_type': 'default'}, is_moe_model=False, config=None, task_type='causal_lm', num_labels=None)",
376
+ "model_meta": "ModelMeta(model_type='qwen2_5_vl', model_groups=[ModelGroup(models=[Model(ms_model_id='Qwen/Qwen2.5-VL-3B-Instruct', hf_model_id='Qwen/Qwen2.5-VL-3B-Instruct', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='Qwen/Qwen2.5-VL-7B-Instruct', hf_model_id='Qwen/Qwen2.5-VL-7B-Instruct', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='Qwen/Qwen2.5-VL-32B-Instruct', hf_model_id='Qwen/Qwen2.5-VL-32B-Instruct', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='Qwen/Qwen2.5-VL-72B-Instruct', hf_model_id='Qwen/Qwen2.5-VL-72B-Instruct', model_path=None, ms_revision=None, hf_revision=None)], ignore_patterns=None, requires=None, tags=[]), ModelGroup(models=[Model(ms_model_id='Qwen/Qwen2.5-VL-3B-Instruct-AWQ', hf_model_id='Qwen/Qwen2.5-VL-3B-Instruct-AWQ', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='Qwen/Qwen2.5-VL-7B-Instruct-AWQ', hf_model_id='Qwen/Qwen2.5-VL-7B-Instruct-AWQ', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='Qwen/Qwen2.5-VL-32B-Instruct-AWQ', hf_model_id='Qwen/Qwen2.5-VL-32B-Instruct-AWQ', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='Qwen/Qwen2.5-VL-72B-Instruct-AWQ', hf_model_id='Qwen/Qwen2.5-VL-72B-Instruct-AWQ', model_path=None, ms_revision=None, hf_revision=None)], ignore_patterns=None, requires=None, tags=[])], template='qwen2_5_vl', get_function=<function get_model_tokenizer_qwen2_5_vl at 0x7fe91d124cc0>, model_arch=MultiModelKeys(arch_name='qwen2_vl', embedding=None, module_list=None, lm_head=None, q_proj=None, k_proj=None, v_proj=None, o_proj=None, attention=None, mlp=None, down_proj=None, qkv_proj=None, qk_proj=None, qa_proj=None, qb_proj=None, kv_proj=None, kva_proj=None, kvb_proj=None, language_model=['model.language_model'], aligner=['model.visual.merger'], vision_tower=['model.visual'], generator=[]), architectures=['Qwen2_5_VLForConditionalGeneration'], additional_saved_files=[], torch_dtype=None, is_multimodal=True, is_reward=False, task_type=None, ignore_patterns=None, requires=['transformers>=4.49', 'qwen_vl_utils>=0.0.6', 'decord'], tags=['vision', 'video'])",
377
+ "model_dir": "/data_genie/genie/yfzhang/models/Qwen2.5-VL-7B-Instruct",
378
+ "hub": "<class 'swift.hub.hub.MSHub'>",
379
+ "evaluation_strategy": "epoch",
380
+ "training_args": "Seq2SeqTrainingArguments(output_dir='/data_r1v4/data_r1v4/yifan.zhang/models/GLM_kwai_all_general_12_claude_general_high_res_wothink12/v1-20250915-132509', overwrite_output_dir=False, do_train=False, do_eval=False, do_predict=False, eval_strategy=<IntervalStrategy.NO: 'no'>, prediction_loss_only=False, per_device_train_batch_size=1, per_device_eval_batch_size=1, per_gpu_train_batch_size=None, per_gpu_eval_batch_size=None, gradient_accumulation_steps=2, eval_accumulation_steps=None, eval_delay=0, torch_empty_cache_steps=None, learning_rate=1e-05, weight_decay=0.1, adam_beta1=0.9, adam_beta2=0.95, adam_epsilon=1e-08, max_grad_norm=1.0, num_train_epochs=5.0, max_steps=-1, lr_scheduler_type=<SchedulerType.COSINE: 'cosine'>, lr_scheduler_kwargs=None, warmup_ratio=0.05, warmup_steps=0, log_level='passive', log_level_replica='warning', log_on_each_node=True, logging_dir='/data_r1v4/data_r1v4/yifan.zhang/models/GLM_kwai_all_general_12_claude_general_high_res_wothink12/v1-20250915-132509/runs', logging_strategy=<IntervalStrategy.STEPS: 'steps'>, logging_first_step=True, logging_steps=5, logging_nan_inf_filter=True, save_strategy=<SaveStrategy.EPOCH: 'epoch'>, save_steps=500, save_total_limit=5, save_safetensors=True, save_on_each_node=False, save_only_model=True, restore_callback_states_from_checkpoint=False, no_cuda=False, use_cpu=False, use_mps_device=False, seed=42, data_seed=42, jit_mode_eval=False, use_ipex=False, bf16=True, fp16=False, fp16_opt_level='O1', half_precision_backend='auto', bf16_full_eval=False, fp16_full_eval=False, tf32=None, local_rank=0, ddp_backend=None, tpu_num_cores=None, tpu_metrics_debug=False, debug=[], dataloader_drop_last=False, eval_steps=None, dataloader_num_workers=4, dataloader_prefetch_factor=10, past_index=-1, run_name='/data_r1v4/data_r1v4/yifan.zhang/models/GLM_kwai_all_general_12_claude_general_high_res_wothink12/v1-20250915-132509', disable_tqdm=False, remove_unused_columns=False, label_names=None, load_best_model_at_end=False, metric_for_best_model='loss', greater_is_better=False, ignore_data_skip=False, fsdp=[], fsdp_min_num_params=0, fsdp_config={'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}, fsdp_transformer_layer_cls_to_wrap=None, accelerator_config=AcceleratorConfig(split_batches=False, dispatch_batches=False, even_batches=True, use_seedable_sampler=True, non_blocking=False, gradient_accumulation_kwargs=None, use_configured_state=False), deepspeed={'fp16': {'enabled': 'auto', 'loss_scale': 0, 'loss_scale_window': 1000, 'initial_scale_power': 16, 'hysteresis': 2, 'min_loss_scale': 1}, 'bf16': {'enabled': 'auto'}, 'zero_optimization': {'stage': 2, 'offload_optimizer': {'device': 'none', 'pin_memory': True}, 'allgather_partitions': True, 'allgather_bucket_size': 200000000.0, 'overlap_comm': False, 'reduce_scatter': True, 'reduce_bucket_size': 200000000.0, 'contiguous_gradients': True}, 'gradient_accumulation_steps': 'auto', 'gradient_clipping': 'auto', 'steps_per_print': 2000, 'train_batch_size': 'auto', 'train_micro_batch_size_per_gpu': 'auto', 'wall_clock_breakdown': False}, label_smoothing_factor=0.0, optim=<OptimizerNames.ADAMW_TORCH: 'adamw_torch'>, optim_args=None, adafactor=False, group_by_length=False, length_column_name='length', report_to=['wandb'], ddp_find_unused_parameters=None, ddp_bucket_cap_mb=None, ddp_broadcast_buffers=None, dataloader_pin_memory=True, dataloader_persistent_workers=False, skip_memory_metrics=True, use_legacy_prediction_loop=False, push_to_hub=False, resume_from_checkpoint=None, hub_model_id=None, hub_strategy=<HubStrategy.EVERY_SAVE: 'every_save'>, hub_token=None, hub_private_repo=None, hub_always_push=False, hub_revision=None, gradient_checkpointing=True, gradient_checkpointing_kwargs=None, include_inputs_for_metrics=False, include_for_metrics=[], eval_do_concat_batches=True, fp16_backend='auto', push_to_hub_model_id=None, push_to_hub_organization=None, push_to_hub_token=None, mp_parameters='', auto_find_batch_size=False, full_determinism=False, torchdynamo=None, ray_scope='last', ddp_timeout=18000000, torch_compile=False, torch_compile_backend=None, torch_compile_mode=None, include_tokens_per_second=None, include_num_input_tokens_seen=None, neftune_noise_alpha=None, optim_target_modules=None, batch_eval_metrics=False, eval_on_start=False, use_liger_kernel=False, liger_kernel_config=None, eval_use_gather_object=False, average_tokens_across_devices=None, sortish_sampler=False, predict_with_generate=False, generation_max_length=None, generation_num_beams=None, generation_config=None, tuner_backend='peft', vit_gradient_checkpointing=True, router_aux_loss_coef=0.0, enable_dft_loss=False, enable_channel_loss=False, check_model=True, acc_strategy='token', train_dataloader_shuffle=True, max_epochs=None, aligner_lr=None, vit_lr=None, use_logits_to_keep=None, ds3_gather_for_generation=True, resume_only_model=False, optimizer=None, loss_type=None, metric=None, eval_use_evalscope=False, eval_dataset=[], eval_dataset_args=None, eval_limit=None, eval_generation_config=None, extra_eval_args=None, use_flash_ckpt=False, sft_alpha=0, train_type='full', local_repo_path=None, galore_config=None)"
381
+ }
chat_template.jinja ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {% set image_count = namespace(value=0) %}{% set video_count = namespace(value=0) %}{% for message in messages %}{% if loop.first and message['role'] != 'system' %}<|im_start|>system
2
+ You are a helpful assistant.<|im_end|>
3
+ {% endif %}<|im_start|>{{ message['role'] }}
4
+ {% if message['content'] is string %}{{ message['content'] }}<|im_end|>
5
+ {% else %}{% for content in message['content'] %}{% if content['type'] == 'image' or 'image' in content or 'image_url' in content %}{% set image_count.value = image_count.value + 1 %}{% if add_vision_id %}Picture {{ image_count.value }}: {% endif %}<|vision_start|><|image_pad|><|vision_end|>{% elif content['type'] == 'video' or 'video' in content %}{% set video_count.value = video_count.value + 1 %}{% if add_vision_id %}Video {{ video_count.value }}: {% endif %}<|vision_start|><|video_pad|><|vision_end|>{% elif 'text' in content %}{{ content['text'] }}{% endif %}{% endfor %}<|im_end|>
6
+ {% endif %}{% endfor %}{% if add_generation_prompt %}<|im_start|>assistant
7
+ {% endif %}
config.json ADDED
@@ -0,0 +1,138 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "Qwen2_5_VLForConditionalGeneration"
4
+ ],
5
+ "attention_dropout": 0.0,
6
+ "bos_token_id": 151643,
7
+ "eos_token_id": 151645,
8
+ "hidden_act": "silu",
9
+ "hidden_size": 3584,
10
+ "image_token_id": 151655,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 18944,
13
+ "max_position_embeddings": 128000,
14
+ "max_window_layers": 28,
15
+ "model_type": "qwen2_5_vl",
16
+ "num_attention_heads": 28,
17
+ "num_hidden_layers": 28,
18
+ "num_key_value_heads": 4,
19
+ "pad_token_id": 151643,
20
+ "rms_norm_eps": 1e-06,
21
+ "rope_scaling": {
22
+ "mrope_section": [
23
+ 16,
24
+ 24,
25
+ 24
26
+ ],
27
+ "rope_type": "default",
28
+ "type": "default"
29
+ },
30
+ "rope_theta": 1000000.0,
31
+ "sliding_window": 32768,
32
+ "text_config": {
33
+ "architectures": [
34
+ "Qwen2_5_VLForConditionalGeneration"
35
+ ],
36
+ "attention_dropout": 0.0,
37
+ "bos_token_id": 151643,
38
+ "eos_token_id": 151645,
39
+ "hidden_act": "silu",
40
+ "hidden_size": 3584,
41
+ "image_token_id": null,
42
+ "initializer_range": 0.02,
43
+ "intermediate_size": 18944,
44
+ "layer_types": [
45
+ "full_attention",
46
+ "full_attention",
47
+ "full_attention",
48
+ "full_attention",
49
+ "full_attention",
50
+ "full_attention",
51
+ "full_attention",
52
+ "full_attention",
53
+ "full_attention",
54
+ "full_attention",
55
+ "full_attention",
56
+ "full_attention",
57
+ "full_attention",
58
+ "full_attention",
59
+ "full_attention",
60
+ "full_attention",
61
+ "full_attention",
62
+ "full_attention",
63
+ "full_attention",
64
+ "full_attention",
65
+ "full_attention",
66
+ "full_attention",
67
+ "full_attention",
68
+ "full_attention",
69
+ "full_attention",
70
+ "full_attention",
71
+ "full_attention",
72
+ "full_attention"
73
+ ],
74
+ "max_position_embeddings": 128000,
75
+ "max_window_layers": 28,
76
+ "model_type": "qwen2_5_vl_text",
77
+ "num_attention_heads": 28,
78
+ "num_hidden_layers": 28,
79
+ "num_key_value_heads": 4,
80
+ "pad_token_id": 151643,
81
+ "rms_norm_eps": 1e-06,
82
+ "rope_scaling": {
83
+ "mrope_section": [
84
+ 16,
85
+ 24,
86
+ 24
87
+ ],
88
+ "rope_type": "default",
89
+ "type": "default"
90
+ },
91
+ "rope_theta": 1000000.0,
92
+ "sliding_window": null,
93
+ "torch_dtype": "bfloat16",
94
+ "use_cache": false,
95
+ "use_sliding_window": false,
96
+ "video_token_id": null,
97
+ "vision_end_token_id": 151653,
98
+ "vision_start_token_id": 151652,
99
+ "vision_token_id": 151654,
100
+ "vocab_size": 152064
101
+ },
102
+ "tie_word_embeddings": false,
103
+ "torch_dtype": "bfloat16",
104
+ "transformers_version": "4.55.4",
105
+ "use_cache": false,
106
+ "use_sliding_window": false,
107
+ "video_token_id": 151656,
108
+ "vision_config": {
109
+ "depth": 32,
110
+ "fullatt_block_indexes": [
111
+ 7,
112
+ 15,
113
+ 23,
114
+ 31
115
+ ],
116
+ "hidden_act": "silu",
117
+ "hidden_size": 1280,
118
+ "in_channels": 3,
119
+ "in_chans": 3,
120
+ "initializer_range": 0.02,
121
+ "intermediate_size": 3420,
122
+ "model_type": "qwen2_5_vl",
123
+ "num_heads": 16,
124
+ "out_hidden_size": 3584,
125
+ "pad_token_id": 151643,
126
+ "patch_size": 14,
127
+ "spatial_merge_size": 2,
128
+ "spatial_patch_size": 14,
129
+ "temporal_patch_size": 2,
130
+ "tokens_per_second": 2,
131
+ "torch_dtype": "bfloat16",
132
+ "window_size": 112
133
+ },
134
+ "vision_end_token_id": 151653,
135
+ "vision_start_token_id": 151652,
136
+ "vision_token_id": 151654,
137
+ "vocab_size": 152064
138
+ }
generation_config.json ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 151643,
3
+ "do_sample": true,
4
+ "eos_token_id": [
5
+ 151645,
6
+ 151643
7
+ ],
8
+ "pad_token_id": 151643,
9
+ "repetition_penalty": 1.05,
10
+ "temperature": 1e-06,
11
+ "transformers_version": "4.55.4"
12
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
model-00001-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0ae322f6e5ba25b2826723db5409d8bbcacfaa8714088a45807a7d5a6db2be18
3
+ size 4968243304
model-00002-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4521812e307ebd2ebd22aed7e7ae57177b0d745b6a82652433e2a5e13e9c50b9
3
+ size 4991495816
model-00003-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d2ea67ccfc5c3d1c63fe998fe8679659563325596c64d7ab4f157f224a44700e
3
+ size 4932751040
model-00004-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d181e953e984acb44e5ffcfe324768238f40e055f717244bf66d4bdabb89b691
3
+ size 1691924384
model.safetensors.index.json ADDED
@@ -0,0 +1,737 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_parameters": 8292166656,
4
+ "total_size": 16584333312
5
+ },
6
+ "weight_map": {
7
+ "lm_head.weight": "model-00004-of-00004.safetensors",
8
+ "model.embed_tokens.weight": "model-00001-of-00004.safetensors",
9
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00004.safetensors",
10
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
11
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
12
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
13
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
14
+ "model.layers.0.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
15
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
16
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
17
+ "model.layers.0.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
18
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
19
+ "model.layers.0.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
20
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
21
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00004.safetensors",
22
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
23
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
24
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
25
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
26
+ "model.layers.1.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
27
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
28
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
29
+ "model.layers.1.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
30
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
31
+ "model.layers.1.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
32
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
33
+ "model.layers.10.input_layernorm.weight": "model-00002-of-00004.safetensors",
34
+ "model.layers.10.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
35
+ "model.layers.10.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
36
+ "model.layers.10.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
37
+ "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
38
+ "model.layers.10.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
39
+ "model.layers.10.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
40
+ "model.layers.10.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
41
+ "model.layers.10.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
42
+ "model.layers.10.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
43
+ "model.layers.10.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
44
+ "model.layers.10.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
45
+ "model.layers.11.input_layernorm.weight": "model-00002-of-00004.safetensors",
46
+ "model.layers.11.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
47
+ "model.layers.11.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
48
+ "model.layers.11.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
49
+ "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
50
+ "model.layers.11.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
51
+ "model.layers.11.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
52
+ "model.layers.11.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
53
+ "model.layers.11.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
54
+ "model.layers.11.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
55
+ "model.layers.11.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
56
+ "model.layers.11.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
57
+ "model.layers.12.input_layernorm.weight": "model-00002-of-00004.safetensors",
58
+ "model.layers.12.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
59
+ "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
60
+ "model.layers.12.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
61
+ "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
62
+ "model.layers.12.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
63
+ "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
64
+ "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
65
+ "model.layers.12.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
66
+ "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
67
+ "model.layers.12.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
68
+ "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
69
+ "model.layers.13.input_layernorm.weight": "model-00002-of-00004.safetensors",
70
+ "model.layers.13.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
71
+ "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
72
+ "model.layers.13.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
73
+ "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
74
+ "model.layers.13.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
75
+ "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
76
+ "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
77
+ "model.layers.13.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
78
+ "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
79
+ "model.layers.13.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
80
+ "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
81
+ "model.layers.14.input_layernorm.weight": "model-00002-of-00004.safetensors",
82
+ "model.layers.14.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
83
+ "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
84
+ "model.layers.14.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
85
+ "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
86
+ "model.layers.14.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
87
+ "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
88
+ "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
89
+ "model.layers.14.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
90
+ "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
91
+ "model.layers.14.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
92
+ "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
93
+ "model.layers.15.input_layernorm.weight": "model-00002-of-00004.safetensors",
94
+ "model.layers.15.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
95
+ "model.layers.15.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
96
+ "model.layers.15.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
97
+ "model.layers.15.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
98
+ "model.layers.15.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
99
+ "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
100
+ "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
101
+ "model.layers.15.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
102
+ "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
103
+ "model.layers.15.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
104
+ "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
105
+ "model.layers.16.input_layernorm.weight": "model-00003-of-00004.safetensors",
106
+ "model.layers.16.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
107
+ "model.layers.16.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
108
+ "model.layers.16.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
109
+ "model.layers.16.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
110
+ "model.layers.16.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
111
+ "model.layers.16.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
112
+ "model.layers.16.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
113
+ "model.layers.16.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
114
+ "model.layers.16.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
115
+ "model.layers.16.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
116
+ "model.layers.16.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
117
+ "model.layers.17.input_layernorm.weight": "model-00003-of-00004.safetensors",
118
+ "model.layers.17.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
119
+ "model.layers.17.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
120
+ "model.layers.17.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
121
+ "model.layers.17.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
122
+ "model.layers.17.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
123
+ "model.layers.17.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
124
+ "model.layers.17.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
125
+ "model.layers.17.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
126
+ "model.layers.17.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
127
+ "model.layers.17.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
128
+ "model.layers.17.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
129
+ "model.layers.18.input_layernorm.weight": "model-00003-of-00004.safetensors",
130
+ "model.layers.18.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
131
+ "model.layers.18.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
132
+ "model.layers.18.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
133
+ "model.layers.18.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
134
+ "model.layers.18.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
135
+ "model.layers.18.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
136
+ "model.layers.18.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
137
+ "model.layers.18.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
138
+ "model.layers.18.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
139
+ "model.layers.18.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
140
+ "model.layers.18.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
141
+ "model.layers.19.input_layernorm.weight": "model-00003-of-00004.safetensors",
142
+ "model.layers.19.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
143
+ "model.layers.19.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
144
+ "model.layers.19.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
145
+ "model.layers.19.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
146
+ "model.layers.19.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
147
+ "model.layers.19.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
148
+ "model.layers.19.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
149
+ "model.layers.19.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
150
+ "model.layers.19.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
151
+ "model.layers.19.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
152
+ "model.layers.19.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
153
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00004.safetensors",
154
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
155
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
156
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
157
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
158
+ "model.layers.2.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
159
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
160
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
161
+ "model.layers.2.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
162
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
163
+ "model.layers.2.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
164
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
165
+ "model.layers.20.input_layernorm.weight": "model-00003-of-00004.safetensors",
166
+ "model.layers.20.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
167
+ "model.layers.20.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
168
+ "model.layers.20.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
169
+ "model.layers.20.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
170
+ "model.layers.20.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
171
+ "model.layers.20.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
172
+ "model.layers.20.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
173
+ "model.layers.20.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
174
+ "model.layers.20.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
175
+ "model.layers.20.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
176
+ "model.layers.20.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
177
+ "model.layers.21.input_layernorm.weight": "model-00003-of-00004.safetensors",
178
+ "model.layers.21.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
179
+ "model.layers.21.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
180
+ "model.layers.21.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
181
+ "model.layers.21.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
182
+ "model.layers.21.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
183
+ "model.layers.21.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
184
+ "model.layers.21.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
185
+ "model.layers.21.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
186
+ "model.layers.21.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
187
+ "model.layers.21.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
188
+ "model.layers.21.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
189
+ "model.layers.22.input_layernorm.weight": "model-00003-of-00004.safetensors",
190
+ "model.layers.22.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
191
+ "model.layers.22.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
192
+ "model.layers.22.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
193
+ "model.layers.22.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
194
+ "model.layers.22.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
195
+ "model.layers.22.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
196
+ "model.layers.22.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
197
+ "model.layers.22.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
198
+ "model.layers.22.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
199
+ "model.layers.22.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
200
+ "model.layers.22.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
201
+ "model.layers.23.input_layernorm.weight": "model-00003-of-00004.safetensors",
202
+ "model.layers.23.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
203
+ "model.layers.23.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
204
+ "model.layers.23.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
205
+ "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
206
+ "model.layers.23.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
207
+ "model.layers.23.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
208
+ "model.layers.23.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
209
+ "model.layers.23.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
210
+ "model.layers.23.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
211
+ "model.layers.23.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
212
+ "model.layers.23.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
213
+ "model.layers.24.input_layernorm.weight": "model-00003-of-00004.safetensors",
214
+ "model.layers.24.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
215
+ "model.layers.24.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
216
+ "model.layers.24.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
217
+ "model.layers.24.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
218
+ "model.layers.24.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
219
+ "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
220
+ "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
221
+ "model.layers.24.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
222
+ "model.layers.24.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
223
+ "model.layers.24.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
224
+ "model.layers.24.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
225
+ "model.layers.25.input_layernorm.weight": "model-00003-of-00004.safetensors",
226
+ "model.layers.25.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
227
+ "model.layers.25.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
228
+ "model.layers.25.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
229
+ "model.layers.25.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
230
+ "model.layers.25.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
231
+ "model.layers.25.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
232
+ "model.layers.25.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
233
+ "model.layers.25.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
234
+ "model.layers.25.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
235
+ "model.layers.25.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
236
+ "model.layers.25.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
237
+ "model.layers.26.input_layernorm.weight": "model-00004-of-00004.safetensors",
238
+ "model.layers.26.mlp.down_proj.weight": "model-00004-of-00004.safetensors",
239
+ "model.layers.26.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
240
+ "model.layers.26.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
241
+ "model.layers.26.post_attention_layernorm.weight": "model-00004-of-00004.safetensors",
242
+ "model.layers.26.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
243
+ "model.layers.26.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
244
+ "model.layers.26.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
245
+ "model.layers.26.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
246
+ "model.layers.26.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
247
+ "model.layers.26.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
248
+ "model.layers.26.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
249
+ "model.layers.27.input_layernorm.weight": "model-00004-of-00004.safetensors",
250
+ "model.layers.27.mlp.down_proj.weight": "model-00004-of-00004.safetensors",
251
+ "model.layers.27.mlp.gate_proj.weight": "model-00004-of-00004.safetensors",
252
+ "model.layers.27.mlp.up_proj.weight": "model-00004-of-00004.safetensors",
253
+ "model.layers.27.post_attention_layernorm.weight": "model-00004-of-00004.safetensors",
254
+ "model.layers.27.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
255
+ "model.layers.27.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
256
+ "model.layers.27.self_attn.o_proj.weight": "model-00004-of-00004.safetensors",
257
+ "model.layers.27.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
258
+ "model.layers.27.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
259
+ "model.layers.27.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
260
+ "model.layers.27.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
261
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00004.safetensors",
262
+ "model.layers.3.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
263
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
264
+ "model.layers.3.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
265
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
266
+ "model.layers.3.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
267
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
268
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
269
+ "model.layers.3.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
270
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
271
+ "model.layers.3.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
272
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
273
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00004.safetensors",
274
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
275
+ "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
276
+ "model.layers.4.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
277
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
278
+ "model.layers.4.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
279
+ "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
280
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
281
+ "model.layers.4.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
282
+ "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
283
+ "model.layers.4.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
284
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
285
+ "model.layers.5.input_layernorm.weight": "model-00002-of-00004.safetensors",
286
+ "model.layers.5.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
287
+ "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
288
+ "model.layers.5.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
289
+ "model.layers.5.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
290
+ "model.layers.5.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
291
+ "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
292
+ "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
293
+ "model.layers.5.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
294
+ "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
295
+ "model.layers.5.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
296
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
297
+ "model.layers.6.input_layernorm.weight": "model-00002-of-00004.safetensors",
298
+ "model.layers.6.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
299
+ "model.layers.6.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
300
+ "model.layers.6.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
301
+ "model.layers.6.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
302
+ "model.layers.6.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
303
+ "model.layers.6.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
304
+ "model.layers.6.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
305
+ "model.layers.6.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
306
+ "model.layers.6.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
307
+ "model.layers.6.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
308
+ "model.layers.6.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
309
+ "model.layers.7.input_layernorm.weight": "model-00002-of-00004.safetensors",
310
+ "model.layers.7.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
311
+ "model.layers.7.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
312
+ "model.layers.7.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
313
+ "model.layers.7.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
314
+ "model.layers.7.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
315
+ "model.layers.7.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
316
+ "model.layers.7.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
317
+ "model.layers.7.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
318
+ "model.layers.7.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
319
+ "model.layers.7.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
320
+ "model.layers.7.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
321
+ "model.layers.8.input_layernorm.weight": "model-00002-of-00004.safetensors",
322
+ "model.layers.8.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
323
+ "model.layers.8.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
324
+ "model.layers.8.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
325
+ "model.layers.8.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
326
+ "model.layers.8.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
327
+ "model.layers.8.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
328
+ "model.layers.8.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
329
+ "model.layers.8.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
330
+ "model.layers.8.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
331
+ "model.layers.8.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
332
+ "model.layers.8.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
333
+ "model.layers.9.input_layernorm.weight": "model-00002-of-00004.safetensors",
334
+ "model.layers.9.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
335
+ "model.layers.9.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
336
+ "model.layers.9.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
337
+ "model.layers.9.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
338
+ "model.layers.9.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
339
+ "model.layers.9.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
340
+ "model.layers.9.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
341
+ "model.layers.9.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
342
+ "model.layers.9.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
343
+ "model.layers.9.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
344
+ "model.layers.9.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
345
+ "model.norm.weight": "model-00004-of-00004.safetensors",
346
+ "visual.blocks.0.attn.proj.bias": "model-00001-of-00004.safetensors",
347
+ "visual.blocks.0.attn.proj.weight": "model-00001-of-00004.safetensors",
348
+ "visual.blocks.0.attn.qkv.bias": "model-00001-of-00004.safetensors",
349
+ "visual.blocks.0.attn.qkv.weight": "model-00001-of-00004.safetensors",
350
+ "visual.blocks.0.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
351
+ "visual.blocks.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
352
+ "visual.blocks.0.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
353
+ "visual.blocks.0.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
354
+ "visual.blocks.0.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
355
+ "visual.blocks.0.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
356
+ "visual.blocks.0.norm1.weight": "model-00001-of-00004.safetensors",
357
+ "visual.blocks.0.norm2.weight": "model-00001-of-00004.safetensors",
358
+ "visual.blocks.1.attn.proj.bias": "model-00001-of-00004.safetensors",
359
+ "visual.blocks.1.attn.proj.weight": "model-00001-of-00004.safetensors",
360
+ "visual.blocks.1.attn.qkv.bias": "model-00001-of-00004.safetensors",
361
+ "visual.blocks.1.attn.qkv.weight": "model-00001-of-00004.safetensors",
362
+ "visual.blocks.1.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
363
+ "visual.blocks.1.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
364
+ "visual.blocks.1.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
365
+ "visual.blocks.1.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
366
+ "visual.blocks.1.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
367
+ "visual.blocks.1.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
368
+ "visual.blocks.1.norm1.weight": "model-00001-of-00004.safetensors",
369
+ "visual.blocks.1.norm2.weight": "model-00001-of-00004.safetensors",
370
+ "visual.blocks.10.attn.proj.bias": "model-00001-of-00004.safetensors",
371
+ "visual.blocks.10.attn.proj.weight": "model-00001-of-00004.safetensors",
372
+ "visual.blocks.10.attn.qkv.bias": "model-00001-of-00004.safetensors",
373
+ "visual.blocks.10.attn.qkv.weight": "model-00001-of-00004.safetensors",
374
+ "visual.blocks.10.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
375
+ "visual.blocks.10.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
376
+ "visual.blocks.10.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
377
+ "visual.blocks.10.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
378
+ "visual.blocks.10.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
379
+ "visual.blocks.10.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
380
+ "visual.blocks.10.norm1.weight": "model-00001-of-00004.safetensors",
381
+ "visual.blocks.10.norm2.weight": "model-00001-of-00004.safetensors",
382
+ "visual.blocks.11.attn.proj.bias": "model-00001-of-00004.safetensors",
383
+ "visual.blocks.11.attn.proj.weight": "model-00001-of-00004.safetensors",
384
+ "visual.blocks.11.attn.qkv.bias": "model-00001-of-00004.safetensors",
385
+ "visual.blocks.11.attn.qkv.weight": "model-00001-of-00004.safetensors",
386
+ "visual.blocks.11.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
387
+ "visual.blocks.11.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
388
+ "visual.blocks.11.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
389
+ "visual.blocks.11.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
390
+ "visual.blocks.11.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
391
+ "visual.blocks.11.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
392
+ "visual.blocks.11.norm1.weight": "model-00001-of-00004.safetensors",
393
+ "visual.blocks.11.norm2.weight": "model-00001-of-00004.safetensors",
394
+ "visual.blocks.12.attn.proj.bias": "model-00001-of-00004.safetensors",
395
+ "visual.blocks.12.attn.proj.weight": "model-00001-of-00004.safetensors",
396
+ "visual.blocks.12.attn.qkv.bias": "model-00001-of-00004.safetensors",
397
+ "visual.blocks.12.attn.qkv.weight": "model-00001-of-00004.safetensors",
398
+ "visual.blocks.12.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
399
+ "visual.blocks.12.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
400
+ "visual.blocks.12.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
401
+ "visual.blocks.12.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
402
+ "visual.blocks.12.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
403
+ "visual.blocks.12.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
404
+ "visual.blocks.12.norm1.weight": "model-00001-of-00004.safetensors",
405
+ "visual.blocks.12.norm2.weight": "model-00001-of-00004.safetensors",
406
+ "visual.blocks.13.attn.proj.bias": "model-00001-of-00004.safetensors",
407
+ "visual.blocks.13.attn.proj.weight": "model-00001-of-00004.safetensors",
408
+ "visual.blocks.13.attn.qkv.bias": "model-00001-of-00004.safetensors",
409
+ "visual.blocks.13.attn.qkv.weight": "model-00001-of-00004.safetensors",
410
+ "visual.blocks.13.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
411
+ "visual.blocks.13.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
412
+ "visual.blocks.13.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
413
+ "visual.blocks.13.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
414
+ "visual.blocks.13.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
415
+ "visual.blocks.13.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
416
+ "visual.blocks.13.norm1.weight": "model-00001-of-00004.safetensors",
417
+ "visual.blocks.13.norm2.weight": "model-00001-of-00004.safetensors",
418
+ "visual.blocks.14.attn.proj.bias": "model-00001-of-00004.safetensors",
419
+ "visual.blocks.14.attn.proj.weight": "model-00001-of-00004.safetensors",
420
+ "visual.blocks.14.attn.qkv.bias": "model-00001-of-00004.safetensors",
421
+ "visual.blocks.14.attn.qkv.weight": "model-00001-of-00004.safetensors",
422
+ "visual.blocks.14.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
423
+ "visual.blocks.14.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
424
+ "visual.blocks.14.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
425
+ "visual.blocks.14.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
426
+ "visual.blocks.14.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
427
+ "visual.blocks.14.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
428
+ "visual.blocks.14.norm1.weight": "model-00001-of-00004.safetensors",
429
+ "visual.blocks.14.norm2.weight": "model-00001-of-00004.safetensors",
430
+ "visual.blocks.15.attn.proj.bias": "model-00001-of-00004.safetensors",
431
+ "visual.blocks.15.attn.proj.weight": "model-00001-of-00004.safetensors",
432
+ "visual.blocks.15.attn.qkv.bias": "model-00001-of-00004.safetensors",
433
+ "visual.blocks.15.attn.qkv.weight": "model-00001-of-00004.safetensors",
434
+ "visual.blocks.15.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
435
+ "visual.blocks.15.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
436
+ "visual.blocks.15.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
437
+ "visual.blocks.15.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
438
+ "visual.blocks.15.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
439
+ "visual.blocks.15.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
440
+ "visual.blocks.15.norm1.weight": "model-00001-of-00004.safetensors",
441
+ "visual.blocks.15.norm2.weight": "model-00001-of-00004.safetensors",
442
+ "visual.blocks.16.attn.proj.bias": "model-00001-of-00004.safetensors",
443
+ "visual.blocks.16.attn.proj.weight": "model-00001-of-00004.safetensors",
444
+ "visual.blocks.16.attn.qkv.bias": "model-00001-of-00004.safetensors",
445
+ "visual.blocks.16.attn.qkv.weight": "model-00001-of-00004.safetensors",
446
+ "visual.blocks.16.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
447
+ "visual.blocks.16.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
448
+ "visual.blocks.16.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
449
+ "visual.blocks.16.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
450
+ "visual.blocks.16.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
451
+ "visual.blocks.16.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
452
+ "visual.blocks.16.norm1.weight": "model-00001-of-00004.safetensors",
453
+ "visual.blocks.16.norm2.weight": "model-00001-of-00004.safetensors",
454
+ "visual.blocks.17.attn.proj.bias": "model-00001-of-00004.safetensors",
455
+ "visual.blocks.17.attn.proj.weight": "model-00001-of-00004.safetensors",
456
+ "visual.blocks.17.attn.qkv.bias": "model-00001-of-00004.safetensors",
457
+ "visual.blocks.17.attn.qkv.weight": "model-00001-of-00004.safetensors",
458
+ "visual.blocks.17.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
459
+ "visual.blocks.17.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
460
+ "visual.blocks.17.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
461
+ "visual.blocks.17.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
462
+ "visual.blocks.17.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
463
+ "visual.blocks.17.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
464
+ "visual.blocks.17.norm1.weight": "model-00001-of-00004.safetensors",
465
+ "visual.blocks.17.norm2.weight": "model-00001-of-00004.safetensors",
466
+ "visual.blocks.18.attn.proj.bias": "model-00001-of-00004.safetensors",
467
+ "visual.blocks.18.attn.proj.weight": "model-00001-of-00004.safetensors",
468
+ "visual.blocks.18.attn.qkv.bias": "model-00001-of-00004.safetensors",
469
+ "visual.blocks.18.attn.qkv.weight": "model-00001-of-00004.safetensors",
470
+ "visual.blocks.18.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
471
+ "visual.blocks.18.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
472
+ "visual.blocks.18.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
473
+ "visual.blocks.18.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
474
+ "visual.blocks.18.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
475
+ "visual.blocks.18.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
476
+ "visual.blocks.18.norm1.weight": "model-00001-of-00004.safetensors",
477
+ "visual.blocks.18.norm2.weight": "model-00001-of-00004.safetensors",
478
+ "visual.blocks.19.attn.proj.bias": "model-00001-of-00004.safetensors",
479
+ "visual.blocks.19.attn.proj.weight": "model-00001-of-00004.safetensors",
480
+ "visual.blocks.19.attn.qkv.bias": "model-00001-of-00004.safetensors",
481
+ "visual.blocks.19.attn.qkv.weight": "model-00001-of-00004.safetensors",
482
+ "visual.blocks.19.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
483
+ "visual.blocks.19.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
484
+ "visual.blocks.19.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
485
+ "visual.blocks.19.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
486
+ "visual.blocks.19.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
487
+ "visual.blocks.19.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
488
+ "visual.blocks.19.norm1.weight": "model-00001-of-00004.safetensors",
489
+ "visual.blocks.19.norm2.weight": "model-00001-of-00004.safetensors",
490
+ "visual.blocks.2.attn.proj.bias": "model-00001-of-00004.safetensors",
491
+ "visual.blocks.2.attn.proj.weight": "model-00001-of-00004.safetensors",
492
+ "visual.blocks.2.attn.qkv.bias": "model-00001-of-00004.safetensors",
493
+ "visual.blocks.2.attn.qkv.weight": "model-00001-of-00004.safetensors",
494
+ "visual.blocks.2.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
495
+ "visual.blocks.2.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
496
+ "visual.blocks.2.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
497
+ "visual.blocks.2.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
498
+ "visual.blocks.2.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
499
+ "visual.blocks.2.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
500
+ "visual.blocks.2.norm1.weight": "model-00001-of-00004.safetensors",
501
+ "visual.blocks.2.norm2.weight": "model-00001-of-00004.safetensors",
502
+ "visual.blocks.20.attn.proj.bias": "model-00001-of-00004.safetensors",
503
+ "visual.blocks.20.attn.proj.weight": "model-00001-of-00004.safetensors",
504
+ "visual.blocks.20.attn.qkv.bias": "model-00001-of-00004.safetensors",
505
+ "visual.blocks.20.attn.qkv.weight": "model-00001-of-00004.safetensors",
506
+ "visual.blocks.20.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
507
+ "visual.blocks.20.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
508
+ "visual.blocks.20.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
509
+ "visual.blocks.20.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
510
+ "visual.blocks.20.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
511
+ "visual.blocks.20.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
512
+ "visual.blocks.20.norm1.weight": "model-00001-of-00004.safetensors",
513
+ "visual.blocks.20.norm2.weight": "model-00001-of-00004.safetensors",
514
+ "visual.blocks.21.attn.proj.bias": "model-00001-of-00004.safetensors",
515
+ "visual.blocks.21.attn.proj.weight": "model-00001-of-00004.safetensors",
516
+ "visual.blocks.21.attn.qkv.bias": "model-00001-of-00004.safetensors",
517
+ "visual.blocks.21.attn.qkv.weight": "model-00001-of-00004.safetensors",
518
+ "visual.blocks.21.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
519
+ "visual.blocks.21.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
520
+ "visual.blocks.21.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
521
+ "visual.blocks.21.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
522
+ "visual.blocks.21.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
523
+ "visual.blocks.21.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
524
+ "visual.blocks.21.norm1.weight": "model-00001-of-00004.safetensors",
525
+ "visual.blocks.21.norm2.weight": "model-00001-of-00004.safetensors",
526
+ "visual.blocks.22.attn.proj.bias": "model-00001-of-00004.safetensors",
527
+ "visual.blocks.22.attn.proj.weight": "model-00001-of-00004.safetensors",
528
+ "visual.blocks.22.attn.qkv.bias": "model-00001-of-00004.safetensors",
529
+ "visual.blocks.22.attn.qkv.weight": "model-00001-of-00004.safetensors",
530
+ "visual.blocks.22.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
531
+ "visual.blocks.22.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
532
+ "visual.blocks.22.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
533
+ "visual.blocks.22.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
534
+ "visual.blocks.22.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
535
+ "visual.blocks.22.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
536
+ "visual.blocks.22.norm1.weight": "model-00001-of-00004.safetensors",
537
+ "visual.blocks.22.norm2.weight": "model-00001-of-00004.safetensors",
538
+ "visual.blocks.23.attn.proj.bias": "model-00001-of-00004.safetensors",
539
+ "visual.blocks.23.attn.proj.weight": "model-00001-of-00004.safetensors",
540
+ "visual.blocks.23.attn.qkv.bias": "model-00001-of-00004.safetensors",
541
+ "visual.blocks.23.attn.qkv.weight": "model-00001-of-00004.safetensors",
542
+ "visual.blocks.23.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
543
+ "visual.blocks.23.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
544
+ "visual.blocks.23.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
545
+ "visual.blocks.23.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
546
+ "visual.blocks.23.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
547
+ "visual.blocks.23.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
548
+ "visual.blocks.23.norm1.weight": "model-00001-of-00004.safetensors",
549
+ "visual.blocks.23.norm2.weight": "model-00001-of-00004.safetensors",
550
+ "visual.blocks.24.attn.proj.bias": "model-00001-of-00004.safetensors",
551
+ "visual.blocks.24.attn.proj.weight": "model-00001-of-00004.safetensors",
552
+ "visual.blocks.24.attn.qkv.bias": "model-00001-of-00004.safetensors",
553
+ "visual.blocks.24.attn.qkv.weight": "model-00001-of-00004.safetensors",
554
+ "visual.blocks.24.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
555
+ "visual.blocks.24.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
556
+ "visual.blocks.24.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
557
+ "visual.blocks.24.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
558
+ "visual.blocks.24.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
559
+ "visual.blocks.24.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
560
+ "visual.blocks.24.norm1.weight": "model-00001-of-00004.safetensors",
561
+ "visual.blocks.24.norm2.weight": "model-00001-of-00004.safetensors",
562
+ "visual.blocks.25.attn.proj.bias": "model-00001-of-00004.safetensors",
563
+ "visual.blocks.25.attn.proj.weight": "model-00001-of-00004.safetensors",
564
+ "visual.blocks.25.attn.qkv.bias": "model-00001-of-00004.safetensors",
565
+ "visual.blocks.25.attn.qkv.weight": "model-00001-of-00004.safetensors",
566
+ "visual.blocks.25.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
567
+ "visual.blocks.25.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
568
+ "visual.blocks.25.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
569
+ "visual.blocks.25.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
570
+ "visual.blocks.25.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
571
+ "visual.blocks.25.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
572
+ "visual.blocks.25.norm1.weight": "model-00001-of-00004.safetensors",
573
+ "visual.blocks.25.norm2.weight": "model-00001-of-00004.safetensors",
574
+ "visual.blocks.26.attn.proj.bias": "model-00001-of-00004.safetensors",
575
+ "visual.blocks.26.attn.proj.weight": "model-00001-of-00004.safetensors",
576
+ "visual.blocks.26.attn.qkv.bias": "model-00001-of-00004.safetensors",
577
+ "visual.blocks.26.attn.qkv.weight": "model-00001-of-00004.safetensors",
578
+ "visual.blocks.26.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
579
+ "visual.blocks.26.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
580
+ "visual.blocks.26.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
581
+ "visual.blocks.26.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
582
+ "visual.blocks.26.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
583
+ "visual.blocks.26.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
584
+ "visual.blocks.26.norm1.weight": "model-00001-of-00004.safetensors",
585
+ "visual.blocks.26.norm2.weight": "model-00001-of-00004.safetensors",
586
+ "visual.blocks.27.attn.proj.bias": "model-00001-of-00004.safetensors",
587
+ "visual.blocks.27.attn.proj.weight": "model-00001-of-00004.safetensors",
588
+ "visual.blocks.27.attn.qkv.bias": "model-00001-of-00004.safetensors",
589
+ "visual.blocks.27.attn.qkv.weight": "model-00001-of-00004.safetensors",
590
+ "visual.blocks.27.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
591
+ "visual.blocks.27.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
592
+ "visual.blocks.27.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
593
+ "visual.blocks.27.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
594
+ "visual.blocks.27.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
595
+ "visual.blocks.27.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
596
+ "visual.blocks.27.norm1.weight": "model-00001-of-00004.safetensors",
597
+ "visual.blocks.27.norm2.weight": "model-00001-of-00004.safetensors",
598
+ "visual.blocks.28.attn.proj.bias": "model-00001-of-00004.safetensors",
599
+ "visual.blocks.28.attn.proj.weight": "model-00001-of-00004.safetensors",
600
+ "visual.blocks.28.attn.qkv.bias": "model-00001-of-00004.safetensors",
601
+ "visual.blocks.28.attn.qkv.weight": "model-00001-of-00004.safetensors",
602
+ "visual.blocks.28.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
603
+ "visual.blocks.28.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
604
+ "visual.blocks.28.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
605
+ "visual.blocks.28.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
606
+ "visual.blocks.28.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
607
+ "visual.blocks.28.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
608
+ "visual.blocks.28.norm1.weight": "model-00001-of-00004.safetensors",
609
+ "visual.blocks.28.norm2.weight": "model-00001-of-00004.safetensors",
610
+ "visual.blocks.29.attn.proj.bias": "model-00001-of-00004.safetensors",
611
+ "visual.blocks.29.attn.proj.weight": "model-00001-of-00004.safetensors",
612
+ "visual.blocks.29.attn.qkv.bias": "model-00001-of-00004.safetensors",
613
+ "visual.blocks.29.attn.qkv.weight": "model-00001-of-00004.safetensors",
614
+ "visual.blocks.29.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
615
+ "visual.blocks.29.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
616
+ "visual.blocks.29.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
617
+ "visual.blocks.29.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
618
+ "visual.blocks.29.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
619
+ "visual.blocks.29.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
620
+ "visual.blocks.29.norm1.weight": "model-00001-of-00004.safetensors",
621
+ "visual.blocks.29.norm2.weight": "model-00001-of-00004.safetensors",
622
+ "visual.blocks.3.attn.proj.bias": "model-00001-of-00004.safetensors",
623
+ "visual.blocks.3.attn.proj.weight": "model-00001-of-00004.safetensors",
624
+ "visual.blocks.3.attn.qkv.bias": "model-00001-of-00004.safetensors",
625
+ "visual.blocks.3.attn.qkv.weight": "model-00001-of-00004.safetensors",
626
+ "visual.blocks.3.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
627
+ "visual.blocks.3.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
628
+ "visual.blocks.3.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
629
+ "visual.blocks.3.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
630
+ "visual.blocks.3.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
631
+ "visual.blocks.3.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
632
+ "visual.blocks.3.norm1.weight": "model-00001-of-00004.safetensors",
633
+ "visual.blocks.3.norm2.weight": "model-00001-of-00004.safetensors",
634
+ "visual.blocks.30.attn.proj.bias": "model-00001-of-00004.safetensors",
635
+ "visual.blocks.30.attn.proj.weight": "model-00001-of-00004.safetensors",
636
+ "visual.blocks.30.attn.qkv.bias": "model-00001-of-00004.safetensors",
637
+ "visual.blocks.30.attn.qkv.weight": "model-00001-of-00004.safetensors",
638
+ "visual.blocks.30.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
639
+ "visual.blocks.30.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
640
+ "visual.blocks.30.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
641
+ "visual.blocks.30.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
642
+ "visual.blocks.30.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
643
+ "visual.blocks.30.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
644
+ "visual.blocks.30.norm1.weight": "model-00001-of-00004.safetensors",
645
+ "visual.blocks.30.norm2.weight": "model-00001-of-00004.safetensors",
646
+ "visual.blocks.31.attn.proj.bias": "model-00001-of-00004.safetensors",
647
+ "visual.blocks.31.attn.proj.weight": "model-00001-of-00004.safetensors",
648
+ "visual.blocks.31.attn.qkv.bias": "model-00001-of-00004.safetensors",
649
+ "visual.blocks.31.attn.qkv.weight": "model-00001-of-00004.safetensors",
650
+ "visual.blocks.31.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
651
+ "visual.blocks.31.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
652
+ "visual.blocks.31.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
653
+ "visual.blocks.31.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
654
+ "visual.blocks.31.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
655
+ "visual.blocks.31.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
656
+ "visual.blocks.31.norm1.weight": "model-00001-of-00004.safetensors",
657
+ "visual.blocks.31.norm2.weight": "model-00001-of-00004.safetensors",
658
+ "visual.blocks.4.attn.proj.bias": "model-00001-of-00004.safetensors",
659
+ "visual.blocks.4.attn.proj.weight": "model-00001-of-00004.safetensors",
660
+ "visual.blocks.4.attn.qkv.bias": "model-00001-of-00004.safetensors",
661
+ "visual.blocks.4.attn.qkv.weight": "model-00001-of-00004.safetensors",
662
+ "visual.blocks.4.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
663
+ "visual.blocks.4.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
664
+ "visual.blocks.4.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
665
+ "visual.blocks.4.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
666
+ "visual.blocks.4.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
667
+ "visual.blocks.4.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
668
+ "visual.blocks.4.norm1.weight": "model-00001-of-00004.safetensors",
669
+ "visual.blocks.4.norm2.weight": "model-00001-of-00004.safetensors",
670
+ "visual.blocks.5.attn.proj.bias": "model-00001-of-00004.safetensors",
671
+ "visual.blocks.5.attn.proj.weight": "model-00001-of-00004.safetensors",
672
+ "visual.blocks.5.attn.qkv.bias": "model-00001-of-00004.safetensors",
673
+ "visual.blocks.5.attn.qkv.weight": "model-00001-of-00004.safetensors",
674
+ "visual.blocks.5.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
675
+ "visual.blocks.5.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
676
+ "visual.blocks.5.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
677
+ "visual.blocks.5.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
678
+ "visual.blocks.5.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
679
+ "visual.blocks.5.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
680
+ "visual.blocks.5.norm1.weight": "model-00001-of-00004.safetensors",
681
+ "visual.blocks.5.norm2.weight": "model-00001-of-00004.safetensors",
682
+ "visual.blocks.6.attn.proj.bias": "model-00001-of-00004.safetensors",
683
+ "visual.blocks.6.attn.proj.weight": "model-00001-of-00004.safetensors",
684
+ "visual.blocks.6.attn.qkv.bias": "model-00001-of-00004.safetensors",
685
+ "visual.blocks.6.attn.qkv.weight": "model-00001-of-00004.safetensors",
686
+ "visual.blocks.6.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
687
+ "visual.blocks.6.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
688
+ "visual.blocks.6.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
689
+ "visual.blocks.6.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
690
+ "visual.blocks.6.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
691
+ "visual.blocks.6.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
692
+ "visual.blocks.6.norm1.weight": "model-00001-of-00004.safetensors",
693
+ "visual.blocks.6.norm2.weight": "model-00001-of-00004.safetensors",
694
+ "visual.blocks.7.attn.proj.bias": "model-00001-of-00004.safetensors",
695
+ "visual.blocks.7.attn.proj.weight": "model-00001-of-00004.safetensors",
696
+ "visual.blocks.7.attn.qkv.bias": "model-00001-of-00004.safetensors",
697
+ "visual.blocks.7.attn.qkv.weight": "model-00001-of-00004.safetensors",
698
+ "visual.blocks.7.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
699
+ "visual.blocks.7.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
700
+ "visual.blocks.7.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
701
+ "visual.blocks.7.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
702
+ "visual.blocks.7.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
703
+ "visual.blocks.7.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
704
+ "visual.blocks.7.norm1.weight": "model-00001-of-00004.safetensors",
705
+ "visual.blocks.7.norm2.weight": "model-00001-of-00004.safetensors",
706
+ "visual.blocks.8.attn.proj.bias": "model-00001-of-00004.safetensors",
707
+ "visual.blocks.8.attn.proj.weight": "model-00001-of-00004.safetensors",
708
+ "visual.blocks.8.attn.qkv.bias": "model-00001-of-00004.safetensors",
709
+ "visual.blocks.8.attn.qkv.weight": "model-00001-of-00004.safetensors",
710
+ "visual.blocks.8.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
711
+ "visual.blocks.8.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
712
+ "visual.blocks.8.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
713
+ "visual.blocks.8.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
714
+ "visual.blocks.8.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
715
+ "visual.blocks.8.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
716
+ "visual.blocks.8.norm1.weight": "model-00001-of-00004.safetensors",
717
+ "visual.blocks.8.norm2.weight": "model-00001-of-00004.safetensors",
718
+ "visual.blocks.9.attn.proj.bias": "model-00001-of-00004.safetensors",
719
+ "visual.blocks.9.attn.proj.weight": "model-00001-of-00004.safetensors",
720
+ "visual.blocks.9.attn.qkv.bias": "model-00001-of-00004.safetensors",
721
+ "visual.blocks.9.attn.qkv.weight": "model-00001-of-00004.safetensors",
722
+ "visual.blocks.9.mlp.down_proj.bias": "model-00001-of-00004.safetensors",
723
+ "visual.blocks.9.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
724
+ "visual.blocks.9.mlp.gate_proj.bias": "model-00001-of-00004.safetensors",
725
+ "visual.blocks.9.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
726
+ "visual.blocks.9.mlp.up_proj.bias": "model-00001-of-00004.safetensors",
727
+ "visual.blocks.9.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
728
+ "visual.blocks.9.norm1.weight": "model-00001-of-00004.safetensors",
729
+ "visual.blocks.9.norm2.weight": "model-00001-of-00004.safetensors",
730
+ "visual.merger.ln_q.weight": "model-00001-of-00004.safetensors",
731
+ "visual.merger.mlp.0.bias": "model-00001-of-00004.safetensors",
732
+ "visual.merger.mlp.0.weight": "model-00001-of-00004.safetensors",
733
+ "visual.merger.mlp.2.bias": "model-00001-of-00004.safetensors",
734
+ "visual.merger.mlp.2.weight": "model-00001-of-00004.safetensors",
735
+ "visual.patch_embed.proj.weight": "model-00001-of-00004.safetensors"
736
+ }
737
+ }
preprocessor_config.json ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "min_pixels": 3136,
3
+ "max_pixels": 12845056,
4
+ "patch_size": 14,
5
+ "temporal_patch_size": 2,
6
+ "merge_size": 2,
7
+ "image_mean": [
8
+ 0.48145466,
9
+ 0.4578275,
10
+ 0.40821073
11
+ ],
12
+ "image_std": [
13
+ 0.26862954,
14
+ 0.26130258,
15
+ 0.27577711
16
+ ],
17
+ "image_processor_type": "Qwen2VLImageProcessor",
18
+ "processor_class": "Qwen2_5_VLProcessor"
19
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|im_start|>",
4
+ "<|im_end|>",
5
+ "<|object_ref_start|>",
6
+ "<|object_ref_end|>",
7
+ "<|box_start|>",
8
+ "<|box_end|>",
9
+ "<|quad_start|>",
10
+ "<|quad_end|>",
11
+ "<|vision_start|>",
12
+ "<|vision_end|>",
13
+ "<|vision_pad|>",
14
+ "<|image_pad|>",
15
+ "<|video_pad|>"
16
+ ],
17
+ "eos_token": {
18
+ "content": "<|im_end|>",
19
+ "lstrip": false,
20
+ "normalized": false,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ },
24
+ "pad_token": {
25
+ "content": "<|endoftext|>",
26
+ "lstrip": false,
27
+ "normalized": false,
28
+ "rstrip": false,
29
+ "single_word": false
30
+ }
31
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9c5ae00e602b8860cbd784ba82a8aa14e8feecec692e7076590d014d7b7fdafa
3
+ size 11421896
tokenizer_config.json ADDED
@@ -0,0 +1,208 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": false,
3
+ "add_prefix_space": false,
4
+ "added_tokens_decoder": {
5
+ "151643": {
6
+ "content": "<|endoftext|>",
7
+ "lstrip": false,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ },
13
+ "151644": {
14
+ "content": "<|im_start|>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false,
19
+ "special": true
20
+ },
21
+ "151645": {
22
+ "content": "<|im_end|>",
23
+ "lstrip": false,
24
+ "normalized": false,
25
+ "rstrip": false,
26
+ "single_word": false,
27
+ "special": true
28
+ },
29
+ "151646": {
30
+ "content": "<|object_ref_start|>",
31
+ "lstrip": false,
32
+ "normalized": false,
33
+ "rstrip": false,
34
+ "single_word": false,
35
+ "special": true
36
+ },
37
+ "151647": {
38
+ "content": "<|object_ref_end|>",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": false,
42
+ "single_word": false,
43
+ "special": true
44
+ },
45
+ "151648": {
46
+ "content": "<|box_start|>",
47
+ "lstrip": false,
48
+ "normalized": false,
49
+ "rstrip": false,
50
+ "single_word": false,
51
+ "special": true
52
+ },
53
+ "151649": {
54
+ "content": "<|box_end|>",
55
+ "lstrip": false,
56
+ "normalized": false,
57
+ "rstrip": false,
58
+ "single_word": false,
59
+ "special": true
60
+ },
61
+ "151650": {
62
+ "content": "<|quad_start|>",
63
+ "lstrip": false,
64
+ "normalized": false,
65
+ "rstrip": false,
66
+ "single_word": false,
67
+ "special": true
68
+ },
69
+ "151651": {
70
+ "content": "<|quad_end|>",
71
+ "lstrip": false,
72
+ "normalized": false,
73
+ "rstrip": false,
74
+ "single_word": false,
75
+ "special": true
76
+ },
77
+ "151652": {
78
+ "content": "<|vision_start|>",
79
+ "lstrip": false,
80
+ "normalized": false,
81
+ "rstrip": false,
82
+ "single_word": false,
83
+ "special": true
84
+ },
85
+ "151653": {
86
+ "content": "<|vision_end|>",
87
+ "lstrip": false,
88
+ "normalized": false,
89
+ "rstrip": false,
90
+ "single_word": false,
91
+ "special": true
92
+ },
93
+ "151654": {
94
+ "content": "<|vision_pad|>",
95
+ "lstrip": false,
96
+ "normalized": false,
97
+ "rstrip": false,
98
+ "single_word": false,
99
+ "special": true
100
+ },
101
+ "151655": {
102
+ "content": "<|image_pad|>",
103
+ "lstrip": false,
104
+ "normalized": false,
105
+ "rstrip": false,
106
+ "single_word": false,
107
+ "special": true
108
+ },
109
+ "151656": {
110
+ "content": "<|video_pad|>",
111
+ "lstrip": false,
112
+ "normalized": false,
113
+ "rstrip": false,
114
+ "single_word": false,
115
+ "special": true
116
+ },
117
+ "151657": {
118
+ "content": "<tool_call>",
119
+ "lstrip": false,
120
+ "normalized": false,
121
+ "rstrip": false,
122
+ "single_word": false,
123
+ "special": false
124
+ },
125
+ "151658": {
126
+ "content": "</tool_call>",
127
+ "lstrip": false,
128
+ "normalized": false,
129
+ "rstrip": false,
130
+ "single_word": false,
131
+ "special": false
132
+ },
133
+ "151659": {
134
+ "content": "<|fim_prefix|>",
135
+ "lstrip": false,
136
+ "normalized": false,
137
+ "rstrip": false,
138
+ "single_word": false,
139
+ "special": false
140
+ },
141
+ "151660": {
142
+ "content": "<|fim_middle|>",
143
+ "lstrip": false,
144
+ "normalized": false,
145
+ "rstrip": false,
146
+ "single_word": false,
147
+ "special": false
148
+ },
149
+ "151661": {
150
+ "content": "<|fim_suffix|>",
151
+ "lstrip": false,
152
+ "normalized": false,
153
+ "rstrip": false,
154
+ "single_word": false,
155
+ "special": false
156
+ },
157
+ "151662": {
158
+ "content": "<|fim_pad|>",
159
+ "lstrip": false,
160
+ "normalized": false,
161
+ "rstrip": false,
162
+ "single_word": false,
163
+ "special": false
164
+ },
165
+ "151663": {
166
+ "content": "<|repo_name|>",
167
+ "lstrip": false,
168
+ "normalized": false,
169
+ "rstrip": false,
170
+ "single_word": false,
171
+ "special": false
172
+ },
173
+ "151664": {
174
+ "content": "<|file_sep|>",
175
+ "lstrip": false,
176
+ "normalized": false,
177
+ "rstrip": false,
178
+ "single_word": false,
179
+ "special": false
180
+ }
181
+ },
182
+ "additional_special_tokens": [
183
+ "<|im_start|>",
184
+ "<|im_end|>",
185
+ "<|object_ref_start|>",
186
+ "<|object_ref_end|>",
187
+ "<|box_start|>",
188
+ "<|box_end|>",
189
+ "<|quad_start|>",
190
+ "<|quad_end|>",
191
+ "<|vision_start|>",
192
+ "<|vision_end|>",
193
+ "<|vision_pad|>",
194
+ "<|image_pad|>",
195
+ "<|video_pad|>"
196
+ ],
197
+ "bos_token": null,
198
+ "clean_up_tokenization_spaces": false,
199
+ "eos_token": "<|im_end|>",
200
+ "errors": "replace",
201
+ "extra_special_tokens": {},
202
+ "model_max_length": 131072,
203
+ "pad_token": "<|endoftext|>",
204
+ "processor_class": "Qwen2_5_VLProcessor",
205
+ "split_special_tokens": false,
206
+ "tokenizer_class": "Qwen2Tokenizer",
207
+ "unk_token": null
208
+ }
trainer_state.json ADDED
@@ -0,0 +1,2266 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_global_step": null,
3
+ "best_metric": null,
4
+ "best_model_checkpoint": null,
5
+ "epoch": 4.0,
6
+ "eval_steps": 500,
7
+ "global_step": 1392,
8
+ "is_hyper_param_search": false,
9
+ "is_local_process_zero": true,
10
+ "is_world_process_zero": true,
11
+ "log_history": [
12
+ {
13
+ "epoch": 0.0028735632183908046,
14
+ "grad_norm": 5.701423168182373,
15
+ "learning_rate": 1.1494252873563219e-07,
16
+ "loss": 0.8435418605804443,
17
+ "step": 1,
18
+ "token_acc": 0.7668058813433263
19
+ },
20
+ {
21
+ "epoch": 0.014367816091954023,
22
+ "grad_norm": 5.826282024383545,
23
+ "learning_rate": 5.747126436781609e-07,
24
+ "loss": 0.8689357042312622,
25
+ "step": 5,
26
+ "token_acc": 0.7603826289524434
27
+ },
28
+ {
29
+ "epoch": 0.028735632183908046,
30
+ "grad_norm": 4.886495113372803,
31
+ "learning_rate": 1.1494252873563219e-06,
32
+ "loss": 0.8401691436767578,
33
+ "step": 10,
34
+ "token_acc": 0.7658241045363977
35
+ },
36
+ {
37
+ "epoch": 0.04310344827586207,
38
+ "grad_norm": 4.146583080291748,
39
+ "learning_rate": 1.724137931034483e-06,
40
+ "loss": 0.7563614845275879,
41
+ "step": 15,
42
+ "token_acc": 0.7847171788125201
43
+ },
44
+ {
45
+ "epoch": 0.05747126436781609,
46
+ "grad_norm": 2.6654443740844727,
47
+ "learning_rate": 2.2988505747126437e-06,
48
+ "loss": 0.6338841438293457,
49
+ "step": 20,
50
+ "token_acc": 0.8138928795337552
51
+ },
52
+ {
53
+ "epoch": 0.07183908045977011,
54
+ "grad_norm": 1.9812933206558228,
55
+ "learning_rate": 2.8735632183908046e-06,
56
+ "loss": 0.5337696552276612,
57
+ "step": 25,
58
+ "token_acc": 0.8382339431104374
59
+ },
60
+ {
61
+ "epoch": 0.08620689655172414,
62
+ "grad_norm": 1.5043323040008545,
63
+ "learning_rate": 3.448275862068966e-06,
64
+ "loss": 0.4845158576965332,
65
+ "step": 30,
66
+ "token_acc": 0.8492833589736979
67
+ },
68
+ {
69
+ "epoch": 0.10057471264367816,
70
+ "grad_norm": 1.1498295068740845,
71
+ "learning_rate": 4.022988505747127e-06,
72
+ "loss": 0.43697123527526854,
73
+ "step": 35,
74
+ "token_acc": 0.8614095803431342
75
+ },
76
+ {
77
+ "epoch": 0.11494252873563218,
78
+ "grad_norm": 0.9884876608848572,
79
+ "learning_rate": 4.5977011494252875e-06,
80
+ "loss": 0.40511465072631836,
81
+ "step": 40,
82
+ "token_acc": 0.8688861103314138
83
+ },
84
+ {
85
+ "epoch": 0.12931034482758622,
86
+ "grad_norm": 0.9158347249031067,
87
+ "learning_rate": 5.172413793103449e-06,
88
+ "loss": 0.3865834951400757,
89
+ "step": 45,
90
+ "token_acc": 0.8749345692154775
91
+ },
92
+ {
93
+ "epoch": 0.14367816091954022,
94
+ "grad_norm": 0.8680062890052795,
95
+ "learning_rate": 5.747126436781609e-06,
96
+ "loss": 0.3814691543579102,
97
+ "step": 50,
98
+ "token_acc": 0.8764696489650478
99
+ },
100
+ {
101
+ "epoch": 0.15804597701149425,
102
+ "grad_norm": 0.8912498950958252,
103
+ "learning_rate": 6.321839080459771e-06,
104
+ "loss": 0.3704723358154297,
105
+ "step": 55,
106
+ "token_acc": 0.8794568983805955
107
+ },
108
+ {
109
+ "epoch": 0.1724137931034483,
110
+ "grad_norm": 0.8006322383880615,
111
+ "learning_rate": 6.896551724137932e-06,
112
+ "loss": 0.36577649116516114,
113
+ "step": 60,
114
+ "token_acc": 0.880862038046495
115
+ },
116
+ {
117
+ "epoch": 0.1867816091954023,
118
+ "grad_norm": 0.8191452622413635,
119
+ "learning_rate": 7.4712643678160925e-06,
120
+ "loss": 0.3519172430038452,
121
+ "step": 65,
122
+ "token_acc": 0.8835096940130558
123
+ },
124
+ {
125
+ "epoch": 0.20114942528735633,
126
+ "grad_norm": 0.8475598096847534,
127
+ "learning_rate": 8.045977011494253e-06,
128
+ "loss": 0.3494016170501709,
129
+ "step": 70,
130
+ "token_acc": 0.8846583471507112
131
+ },
132
+ {
133
+ "epoch": 0.21551724137931033,
134
+ "grad_norm": 0.8054814338684082,
135
+ "learning_rate": 8.620689655172414e-06,
136
+ "loss": 0.33314008712768556,
137
+ "step": 75,
138
+ "token_acc": 0.8894680986543279
139
+ },
140
+ {
141
+ "epoch": 0.22988505747126436,
142
+ "grad_norm": 0.8506744503974915,
143
+ "learning_rate": 9.195402298850575e-06,
144
+ "loss": 0.3356497764587402,
145
+ "step": 80,
146
+ "token_acc": 0.8887663199854837
147
+ },
148
+ {
149
+ "epoch": 0.2442528735632184,
150
+ "grad_norm": 0.9287813901901245,
151
+ "learning_rate": 9.770114942528738e-06,
152
+ "loss": 0.34115211963653563,
153
+ "step": 85,
154
+ "token_acc": 0.8870185172342049
155
+ },
156
+ {
157
+ "epoch": 0.25862068965517243,
158
+ "grad_norm": 0.803466260433197,
159
+ "learning_rate": 9.999918729041869e-06,
160
+ "loss": 0.32897319793701174,
161
+ "step": 90,
162
+ "token_acc": 0.8905416444262261
163
+ },
164
+ {
165
+ "epoch": 0.27298850574712646,
166
+ "grad_norm": 0.8131177425384521,
167
+ "learning_rate": 9.999422082754275e-06,
168
+ "loss": 0.34308557510375975,
169
+ "step": 95,
170
+ "token_acc": 0.8873970478126003
171
+ },
172
+ {
173
+ "epoch": 0.28735632183908044,
174
+ "grad_norm": 0.7532012462615967,
175
+ "learning_rate": 9.998473985504493e-06,
176
+ "loss": 0.32273378372192385,
177
+ "step": 100,
178
+ "token_acc": 0.8920797660374831
179
+ },
180
+ {
181
+ "epoch": 0.3017241379310345,
182
+ "grad_norm": 0.827369749546051,
183
+ "learning_rate": 9.9970745229063e-06,
184
+ "loss": 0.31739416122436526,
185
+ "step": 105,
186
+ "token_acc": 0.8942937324602432
187
+ },
188
+ {
189
+ "epoch": 0.3160919540229885,
190
+ "grad_norm": 0.839455783367157,
191
+ "learning_rate": 9.995223821332053e-06,
192
+ "loss": 0.30788021087646483,
193
+ "step": 110,
194
+ "token_acc": 0.8955595489385134
195
+ },
196
+ {
197
+ "epoch": 0.33045977011494254,
198
+ "grad_norm": 0.8043475151062012,
199
+ "learning_rate": 9.992922047901269e-06,
200
+ "loss": 0.3212079286575317,
201
+ "step": 115,
202
+ "token_acc": 0.8932447774433532
203
+ },
204
+ {
205
+ "epoch": 0.3448275862068966,
206
+ "grad_norm": 0.7365073561668396,
207
+ "learning_rate": 9.990169410465537e-06,
208
+ "loss": 0.326586389541626,
209
+ "step": 120,
210
+ "token_acc": 0.8911639743666266
211
+ },
212
+ {
213
+ "epoch": 0.35919540229885055,
214
+ "grad_norm": 0.7726923227310181,
215
+ "learning_rate": 9.986966157589751e-06,
216
+ "loss": 0.3208265781402588,
217
+ "step": 125,
218
+ "token_acc": 0.8929274689605703
219
+ },
220
+ {
221
+ "epoch": 0.3735632183908046,
222
+ "grad_norm": 0.7451789975166321,
223
+ "learning_rate": 9.983312578529662e-06,
224
+ "loss": 0.30309247970581055,
225
+ "step": 130,
226
+ "token_acc": 0.8975737660451567
227
+ },
228
+ {
229
+ "epoch": 0.3879310344827586,
230
+ "grad_norm": 0.7862595915794373,
231
+ "learning_rate": 9.979209003205761e-06,
232
+ "loss": 0.31304683685302737,
233
+ "step": 135,
234
+ "token_acc": 0.8953799106225167
235
+ },
236
+ {
237
+ "epoch": 0.40229885057471265,
238
+ "grad_norm": 0.7985342144966125,
239
+ "learning_rate": 9.974655802173482e-06,
240
+ "loss": 0.3002079963684082,
241
+ "step": 140,
242
+ "token_acc": 0.8988757282833291
243
+ },
244
+ {
245
+ "epoch": 0.4166666666666667,
246
+ "grad_norm": 0.7889167666435242,
247
+ "learning_rate": 9.969653386589749e-06,
248
+ "loss": 0.29943289756774905,
249
+ "step": 145,
250
+ "token_acc": 0.8984499426698959
251
+ },
252
+ {
253
+ "epoch": 0.43103448275862066,
254
+ "grad_norm": 0.7854207754135132,
255
+ "learning_rate": 9.964202208175835e-06,
256
+ "loss": 0.2975877285003662,
257
+ "step": 150,
258
+ "token_acc": 0.8993797084658467
259
+ },
260
+ {
261
+ "epoch": 0.4454022988505747,
262
+ "grad_norm": 0.7467015981674194,
263
+ "learning_rate": 9.958302759176586e-06,
264
+ "loss": 0.29912853240966797,
265
+ "step": 155,
266
+ "token_acc": 0.8986065273193986
267
+ },
268
+ {
269
+ "epoch": 0.45977011494252873,
270
+ "grad_norm": 0.7273268699645996,
271
+ "learning_rate": 9.951955572315962e-06,
272
+ "loss": 0.29766645431518557,
273
+ "step": 160,
274
+ "token_acc": 0.8993621484079499
275
+ },
276
+ {
277
+ "epoch": 0.47413793103448276,
278
+ "grad_norm": 0.7913146018981934,
279
+ "learning_rate": 9.945161220748928e-06,
280
+ "loss": 0.30915985107421873,
281
+ "step": 165,
282
+ "token_acc": 0.8956878620785929
283
+ },
284
+ {
285
+ "epoch": 0.4885057471264368,
286
+ "grad_norm": 0.7098119258880615,
287
+ "learning_rate": 9.937920318009718e-06,
288
+ "loss": 0.30398380756378174,
289
+ "step": 170,
290
+ "token_acc": 0.8969151414504762
291
+ },
292
+ {
293
+ "epoch": 0.5028735632183908,
294
+ "grad_norm": 0.7509280443191528,
295
+ "learning_rate": 9.9302335179564e-06,
296
+ "loss": 0.30012104511260984,
297
+ "step": 175,
298
+ "token_acc": 0.8990500247419358
299
+ },
300
+ {
301
+ "epoch": 0.5172413793103449,
302
+ "grad_norm": 0.8269632458686829,
303
+ "learning_rate": 9.922101514711866e-06,
304
+ "loss": 0.30164597034454343,
305
+ "step": 180,
306
+ "token_acc": 0.8985394983310564
307
+ },
308
+ {
309
+ "epoch": 0.5316091954022989,
310
+ "grad_norm": 0.8219606280326843,
311
+ "learning_rate": 9.913525042601125e-06,
312
+ "loss": 0.2887198686599731,
313
+ "step": 185,
314
+ "token_acc": 0.9024582554776668
315
+ },
316
+ {
317
+ "epoch": 0.5459770114942529,
318
+ "grad_norm": 0.7701752185821533,
319
+ "learning_rate": 9.904504876085011e-06,
320
+ "loss": 0.30989344120025636,
321
+ "step": 190,
322
+ "token_acc": 0.8968100683109144
323
+ },
324
+ {
325
+ "epoch": 0.5603448275862069,
326
+ "grad_norm": 0.7990351319313049,
327
+ "learning_rate": 9.89504182969024e-06,
328
+ "loss": 0.28575849533081055,
329
+ "step": 195,
330
+ "token_acc": 0.902413955213203
331
+ },
332
+ {
333
+ "epoch": 0.5747126436781609,
334
+ "grad_norm": 0.7648539543151855,
335
+ "learning_rate": 9.885136757935862e-06,
336
+ "loss": 0.2953649997711182,
337
+ "step": 200,
338
+ "token_acc": 0.8998516532925523
339
+ },
340
+ {
341
+ "epoch": 0.5890804597701149,
342
+ "grad_norm": 0.7957839965820312,
343
+ "learning_rate": 9.87479055525609e-06,
344
+ "loss": 0.2969522953033447,
345
+ "step": 205,
346
+ "token_acc": 0.9001964095546645
347
+ },
348
+ {
349
+ "epoch": 0.603448275862069,
350
+ "grad_norm": 0.6543287038803101,
351
+ "learning_rate": 9.864004155919545e-06,
352
+ "loss": 0.29447548389434813,
353
+ "step": 210,
354
+ "token_acc": 0.9012295897099171
355
+ },
356
+ {
357
+ "epoch": 0.617816091954023,
358
+ "grad_norm": 0.7653723359107971,
359
+ "learning_rate": 9.852778533944875e-06,
360
+ "loss": 0.28338687419891356,
361
+ "step": 215,
362
+ "token_acc": 0.9039403841235081
363
+ },
364
+ {
365
+ "epoch": 0.632183908045977,
366
+ "grad_norm": 0.7216730713844299,
367
+ "learning_rate": 9.841114703012817e-06,
368
+ "loss": 0.27729272842407227,
369
+ "step": 220,
370
+ "token_acc": 0.9057350722299407
371
+ },
372
+ {
373
+ "epoch": 0.646551724137931,
374
+ "grad_norm": 0.6722442507743835,
375
+ "learning_rate": 9.829013716374647e-06,
376
+ "loss": 0.28614675998687744,
377
+ "step": 225,
378
+ "token_acc": 0.902680971254157
379
+ },
380
+ {
381
+ "epoch": 0.6609195402298851,
382
+ "grad_norm": 0.808274507522583,
383
+ "learning_rate": 9.81647666675708e-06,
384
+ "loss": 0.291225266456604,
385
+ "step": 230,
386
+ "token_acc": 0.9012525023959841
387
+ },
388
+ {
389
+ "epoch": 0.6752873563218391,
390
+ "grad_norm": 0.7579320669174194,
391
+ "learning_rate": 9.803504686263586e-06,
392
+ "loss": 0.282744312286377,
393
+ "step": 235,
394
+ "token_acc": 0.9036740387556348
395
+ },
396
+ {
397
+ "epoch": 0.6896551724137931,
398
+ "grad_norm": 0.6876379251480103,
399
+ "learning_rate": 9.790098946272177e-06,
400
+ "loss": 0.29027657508850097,
401
+ "step": 240,
402
+ "token_acc": 0.9016263972016184
403
+ },
404
+ {
405
+ "epoch": 0.7040229885057471,
406
+ "grad_norm": 0.7337082624435425,
407
+ "learning_rate": 9.776260657329614e-06,
408
+ "loss": 0.28811571598052976,
409
+ "step": 245,
410
+ "token_acc": 0.901976516465219
411
+ },
412
+ {
413
+ "epoch": 0.7183908045977011,
414
+ "grad_norm": 0.7502078413963318,
415
+ "learning_rate": 9.761991069042106e-06,
416
+ "loss": 0.2817479372024536,
417
+ "step": 250,
418
+ "token_acc": 0.9038514202476329
419
+ },
420
+ {
421
+ "epoch": 0.7327586206896551,
422
+ "grad_norm": 0.7350848913192749,
423
+ "learning_rate": 9.747291469962454e-06,
424
+ "loss": 0.2851818561553955,
425
+ "step": 255,
426
+ "token_acc": 0.9034762449946743
427
+ },
428
+ {
429
+ "epoch": 0.7471264367816092,
430
+ "grad_norm": 0.745708167552948,
431
+ "learning_rate": 9.732163187473716e-06,
432
+ "loss": 0.2827833414077759,
433
+ "step": 260,
434
+ "token_acc": 0.9033941118722216
435
+ },
436
+ {
437
+ "epoch": 0.7614942528735632,
438
+ "grad_norm": 0.7603588104248047,
439
+ "learning_rate": 9.716607587669325e-06,
440
+ "loss": 0.29482879638671877,
441
+ "step": 265,
442
+ "token_acc": 0.9005131991262569
443
+ },
444
+ {
445
+ "epoch": 0.7758620689655172,
446
+ "grad_norm": 0.7007916569709778,
447
+ "learning_rate": 9.700626075229739e-06,
448
+ "loss": 0.2890280246734619,
449
+ "step": 270,
450
+ "token_acc": 0.9017369981167822
451
+ },
452
+ {
453
+ "epoch": 0.7902298850574713,
454
+ "grad_norm": 0.6991475820541382,
455
+ "learning_rate": 9.684220093295596e-06,
456
+ "loss": 0.2728667020797729,
457
+ "step": 275,
458
+ "token_acc": 0.9074207128985996
459
+ },
460
+ {
461
+ "epoch": 0.8045977011494253,
462
+ "grad_norm": 0.7430739402770996,
463
+ "learning_rate": 9.667391123337393e-06,
464
+ "loss": 0.2816755771636963,
465
+ "step": 280,
466
+ "token_acc": 0.9039949641164945
467
+ },
468
+ {
469
+ "epoch": 0.8189655172413793,
470
+ "grad_norm": 0.7707583904266357,
471
+ "learning_rate": 9.650140685021716e-06,
472
+ "loss": 0.279411244392395,
473
+ "step": 285,
474
+ "token_acc": 0.904840837195186
475
+ },
476
+ {
477
+ "epoch": 0.8333333333333334,
478
+ "grad_norm": 0.778766930103302,
479
+ "learning_rate": 9.632470336074009e-06,
480
+ "loss": 0.2768320798873901,
481
+ "step": 290,
482
+ "token_acc": 0.9054280611811014
483
+ },
484
+ {
485
+ "epoch": 0.8477011494252874,
486
+ "grad_norm": 0.8028104305267334,
487
+ "learning_rate": 9.614381672137907e-06,
488
+ "loss": 0.2814202070236206,
489
+ "step": 295,
490
+ "token_acc": 0.9039378147324043
491
+ },
492
+ {
493
+ "epoch": 0.8620689655172413,
494
+ "grad_norm": 0.7755345106124878,
495
+ "learning_rate": 9.595876326631155e-06,
496
+ "loss": 0.28777663707733153,
497
+ "step": 300,
498
+ "token_acc": 0.9027070136785091
499
+ },
500
+ {
501
+ "epoch": 0.8764367816091954,
502
+ "grad_norm": 0.6636546850204468,
503
+ "learning_rate": 9.5769559705981e-06,
504
+ "loss": 0.2834830045700073,
505
+ "step": 305,
506
+ "token_acc": 0.9043677742168689
507
+ },
508
+ {
509
+ "epoch": 0.8908045977011494,
510
+ "grad_norm": 0.6981901526451111,
511
+ "learning_rate": 9.557622312558813e-06,
512
+ "loss": 0.2755718469619751,
513
+ "step": 310,
514
+ "token_acc": 0.9058268683696704
515
+ },
516
+ {
517
+ "epoch": 0.9051724137931034,
518
+ "grad_norm": 0.7137438654899597,
519
+ "learning_rate": 9.537877098354787e-06,
520
+ "loss": 0.27501988410949707,
521
+ "step": 315,
522
+ "token_acc": 0.9055966179120146
523
+ },
524
+ {
525
+ "epoch": 0.9195402298850575,
526
+ "grad_norm": 0.6470423340797424,
527
+ "learning_rate": 9.517722110991295e-06,
528
+ "loss": 0.2749337673187256,
529
+ "step": 320,
530
+ "token_acc": 0.9056260532474261
531
+ },
532
+ {
533
+ "epoch": 0.9339080459770115,
534
+ "grad_norm": 0.7691536545753479,
535
+ "learning_rate": 9.49715917047639e-06,
536
+ "loss": 0.2793783187866211,
537
+ "step": 325,
538
+ "token_acc": 0.9046256514742141
539
+ },
540
+ {
541
+ "epoch": 0.9482758620689655,
542
+ "grad_norm": 0.7594360113143921,
543
+ "learning_rate": 9.47619013365655e-06,
544
+ "loss": 0.28690061569213865,
545
+ "step": 330,
546
+ "token_acc": 0.9025547686081296
547
+ },
548
+ {
549
+ "epoch": 0.9626436781609196,
550
+ "grad_norm": 0.6703284978866577,
551
+ "learning_rate": 9.454816894049002e-06,
552
+ "loss": 0.27643957138061526,
553
+ "step": 335,
554
+ "token_acc": 0.9067482976841635
555
+ },
556
+ {
557
+ "epoch": 0.9770114942528736,
558
+ "grad_norm": 0.6189959645271301,
559
+ "learning_rate": 9.43304138167074e-06,
560
+ "loss": 0.2853975772857666,
561
+ "step": 340,
562
+ "token_acc": 0.9036247453748919
563
+ },
564
+ {
565
+ "epoch": 0.9913793103448276,
566
+ "grad_norm": 0.6638550162315369,
567
+ "learning_rate": 9.410865562864247e-06,
568
+ "loss": 0.2738370418548584,
569
+ "step": 345,
570
+ "token_acc": 0.9068261216206421
571
+ },
572
+ {
573
+ "epoch": 1.0057471264367817,
574
+ "grad_norm": 0.6068477034568787,
575
+ "learning_rate": 9.388291440119924e-06,
576
+ "loss": 0.2589106559753418,
577
+ "step": 350,
578
+ "token_acc": 0.9109583410112121
579
+ },
580
+ {
581
+ "epoch": 1.0201149425287357,
582
+ "grad_norm": 0.732253909111023,
583
+ "learning_rate": 9.365321051895268e-06,
584
+ "loss": 0.2238328456878662,
585
+ "step": 355,
586
+ "token_acc": 0.9216384658610375
587
+ },
588
+ {
589
+ "epoch": 1.0344827586206897,
590
+ "grad_norm": 0.6326807141304016,
591
+ "learning_rate": 9.341956472430803e-06,
592
+ "loss": 0.22989251613616943,
593
+ "step": 360,
594
+ "token_acc": 0.9194194964787729
595
+ },
596
+ {
597
+ "epoch": 1.0488505747126438,
598
+ "grad_norm": 0.7311187982559204,
599
+ "learning_rate": 9.318199811562762e-06,
600
+ "loss": 0.2297137975692749,
601
+ "step": 365,
602
+ "token_acc": 0.9192324609632725
603
+ },
604
+ {
605
+ "epoch": 1.0632183908045978,
606
+ "grad_norm": 0.586195707321167,
607
+ "learning_rate": 9.294053214532588e-06,
608
+ "loss": 0.22300987243652343,
609
+ "step": 370,
610
+ "token_acc": 0.9219570625633465
611
+ },
612
+ {
613
+ "epoch": 1.0775862068965518,
614
+ "grad_norm": 0.6735196709632874,
615
+ "learning_rate": 9.269518861793193e-06,
616
+ "loss": 0.22916293144226074,
617
+ "step": 375,
618
+ "token_acc": 0.9195292685994776
619
+ },
620
+ {
621
+ "epoch": 1.0919540229885056,
622
+ "grad_norm": 0.6448853015899658,
623
+ "learning_rate": 9.244598968812081e-06,
624
+ "loss": 0.22288815975189208,
625
+ "step": 380,
626
+ "token_acc": 0.9211659461783944
627
+ },
628
+ {
629
+ "epoch": 1.1063218390804597,
630
+ "grad_norm": 0.6758164167404175,
631
+ "learning_rate": 9.219295785871278e-06,
632
+ "loss": 0.2312171459197998,
633
+ "step": 385,
634
+ "token_acc": 0.9189928032918805
635
+ },
636
+ {
637
+ "epoch": 1.1206896551724137,
638
+ "grad_norm": 0.6390823721885681,
639
+ "learning_rate": 9.193611597864138e-06,
640
+ "loss": 0.22460539340972902,
641
+ "step": 390,
642
+ "token_acc": 0.9209666270356955
643
+ },
644
+ {
645
+ "epoch": 1.1350574712643677,
646
+ "grad_norm": 0.5888715386390686,
647
+ "learning_rate": 9.16754872408901e-06,
648
+ "loss": 0.22736835479736328,
649
+ "step": 395,
650
+ "token_acc": 0.9198526581382215
651
+ },
652
+ {
653
+ "epoch": 1.1494252873563218,
654
+ "grad_norm": 0.6396211981773376,
655
+ "learning_rate": 9.14110951803981e-06,
656
+ "loss": 0.22712435722351074,
657
+ "step": 400,
658
+ "token_acc": 0.9198965661702035
659
+ },
660
+ {
661
+ "epoch": 1.1637931034482758,
662
+ "grad_norm": 0.6098877787590027,
663
+ "learning_rate": 9.114296367193491e-06,
664
+ "loss": 0.22663559913635253,
665
+ "step": 405,
666
+ "token_acc": 0.9205513011598225
667
+ },
668
+ {
669
+ "epoch": 1.1781609195402298,
670
+ "grad_norm": 0.6394032835960388,
671
+ "learning_rate": 9.08711169279446e-06,
672
+ "loss": 0.2287325382232666,
673
+ "step": 410,
674
+ "token_acc": 0.9189529330228593
675
+ },
676
+ {
677
+ "epoch": 1.1925287356321839,
678
+ "grad_norm": 0.644769549369812,
679
+ "learning_rate": 9.059557949635931e-06,
680
+ "loss": 0.22587807178497316,
681
+ "step": 415,
682
+ "token_acc": 0.9206358036258022
683
+ },
684
+ {
685
+ "epoch": 1.206896551724138,
686
+ "grad_norm": 0.6251801252365112,
687
+ "learning_rate": 9.031637625838265e-06,
688
+ "loss": 0.23158540725708007,
689
+ "step": 420,
690
+ "token_acc": 0.9189880409547674
691
+ },
692
+ {
693
+ "epoch": 1.221264367816092,
694
+ "grad_norm": 0.7210418581962585,
695
+ "learning_rate": 9.003353242624279e-06,
696
+ "loss": 0.22089509963989257,
697
+ "step": 425,
698
+ "token_acc": 0.9217030262388175
699
+ },
700
+ {
701
+ "epoch": 1.235632183908046,
702
+ "grad_norm": 0.6865154504776001,
703
+ "learning_rate": 8.97470735409159e-06,
704
+ "loss": 0.2276074171066284,
705
+ "step": 430,
706
+ "token_acc": 0.9203275653350423
707
+ },
708
+ {
709
+ "epoch": 1.25,
710
+ "grad_norm": 0.6921049356460571,
711
+ "learning_rate": 8.94570254698197e-06,
712
+ "loss": 0.23009068965911866,
713
+ "step": 435,
714
+ "token_acc": 0.9185853649836814
715
+ },
716
+ {
717
+ "epoch": 1.264367816091954,
718
+ "grad_norm": 0.6811380982398987,
719
+ "learning_rate": 8.916341440447763e-06,
720
+ "loss": 0.2254939556121826,
721
+ "step": 440,
722
+ "token_acc": 0.9204800159783405
723
+ },
724
+ {
725
+ "epoch": 1.278735632183908,
726
+ "grad_norm": 0.6077721118927002,
727
+ "learning_rate": 8.886626685815381e-06,
728
+ "loss": 0.22704215049743653,
729
+ "step": 445,
730
+ "token_acc": 0.9194910501996322
731
+ },
732
+ {
733
+ "epoch": 1.293103448275862,
734
+ "grad_norm": 0.6493836045265198,
735
+ "learning_rate": 8.856560966345878e-06,
736
+ "loss": 0.2236196756362915,
737
+ "step": 450,
738
+ "token_acc": 0.9211338336634768
739
+ },
740
+ {
741
+ "epoch": 1.3074712643678161,
742
+ "grad_norm": 0.629676878452301,
743
+ "learning_rate": 8.826146996992652e-06,
744
+ "loss": 0.22625925540924072,
745
+ "step": 455,
746
+ "token_acc": 0.9205657286477268
747
+ },
748
+ {
749
+ "epoch": 1.3218390804597702,
750
+ "grad_norm": 0.6058956980705261,
751
+ "learning_rate": 8.795387524156288e-06,
752
+ "loss": 0.2242074966430664,
753
+ "step": 460,
754
+ "token_acc": 0.9205807027643003
755
+ },
756
+ {
757
+ "epoch": 1.3362068965517242,
758
+ "grad_norm": 0.6739834547042847,
759
+ "learning_rate": 8.764285325436558e-06,
760
+ "loss": 0.22307302951812744,
761
+ "step": 465,
762
+ "token_acc": 0.9218346894929089
763
+ },
764
+ {
765
+ "epoch": 1.3505747126436782,
766
+ "grad_norm": 0.6315281391143799,
767
+ "learning_rate": 8.73284320938158e-06,
768
+ "loss": 0.22982447147369384,
769
+ "step": 470,
770
+ "token_acc": 0.9196403357167869
771
+ },
772
+ {
773
+ "epoch": 1.3649425287356323,
774
+ "grad_norm": 0.6005785465240479,
775
+ "learning_rate": 8.701064015234244e-06,
776
+ "loss": 0.21757099628448487,
777
+ "step": 475,
778
+ "token_acc": 0.922890382253053
779
+ },
780
+ {
781
+ "epoch": 1.3793103448275863,
782
+ "grad_norm": 0.6035122871398926,
783
+ "learning_rate": 8.668950612675784e-06,
784
+ "loss": 0.23044428825378419,
785
+ "step": 480,
786
+ "token_acc": 0.9192890824546467
787
+ },
788
+ {
789
+ "epoch": 1.3936781609195403,
790
+ "grad_norm": 0.5926746726036072,
791
+ "learning_rate": 8.636505901566678e-06,
792
+ "loss": 0.21679251194000243,
793
+ "step": 485,
794
+ "token_acc": 0.9236747555910407
795
+ },
796
+ {
797
+ "epoch": 1.4080459770114944,
798
+ "grad_norm": 0.6238638758659363,
799
+ "learning_rate": 8.603732811684768e-06,
800
+ "loss": 0.2293393850326538,
801
+ "step": 490,
802
+ "token_acc": 0.9200520664579876
803
+ },
804
+ {
805
+ "epoch": 1.4224137931034484,
806
+ "grad_norm": 0.6290664076805115,
807
+ "learning_rate": 8.570634302460707e-06,
808
+ "loss": 0.22460541725158692,
809
+ "step": 495,
810
+ "token_acc": 0.9209492975317076
811
+ },
812
+ {
813
+ "epoch": 1.4367816091954024,
814
+ "grad_norm": 0.6945292353630066,
815
+ "learning_rate": 8.537213362710722e-06,
816
+ "loss": 0.22157375812530516,
817
+ "step": 500,
818
+ "token_acc": 0.9212576057473792
819
+ },
820
+ {
821
+ "epoch": 1.4511494252873562,
822
+ "grad_norm": 0.6543501019477844,
823
+ "learning_rate": 8.503473010366713e-06,
824
+ "loss": 0.23016009330749512,
825
+ "step": 505,
826
+ "token_acc": 0.9189749013260724
827
+ },
828
+ {
829
+ "epoch": 1.4655172413793103,
830
+ "grad_norm": 0.7231053709983826,
831
+ "learning_rate": 8.469416292203747e-06,
832
+ "loss": 0.22193603515625,
833
+ "step": 510,
834
+ "token_acc": 0.9215068557863781
835
+ },
836
+ {
837
+ "epoch": 1.4798850574712643,
838
+ "grad_norm": 0.611612856388092,
839
+ "learning_rate": 8.435046283564918e-06,
840
+ "loss": 0.22004530429840088,
841
+ "step": 515,
842
+ "token_acc": 0.9220109589346533
843
+ },
844
+ {
845
+ "epoch": 1.4942528735632183,
846
+ "grad_norm": 0.6593984961509705,
847
+ "learning_rate": 8.400366088083648e-06,
848
+ "loss": 0.22890899181365967,
849
+ "step": 520,
850
+ "token_acc": 0.9195443259821819
851
+ },
852
+ {
853
+ "epoch": 1.5086206896551724,
854
+ "grad_norm": 0.572110652923584,
855
+ "learning_rate": 8.365378837403429e-06,
856
+ "loss": 0.2156899929046631,
857
+ "step": 525,
858
+ "token_acc": 0.9235853204215982
859
+ },
860
+ {
861
+ "epoch": 1.5229885057471264,
862
+ "grad_norm": 0.620583176612854,
863
+ "learning_rate": 8.330087690895025e-06,
864
+ "loss": 0.22582778930664063,
865
+ "step": 530,
866
+ "token_acc": 0.9202767452825017
867
+ },
868
+ {
869
+ "epoch": 1.5373563218390804,
870
+ "grad_norm": 0.598166823387146,
871
+ "learning_rate": 8.294495835371192e-06,
872
+ "loss": 0.2208933115005493,
873
+ "step": 535,
874
+ "token_acc": 0.9222432071023863
875
+ },
876
+ {
877
+ "epoch": 1.5517241379310345,
878
+ "grad_norm": 0.6030401587486267,
879
+ "learning_rate": 8.258606484798896e-06,
880
+ "loss": 0.22270665168762208,
881
+ "step": 540,
882
+ "token_acc": 0.9211812235475183
883
+ },
884
+ {
885
+ "epoch": 1.5660919540229885,
886
+ "grad_norm": 0.6478816270828247,
887
+ "learning_rate": 8.22242288000909e-06,
888
+ "loss": 0.22272849082946777,
889
+ "step": 545,
890
+ "token_acc": 0.920936990781865
891
+ },
892
+ {
893
+ "epoch": 1.5804597701149425,
894
+ "grad_norm": 0.6298638582229614,
895
+ "learning_rate": 8.185948288404073e-06,
896
+ "loss": 0.22361083030700685,
897
+ "step": 550,
898
+ "token_acc": 0.9213775438736918
899
+ },
900
+ {
901
+ "epoch": 1.5948275862068966,
902
+ "grad_norm": 0.6502262949943542,
903
+ "learning_rate": 8.149186003662437e-06,
904
+ "loss": 0.22431838512420654,
905
+ "step": 555,
906
+ "token_acc": 0.9212732332428626
907
+ },
908
+ {
909
+ "epoch": 1.6091954022988506,
910
+ "grad_norm": 0.5969738364219666,
911
+ "learning_rate": 8.112139345441638e-06,
912
+ "loss": 0.21729083061218263,
913
+ "step": 560,
914
+ "token_acc": 0.9223729651179893
915
+ },
916
+ {
917
+ "epoch": 1.6235632183908046,
918
+ "grad_norm": 0.5870260000228882,
919
+ "learning_rate": 8.074811659078247e-06,
920
+ "loss": 0.22120294570922852,
921
+ "step": 565,
922
+ "token_acc": 0.9222056683329211
923
+ },
924
+ {
925
+ "epoch": 1.6379310344827587,
926
+ "grad_norm": 0.6318520307540894,
927
+ "learning_rate": 8.037206315285842e-06,
928
+ "loss": 0.23049440383911132,
929
+ "step": 570,
930
+ "token_acc": 0.9191351729034366
931
+ },
932
+ {
933
+ "epoch": 1.6522988505747125,
934
+ "grad_norm": 0.6057857275009155,
935
+ "learning_rate": 7.999326709850653e-06,
936
+ "loss": 0.22965495586395263,
937
+ "step": 575,
938
+ "token_acc": 0.9194872224082133
939
+ },
940
+ {
941
+ "epoch": 1.6666666666666665,
942
+ "grad_norm": 0.6213001012802124,
943
+ "learning_rate": 7.961176263324902e-06,
944
+ "loss": 0.2238182544708252,
945
+ "step": 580,
946
+ "token_acc": 0.9214975345535017
947
+ },
948
+ {
949
+ "epoch": 1.6810344827586206,
950
+ "grad_norm": 0.6396650671958923,
951
+ "learning_rate": 7.92275842071793e-06,
952
+ "loss": 0.22597620487213135,
953
+ "step": 585,
954
+ "token_acc": 0.9198568056786376
955
+ },
956
+ {
957
+ "epoch": 1.6954022988505746,
958
+ "grad_norm": 0.634431779384613,
959
+ "learning_rate": 7.88407665118512e-06,
960
+ "loss": 0.22038779258728028,
961
+ "step": 590,
962
+ "token_acc": 0.922038028258472
963
+ },
964
+ {
965
+ "epoch": 1.7097701149425286,
966
+ "grad_norm": 0.6329310536384583,
967
+ "learning_rate": 7.845134447714614e-06,
968
+ "loss": 0.22400221824645997,
969
+ "step": 595,
970
+ "token_acc": 0.9209220975760725
971
+ },
972
+ {
973
+ "epoch": 1.7241379310344827,
974
+ "grad_norm": 0.6118066310882568,
975
+ "learning_rate": 7.805935326811913e-06,
976
+ "loss": 0.22857046127319336,
977
+ "step": 600,
978
+ "token_acc": 0.91976195082606
979
+ },
980
+ {
981
+ "epoch": 1.7385057471264367,
982
+ "grad_norm": 0.6650734543800354,
983
+ "learning_rate": 7.766482828182314e-06,
984
+ "loss": 0.23167500495910645,
985
+ "step": 605,
986
+ "token_acc": 0.9195267958122185
987
+ },
988
+ {
989
+ "epoch": 1.7528735632183907,
990
+ "grad_norm": 0.6366269588470459,
991
+ "learning_rate": 7.726780514411287e-06,
992
+ "loss": 0.2227400779724121,
993
+ "step": 610,
994
+ "token_acc": 0.9218334909271017
995
+ },
996
+ {
997
+ "epoch": 1.7672413793103448,
998
+ "grad_norm": 0.6021769642829895,
999
+ "learning_rate": 7.686831970642768e-06,
1000
+ "loss": 0.2297227144241333,
1001
+ "step": 615,
1002
+ "token_acc": 0.919332224722032
1003
+ },
1004
+ {
1005
+ "epoch": 1.7816091954022988,
1006
+ "grad_norm": 0.615818440914154,
1007
+ "learning_rate": 7.64664080425541e-06,
1008
+ "loss": 0.2171243190765381,
1009
+ "step": 620,
1010
+ "token_acc": 0.9234899290175136
1011
+ },
1012
+ {
1013
+ "epoch": 1.7959770114942528,
1014
+ "grad_norm": 0.6163890957832336,
1015
+ "learning_rate": 7.606210644536851e-06,
1016
+ "loss": 0.2218536376953125,
1017
+ "step": 625,
1018
+ "token_acc": 0.9216376121035887
1019
+ },
1020
+ {
1021
+ "epoch": 1.8103448275862069,
1022
+ "grad_norm": 0.6491876244544983,
1023
+ "learning_rate": 7.565545142355971e-06,
1024
+ "loss": 0.22541501522064208,
1025
+ "step": 630,
1026
+ "token_acc": 0.9204486099781094
1027
+ },
1028
+ {
1029
+ "epoch": 1.8247126436781609,
1030
+ "grad_norm": 0.6006410121917725,
1031
+ "learning_rate": 7.524647969833223e-06,
1032
+ "loss": 0.22348830699920655,
1033
+ "step": 635,
1034
+ "token_acc": 0.9213447336029541
1035
+ },
1036
+ {
1037
+ "epoch": 1.839080459770115,
1038
+ "grad_norm": 0.6636133193969727,
1039
+ "learning_rate": 7.483522820009037e-06,
1040
+ "loss": 0.2237182855606079,
1041
+ "step": 640,
1042
+ "token_acc": 0.9212789131646073
1043
+ },
1044
+ {
1045
+ "epoch": 1.853448275862069,
1046
+ "grad_norm": 0.6226218938827515,
1047
+ "learning_rate": 7.442173406510342e-06,
1048
+ "loss": 0.22558093070983887,
1049
+ "step": 645,
1050
+ "token_acc": 0.9203516760456492
1051
+ },
1052
+ {
1053
+ "epoch": 1.867816091954023,
1054
+ "grad_norm": 0.5878725051879883,
1055
+ "learning_rate": 7.400603463215213e-06,
1056
+ "loss": 0.218361759185791,
1057
+ "step": 650,
1058
+ "token_acc": 0.9226935222326534
1059
+ },
1060
+ {
1061
+ "epoch": 1.882183908045977,
1062
+ "grad_norm": 0.6098897457122803,
1063
+ "learning_rate": 7.358816743915704e-06,
1064
+ "loss": 0.21668548583984376,
1065
+ "step": 655,
1066
+ "token_acc": 0.9230603341503006
1067
+ },
1068
+ {
1069
+ "epoch": 1.896551724137931,
1070
+ "grad_norm": 0.6154414415359497,
1071
+ "learning_rate": 7.3168170219788836e-06,
1072
+ "loss": 0.22438428401947022,
1073
+ "step": 660,
1074
+ "token_acc": 0.9209099064251443
1075
+ },
1076
+ {
1077
+ "epoch": 1.910919540229885,
1078
+ "grad_norm": 0.5887994170188904,
1079
+ "learning_rate": 7.274608090006088e-06,
1080
+ "loss": 0.21097733974456787,
1081
+ "step": 665,
1082
+ "token_acc": 0.9246009629067451
1083
+ },
1084
+ {
1085
+ "epoch": 1.9252873563218391,
1086
+ "grad_norm": 0.5934816002845764,
1087
+ "learning_rate": 7.232193759490452e-06,
1088
+ "loss": 0.22112298011779785,
1089
+ "step": 670,
1090
+ "token_acc": 0.9216444652064286
1091
+ },
1092
+ {
1093
+ "epoch": 1.9396551724137931,
1094
+ "grad_norm": 0.5901604890823364,
1095
+ "learning_rate": 7.189577860472732e-06,
1096
+ "loss": 0.2318714141845703,
1097
+ "step": 675,
1098
+ "token_acc": 0.9180115782893485
1099
+ },
1100
+ {
1101
+ "epoch": 1.9540229885057472,
1102
+ "grad_norm": 0.6483431458473206,
1103
+ "learning_rate": 7.146764241195445e-06,
1104
+ "loss": 0.2210935115814209,
1105
+ "step": 680,
1106
+ "token_acc": 0.9222021475228559
1107
+ },
1108
+ {
1109
+ "epoch": 1.9683908045977012,
1110
+ "grad_norm": 0.6194449067115784,
1111
+ "learning_rate": 7.103756767755368e-06,
1112
+ "loss": 0.2205436944961548,
1113
+ "step": 685,
1114
+ "token_acc": 0.9223388322921946
1115
+ },
1116
+ {
1117
+ "epoch": 1.9827586206896552,
1118
+ "grad_norm": 0.599036455154419,
1119
+ "learning_rate": 7.060559323754436e-06,
1120
+ "loss": 0.21962616443634034,
1121
+ "step": 690,
1122
+ "token_acc": 0.9221123108771584
1123
+ },
1124
+ {
1125
+ "epoch": 1.9971264367816093,
1126
+ "grad_norm": 0.6493648886680603,
1127
+ "learning_rate": 7.017175809949044e-06,
1128
+ "loss": 0.21666316986083983,
1129
+ "step": 695,
1130
+ "token_acc": 0.9239043058856424
1131
+ },
1132
+ {
1133
+ "epoch": 2.0114942528735633,
1134
+ "grad_norm": 0.5151979327201843,
1135
+ "learning_rate": 6.9736101438978055e-06,
1136
+ "loss": 0.17990593910217284,
1137
+ "step": 700,
1138
+ "token_acc": 0.9367058259410214
1139
+ },
1140
+ {
1141
+ "epoch": 2.0258620689655173,
1142
+ "grad_norm": 0.6045121550559998,
1143
+ "learning_rate": 6.929866259607798e-06,
1144
+ "loss": 0.16274212598800658,
1145
+ "step": 705,
1146
+ "token_acc": 0.9415297555911625
1147
+ },
1148
+ {
1149
+ "epoch": 2.0402298850574714,
1150
+ "grad_norm": 0.5994232296943665,
1151
+ "learning_rate": 6.885948107179321e-06,
1152
+ "loss": 0.15833277702331544,
1153
+ "step": 710,
1154
+ "token_acc": 0.9430992210408701
1155
+ },
1156
+ {
1157
+ "epoch": 2.0545977011494254,
1158
+ "grad_norm": 0.5645899176597595,
1159
+ "learning_rate": 6.841859652449191e-06,
1160
+ "loss": 0.15611765384674073,
1161
+ "step": 715,
1162
+ "token_acc": 0.9431611196378555
1163
+ },
1164
+ {
1165
+ "epoch": 2.0689655172413794,
1166
+ "grad_norm": 0.5400298833847046,
1167
+ "learning_rate": 6.797604876632633e-06,
1168
+ "loss": 0.15789283514022828,
1169
+ "step": 720,
1170
+ "token_acc": 0.9429063148389799
1171
+ },
1172
+ {
1173
+ "epoch": 2.0833333333333335,
1174
+ "grad_norm": 0.5909697413444519,
1175
+ "learning_rate": 6.753187775963773e-06,
1176
+ "loss": 0.16772289276123048,
1177
+ "step": 725,
1178
+ "token_acc": 0.9394692623007922
1179
+ },
1180
+ {
1181
+ "epoch": 2.0977011494252875,
1182
+ "grad_norm": 0.5501136779785156,
1183
+ "learning_rate": 6.708612361334767e-06,
1184
+ "loss": 0.16173641681671141,
1185
+ "step": 730,
1186
+ "token_acc": 0.9413316946211683
1187
+ },
1188
+ {
1189
+ "epoch": 2.1120689655172415,
1190
+ "grad_norm": 0.5678064227104187,
1191
+ "learning_rate": 6.663882657933627e-06,
1192
+ "loss": 0.1595856785774231,
1193
+ "step": 735,
1194
+ "token_acc": 0.9419337925811573
1195
+ },
1196
+ {
1197
+ "epoch": 2.1264367816091956,
1198
+ "grad_norm": 0.5946845412254333,
1199
+ "learning_rate": 6.619002704880734e-06,
1200
+ "loss": 0.16368247270584108,
1201
+ "step": 740,
1202
+ "token_acc": 0.9412974145140175
1203
+ },
1204
+ {
1205
+ "epoch": 2.1408045977011496,
1206
+ "grad_norm": 0.5547811388969421,
1207
+ "learning_rate": 6.573976554864112e-06,
1208
+ "loss": 0.1569953441619873,
1209
+ "step": 745,
1210
+ "token_acc": 0.9429346979137447
1211
+ },
1212
+ {
1213
+ "epoch": 2.1551724137931036,
1214
+ "grad_norm": 0.6081125140190125,
1215
+ "learning_rate": 6.52880827377346e-06,
1216
+ "loss": 0.15633049011230468,
1217
+ "step": 750,
1218
+ "token_acc": 0.9436848836708006
1219
+ },
1220
+ {
1221
+ "epoch": 2.1695402298850572,
1222
+ "grad_norm": 0.5700559020042419,
1223
+ "learning_rate": 6.4835019403330005e-06,
1224
+ "loss": 0.1616694450378418,
1225
+ "step": 755,
1226
+ "token_acc": 0.9420208339977444
1227
+ },
1228
+ {
1229
+ "epoch": 2.1839080459770113,
1230
+ "grad_norm": 0.5927231907844543,
1231
+ "learning_rate": 6.4380616457331755e-06,
1232
+ "loss": 0.15684512853622437,
1233
+ "step": 760,
1234
+ "token_acc": 0.9430860747388772
1235
+ },
1236
+ {
1237
+ "epoch": 2.1982758620689653,
1238
+ "grad_norm": 0.5662438273429871,
1239
+ "learning_rate": 6.392491493261199e-06,
1240
+ "loss": 0.15455737113952636,
1241
+ "step": 765,
1242
+ "token_acc": 0.9439083729020733
1243
+ },
1244
+ {
1245
+ "epoch": 2.2126436781609193,
1246
+ "grad_norm": 0.6180692315101624,
1247
+ "learning_rate": 6.3467955979305326e-06,
1248
+ "loss": 0.16106997728347777,
1249
+ "step": 770,
1250
+ "token_acc": 0.9416754852644854
1251
+ },
1252
+ {
1253
+ "epoch": 2.2270114942528734,
1254
+ "grad_norm": 0.5441434979438782,
1255
+ "learning_rate": 6.300978086109305e-06,
1256
+ "loss": 0.16208994388580322,
1257
+ "step": 775,
1258
+ "token_acc": 0.9412263813209829
1259
+ },
1260
+ {
1261
+ "epoch": 2.2413793103448274,
1262
+ "grad_norm": 0.5616468191146851,
1263
+ "learning_rate": 6.255043095147679e-06,
1264
+ "loss": 0.15927375555038453,
1265
+ "step": 780,
1266
+ "token_acc": 0.9423713744721197
1267
+ },
1268
+ {
1269
+ "epoch": 2.2557471264367814,
1270
+ "grad_norm": 0.5193885564804077,
1271
+ "learning_rate": 6.208994773004266e-06,
1272
+ "loss": 0.15997812747955323,
1273
+ "step": 785,
1274
+ "token_acc": 0.9420608194731714
1275
+ },
1276
+ {
1277
+ "epoch": 2.2701149425287355,
1278
+ "grad_norm": 0.5596188306808472,
1279
+ "learning_rate": 6.162837277871553e-06,
1280
+ "loss": 0.16357777118682862,
1281
+ "step": 790,
1282
+ "token_acc": 0.9406173675910481
1283
+ },
1284
+ {
1285
+ "epoch": 2.2844827586206895,
1286
+ "grad_norm": 0.5576221346855164,
1287
+ "learning_rate": 6.116574777800413e-06,
1288
+ "loss": 0.15778462886810302,
1289
+ "step": 795,
1290
+ "token_acc": 0.9430074419760587
1291
+ },
1292
+ {
1293
+ "epoch": 2.2988505747126435,
1294
+ "grad_norm": 0.6510303020477295,
1295
+ "learning_rate": 6.070211450323737e-06,
1296
+ "loss": 0.15905938148498536,
1297
+ "step": 800,
1298
+ "token_acc": 0.9422998550987141
1299
+ },
1300
+ {
1301
+ "epoch": 2.3132183908045976,
1302
+ "grad_norm": 0.6205097436904907,
1303
+ "learning_rate": 6.023751482079186e-06,
1304
+ "loss": 0.16222896575927734,
1305
+ "step": 805,
1306
+ "token_acc": 0.94130131311641
1307
+ },
1308
+ {
1309
+ "epoch": 2.3275862068965516,
1310
+ "grad_norm": 0.5592314004898071,
1311
+ "learning_rate": 5.977199068431154e-06,
1312
+ "loss": 0.15956305265426635,
1313
+ "step": 810,
1314
+ "token_acc": 0.9416855559126421
1315
+ },
1316
+ {
1317
+ "epoch": 2.3419540229885056,
1318
+ "grad_norm": 0.5842496752738953,
1319
+ "learning_rate": 5.930558413091903e-06,
1320
+ "loss": 0.15918197631835937,
1321
+ "step": 815,
1322
+ "token_acc": 0.9425314933703973
1323
+ },
1324
+ {
1325
+ "epoch": 2.3563218390804597,
1326
+ "grad_norm": 0.5384538769721985,
1327
+ "learning_rate": 5.883833727741982e-06,
1328
+ "loss": 0.15995166301727295,
1329
+ "step": 820,
1330
+ "token_acc": 0.9422360127778503
1331
+ },
1332
+ {
1333
+ "epoch": 2.3706896551724137,
1334
+ "grad_norm": 0.5527569651603699,
1335
+ "learning_rate": 5.837029231649899e-06,
1336
+ "loss": 0.16167669296264647,
1337
+ "step": 825,
1338
+ "token_acc": 0.9420040830709819
1339
+ },
1340
+ {
1341
+ "epoch": 2.3850574712643677,
1342
+ "grad_norm": 0.565212070941925,
1343
+ "learning_rate": 5.790149151291124e-06,
1344
+ "loss": 0.1560319781303406,
1345
+ "step": 830,
1346
+ "token_acc": 0.9427164634418289
1347
+ },
1348
+ {
1349
+ "epoch": 2.3994252873563218,
1350
+ "grad_norm": 0.5099773406982422,
1351
+ "learning_rate": 5.743197719966434e-06,
1352
+ "loss": 0.15993741750717164,
1353
+ "step": 835,
1354
+ "token_acc": 0.9424713643815994
1355
+ },
1356
+ {
1357
+ "epoch": 2.413793103448276,
1358
+ "grad_norm": 0.5427571535110474,
1359
+ "learning_rate": 5.696179177419643e-06,
1360
+ "loss": 0.15984433889389038,
1361
+ "step": 840,
1362
+ "token_acc": 0.9422104030504231
1363
+ },
1364
+ {
1365
+ "epoch": 2.42816091954023,
1366
+ "grad_norm": 0.5649964809417725,
1367
+ "learning_rate": 5.649097769454749e-06,
1368
+ "loss": 0.1587543249130249,
1369
+ "step": 845,
1370
+ "token_acc": 0.9421576260408061
1371
+ },
1372
+ {
1373
+ "epoch": 2.442528735632184,
1374
+ "grad_norm": 0.5804570317268372,
1375
+ "learning_rate": 5.60195774755254e-06,
1376
+ "loss": 0.15945730209350586,
1377
+ "step": 850,
1378
+ "token_acc": 0.9422158521017598
1379
+ },
1380
+ {
1381
+ "epoch": 2.456896551724138,
1382
+ "grad_norm": 0.5454772710800171,
1383
+ "learning_rate": 5.554763368486675e-06,
1384
+ "loss": 0.16291909217834472,
1385
+ "step": 855,
1386
+ "token_acc": 0.9408588784800723
1387
+ },
1388
+ {
1389
+ "epoch": 2.471264367816092,
1390
+ "grad_norm": 0.5575549006462097,
1391
+ "learning_rate": 5.507518893939305e-06,
1392
+ "loss": 0.15833431482315063,
1393
+ "step": 860,
1394
+ "token_acc": 0.9425904466869698
1395
+ },
1396
+ {
1397
+ "epoch": 2.485632183908046,
1398
+ "grad_norm": 0.5594390034675598,
1399
+ "learning_rate": 5.46022859011623e-06,
1400
+ "loss": 0.16217752695083618,
1401
+ "step": 865,
1402
+ "token_acc": 0.9414153546966382
1403
+ },
1404
+ {
1405
+ "epoch": 2.5,
1406
+ "grad_norm": 0.5520246028900146,
1407
+ "learning_rate": 5.412896727361663e-06,
1408
+ "loss": 0.16071032285690307,
1409
+ "step": 870,
1410
+ "token_acc": 0.9419214876033057
1411
+ },
1412
+ {
1413
+ "epoch": 2.514367816091954,
1414
+ "grad_norm": 0.5357754826545715,
1415
+ "learning_rate": 5.365527579772615e-06,
1416
+ "loss": 0.16383228302001954,
1417
+ "step": 875,
1418
+ "token_acc": 0.9405870600052406
1419
+ },
1420
+ {
1421
+ "epoch": 2.528735632183908,
1422
+ "grad_norm": 0.566700279712677,
1423
+ "learning_rate": 5.318125424812943e-06,
1424
+ "loss": 0.15904467105865477,
1425
+ "step": 880,
1426
+ "token_acc": 0.9423006162520172
1427
+ },
1428
+ {
1429
+ "epoch": 2.543103448275862,
1430
+ "grad_norm": 0.5374186635017395,
1431
+ "learning_rate": 5.270694542927089e-06,
1432
+ "loss": 0.16128381490707397,
1433
+ "step": 885,
1434
+ "token_acc": 0.9415549986237668
1435
+ },
1436
+ {
1437
+ "epoch": 2.557471264367816,
1438
+ "grad_norm": 0.5469412803649902,
1439
+ "learning_rate": 5.2232392171535515e-06,
1440
+ "loss": 0.15972660779953002,
1441
+ "step": 890,
1442
+ "token_acc": 0.9423087655850607
1443
+ },
1444
+ {
1445
+ "epoch": 2.57183908045977,
1446
+ "grad_norm": 0.556884765625,
1447
+ "learning_rate": 5.175763732738135e-06,
1448
+ "loss": 0.16042428016662597,
1449
+ "step": 895,
1450
+ "token_acc": 0.9419193194507572
1451
+ },
1452
+ {
1453
+ "epoch": 2.586206896551724,
1454
+ "grad_norm": 0.5304916501045227,
1455
+ "learning_rate": 5.128272376746972e-06,
1456
+ "loss": 0.15746138095855713,
1457
+ "step": 900,
1458
+ "token_acc": 0.9429987863934677
1459
+ },
1460
+ {
1461
+ "epoch": 2.6005747126436782,
1462
+ "grad_norm": 0.5301142930984497,
1463
+ "learning_rate": 5.080769437679417e-06,
1464
+ "loss": 0.15831880569458007,
1465
+ "step": 905,
1466
+ "token_acc": 0.9429148158292744
1467
+ },
1468
+ {
1469
+ "epoch": 2.6149425287356323,
1470
+ "grad_norm": 0.566373884677887,
1471
+ "learning_rate": 5.03325920508078e-06,
1472
+ "loss": 0.1599961757659912,
1473
+ "step": 910,
1474
+ "token_acc": 0.9419200747371421
1475
+ },
1476
+ {
1477
+ "epoch": 2.6293103448275863,
1478
+ "grad_norm": 0.5492708086967468,
1479
+ "learning_rate": 4.985745969154981e-06,
1480
+ "loss": 0.15549759864807128,
1481
+ "step": 915,
1482
+ "token_acc": 0.9435671525736303
1483
+ },
1484
+ {
1485
+ "epoch": 2.6436781609195403,
1486
+ "grad_norm": 0.5328662991523743,
1487
+ "learning_rate": 4.938234020377141e-06,
1488
+ "loss": 0.162940514087677,
1489
+ "step": 920,
1490
+ "token_acc": 0.9412549685243446
1491
+ },
1492
+ {
1493
+ "epoch": 2.6580459770114944,
1494
+ "grad_norm": 0.5540962815284729,
1495
+ "learning_rate": 4.890727649106156e-06,
1496
+ "loss": 0.16096749305725097,
1497
+ "step": 925,
1498
+ "token_acc": 0.9422050509171753
1499
+ },
1500
+ {
1501
+ "epoch": 2.6724137931034484,
1502
+ "grad_norm": 0.5807366967201233,
1503
+ "learning_rate": 4.843231145197267e-06,
1504
+ "loss": 0.1637776732444763,
1505
+ "step": 930,
1506
+ "token_acc": 0.9410530542305114
1507
+ },
1508
+ {
1509
+ "epoch": 2.6867816091954024,
1510
+ "grad_norm": 0.5675597786903381,
1511
+ "learning_rate": 4.79574879761468e-06,
1512
+ "loss": 0.15756003856658934,
1513
+ "step": 935,
1514
+ "token_acc": 0.9426537129121203
1515
+ },
1516
+ {
1517
+ "epoch": 2.7011494252873565,
1518
+ "grad_norm": 0.5579917430877686,
1519
+ "learning_rate": 4.7482848940442875e-06,
1520
+ "loss": 0.15803995132446289,
1521
+ "step": 940,
1522
+ "token_acc": 0.9427388146214977
1523
+ },
1524
+ {
1525
+ "epoch": 2.7155172413793105,
1526
+ "grad_norm": 0.5666033625602722,
1527
+ "learning_rate": 4.700843720506464e-06,
1528
+ "loss": 0.15797820091247558,
1529
+ "step": 945,
1530
+ "token_acc": 0.9427172320327115
1531
+ },
1532
+ {
1533
+ "epoch": 2.7298850574712645,
1534
+ "grad_norm": 0.5249812006950378,
1535
+ "learning_rate": 4.653429560969058e-06,
1536
+ "loss": 0.15697550773620605,
1537
+ "step": 950,
1538
+ "token_acc": 0.9426309863257727
1539
+ },
1540
+ {
1541
+ "epoch": 2.7442528735632186,
1542
+ "grad_norm": 0.5508291125297546,
1543
+ "learning_rate": 4.606046696960531e-06,
1544
+ "loss": 0.1596212148666382,
1545
+ "step": 955,
1546
+ "token_acc": 0.9421995105801065
1547
+ },
1548
+ {
1549
+ "epoch": 2.7586206896551726,
1550
+ "grad_norm": 0.5431759357452393,
1551
+ "learning_rate": 4.558699407183339e-06,
1552
+ "loss": 0.1613481879234314,
1553
+ "step": 960,
1554
+ "token_acc": 0.9418076752369321
1555
+ },
1556
+ {
1557
+ "epoch": 2.7729885057471266,
1558
+ "grad_norm": 0.5558294057846069,
1559
+ "learning_rate": 4.511391967127563e-06,
1560
+ "loss": 0.16271686553955078,
1561
+ "step": 965,
1562
+ "token_acc": 0.9415121493610068
1563
+ },
1564
+ {
1565
+ "epoch": 2.7873563218390807,
1566
+ "grad_norm": 0.5539355874061584,
1567
+ "learning_rate": 4.464128648684829e-06,
1568
+ "loss": 0.1587162733078003,
1569
+ "step": 970,
1570
+ "token_acc": 0.9426954815163374
1571
+ },
1572
+ {
1573
+ "epoch": 2.8017241379310347,
1574
+ "grad_norm": 0.5470131039619446,
1575
+ "learning_rate": 4.4169137197625544e-06,
1576
+ "loss": 0.1590419292449951,
1577
+ "step": 975,
1578
+ "token_acc": 0.9426755454615614
1579
+ },
1580
+ {
1581
+ "epoch": 2.8160919540229887,
1582
+ "grad_norm": 0.5776523351669312,
1583
+ "learning_rate": 4.369751443898554e-06,
1584
+ "loss": 0.16058001518249512,
1585
+ "step": 980,
1586
+ "token_acc": 0.9419302011022055
1587
+ },
1588
+ {
1589
+ "epoch": 2.8304597701149428,
1590
+ "grad_norm": 0.5623759627342224,
1591
+ "learning_rate": 4.3226460798760335e-06,
1592
+ "loss": 0.1585533857345581,
1593
+ "step": 985,
1594
+ "token_acc": 0.9425908221797323
1595
+ },
1596
+ {
1597
+ "epoch": 2.844827586206897,
1598
+ "grad_norm": 0.578778088092804,
1599
+ "learning_rate": 4.275601881339028e-06,
1600
+ "loss": 0.1597745656967163,
1601
+ "step": 990,
1602
+ "token_acc": 0.9424683325257839
1603
+ },
1604
+ {
1605
+ "epoch": 2.859195402298851,
1606
+ "grad_norm": 0.508884608745575,
1607
+ "learning_rate": 4.2286230964082855e-06,
1608
+ "loss": 0.1611253261566162,
1609
+ "step": 995,
1610
+ "token_acc": 0.9417449510184148
1611
+ },
1612
+ {
1613
+ "epoch": 2.873563218390805,
1614
+ "grad_norm": 0.5472661852836609,
1615
+ "learning_rate": 4.181713967297673e-06,
1616
+ "loss": 0.16162338256835937,
1617
+ "step": 1000,
1618
+ "token_acc": 0.9418222242808838
1619
+ },
1620
+ {
1621
+ "epoch": 2.887931034482759,
1622
+ "grad_norm": 0.5404829978942871,
1623
+ "learning_rate": 4.134878729931084e-06,
1624
+ "loss": 0.15965355634689332,
1625
+ "step": 1005,
1626
+ "token_acc": 0.9421647947746333
1627
+ },
1628
+ {
1629
+ "epoch": 2.9022988505747125,
1630
+ "grad_norm": 0.546474277973175,
1631
+ "learning_rate": 4.088121613559946e-06,
1632
+ "loss": 0.15857319831848143,
1633
+ "step": 1010,
1634
+ "token_acc": 0.9425641636936944
1635
+ },
1636
+ {
1637
+ "epoch": 2.9166666666666665,
1638
+ "grad_norm": 0.5491721034049988,
1639
+ "learning_rate": 4.041446840381309e-06,
1640
+ "loss": 0.1571238160133362,
1641
+ "step": 1015,
1642
+ "token_acc": 0.9431960282145565
1643
+ },
1644
+ {
1645
+ "epoch": 2.9310344827586206,
1646
+ "grad_norm": 0.5627540349960327,
1647
+ "learning_rate": 3.994858625156582e-06,
1648
+ "loss": 0.16332995891571045,
1649
+ "step": 1020,
1650
+ "token_acc": 0.9413830317117715
1651
+ },
1652
+ {
1653
+ "epoch": 2.9454022988505746,
1654
+ "grad_norm": 0.5357237458229065,
1655
+ "learning_rate": 3.948361174830936e-06,
1656
+ "loss": 0.15722098350524902,
1657
+ "step": 1025,
1658
+ "token_acc": 0.9429645655712238
1659
+ },
1660
+ {
1661
+ "epoch": 2.9597701149425286,
1662
+ "grad_norm": 0.5359401702880859,
1663
+ "learning_rate": 3.901958688153418e-06,
1664
+ "loss": 0.1601097822189331,
1665
+ "step": 1030,
1666
+ "token_acc": 0.9418747990887364
1667
+ },
1668
+ {
1669
+ "epoch": 2.9741379310344827,
1670
+ "grad_norm": 0.5464105010032654,
1671
+ "learning_rate": 3.855655355297789e-06,
1672
+ "loss": 0.15313236713409423,
1673
+ "step": 1035,
1674
+ "token_acc": 0.9442072969386959
1675
+ },
1676
+ {
1677
+ "epoch": 2.9885057471264367,
1678
+ "grad_norm": 0.5644527077674866,
1679
+ "learning_rate": 3.809455357484168e-06,
1680
+ "loss": 0.16143866777420043,
1681
+ "step": 1040,
1682
+ "token_acc": 0.9420071092559834
1683
+ },
1684
+ {
1685
+ "epoch": 3.0028735632183907,
1686
+ "grad_norm": 0.5285263657569885,
1687
+ "learning_rate": 3.763362866601447e-06,
1688
+ "loss": 0.15135617256164552,
1689
+ "step": 1045,
1690
+ "token_acc": 0.9449246755929412
1691
+ },
1692
+ {
1693
+ "epoch": 3.0172413793103448,
1694
+ "grad_norm": 0.4897845685482025,
1695
+ "learning_rate": 3.7173820448305754e-06,
1696
+ "loss": 0.11718956232070923,
1697
+ "step": 1050,
1698
+ "token_acc": 0.959481731955255
1699
+ },
1700
+ {
1701
+ "epoch": 3.031609195402299,
1702
+ "grad_norm": 0.5987873077392578,
1703
+ "learning_rate": 3.671517044268715e-06,
1704
+ "loss": 0.11166307926177979,
1705
+ "step": 1055,
1706
+ "token_acc": 0.9600167651667093
1707
+ },
1708
+ {
1709
+ "epoch": 3.045977011494253,
1710
+ "grad_norm": 0.5278794169425964,
1711
+ "learning_rate": 3.6257720065542987e-06,
1712
+ "loss": 0.1127786636352539,
1713
+ "step": 1060,
1714
+ "token_acc": 0.9593323057132718
1715
+ },
1716
+ {
1717
+ "epoch": 3.060344827586207,
1718
+ "grad_norm": 0.5107921957969666,
1719
+ "learning_rate": 3.5801510624930365e-06,
1720
+ "loss": 0.1061087965965271,
1721
+ "step": 1065,
1722
+ "token_acc": 0.9620770316163334
1723
+ },
1724
+ {
1725
+ "epoch": 3.074712643678161,
1726
+ "grad_norm": 0.5065659880638123,
1727
+ "learning_rate": 3.5346583316849027e-06,
1728
+ "loss": 0.10998306274414063,
1729
+ "step": 1070,
1730
+ "token_acc": 0.9607816463319588
1731
+ },
1732
+ {
1733
+ "epoch": 3.089080459770115,
1734
+ "grad_norm": 0.5085805654525757,
1735
+ "learning_rate": 3.489297922152136e-06,
1736
+ "loss": 0.10954939126968384,
1737
+ "step": 1075,
1738
+ "token_acc": 0.9605736594874236
1739
+ },
1740
+ {
1741
+ "epoch": 3.103448275862069,
1742
+ "grad_norm": 0.5702577233314514,
1743
+ "learning_rate": 3.4440739299682844e-06,
1744
+ "loss": 0.10895823240280152,
1745
+ "step": 1080,
1746
+ "token_acc": 0.9604872715914415
1747
+ },
1748
+ {
1749
+ "epoch": 3.117816091954023,
1750
+ "grad_norm": 0.5174564719200134,
1751
+ "learning_rate": 3.3989904388883155e-06,
1752
+ "loss": 0.11295514106750489,
1753
+ "step": 1085,
1754
+ "token_acc": 0.9594747043946243
1755
+ },
1756
+ {
1757
+ "epoch": 3.132183908045977,
1758
+ "grad_norm": 0.512843132019043,
1759
+ "learning_rate": 3.3540515199798653e-06,
1760
+ "loss": 0.10992898941040039,
1761
+ "step": 1090,
1762
+ "token_acc": 0.960507777933026
1763
+ },
1764
+ {
1765
+ "epoch": 3.146551724137931,
1766
+ "grad_norm": 0.49290990829467773,
1767
+ "learning_rate": 3.3092612312556077e-06,
1768
+ "loss": 0.10842022895812989,
1769
+ "step": 1095,
1770
+ "token_acc": 0.9609840982724474
1771
+ },
1772
+ {
1773
+ "epoch": 3.160919540229885,
1774
+ "grad_norm": 0.5270472764968872,
1775
+ "learning_rate": 3.264623617306817e-06,
1776
+ "loss": 0.10932838916778564,
1777
+ "step": 1100,
1778
+ "token_acc": 0.9603938160814222
1779
+ },
1780
+ {
1781
+ "epoch": 3.175287356321839,
1782
+ "grad_norm": 0.5300927758216858,
1783
+ "learning_rate": 3.220142708938144e-06,
1784
+ "loss": 0.11059236526489258,
1785
+ "step": 1105,
1786
+ "token_acc": 0.960232914783978
1787
+ },
1788
+ {
1789
+ "epoch": 3.189655172413793,
1790
+ "grad_norm": 0.5519798994064331,
1791
+ "learning_rate": 3.1758225228036233e-06,
1792
+ "loss": 0.10988420248031616,
1793
+ "step": 1110,
1794
+ "token_acc": 0.9601467152547007
1795
+ },
1796
+ {
1797
+ "epoch": 3.204022988505747,
1798
+ "grad_norm": 0.503612220287323,
1799
+ "learning_rate": 3.13166706104397e-06,
1800
+ "loss": 0.1070522665977478,
1801
+ "step": 1115,
1802
+ "token_acc": 0.9615549485185964
1803
+ },
1804
+ {
1805
+ "epoch": 3.218390804597701,
1806
+ "grad_norm": 0.5259993076324463,
1807
+ "learning_rate": 3.0876803109251853e-06,
1808
+ "loss": 0.11044783592224121,
1809
+ "step": 1120,
1810
+ "token_acc": 0.9602514344316645
1811
+ },
1812
+ {
1813
+ "epoch": 3.2327586206896552,
1814
+ "grad_norm": 0.5334917902946472,
1815
+ "learning_rate": 3.043866244478505e-06,
1816
+ "loss": 0.11164047718048095,
1817
+ "step": 1125,
1818
+ "token_acc": 0.9597306917897919
1819
+ },
1820
+ {
1821
+ "epoch": 3.2471264367816093,
1822
+ "grad_norm": 0.5120248198509216,
1823
+ "learning_rate": 3.0002288181417148e-06,
1824
+ "loss": 0.11079723834991455,
1825
+ "step": 1130,
1826
+ "token_acc": 0.9597157535890911
1827
+ },
1828
+ {
1829
+ "epoch": 3.2614942528735633,
1830
+ "grad_norm": 0.5175688862800598,
1831
+ "learning_rate": 2.9567719724018974e-06,
1832
+ "loss": 0.1095381498336792,
1833
+ "step": 1135,
1834
+ "token_acc": 0.9603776708395413
1835
+ },
1836
+ {
1837
+ "epoch": 3.2758620689655173,
1838
+ "grad_norm": 0.5364386439323425,
1839
+ "learning_rate": 2.913499631439582e-06,
1840
+ "loss": 0.11385544538497924,
1841
+ "step": 1140,
1842
+ "token_acc": 0.959080389977889
1843
+ },
1844
+ {
1845
+ "epoch": 3.2902298850574714,
1846
+ "grad_norm": 0.5315613150596619,
1847
+ "learning_rate": 2.8704157027744097e-06,
1848
+ "loss": 0.10774133205413819,
1849
+ "step": 1145,
1850
+ "token_acc": 0.9608326559294428
1851
+ },
1852
+ {
1853
+ "epoch": 3.3045977011494254,
1854
+ "grad_norm": 0.4946730136871338,
1855
+ "learning_rate": 2.827524076912267e-06,
1856
+ "loss": 0.10972250699996948,
1857
+ "step": 1150,
1858
+ "token_acc": 0.9603479656939942
1859
+ },
1860
+ {
1861
+ "epoch": 3.3189655172413794,
1862
+ "grad_norm": 0.5144687294960022,
1863
+ "learning_rate": 2.784828626993976e-06,
1864
+ "loss": 0.1102259874343872,
1865
+ "step": 1155,
1866
+ "token_acc": 0.9601369651469436
1867
+ },
1868
+ {
1869
+ "epoch": 3.3333333333333335,
1870
+ "grad_norm": 0.5160213112831116,
1871
+ "learning_rate": 2.7423332084455543e-06,
1872
+ "loss": 0.11218315362930298,
1873
+ "step": 1160,
1874
+ "token_acc": 0.9595977940943304
1875
+ },
1876
+ {
1877
+ "epoch": 3.3477011494252875,
1878
+ "grad_norm": 0.5422830581665039,
1879
+ "learning_rate": 2.7000416586300516e-06,
1880
+ "loss": 0.11074899435043335,
1881
+ "step": 1165,
1882
+ "token_acc": 0.9601372202414836
1883
+ },
1884
+ {
1885
+ "epoch": 3.3620689655172415,
1886
+ "grad_norm": 0.5436260104179382,
1887
+ "learning_rate": 2.65795779650105e-06,
1888
+ "loss": 0.11167094707489014,
1889
+ "step": 1170,
1890
+ "token_acc": 0.9597704434511221
1891
+ },
1892
+ {
1893
+ "epoch": 3.3764367816091956,
1894
+ "grad_norm": 0.5317823886871338,
1895
+ "learning_rate": 2.6160854222577992e-06,
1896
+ "loss": 0.11267951726913453,
1897
+ "step": 1175,
1898
+ "token_acc": 0.9592586091295477
1899
+ },
1900
+ {
1901
+ "epoch": 3.3908045977011496,
1902
+ "grad_norm": 0.5103031396865845,
1903
+ "learning_rate": 2.5744283170020566e-06,
1904
+ "loss": 0.10874271392822266,
1905
+ "step": 1180,
1906
+ "token_acc": 0.9604443435037474
1907
+ },
1908
+ {
1909
+ "epoch": 3.405172413793103,
1910
+ "grad_norm": 0.4954359233379364,
1911
+ "learning_rate": 2.532990242396664e-06,
1912
+ "loss": 0.11055747270584107,
1913
+ "step": 1185,
1914
+ "token_acc": 0.9599720950966685
1915
+ },
1916
+ {
1917
+ "epoch": 3.4195402298850572,
1918
+ "grad_norm": 0.5554635524749756,
1919
+ "learning_rate": 2.4917749403258482e-06,
1920
+ "loss": 0.11123825311660766,
1921
+ "step": 1190,
1922
+ "token_acc": 0.9598281478153431
1923
+ },
1924
+ {
1925
+ "epoch": 3.4339080459770113,
1926
+ "grad_norm": 0.5267956852912903,
1927
+ "learning_rate": 2.4507861325573384e-06,
1928
+ "loss": 0.11294952630996705,
1929
+ "step": 1195,
1930
+ "token_acc": 0.9595975609398073
1931
+ },
1932
+ {
1933
+ "epoch": 3.4482758620689653,
1934
+ "grad_norm": 0.5136481523513794,
1935
+ "learning_rate": 2.41002752040629e-06,
1936
+ "loss": 0.10954599380493164,
1937
+ "step": 1200,
1938
+ "token_acc": 0.9604799088666979
1939
+ },
1940
+ {
1941
+ "epoch": 3.4626436781609193,
1942
+ "grad_norm": 0.5163865685462952,
1943
+ "learning_rate": 2.3695027844010416e-06,
1944
+ "loss": 0.1089853048324585,
1945
+ "step": 1205,
1946
+ "token_acc": 0.9606652616554601
1947
+ },
1948
+ {
1949
+ "epoch": 3.4770114942528734,
1950
+ "grad_norm": 0.5261132121086121,
1951
+ "learning_rate": 2.3292155839507767e-06,
1952
+ "loss": 0.11200270652770997,
1953
+ "step": 1210,
1954
+ "token_acc": 0.9598040583043245
1955
+ },
1956
+ {
1957
+ "epoch": 3.4913793103448274,
1958
+ "grad_norm": 0.5254859328269958,
1959
+ "learning_rate": 2.2891695570150637e-06,
1960
+ "loss": 0.11190475225448608,
1961
+ "step": 1215,
1962
+ "token_acc": 0.9595285829926624
1963
+ },
1964
+ {
1965
+ "epoch": 3.5057471264367814,
1966
+ "grad_norm": 0.5383979082107544,
1967
+ "learning_rate": 2.2493683197753498e-06,
1968
+ "loss": 0.11255807876586914,
1969
+ "step": 1220,
1970
+ "token_acc": 0.9591381953870757
1971
+ },
1972
+ {
1973
+ "epoch": 3.5201149425287355,
1974
+ "grad_norm": 0.5105087161064148,
1975
+ "learning_rate": 2.2098154663084204e-06,
1976
+ "loss": 0.11062219142913818,
1977
+ "step": 1225,
1978
+ "token_acc": 0.9600996288337565
1979
+ },
1980
+ {
1981
+ "epoch": 3.5344827586206895,
1982
+ "grad_norm": 0.5059808492660522,
1983
+ "learning_rate": 2.1705145682618506e-06,
1984
+ "loss": 0.11152106523513794,
1985
+ "step": 1230,
1986
+ "token_acc": 0.9599567491646626
1987
+ },
1988
+ {
1989
+ "epoch": 3.5488505747126435,
1990
+ "grad_norm": 0.5309309959411621,
1991
+ "learning_rate": 2.1314691745314836e-06,
1992
+ "loss": 0.10833891630172729,
1993
+ "step": 1235,
1994
+ "token_acc": 0.9608956001041395
1995
+ },
1996
+ {
1997
+ "epoch": 3.5632183908045976,
1998
+ "grad_norm": 0.5250213146209717,
1999
+ "learning_rate": 2.0926828109409587e-06,
2000
+ "loss": 0.10981615781784057,
2001
+ "step": 1240,
2002
+ "token_acc": 0.9603095846626934
2003
+ },
2004
+ {
2005
+ "epoch": 3.5775862068965516,
2006
+ "grad_norm": 0.5215520858764648,
2007
+ "learning_rate": 2.0541589799233313e-06,
2008
+ "loss": 0.10947526693344116,
2009
+ "step": 1245,
2010
+ "token_acc": 0.9607612043950218
2011
+ },
2012
+ {
2013
+ "epoch": 3.5919540229885056,
2014
+ "grad_norm": 0.5618249773979187,
2015
+ "learning_rate": 2.0159011602048007e-06,
2016
+ "loss": 0.110862135887146,
2017
+ "step": 1250,
2018
+ "token_acc": 0.9600785435223347
2019
+ },
2020
+ {
2021
+ "epoch": 3.6063218390804597,
2022
+ "grad_norm": 0.5231663584709167,
2023
+ "learning_rate": 1.977912806490578e-06,
2024
+ "loss": 0.11059050559997559,
2025
+ "step": 1255,
2026
+ "token_acc": 0.9598605523622261
2027
+ },
2028
+ {
2029
+ "epoch": 3.6206896551724137,
2030
+ "grad_norm": 0.531852126121521,
2031
+ "learning_rate": 1.940197349152923e-06,
2032
+ "loss": 0.10940660238265991,
2033
+ "step": 1260,
2034
+ "token_acc": 0.960699336760164
2035
+ },
2036
+ {
2037
+ "epoch": 3.6350574712643677,
2038
+ "grad_norm": 0.5311183333396912,
2039
+ "learning_rate": 1.9027581939213852e-06,
2040
+ "loss": 0.11252405643463134,
2041
+ "step": 1265,
2042
+ "token_acc": 0.959284159247536
2043
+ },
2044
+ {
2045
+ "epoch": 3.6494252873563218,
2046
+ "grad_norm": 0.5065079927444458,
2047
+ "learning_rate": 1.8655987215752574e-06,
2048
+ "loss": 0.1130037784576416,
2049
+ "step": 1270,
2050
+ "token_acc": 0.9595236946583082
2051
+ },
2052
+ {
2053
+ "epoch": 3.663793103448276,
2054
+ "grad_norm": 0.5160691142082214,
2055
+ "learning_rate": 1.8287222876382914e-06,
2056
+ "loss": 0.11050925254821778,
2057
+ "step": 1275,
2058
+ "token_acc": 0.9606644023005056
2059
+ },
2060
+ {
2061
+ "epoch": 3.67816091954023,
2062
+ "grad_norm": 0.5017895102500916,
2063
+ "learning_rate": 1.792132222075692e-06,
2064
+ "loss": 0.10700697898864746,
2065
+ "step": 1280,
2066
+ "token_acc": 0.9610938904060672
2067
+ },
2068
+ {
2069
+ "epoch": 3.692528735632184,
2070
+ "grad_norm": 0.4997377097606659,
2071
+ "learning_rate": 1.7558318289934185e-06,
2072
+ "loss": 0.10991954803466797,
2073
+ "step": 1285,
2074
+ "token_acc": 0.9607771018732166
2075
+ },
2076
+ {
2077
+ "epoch": 3.706896551724138,
2078
+ "grad_norm": 0.5202011466026306,
2079
+ "learning_rate": 1.7198243863398273e-06,
2080
+ "loss": 0.10314581394195557,
2081
+ "step": 1290,
2082
+ "token_acc": 0.9623357399288797
2083
+ },
2084
+ {
2085
+ "epoch": 3.721264367816092,
2086
+ "grad_norm": 0.5741920471191406,
2087
+ "learning_rate": 1.6841131456096615e-06,
2088
+ "loss": 0.10936017036437988,
2089
+ "step": 1295,
2090
+ "token_acc": 0.9609912885683957
2091
+ },
2092
+ {
2093
+ "epoch": 3.735632183908046,
2094
+ "grad_norm": 0.5348463654518127,
2095
+ "learning_rate": 1.6487013315504484e-06,
2096
+ "loss": 0.1124497413635254,
2097
+ "step": 1300,
2098
+ "token_acc": 0.9589595608543546
2099
+ },
2100
+ {
2101
+ "epoch": 3.75,
2102
+ "grad_norm": 0.48192116618156433,
2103
+ "learning_rate": 1.6135921418712959e-06,
2104
+ "loss": 0.11058835983276367,
2105
+ "step": 1305,
2106
+ "token_acc": 0.9603752430817338
2107
+ },
2108
+ {
2109
+ "epoch": 3.764367816091954,
2110
+ "grad_norm": 0.5294514894485474,
2111
+ "learning_rate": 1.578788746954137e-06,
2112
+ "loss": 0.11072230339050293,
2113
+ "step": 1310,
2114
+ "token_acc": 0.9600368827663238
2115
+ },
2116
+ {
2117
+ "epoch": 3.778735632183908,
2118
+ "grad_norm": 0.5361485481262207,
2119
+ "learning_rate": 1.5442942895674523e-06,
2120
+ "loss": 0.1092519998550415,
2121
+ "step": 1315,
2122
+ "token_acc": 0.9608080487369277
2123
+ },
2124
+ {
2125
+ "epoch": 3.793103448275862,
2126
+ "grad_norm": 0.513168215751648,
2127
+ "learning_rate": 1.5101118845824631e-06,
2128
+ "loss": 0.10951132774353027,
2129
+ "step": 1320,
2130
+ "token_acc": 0.9610710766793532
2131
+ },
2132
+ {
2133
+ "epoch": 3.807471264367816,
2134
+ "grad_norm": 0.5398263931274414,
2135
+ "learning_rate": 1.4762446186918627e-06,
2136
+ "loss": 0.1096685767173767,
2137
+ "step": 1325,
2138
+ "token_acc": 0.9604541348326774
2139
+ },
2140
+ {
2141
+ "epoch": 3.82183908045977,
2142
+ "grad_norm": 0.4786757528781891,
2143
+ "learning_rate": 1.4426955501310901e-06,
2144
+ "loss": 0.1095815658569336,
2145
+ "step": 1330,
2146
+ "token_acc": 0.9606942978624907
2147
+ },
2148
+ {
2149
+ "epoch": 3.836206896551724,
2150
+ "grad_norm": 0.5471305251121521,
2151
+ "learning_rate": 1.4094677084021592e-06,
2152
+ "loss": 0.11180591583251953,
2153
+ "step": 1335,
2154
+ "token_acc": 0.9597833146365944
2155
+ },
2156
+ {
2157
+ "epoch": 3.8505747126436782,
2158
+ "grad_norm": 0.5561555624008179,
2159
+ "learning_rate": 1.376564094000104e-06,
2160
+ "loss": 0.10869028568267822,
2161
+ "step": 1340,
2162
+ "token_acc": 0.9609312833525311
2163
+ },
2164
+ {
2165
+ "epoch": 3.8649425287356323,
2166
+ "grad_norm": 0.5201286673545837,
2167
+ "learning_rate": 1.3439876781420237e-06,
2168
+ "loss": 0.10850247144699096,
2169
+ "step": 1345,
2170
+ "token_acc": 0.9610563150905735
2171
+ },
2172
+ {
2173
+ "epoch": 3.8793103448275863,
2174
+ "grad_norm": 0.4803552031517029,
2175
+ "learning_rate": 1.3117414024987823e-06,
2176
+ "loss": 0.1074610710144043,
2177
+ "step": 1350,
2178
+ "token_acc": 0.9612982779391964
2179
+ },
2180
+ {
2181
+ "epoch": 3.8936781609195403,
2182
+ "grad_norm": 0.5793502330780029,
2183
+ "learning_rate": 1.2798281789293766e-06,
2184
+ "loss": 0.10727846622467041,
2185
+ "step": 1355,
2186
+ "token_acc": 0.961043908943709
2187
+ },
2188
+ {
2189
+ "epoch": 3.9080459770114944,
2190
+ "grad_norm": 0.4976837635040283,
2191
+ "learning_rate": 1.2482508892179884e-06,
2192
+ "loss": 0.10663462877273559,
2193
+ "step": 1360,
2194
+ "token_acc": 0.9616364786973849
2195
+ },
2196
+ {
2197
+ "epoch": 3.9224137931034484,
2198
+ "grad_norm": 0.5056930184364319,
2199
+ "learning_rate": 1.2170123848137649e-06,
2200
+ "loss": 0.10944747924804688,
2201
+ "step": 1365,
2202
+ "token_acc": 0.9610281269079349
2203
+ },
2204
+ {
2205
+ "epoch": 3.9367816091954024,
2206
+ "grad_norm": 0.5023649334907532,
2207
+ "learning_rate": 1.186115486573326e-06,
2208
+ "loss": 0.11062502861022949,
2209
+ "step": 1370,
2210
+ "token_acc": 0.960362025522687
2211
+ },
2212
+ {
2213
+ "epoch": 3.9511494252873565,
2214
+ "grad_norm": 0.510715663433075,
2215
+ "learning_rate": 1.1555629845060363e-06,
2216
+ "loss": 0.11008999347686768,
2217
+ "step": 1375,
2218
+ "token_acc": 0.9607457258324054
2219
+ },
2220
+ {
2221
+ "epoch": 3.9655172413793105,
2222
+ "grad_norm": 0.544761598110199,
2223
+ "learning_rate": 1.125357637522072e-06,
2224
+ "loss": 0.10898411273956299,
2225
+ "step": 1380,
2226
+ "token_acc": 0.9611561145762342
2227
+ },
2228
+ {
2229
+ "epoch": 3.9798850574712645,
2230
+ "grad_norm": 0.5076886415481567,
2231
+ "learning_rate": 1.0955021731832855e-06,
2232
+ "loss": 0.10860610008239746,
2233
+ "step": 1385,
2234
+ "token_acc": 0.9611211496293943
2235
+ },
2236
+ {
2237
+ "epoch": 3.9942528735632186,
2238
+ "grad_norm": 0.4891972243785858,
2239
+ "learning_rate": 1.0659992874569053e-06,
2240
+ "loss": 0.1102171540260315,
2241
+ "step": 1390,
2242
+ "token_acc": 0.9605603039197617
2243
+ }
2244
+ ],
2245
+ "logging_steps": 5,
2246
+ "max_steps": 1740,
2247
+ "num_input_tokens_seen": 0,
2248
+ "num_train_epochs": 5,
2249
+ "save_steps": 500,
2250
+ "stateful_callbacks": {
2251
+ "TrainerControl": {
2252
+ "args": {
2253
+ "should_epoch_stop": false,
2254
+ "should_evaluate": false,
2255
+ "should_log": false,
2256
+ "should_save": true,
2257
+ "should_training_stop": false
2258
+ },
2259
+ "attributes": {}
2260
+ }
2261
+ },
2262
+ "total_flos": 3.366696470441047e+19,
2263
+ "train_batch_size": 1,
2264
+ "trial_name": null,
2265
+ "trial_params": null
2266
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1998eec44c39427025bbf596bdfed83a55b5e5592e9baf8186a4d46bebf175d3
3
+ size 8785
video_preprocessor_config.json ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "crop_size": null,
3
+ "data_format": "channels_first",
4
+ "default_to_square": true,
5
+ "device": null,
6
+ "do_center_crop": null,
7
+ "do_convert_rgb": true,
8
+ "do_normalize": true,
9
+ "do_pad": null,
10
+ "do_rescale": true,
11
+ "do_resize": true,
12
+ "do_sample_frames": false,
13
+ "fps": null,
14
+ "image_mean": [
15
+ 0.48145466,
16
+ 0.4578275,
17
+ 0.40821073
18
+ ],
19
+ "image_std": [
20
+ 0.26862954,
21
+ 0.26130258,
22
+ 0.27577711
23
+ ],
24
+ "input_data_format": null,
25
+ "max_frames": 768,
26
+ "max_pixels": 12845056,
27
+ "merge_size": 2,
28
+ "min_frames": 4,
29
+ "min_pixels": 3136,
30
+ "num_frames": null,
31
+ "patch_size": 14,
32
+ "processor_class": "Qwen2_5_VLProcessor",
33
+ "resample": 3,
34
+ "rescale_factor": 0.00392156862745098,
35
+ "size": {
36
+ "longest_edge": 12845056,
37
+ "shortest_edge": 3136
38
+ },
39
+ "size_divisor": null,
40
+ "temporal_patch_size": 2,
41
+ "video_metadata": null,
42
+ "video_processor_type": "Qwen2VLVideoProcessor"
43
+ }
vocab.json ADDED
The diff for this file is too large to render. See raw diff