leiwx52 commited on
Commit
6c2ea2c
·
verified ·
1 Parent(s): b138d86

Delete qwen3-vl4b-agentnet_filter_failure_ws4_lr2e-5_vit1e-5_aligner1e-5_bs384-step500

Browse files
Files changed (19) hide show
  1. qwen3-vl4b-agentnet_filter_failure_ws4_lr2e-5_vit1e-5_aligner1e-5_bs384-step500/added_tokens.json +0 -28
  2. qwen3-vl4b-agentnet_filter_failure_ws4_lr2e-5_vit1e-5_aligner1e-5_bs384-step500/args.json +0 -392
  3. qwen3-vl4b-agentnet_filter_failure_ws4_lr2e-5_vit1e-5_aligner1e-5_bs384-step500/chat_template.jinja +0 -120
  4. qwen3-vl4b-agentnet_filter_failure_ws4_lr2e-5_vit1e-5_aligner1e-5_bs384-step500/config.json +0 -70
  5. qwen3-vl4b-agentnet_filter_failure_ws4_lr2e-5_vit1e-5_aligner1e-5_bs384-step500/generation_config.json +0 -13
  6. qwen3-vl4b-agentnet_filter_failure_ws4_lr2e-5_vit1e-5_aligner1e-5_bs384-step500/latest +0 -1
  7. qwen3-vl4b-agentnet_filter_failure_ws4_lr2e-5_vit1e-5_aligner1e-5_bs384-step500/merges.txt +0 -0
  8. qwen3-vl4b-agentnet_filter_failure_ws4_lr2e-5_vit1e-5_aligner1e-5_bs384-step500/model-00001-of-00002.safetensors +0 -3
  9. qwen3-vl4b-agentnet_filter_failure_ws4_lr2e-5_vit1e-5_aligner1e-5_bs384-step500/model-00002-of-00002.safetensors +0 -3
  10. qwen3-vl4b-agentnet_filter_failure_ws4_lr2e-5_vit1e-5_aligner1e-5_bs384-step500/model.safetensors.index.json +0 -722
  11. qwen3-vl4b-agentnet_filter_failure_ws4_lr2e-5_vit1e-5_aligner1e-5_bs384-step500/preprocessor_config.json +0 -21
  12. qwen3-vl4b-agentnet_filter_failure_ws4_lr2e-5_vit1e-5_aligner1e-5_bs384-step500/special_tokens_map.json +0 -31
  13. qwen3-vl4b-agentnet_filter_failure_ws4_lr2e-5_vit1e-5_aligner1e-5_bs384-step500/tokenizer.json +0 -3
  14. qwen3-vl4b-agentnet_filter_failure_ws4_lr2e-5_vit1e-5_aligner1e-5_bs384-step500/tokenizer_config.json +0 -240
  15. qwen3-vl4b-agentnet_filter_failure_ws4_lr2e-5_vit1e-5_aligner1e-5_bs384-step500/trainer_state.json +0 -3534
  16. qwen3-vl4b-agentnet_filter_failure_ws4_lr2e-5_vit1e-5_aligner1e-5_bs384-step500/training_args.bin +0 -3
  17. qwen3-vl4b-agentnet_filter_failure_ws4_lr2e-5_vit1e-5_aligner1e-5_bs384-step500/video_preprocessor_config.json +0 -41
  18. qwen3-vl4b-agentnet_filter_failure_ws4_lr2e-5_vit1e-5_aligner1e-5_bs384-step500/vocab.json +0 -0
  19. qwen3-vl4b-agentnet_filter_failure_ws4_lr2e-5_vit1e-5_aligner1e-5_bs384-step500/zero_to_fp32.py +0 -760
qwen3-vl4b-agentnet_filter_failure_ws4_lr2e-5_vit1e-5_aligner1e-5_bs384-step500/added_tokens.json DELETED
@@ -1,28 +0,0 @@
1
- {
2
- "</think>": 151668,
3
- "</tool_call>": 151658,
4
- "</tool_response>": 151666,
5
- "<think>": 151667,
6
- "<tool_call>": 151657,
7
- "<tool_response>": 151665,
8
- "<|box_end|>": 151649,
9
- "<|box_start|>": 151648,
10
- "<|endoftext|>": 151643,
11
- "<|file_sep|>": 151664,
12
- "<|fim_middle|>": 151660,
13
- "<|fim_pad|>": 151662,
14
- "<|fim_prefix|>": 151659,
15
- "<|fim_suffix|>": 151661,
16
- "<|im_end|>": 151645,
17
- "<|im_start|>": 151644,
18
- "<|image_pad|>": 151655,
19
- "<|object_ref_end|>": 151647,
20
- "<|object_ref_start|>": 151646,
21
- "<|quad_end|>": 151651,
22
- "<|quad_start|>": 151650,
23
- "<|repo_name|>": 151663,
24
- "<|video_pad|>": 151656,
25
- "<|vision_end|>": 151653,
26
- "<|vision_pad|>": 151654,
27
- "<|vision_start|>": 151652
28
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
qwen3-vl4b-agentnet_filter_failure_ws4_lr2e-5_vit1e-5_aligner1e-5_bs384-step500/args.json DELETED
@@ -1,392 +0,0 @@
1
- {
2
- "output_dir": "/apdcephfs_fsgm/share_304220499/weixian/workspace/Agent_SFT/output/Qwen3-VL-4B-Instruct/agentnet_filter_failure_ws4_lr2e-5_vit1e-5_aligner1e-5_bs384/v0-20260131-020453",
3
- "overwrite_output_dir": false,
4
- "do_train": false,
5
- "do_eval": false,
6
- "do_predict": false,
7
- "eval_strategy": "no",
8
- "prediction_loss_only": false,
9
- "per_device_train_batch_size": 4,
10
- "per_device_eval_batch_size": 1,
11
- "per_gpu_train_batch_size": null,
12
- "per_gpu_eval_batch_size": null,
13
- "gradient_accumulation_steps": 1,
14
- "eval_accumulation_steps": null,
15
- "eval_delay": 0,
16
- "torch_empty_cache_steps": null,
17
- "learning_rate": 2e-05,
18
- "weight_decay": 0.1,
19
- "adam_beta1": 0.9,
20
- "adam_beta2": 0.95,
21
- "adam_epsilon": 1e-08,
22
- "max_grad_norm": 1.0,
23
- "num_train_epochs": 1.0,
24
- "max_steps": -1,
25
- "lr_scheduler_type": "cosine",
26
- "lr_scheduler_kwargs": null,
27
- "warmup_ratio": 0.05,
28
- "warmup_steps": 0,
29
- "log_level": "passive",
30
- "log_level_replica": "warning",
31
- "log_on_each_node": true,
32
- "logging_dir": "/apdcephfs_fsgm/share_304220499/weixian/workspace/Agent_SFT/output/Qwen3-VL-4B-Instruct/agentnet_filter_failure_ws4_lr2e-5_vit1e-5_aligner1e-5_bs384/v0-20260131-020453/runs",
33
- "logging_strategy": "steps",
34
- "logging_first_step": true,
35
- "logging_steps": 1,
36
- "logging_nan_inf_filter": true,
37
- "save_strategy": "steps",
38
- "save_steps": 500.0,
39
- "save_total_limit": null,
40
- "save_safetensors": true,
41
- "save_on_each_node": false,
42
- "save_only_model": false,
43
- "restore_callback_states_from_checkpoint": false,
44
- "no_cuda": false,
45
- "use_cpu": false,
46
- "use_mps_device": false,
47
- "seed": 42,
48
- "data_seed": 42,
49
- "jit_mode_eval": false,
50
- "bf16": true,
51
- "fp16": false,
52
- "fp16_opt_level": "O1",
53
- "half_precision_backend": "auto",
54
- "bf16_full_eval": false,
55
- "fp16_full_eval": false,
56
- "tf32": null,
57
- "local_rank": 0,
58
- "ddp_backend": null,
59
- "tpu_num_cores": null,
60
- "tpu_metrics_debug": false,
61
- "debug": null,
62
- "dataloader_drop_last": false,
63
- "eval_steps": 10000.0,
64
- "dataloader_num_workers": 8,
65
- "dataloader_prefetch_factor": null,
66
- "past_index": -1,
67
- "run_name": "/apdcephfs_fsgm/share_304220499/weixian/workspace/Agent_SFT/output/Qwen3-VL-4B-Instruct/agentnet_filter_failure_ws4_lr2e-5_vit1e-5_aligner1e-5_bs384/v0-20260131-020453",
68
- "disable_tqdm": null,
69
- "remove_unused_columns": true,
70
- "label_names": null,
71
- "load_best_model_at_end": false,
72
- "metric_for_best_model": "loss",
73
- "greater_is_better": false,
74
- "ignore_data_skip": false,
75
- "fsdp": [],
76
- "fsdp_min_num_params": 0,
77
- "fsdp_config": null,
78
- "fsdp_transformer_layer_cls_to_wrap": null,
79
- "accelerator_config": {
80
- "dispatch_batches": false
81
- },
82
- "parallelism_config": null,
83
- "deepspeed": {
84
- "fp16": {
85
- "enabled": "auto",
86
- "loss_scale": 0,
87
- "loss_scale_window": 1000,
88
- "initial_scale_power": 16,
89
- "hysteresis": 2,
90
- "min_loss_scale": 1
91
- },
92
- "bf16": {
93
- "enabled": "auto"
94
- },
95
- "zero_optimization": {
96
- "stage": 1,
97
- "offload_optimizer": {
98
- "device": "none",
99
- "pin_memory": true
100
- },
101
- "allgather_partitions": true,
102
- "allgather_bucket_size": 200000000.0,
103
- "overlap_comm": false,
104
- "reduce_scatter": true,
105
- "reduce_bucket_size": 200000000.0,
106
- "contiguous_gradients": true
107
- },
108
- "gradient_accumulation_steps": "auto",
109
- "gradient_clipping": "auto",
110
- "steps_per_print": 2000,
111
- "train_batch_size": "auto",
112
- "train_micro_batch_size_per_gpu": "auto",
113
- "wall_clock_breakdown": false
114
- },
115
- "label_smoothing_factor": 0.0,
116
- "optim": "adamw_torch_fused",
117
- "optim_args": null,
118
- "adafactor": false,
119
- "group_by_length": false,
120
- "length_column_name": "length",
121
- "report_to": [
122
- "wandb"
123
- ],
124
- "project": "huggingface",
125
- "trackio_space_id": "trackio",
126
- "ddp_find_unused_parameters": null,
127
- "ddp_bucket_cap_mb": null,
128
- "ddp_broadcast_buffers": null,
129
- "dataloader_pin_memory": true,
130
- "dataloader_persistent_workers": false,
131
- "skip_memory_metrics": true,
132
- "use_legacy_prediction_loop": false,
133
- "push_to_hub": false,
134
- "resume_from_checkpoint": null,
135
- "hub_model_id": null,
136
- "hub_strategy": "every_save",
137
- "hub_token": null,
138
- "hub_private_repo": null,
139
- "hub_always_push": false,
140
- "hub_revision": null,
141
- "gradient_checkpointing": true,
142
- "gradient_checkpointing_kwargs": null,
143
- "include_inputs_for_metrics": false,
144
- "include_for_metrics": [],
145
- "eval_do_concat_batches": true,
146
- "fp16_backend": "auto",
147
- "push_to_hub_model_id": null,
148
- "push_to_hub_organization": null,
149
- "push_to_hub_token": null,
150
- "mp_parameters": "",
151
- "auto_find_batch_size": false,
152
- "full_determinism": false,
153
- "torchdynamo": null,
154
- "ray_scope": "last",
155
- "ddp_timeout": 18000000,
156
- "torch_compile": false,
157
- "torch_compile_backend": null,
158
- "torch_compile_mode": null,
159
- "include_tokens_per_second": false,
160
- "include_num_input_tokens_seen": false,
161
- "neftune_noise_alpha": null,
162
- "optim_target_modules": null,
163
- "batch_eval_metrics": false,
164
- "eval_on_start": false,
165
- "use_liger_kernel": true,
166
- "liger_kernel_config": null,
167
- "eval_use_gather_object": false,
168
- "average_tokens_across_devices": true,
169
- "sortish_sampler": false,
170
- "predict_with_generate": false,
171
- "generation_max_length": null,
172
- "generation_num_beams": null,
173
- "generation_config": null,
174
- "tuner_backend": "peft",
175
- "vit_gradient_checkpointing": null,
176
- "router_aux_loss_coef": 0.0,
177
- "enable_dft_loss": false,
178
- "enable_channel_loss": false,
179
- "check_model": true,
180
- "acc_strategy": "token",
181
- "train_dataloader_shuffle": true,
182
- "max_epochs": null,
183
- "aligner_lr": 1e-05,
184
- "vit_lr": 1e-05,
185
- "use_logits_to_keep": null,
186
- "ds3_gather_for_generation": true,
187
- "resume_only_model": false,
188
- "optimizer": null,
189
- "loss_type": null,
190
- "metric": null,
191
- "eval_use_evalscope": false,
192
- "eval_dataset": [],
193
- "eval_dataset_args": null,
194
- "eval_limit": null,
195
- "eval_generation_config": null,
196
- "extra_eval_args": null,
197
- "use_flash_ckpt": false,
198
- "use_ray": false,
199
- "ray_exp_name": null,
200
- "device_groups": null,
201
- "model": "/apdcephfs_fsgm/share_304220499/mclan/checkpoints/Qwen3-VL-4B-Instruct",
202
- "model_type": "qwen3_vl",
203
- "model_revision": null,
204
- "task_type": "causal_lm",
205
- "torch_dtype": "bfloat16",
206
- "attn_impl": "flash_attn",
207
- "new_special_tokens": [],
208
- "num_labels": null,
209
- "problem_type": null,
210
- "rope_scaling": null,
211
- "device_map": null,
212
- "max_memory": {},
213
- "max_model_len": null,
214
- "local_repo_path": null,
215
- "init_strategy": null,
216
- "template": "qwen3_vl",
217
- "system": null,
218
- "max_length": 65536,
219
- "truncation_strategy": "delete",
220
- "max_pixels": null,
221
- "agent_template": null,
222
- "norm_bbox": null,
223
- "use_chat_template": true,
224
- "padding_side": "right",
225
- "padding_free": true,
226
- "loss_scale": "default",
227
- "sequence_parallel_size": 1,
228
- "template_backend": "swift",
229
- "response_prefix": null,
230
- "enable_thinking": null,
231
- "add_non_thinking_prefix": true,
232
- "dataset": [
233
- "/apdcephfs_fsgm/share_304220499/data/planning/to_train/0127/AgentNet_ws4_filter_failure/agentnet_ubuntu_5k.train0107.openai_unified_converted.swift.ws4.l1.jsonl#163000",
234
- "/apdcephfs_fsgm/share_304220499/data/planning/to_train/0127/AgentNet_ws4_filter_failure/agentnet_win_mac_18k.train0107.openai_unified_converted.swift.ws4.l1.jsonl#339000",
235
- "/apdcephfs_fsgm/share_304220499/data/planning/to_train/0127/AgentNet_ws4_filter_failure/agentnet_ubuntu_5k.train0107.openai_unified_converted.swift.ws4.l2.jsonl#163000",
236
- "/apdcephfs_fsgm/share_304220499/data/planning/to_train/0127/AgentNet_ws4_filter_failure/agentnet_win_mac_18k.train0107.openai_unified_converted.swift.ws4.l2.jsonl#339000",
237
- "/apdcephfs_fsgm/share_304220499/data/planning/to_train/0127/AgentNet_ws4_filter_failure/agentnet_ubuntu_5k.train0107.openai_unified_converted.swift.ws4.l3.jsonl#163000",
238
- "/apdcephfs_fsgm/share_304220499/data/planning/to_train/0127/AgentNet_ws4_filter_failure/agentnet_win_mac_18k.train0107.openai_unified_converted.swift.ws4.l3.jsonl#339000"
239
- ],
240
- "val_dataset": [],
241
- "cached_dataset": [],
242
- "cached_val_dataset": [],
243
- "split_dataset_ratio": 0.0,
244
- "dataset_num_proc": 8,
245
- "load_from_cache_file": false,
246
- "dataset_shuffle": true,
247
- "val_dataset_shuffle": false,
248
- "streaming": false,
249
- "interleave_prob": null,
250
- "stopping_strategy": "first_exhausted",
251
- "shuffle_buffer_size": 1000,
252
- "download_mode": "reuse_dataset_if_exists",
253
- "columns": {},
254
- "strict": false,
255
- "model_name": null,
256
- "model_author": null,
257
- "custom_dataset_info": [],
258
- "quant_method": null,
259
- "quant_bits": null,
260
- "hqq_axis": null,
261
- "bnb_4bit_compute_dtype": "bfloat16",
262
- "bnb_4bit_quant_type": "nf4",
263
- "bnb_4bit_use_double_quant": true,
264
- "bnb_4bit_quant_storage": null,
265
- "max_new_tokens": 64,
266
- "temperature": 0.0,
267
- "top_k": null,
268
- "top_p": null,
269
- "repetition_penalty": null,
270
- "num_beams": 1,
271
- "stream": false,
272
- "stop_words": [],
273
- "logprobs": false,
274
- "top_logprobs": null,
275
- "structured_outputs_regex": null,
276
- "ckpt_dir": null,
277
- "lora_modules": [],
278
- "train_type": "full",
279
- "adapters": [],
280
- "external_plugins": [],
281
- "model_kwargs": {},
282
- "load_args": false,
283
- "load_data_args": false,
284
- "packing": false,
285
- "packing_length": null,
286
- "packing_num_proc": 1,
287
- "lazy_tokenize": true,
288
- "custom_register_path": [],
289
- "use_hf": false,
290
- "ignore_args_error": false,
291
- "use_swift_lora": false,
292
- "freeze_parameters": [],
293
- "freeze_parameters_regex": null,
294
- "freeze_parameters_ratio": 0.0,
295
- "trainable_parameters": [
296
- "model.visual.merger",
297
- "model.visual.deepstack_merger_list"
298
- ],
299
- "trainable_parameters_regex": null,
300
- "freeze_llm": false,
301
- "freeze_vit": false,
302
- "freeze_aligner": false,
303
- "target_modules": [
304
- "all-linear"
305
- ],
306
- "target_regex": null,
307
- "target_parameters": null,
308
- "modules_to_save": [],
309
- "lora_rank": 8,
310
- "lora_alpha": 32,
311
- "lora_dropout": 0.05,
312
- "lora_bias": "none",
313
- "lora_dtype": null,
314
- "lorap_lr_ratio": null,
315
- "use_rslora": false,
316
- "use_dora": false,
317
- "lora_ga_batch_size": 2,
318
- "lora_ga_iters": 2,
319
- "lora_ga_max_length": 1024,
320
- "lora_ga_direction": "ArB2r",
321
- "lora_ga_scale": "stable",
322
- "lora_ga_stable_gamma": 16,
323
- "init_weights": true,
324
- "fourier_n_frequency": 2000,
325
- "fourier_scaling": 300.0,
326
- "boft_block_size": 4,
327
- "boft_block_num": 0,
328
- "boft_n_butterfly_factor": 1,
329
- "boft_dropout": 0.0,
330
- "vera_rank": 256,
331
- "vera_projection_prng_key": 0,
332
- "vera_dropout": 0.0,
333
- "vera_d_initial": 0.1,
334
- "adapter_act": "gelu",
335
- "adapter_length": 128,
336
- "use_galore": false,
337
- "galore_target_modules": null,
338
- "galore_rank": 128,
339
- "galore_update_proj_gap": 50,
340
- "galore_scale": 1.0,
341
- "galore_proj_type": "std",
342
- "galore_optim_per_parameter": false,
343
- "galore_with_embedding": false,
344
- "galore_quantization": false,
345
- "galore_proj_quant": false,
346
- "galore_proj_bits": 4,
347
- "galore_proj_group_size": 256,
348
- "galore_cos_threshold": 0.4,
349
- "galore_gamma_proj": 2,
350
- "galore_queue_size": 5,
351
- "adalora_target_r": 8,
352
- "adalora_init_r": 12,
353
- "adalora_tinit": 0,
354
- "adalora_tfinal": 0,
355
- "adalora_deltaT": 1,
356
- "adalora_beta1": 0.85,
357
- "adalora_beta2": 0.85,
358
- "adalora_orth_reg_weight": 0.5,
359
- "llamapro_num_new_blocks": 4,
360
- "llamapro_num_groups": null,
361
- "lisa_activated_layers": 0,
362
- "lisa_step_interval": 20,
363
- "reft_layer_key": null,
364
- "reft_layers": null,
365
- "reft_rank": 4,
366
- "reft_intervention_type": "LoreftIntervention",
367
- "reft_args": null,
368
- "swanlab_token": null,
369
- "swanlab_project": "ms-swift",
370
- "swanlab_workspace": null,
371
- "swanlab_exp_name": null,
372
- "swanlab_notification_method": null,
373
- "swanlab_webhook_url": null,
374
- "swanlab_secret": null,
375
- "swanlab_mode": "cloud",
376
- "add_version": true,
377
- "create_checkpoint_symlink": false,
378
- "zero_hpz_partition_size": null,
379
- "deepspeed_autotp_size": null,
380
- "early_stop_interval": null,
381
- "rank": 0,
382
- "global_world_size": 96,
383
- "local_world_size": 8,
384
- "model_suffix": "Qwen3-VL-4B-Instruct",
385
- "model_info": "ModelInfo(model_type='qwen3_vl', model_dir='/apdcephfs_fsgm/share_304220499/mclan/checkpoints/Qwen3-VL-4B-Instruct', torch_dtype=torch.bfloat16, max_model_len=262144, quant_method=None, quant_bits=None, rope_scaling={'mrope_interleaved': True, 'mrope_section': [24, 20, 20], 'rope_type': 'default'}, is_moe_model=False, is_multimodal=True, config=None, task_type='causal_lm', num_labels=None)",
386
- "model_meta": "ModelMeta(model_type='qwen3_vl', model_groups=[ModelGroup(models=[Model(ms_model_id='Qwen/Qwen3-VL-2B-Instruct', hf_model_id='Qwen/Qwen3-VL-2B-Instruct', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='Qwen/Qwen3-VL-2B-Thinking', hf_model_id='Qwen/Qwen3-VL-2B-Thinking', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='Qwen/Qwen3-VL-2B-Instruct-FP8', hf_model_id='Qwen/Qwen3-VL-2B-Instruct-FP8', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='Qwen/Qwen3-VL-2B-Thinking-FP8', hf_model_id='Qwen/Qwen3-VL-2B-Thinking-FP8', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='Qwen/Qwen3-VL-4B-Instruct', hf_model_id='Qwen/Qwen3-VL-4B-Instruct', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='Qwen/Qwen3-VL-4B-Thinking', hf_model_id='Qwen/Qwen3-VL-4B-Thinking', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='Qwen/Qwen3-VL-4B-Instruct-FP8', hf_model_id='Qwen/Qwen3-VL-4B-Instruct-FP8', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='Qwen/Qwen3-VL-4B-Thinking-FP8', hf_model_id='Qwen/Qwen3-VL-4B-Thinking-FP8', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='Qwen/Qwen3-VL-8B-Instruct', hf_model_id='Qwen/Qwen3-VL-8B-Instruct', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='Qwen/Qwen3-VL-8B-Thinking', hf_model_id='Qwen/Qwen3-VL-8B-Thinking', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='Qwen/Qwen3-VL-8B-Instruct-FP8', hf_model_id='Qwen/Qwen3-VL-8B-Instruct-FP8', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='Qwen/Qwen3-VL-8B-Thinking-FP8', hf_model_id='Qwen/Qwen3-VL-8B-Thinking-FP8', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='Qwen/Qwen3-VL-32B-Instruct', hf_model_id='Qwen/Qwen3-VL-32B-Instruct', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='Qwen/Qwen3-VL-32B-Thinking', hf_model_id='Qwen/Qwen3-VL-32B-Thinking', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='Qwen/Qwen3-VL-32B-Instruct-FP8', hf_model_id='Qwen/Qwen3-VL-32B-Instruct-FP8', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='Qwen/Qwen3-VL-32B-Thinking-FP8', hf_model_id='Qwen/Qwen3-VL-32B-Thinking-FP8', model_path=None, ms_revision=None, hf_revision=None)], ignore_patterns=None, requires=None, tags=[])], template='qwen3_vl', get_function=<function get_model_tokenizer_qwen3_vl at 0x7f78135487c0>, model_arch=MultiModelKeys(arch_name='qwen3_vl', embedding=None, module_list=None, lm_head=None, q_proj=None, k_proj=None, v_proj=None, o_proj=None, attention=None, mlp=None, down_proj=None, qkv_proj=None, qk_proj=None, qa_proj=None, qb_proj=None, kv_proj=None, kva_proj=None, kvb_proj=None, language_model=['model.language_model', 'lm_head'], aligner=['model.visual.merger', 'model.visual.deepstack_merger_list'], vision_tower=['model.visual'], generator=[]), architectures=['Qwen3VLForConditionalGeneration'], additional_saved_files=[], torch_dtype=None, is_multimodal=True, is_reward=False, is_reranker=False, task_type=None, ignore_patterns=None, requires=['transformers>=4.57', 'qwen_vl_utils>=0.0.14', 'decord'], tags=['vision', 'video'])",
387
- "model_dir": "/apdcephfs_fsgm/share_304220499/mclan/checkpoints/Qwen3-VL-4B-Instruct",
388
- "_val_dataset_exists": [],
389
- "hub": "<class 'swift.hub.hub.MSHub'>",
390
- "evaluation_strategy": "steps",
391
- "training_args": "Seq2SeqTrainingArguments(output_dir='/apdcephfs_fsgm/share_304220499/weixian/workspace/Agent_SFT/output/Qwen3-VL-4B-Instruct/agentnet_filter_failure_ws4_lr2e-5_vit1e-5_aligner1e-5_bs384/v0-20260131-020453', overwrite_output_dir=False, do_train=False, do_eval=False, do_predict=False, eval_strategy=<IntervalStrategy.NO: 'no'>, prediction_loss_only=False, per_device_train_batch_size=4, per_device_eval_batch_size=1, per_gpu_train_batch_size=None, per_gpu_eval_batch_size=None, gradient_accumulation_steps=1, eval_accumulation_steps=None, eval_delay=0, torch_empty_cache_steps=None, learning_rate=2e-05, weight_decay=0.1, adam_beta1=0.9, adam_beta2=0.95, adam_epsilon=1e-08, max_grad_norm=1.0, num_train_epochs=1.0, max_steps=-1, lr_scheduler_type=<SchedulerType.COSINE: 'cosine'>, lr_scheduler_kwargs=None, warmup_ratio=0.05, warmup_steps=0, log_level='passive', log_level_replica='warning', log_on_each_node=True, logging_dir='/apdcephfs_fsgm/share_304220499/weixian/workspace/Agent_SFT/output/Qwen3-VL-4B-Instruct/agentnet_filter_failure_ws4_lr2e-5_vit1e-5_aligner1e-5_bs384/v0-20260131-020453/runs', logging_strategy=<IntervalStrategy.STEPS: 'steps'>, logging_first_step=True, logging_steps=1, logging_nan_inf_filter=True, save_strategy=<SaveStrategy.STEPS: 'steps'>, save_steps=500, save_total_limit=None, save_safetensors=True, save_on_each_node=False, save_only_model=False, restore_callback_states_from_checkpoint=False, no_cuda=False, use_cpu=False, use_mps_device=False, seed=42, data_seed=42, jit_mode_eval=False, bf16=True, fp16=False, fp16_opt_level='O1', half_precision_backend='auto', bf16_full_eval=False, fp16_full_eval=False, tf32=None, local_rank=0, ddp_backend=None, tpu_num_cores=None, tpu_metrics_debug=False, debug=[], dataloader_drop_last=False, eval_steps=10000.0, dataloader_num_workers=8, dataloader_prefetch_factor=2, past_index=-1, run_name='/apdcephfs_fsgm/share_304220499/weixian/workspace/Agent_SFT/output/Qwen3-VL-4B-Instruct/agentnet_filter_failure_ws4_lr2e-5_vit1e-5_aligner1e-5_bs384/v0-20260131-020453', disable_tqdm=False, remove_unused_columns=False, label_names=None, load_best_model_at_end=False, metric_for_best_model='loss', greater_is_better=False, ignore_data_skip=False, fsdp=[], fsdp_min_num_params=0, fsdp_config={'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}, fsdp_transformer_layer_cls_to_wrap=None, accelerator_config=AcceleratorConfig(split_batches=False, dispatch_batches=False, even_batches=True, use_seedable_sampler=True, non_blocking=False, gradient_accumulation_kwargs=None, use_configured_state=False), parallelism_config=None, deepspeed={'fp16': {'enabled': 'auto', 'loss_scale': 0, 'loss_scale_window': 1000, 'initial_scale_power': 16, 'hysteresis': 2, 'min_loss_scale': 1}, 'bf16': {'enabled': 'auto'}, 'zero_optimization': {'stage': 1, 'offload_optimizer': {'device': 'none', 'pin_memory': True}, 'allgather_partitions': True, 'allgather_bucket_size': 200000000.0, 'overlap_comm': False, 'reduce_scatter': True, 'reduce_bucket_size': 200000000.0, 'contiguous_gradients': True}, 'gradient_accumulation_steps': 'auto', 'gradient_clipping': 'auto', 'steps_per_print': 2000, 'train_batch_size': 'auto', 'train_micro_batch_size_per_gpu': 'auto', 'wall_clock_breakdown': False}, label_smoothing_factor=0.0, optim=<OptimizerNames.ADAMW_TORCH_FUSED: 'adamw_torch_fused'>, optim_args=None, adafactor=False, group_by_length=False, length_column_name='length', report_to=['wandb'], project='huggingface', trackio_space_id='trackio', ddp_find_unused_parameters=None, ddp_bucket_cap_mb=None, ddp_broadcast_buffers=None, dataloader_pin_memory=True, dataloader_persistent_workers=False, skip_memory_metrics=True, use_legacy_prediction_loop=False, push_to_hub=False, resume_from_checkpoint=None, hub_model_id=None, hub_strategy=<HubStrategy.EVERY_SAVE: 'every_save'>, hub_token=None, hub_private_repo=None, hub_always_push=False, hub_revision=None, gradient_checkpointing=True, gradient_checkpointing_kwargs=None, include_inputs_for_metrics=False, include_for_metrics=[], eval_do_concat_batches=True, fp16_backend='auto', push_to_hub_model_id=None, push_to_hub_organization=None, push_to_hub_token=None, mp_parameters='', auto_find_batch_size=False, full_determinism=False, torchdynamo=None, ray_scope='last', ddp_timeout=18000000, torch_compile=False, torch_compile_backend=None, torch_compile_mode=None, include_tokens_per_second=None, include_num_input_tokens_seen=None, neftune_noise_alpha=None, optim_target_modules=None, batch_eval_metrics=False, eval_on_start=False, use_liger_kernel=True, liger_kernel_config=None, eval_use_gather_object=False, average_tokens_across_devices=None, sortish_sampler=False, predict_with_generate=False, generation_max_length=None, generation_num_beams=None, generation_config=None, tuner_backend='peft', vit_gradient_checkpointing=True, router_aux_loss_coef=0.0, enable_dft_loss=False, enable_channel_loss=False, check_model=True, acc_strategy='token', train_dataloader_shuffle=True, max_epochs=None, aligner_lr=1e-05, vit_lr=1e-05, use_logits_to_keep=None, ds3_gather_for_generation=True, resume_only_model=False, optimizer='multimodal', loss_type=None, metric=None, eval_use_evalscope=False, eval_dataset=[], eval_dataset_args=None, eval_limit=None, eval_generation_config=None, extra_eval_args=None, use_flash_ckpt=False, sft_alpha=0, chord_sft_dataset=[], chord_sft_per_device_train_batch_size=None, chord_enable_phi_function=False, chord_mu_warmup_steps=None, chord_mu_decay_steps=None, chord_mu_peak=None, chord_mu_valley=None, train_type='full', local_repo_path=None, galore_config=None, task_type='causal_lm', problem_type=None)"
392
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
qwen3-vl4b-agentnet_filter_failure_ws4_lr2e-5_vit1e-5_aligner1e-5_bs384-step500/chat_template.jinja DELETED
@@ -1,120 +0,0 @@
1
- {%- if tools %}
2
- {{- '<|im_start|>system\n' }}
3
- {%- if messages[0].role == 'system' %}
4
- {%- if messages[0].content is string %}
5
- {{- messages[0].content }}
6
- {%- else %}
7
- {%- for content in messages[0].content %}
8
- {%- if 'text' in content %}
9
- {{- content.text }}
10
- {%- endif %}
11
- {%- endfor %}
12
- {%- endif %}
13
- {{- '\n\n' }}
14
- {%- endif %}
15
- {{- "# Tools\n\nYou may call one or more functions to assist with the user query.\n\nYou are provided with function signatures within <tools></tools> XML tags:\n<tools>" }}
16
- {%- for tool in tools %}
17
- {{- "\n" }}
18
- {{- tool | tojson }}
19
- {%- endfor %}
20
- {{- "\n</tools>\n\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\n<tool_call>\n{\"name\": <function-name>, \"arguments\": <args-json-object>}\n</tool_call><|im_end|>\n" }}
21
- {%- else %}
22
- {%- if messages[0].role == 'system' %}
23
- {{- '<|im_start|>system\n' }}
24
- {%- if messages[0].content is string %}
25
- {{- messages[0].content }}
26
- {%- else %}
27
- {%- for content in messages[0].content %}
28
- {%- if 'text' in content %}
29
- {{- content.text }}
30
- {%- endif %}
31
- {%- endfor %}
32
- {%- endif %}
33
- {{- '<|im_end|>\n' }}
34
- {%- endif %}
35
- {%- endif %}
36
- {%- set image_count = namespace(value=0) %}
37
- {%- set video_count = namespace(value=0) %}
38
- {%- for message in messages %}
39
- {%- if message.role == "user" %}
40
- {{- '<|im_start|>' + message.role + '\n' }}
41
- {%- if message.content is string %}
42
- {{- message.content }}
43
- {%- else %}
44
- {%- for content in message.content %}
45
- {%- if content.type == 'image' or 'image' in content or 'image_url' in content %}
46
- {%- set image_count.value = image_count.value + 1 %}
47
- {%- if add_vision_id %}Picture {{ image_count.value }}: {% endif -%}
48
- <|vision_start|><|image_pad|><|vision_end|>
49
- {%- elif content.type == 'video' or 'video' in content %}
50
- {%- set video_count.value = video_count.value + 1 %}
51
- {%- if add_vision_id %}Video {{ video_count.value }}: {% endif -%}
52
- <|vision_start|><|video_pad|><|vision_end|>
53
- {%- elif 'text' in content %}
54
- {{- content.text }}
55
- {%- endif %}
56
- {%- endfor %}
57
- {%- endif %}
58
- {{- '<|im_end|>\n' }}
59
- {%- elif message.role == "assistant" %}
60
- {{- '<|im_start|>' + message.role + '\n' }}
61
- {%- if message.content is string %}
62
- {{- message.content }}
63
- {%- else %}
64
- {%- for content_item in message.content %}
65
- {%- if 'text' in content_item %}
66
- {{- content_item.text }}
67
- {%- endif %}
68
- {%- endfor %}
69
- {%- endif %}
70
- {%- if message.tool_calls %}
71
- {%- for tool_call in message.tool_calls %}
72
- {%- if (loop.first and message.content) or (not loop.first) %}
73
- {{- '\n' }}
74
- {%- endif %}
75
- {%- if tool_call.function %}
76
- {%- set tool_call = tool_call.function %}
77
- {%- endif %}
78
- {{- '<tool_call>\n{"name": "' }}
79
- {{- tool_call.name }}
80
- {{- '", "arguments": ' }}
81
- {%- if tool_call.arguments is string %}
82
- {{- tool_call.arguments }}
83
- {%- else %}
84
- {{- tool_call.arguments | tojson }}
85
- {%- endif %}
86
- {{- '}\n</tool_call>' }}
87
- {%- endfor %}
88
- {%- endif %}
89
- {{- '<|im_end|>\n' }}
90
- {%- elif message.role == "tool" %}
91
- {%- if loop.first or (messages[loop.index0 - 1].role != "tool") %}
92
- {{- '<|im_start|>user' }}
93
- {%- endif %}
94
- {{- '\n<tool_response>\n' }}
95
- {%- if message.content is string %}
96
- {{- message.content }}
97
- {%- else %}
98
- {%- for content in message.content %}
99
- {%- if content.type == 'image' or 'image' in content or 'image_url' in content %}
100
- {%- set image_count.value = image_count.value + 1 %}
101
- {%- if add_vision_id %}Picture {{ image_count.value }}: {% endif -%}
102
- <|vision_start|><|image_pad|><|vision_end|>
103
- {%- elif content.type == 'video' or 'video' in content %}
104
- {%- set video_count.value = video_count.value + 1 %}
105
- {%- if add_vision_id %}Video {{ video_count.value }}: {% endif -%}
106
- <|vision_start|><|video_pad|><|vision_end|>
107
- {%- elif 'text' in content %}
108
- {{- content.text }}
109
- {%- endif %}
110
- {%- endfor %}
111
- {%- endif %}
112
- {{- '\n</tool_response>' }}
113
- {%- if loop.last or (messages[loop.index0 + 1].role != "tool") %}
114
- {{- '<|im_end|>\n' }}
115
- {%- endif %}
116
- {%- endif %}
117
- {%- endfor %}
118
- {%- if add_generation_prompt %}
119
- {{- '<|im_start|>assistant\n' }}
120
- {%- endif %}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
qwen3-vl4b-agentnet_filter_failure_ws4_lr2e-5_vit1e-5_aligner1e-5_bs384-step500/config.json DELETED
@@ -1,70 +0,0 @@
1
- {
2
- "architectures": [
3
- "Qwen3VLForConditionalGeneration"
4
- ],
5
- "dtype": "bfloat16",
6
- "eos_token_id": 151645,
7
- "hidden_size": 2560,
8
- "image_token_id": 151655,
9
- "model_type": "qwen3_vl",
10
- "pad_token_id": 151643,
11
- "text_config": {
12
- "attention_bias": false,
13
- "attention_dropout": 0.0,
14
- "bos_token_id": 151643,
15
- "dtype": "bfloat16",
16
- "eos_token_id": 151645,
17
- "head_dim": 128,
18
- "hidden_act": "silu",
19
- "hidden_size": 2560,
20
- "initializer_range": 0.02,
21
- "intermediate_size": 9728,
22
- "max_position_embeddings": 262144,
23
- "model_type": "qwen3_vl_text",
24
- "num_attention_heads": 32,
25
- "num_hidden_layers": 36,
26
- "num_key_value_heads": 8,
27
- "pad_token_id": 151643,
28
- "rms_norm_eps": 1e-06,
29
- "rope_scaling": {
30
- "mrope_interleaved": true,
31
- "mrope_section": [
32
- 24,
33
- 20,
34
- 20
35
- ],
36
- "rope_type": "default"
37
- },
38
- "rope_theta": 5000000,
39
- "tie_word_embeddings": true,
40
- "use_cache": false,
41
- "vocab_size": 151936
42
- },
43
- "tie_word_embeddings": true,
44
- "transformers_version": "4.57.1",
45
- "video_token_id": 151656,
46
- "vision_config": {
47
- "deepstack_visual_indexes": [
48
- 5,
49
- 11,
50
- 17
51
- ],
52
- "depth": 24,
53
- "dtype": "bfloat16",
54
- "hidden_act": "gelu_pytorch_tanh",
55
- "hidden_size": 1024,
56
- "in_channels": 3,
57
- "initializer_range": 0.02,
58
- "intermediate_size": 4096,
59
- "model_type": "qwen3_vl",
60
- "num_heads": 16,
61
- "num_position_embeddings": 2304,
62
- "out_hidden_size": 2560,
63
- "pad_token_id": 151643,
64
- "patch_size": 16,
65
- "spatial_merge_size": 2,
66
- "temporal_patch_size": 2
67
- },
68
- "vision_end_token_id": 151653,
69
- "vision_start_token_id": 151652
70
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
qwen3-vl4b-agentnet_filter_failure_ws4_lr2e-5_vit1e-5_aligner1e-5_bs384-step500/generation_config.json DELETED
@@ -1,13 +0,0 @@
1
- {
2
- "bos_token_id": 151643,
3
- "do_sample": true,
4
- "eos_token_id": [
5
- 151645,
6
- 151643
7
- ],
8
- "pad_token_id": 151643,
9
- "temperature": 0.7,
10
- "top_k": 20,
11
- "top_p": 0.8,
12
- "transformers_version": "4.57.1"
13
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
qwen3-vl4b-agentnet_filter_failure_ws4_lr2e-5_vit1e-5_aligner1e-5_bs384-step500/latest DELETED
@@ -1 +0,0 @@
1
- global_step500
 
 
qwen3-vl4b-agentnet_filter_failure_ws4_lr2e-5_vit1e-5_aligner1e-5_bs384-step500/merges.txt DELETED
The diff for this file is too large to render. See raw diff
 
qwen3-vl4b-agentnet_filter_failure_ws4_lr2e-5_vit1e-5_aligner1e-5_bs384-step500/model-00001-of-00002.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:b130fa2b71319f11479489c2f6c4e9cff8605cd4cd2e0ee3f828178aef2f6088
3
- size 4990497880
 
 
 
 
qwen3-vl4b-agentnet_filter_failure_ws4_lr2e-5_vit1e-5_aligner1e-5_bs384-step500/model-00002-of-00002.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:e717d3c72f84363f5efcae0edaa621f842a3e36299567ce5501e77e71ed8e94f
3
- size 4663133960
 
 
 
 
qwen3-vl4b-agentnet_filter_failure_ws4_lr2e-5_vit1e-5_aligner1e-5_bs384-step500/model.safetensors.index.json DELETED
@@ -1,722 +0,0 @@
1
- {
2
- "metadata": {
3
- "total_parameters": 4437815808,
4
- "total_size": 9653543936
5
- },
6
- "weight_map": {
7
- "lm_head.weight": "model-00002-of-00002.safetensors",
8
- "model.language_model.embed_tokens.weight": "model-00001-of-00002.safetensors",
9
- "model.language_model.layers.0.input_layernorm.weight": "model-00001-of-00002.safetensors",
10
- "model.language_model.layers.0.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
11
- "model.language_model.layers.0.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
12
- "model.language_model.layers.0.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
13
- "model.language_model.layers.0.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
14
- "model.language_model.layers.0.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
15
- "model.language_model.layers.0.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
16
- "model.language_model.layers.0.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
17
- "model.language_model.layers.0.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
18
- "model.language_model.layers.0.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
19
- "model.language_model.layers.0.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
20
- "model.language_model.layers.1.input_layernorm.weight": "model-00001-of-00002.safetensors",
21
- "model.language_model.layers.1.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
22
- "model.language_model.layers.1.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
23
- "model.language_model.layers.1.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
24
- "model.language_model.layers.1.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
25
- "model.language_model.layers.1.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
26
- "model.language_model.layers.1.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
27
- "model.language_model.layers.1.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
28
- "model.language_model.layers.1.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
29
- "model.language_model.layers.1.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
30
- "model.language_model.layers.1.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
31
- "model.language_model.layers.10.input_layernorm.weight": "model-00001-of-00002.safetensors",
32
- "model.language_model.layers.10.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
33
- "model.language_model.layers.10.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
34
- "model.language_model.layers.10.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
35
- "model.language_model.layers.10.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
36
- "model.language_model.layers.10.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
37
- "model.language_model.layers.10.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
38
- "model.language_model.layers.10.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
39
- "model.language_model.layers.10.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
40
- "model.language_model.layers.10.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
41
- "model.language_model.layers.10.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
42
- "model.language_model.layers.11.input_layernorm.weight": "model-00001-of-00002.safetensors",
43
- "model.language_model.layers.11.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
44
- "model.language_model.layers.11.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
45
- "model.language_model.layers.11.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
46
- "model.language_model.layers.11.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
47
- "model.language_model.layers.11.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
48
- "model.language_model.layers.11.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
49
- "model.language_model.layers.11.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
50
- "model.language_model.layers.11.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
51
- "model.language_model.layers.11.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
52
- "model.language_model.layers.11.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
53
- "model.language_model.layers.12.input_layernorm.weight": "model-00001-of-00002.safetensors",
54
- "model.language_model.layers.12.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
55
- "model.language_model.layers.12.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
56
- "model.language_model.layers.12.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
57
- "model.language_model.layers.12.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
58
- "model.language_model.layers.12.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
59
- "model.language_model.layers.12.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
60
- "model.language_model.layers.12.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
61
- "model.language_model.layers.12.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
62
- "model.language_model.layers.12.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
63
- "model.language_model.layers.12.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
64
- "model.language_model.layers.13.input_layernorm.weight": "model-00001-of-00002.safetensors",
65
- "model.language_model.layers.13.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
66
- "model.language_model.layers.13.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
67
- "model.language_model.layers.13.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
68
- "model.language_model.layers.13.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
69
- "model.language_model.layers.13.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
70
- "model.language_model.layers.13.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
71
- "model.language_model.layers.13.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
72
- "model.language_model.layers.13.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
73
- "model.language_model.layers.13.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
74
- "model.language_model.layers.13.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
75
- "model.language_model.layers.14.input_layernorm.weight": "model-00001-of-00002.safetensors",
76
- "model.language_model.layers.14.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
77
- "model.language_model.layers.14.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
78
- "model.language_model.layers.14.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
79
- "model.language_model.layers.14.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
80
- "model.language_model.layers.14.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
81
- "model.language_model.layers.14.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
82
- "model.language_model.layers.14.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
83
- "model.language_model.layers.14.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
84
- "model.language_model.layers.14.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
85
- "model.language_model.layers.14.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
86
- "model.language_model.layers.15.input_layernorm.weight": "model-00001-of-00002.safetensors",
87
- "model.language_model.layers.15.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
88
- "model.language_model.layers.15.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
89
- "model.language_model.layers.15.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
90
- "model.language_model.layers.15.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
91
- "model.language_model.layers.15.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
92
- "model.language_model.layers.15.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
93
- "model.language_model.layers.15.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
94
- "model.language_model.layers.15.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
95
- "model.language_model.layers.15.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
96
- "model.language_model.layers.15.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
97
- "model.language_model.layers.16.input_layernorm.weight": "model-00002-of-00002.safetensors",
98
- "model.language_model.layers.16.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
99
- "model.language_model.layers.16.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
100
- "model.language_model.layers.16.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
101
- "model.language_model.layers.16.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
102
- "model.language_model.layers.16.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
103
- "model.language_model.layers.16.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
104
- "model.language_model.layers.16.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
105
- "model.language_model.layers.16.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
106
- "model.language_model.layers.16.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
107
- "model.language_model.layers.16.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
108
- "model.language_model.layers.17.input_layernorm.weight": "model-00002-of-00002.safetensors",
109
- "model.language_model.layers.17.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
110
- "model.language_model.layers.17.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
111
- "model.language_model.layers.17.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
112
- "model.language_model.layers.17.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
113
- "model.language_model.layers.17.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
114
- "model.language_model.layers.17.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
115
- "model.language_model.layers.17.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
116
- "model.language_model.layers.17.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
117
- "model.language_model.layers.17.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
118
- "model.language_model.layers.17.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
119
- "model.language_model.layers.18.input_layernorm.weight": "model-00002-of-00002.safetensors",
120
- "model.language_model.layers.18.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
121
- "model.language_model.layers.18.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
122
- "model.language_model.layers.18.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
123
- "model.language_model.layers.18.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
124
- "model.language_model.layers.18.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
125
- "model.language_model.layers.18.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
126
- "model.language_model.layers.18.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
127
- "model.language_model.layers.18.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
128
- "model.language_model.layers.18.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
129
- "model.language_model.layers.18.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
130
- "model.language_model.layers.19.input_layernorm.weight": "model-00002-of-00002.safetensors",
131
- "model.language_model.layers.19.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
132
- "model.language_model.layers.19.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
133
- "model.language_model.layers.19.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
134
- "model.language_model.layers.19.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
135
- "model.language_model.layers.19.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
136
- "model.language_model.layers.19.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
137
- "model.language_model.layers.19.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
138
- "model.language_model.layers.19.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
139
- "model.language_model.layers.19.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
140
- "model.language_model.layers.19.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
141
- "model.language_model.layers.2.input_layernorm.weight": "model-00001-of-00002.safetensors",
142
- "model.language_model.layers.2.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
143
- "model.language_model.layers.2.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
144
- "model.language_model.layers.2.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
145
- "model.language_model.layers.2.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
146
- "model.language_model.layers.2.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
147
- "model.language_model.layers.2.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
148
- "model.language_model.layers.2.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
149
- "model.language_model.layers.2.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
150
- "model.language_model.layers.2.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
151
- "model.language_model.layers.2.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
152
- "model.language_model.layers.20.input_layernorm.weight": "model-00002-of-00002.safetensors",
153
- "model.language_model.layers.20.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
154
- "model.language_model.layers.20.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
155
- "model.language_model.layers.20.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
156
- "model.language_model.layers.20.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
157
- "model.language_model.layers.20.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
158
- "model.language_model.layers.20.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
159
- "model.language_model.layers.20.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
160
- "model.language_model.layers.20.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
161
- "model.language_model.layers.20.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
162
- "model.language_model.layers.20.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
163
- "model.language_model.layers.21.input_layernorm.weight": "model-00002-of-00002.safetensors",
164
- "model.language_model.layers.21.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
165
- "model.language_model.layers.21.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
166
- "model.language_model.layers.21.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
167
- "model.language_model.layers.21.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
168
- "model.language_model.layers.21.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
169
- "model.language_model.layers.21.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
170
- "model.language_model.layers.21.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
171
- "model.language_model.layers.21.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
172
- "model.language_model.layers.21.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
173
- "model.language_model.layers.21.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
174
- "model.language_model.layers.22.input_layernorm.weight": "model-00002-of-00002.safetensors",
175
- "model.language_model.layers.22.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
176
- "model.language_model.layers.22.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
177
- "model.language_model.layers.22.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
178
- "model.language_model.layers.22.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
179
- "model.language_model.layers.22.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
180
- "model.language_model.layers.22.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
181
- "model.language_model.layers.22.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
182
- "model.language_model.layers.22.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
183
- "model.language_model.layers.22.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
184
- "model.language_model.layers.22.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
185
- "model.language_model.layers.23.input_layernorm.weight": "model-00002-of-00002.safetensors",
186
- "model.language_model.layers.23.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
187
- "model.language_model.layers.23.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
188
- "model.language_model.layers.23.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
189
- "model.language_model.layers.23.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
190
- "model.language_model.layers.23.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
191
- "model.language_model.layers.23.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
192
- "model.language_model.layers.23.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
193
- "model.language_model.layers.23.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
194
- "model.language_model.layers.23.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
195
- "model.language_model.layers.23.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
196
- "model.language_model.layers.24.input_layernorm.weight": "model-00002-of-00002.safetensors",
197
- "model.language_model.layers.24.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
198
- "model.language_model.layers.24.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
199
- "model.language_model.layers.24.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
200
- "model.language_model.layers.24.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
201
- "model.language_model.layers.24.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
202
- "model.language_model.layers.24.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
203
- "model.language_model.layers.24.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
204
- "model.language_model.layers.24.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
205
- "model.language_model.layers.24.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
206
- "model.language_model.layers.24.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
207
- "model.language_model.layers.25.input_layernorm.weight": "model-00002-of-00002.safetensors",
208
- "model.language_model.layers.25.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
209
- "model.language_model.layers.25.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
210
- "model.language_model.layers.25.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
211
- "model.language_model.layers.25.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
212
- "model.language_model.layers.25.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
213
- "model.language_model.layers.25.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
214
- "model.language_model.layers.25.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
215
- "model.language_model.layers.25.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
216
- "model.language_model.layers.25.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
217
- "model.language_model.layers.25.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
218
- "model.language_model.layers.26.input_layernorm.weight": "model-00002-of-00002.safetensors",
219
- "model.language_model.layers.26.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
220
- "model.language_model.layers.26.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
221
- "model.language_model.layers.26.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
222
- "model.language_model.layers.26.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
223
- "model.language_model.layers.26.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
224
- "model.language_model.layers.26.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
225
- "model.language_model.layers.26.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
226
- "model.language_model.layers.26.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
227
- "model.language_model.layers.26.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
228
- "model.language_model.layers.26.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
229
- "model.language_model.layers.27.input_layernorm.weight": "model-00002-of-00002.safetensors",
230
- "model.language_model.layers.27.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
231
- "model.language_model.layers.27.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
232
- "model.language_model.layers.27.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
233
- "model.language_model.layers.27.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
234
- "model.language_model.layers.27.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
235
- "model.language_model.layers.27.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
236
- "model.language_model.layers.27.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
237
- "model.language_model.layers.27.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
238
- "model.language_model.layers.27.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
239
- "model.language_model.layers.27.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
240
- "model.language_model.layers.28.input_layernorm.weight": "model-00002-of-00002.safetensors",
241
- "model.language_model.layers.28.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
242
- "model.language_model.layers.28.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
243
- "model.language_model.layers.28.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
244
- "model.language_model.layers.28.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
245
- "model.language_model.layers.28.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
246
- "model.language_model.layers.28.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
247
- "model.language_model.layers.28.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
248
- "model.language_model.layers.28.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
249
- "model.language_model.layers.28.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
250
- "model.language_model.layers.28.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
251
- "model.language_model.layers.29.input_layernorm.weight": "model-00002-of-00002.safetensors",
252
- "model.language_model.layers.29.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
253
- "model.language_model.layers.29.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
254
- "model.language_model.layers.29.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
255
- "model.language_model.layers.29.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
256
- "model.language_model.layers.29.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
257
- "model.language_model.layers.29.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
258
- "model.language_model.layers.29.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
259
- "model.language_model.layers.29.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
260
- "model.language_model.layers.29.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
261
- "model.language_model.layers.29.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
262
- "model.language_model.layers.3.input_layernorm.weight": "model-00001-of-00002.safetensors",
263
- "model.language_model.layers.3.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
264
- "model.language_model.layers.3.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
265
- "model.language_model.layers.3.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
266
- "model.language_model.layers.3.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
267
- "model.language_model.layers.3.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
268
- "model.language_model.layers.3.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
269
- "model.language_model.layers.3.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
270
- "model.language_model.layers.3.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
271
- "model.language_model.layers.3.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
272
- "model.language_model.layers.3.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
273
- "model.language_model.layers.30.input_layernorm.weight": "model-00002-of-00002.safetensors",
274
- "model.language_model.layers.30.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
275
- "model.language_model.layers.30.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
276
- "model.language_model.layers.30.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
277
- "model.language_model.layers.30.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
278
- "model.language_model.layers.30.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
279
- "model.language_model.layers.30.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
280
- "model.language_model.layers.30.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
281
- "model.language_model.layers.30.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
282
- "model.language_model.layers.30.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
283
- "model.language_model.layers.30.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
284
- "model.language_model.layers.31.input_layernorm.weight": "model-00002-of-00002.safetensors",
285
- "model.language_model.layers.31.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
286
- "model.language_model.layers.31.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
287
- "model.language_model.layers.31.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
288
- "model.language_model.layers.31.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
289
- "model.language_model.layers.31.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
290
- "model.language_model.layers.31.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
291
- "model.language_model.layers.31.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
292
- "model.language_model.layers.31.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
293
- "model.language_model.layers.31.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
294
- "model.language_model.layers.31.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
295
- "model.language_model.layers.32.input_layernorm.weight": "model-00002-of-00002.safetensors",
296
- "model.language_model.layers.32.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
297
- "model.language_model.layers.32.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
298
- "model.language_model.layers.32.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
299
- "model.language_model.layers.32.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
300
- "model.language_model.layers.32.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
301
- "model.language_model.layers.32.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
302
- "model.language_model.layers.32.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
303
- "model.language_model.layers.32.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
304
- "model.language_model.layers.32.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
305
- "model.language_model.layers.32.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
306
- "model.language_model.layers.33.input_layernorm.weight": "model-00002-of-00002.safetensors",
307
- "model.language_model.layers.33.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
308
- "model.language_model.layers.33.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
309
- "model.language_model.layers.33.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
310
- "model.language_model.layers.33.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
311
- "model.language_model.layers.33.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
312
- "model.language_model.layers.33.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
313
- "model.language_model.layers.33.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
314
- "model.language_model.layers.33.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
315
- "model.language_model.layers.33.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
316
- "model.language_model.layers.33.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
317
- "model.language_model.layers.34.input_layernorm.weight": "model-00002-of-00002.safetensors",
318
- "model.language_model.layers.34.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
319
- "model.language_model.layers.34.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
320
- "model.language_model.layers.34.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
321
- "model.language_model.layers.34.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
322
- "model.language_model.layers.34.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
323
- "model.language_model.layers.34.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
324
- "model.language_model.layers.34.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
325
- "model.language_model.layers.34.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
326
- "model.language_model.layers.34.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
327
- "model.language_model.layers.34.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
328
- "model.language_model.layers.35.input_layernorm.weight": "model-00002-of-00002.safetensors",
329
- "model.language_model.layers.35.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
330
- "model.language_model.layers.35.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
331
- "model.language_model.layers.35.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
332
- "model.language_model.layers.35.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
333
- "model.language_model.layers.35.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
334
- "model.language_model.layers.35.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
335
- "model.language_model.layers.35.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
336
- "model.language_model.layers.35.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
337
- "model.language_model.layers.35.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
338
- "model.language_model.layers.35.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
339
- "model.language_model.layers.4.input_layernorm.weight": "model-00001-of-00002.safetensors",
340
- "model.language_model.layers.4.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
341
- "model.language_model.layers.4.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
342
- "model.language_model.layers.4.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
343
- "model.language_model.layers.4.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
344
- "model.language_model.layers.4.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
345
- "model.language_model.layers.4.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
346
- "model.language_model.layers.4.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
347
- "model.language_model.layers.4.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
348
- "model.language_model.layers.4.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
349
- "model.language_model.layers.4.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
350
- "model.language_model.layers.5.input_layernorm.weight": "model-00001-of-00002.safetensors",
351
- "model.language_model.layers.5.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
352
- "model.language_model.layers.5.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
353
- "model.language_model.layers.5.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
354
- "model.language_model.layers.5.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
355
- "model.language_model.layers.5.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
356
- "model.language_model.layers.5.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
357
- "model.language_model.layers.5.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
358
- "model.language_model.layers.5.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
359
- "model.language_model.layers.5.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
360
- "model.language_model.layers.5.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
361
- "model.language_model.layers.6.input_layernorm.weight": "model-00001-of-00002.safetensors",
362
- "model.language_model.layers.6.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
363
- "model.language_model.layers.6.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
364
- "model.language_model.layers.6.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
365
- "model.language_model.layers.6.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
366
- "model.language_model.layers.6.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
367
- "model.language_model.layers.6.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
368
- "model.language_model.layers.6.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
369
- "model.language_model.layers.6.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
370
- "model.language_model.layers.6.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
371
- "model.language_model.layers.6.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
372
- "model.language_model.layers.7.input_layernorm.weight": "model-00001-of-00002.safetensors",
373
- "model.language_model.layers.7.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
374
- "model.language_model.layers.7.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
375
- "model.language_model.layers.7.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
376
- "model.language_model.layers.7.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
377
- "model.language_model.layers.7.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
378
- "model.language_model.layers.7.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
379
- "model.language_model.layers.7.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
380
- "model.language_model.layers.7.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
381
- "model.language_model.layers.7.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
382
- "model.language_model.layers.7.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
383
- "model.language_model.layers.8.input_layernorm.weight": "model-00001-of-00002.safetensors",
384
- "model.language_model.layers.8.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
385
- "model.language_model.layers.8.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
386
- "model.language_model.layers.8.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
387
- "model.language_model.layers.8.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
388
- "model.language_model.layers.8.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
389
- "model.language_model.layers.8.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
390
- "model.language_model.layers.8.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
391
- "model.language_model.layers.8.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
392
- "model.language_model.layers.8.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
393
- "model.language_model.layers.8.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
394
- "model.language_model.layers.9.input_layernorm.weight": "model-00001-of-00002.safetensors",
395
- "model.language_model.layers.9.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
396
- "model.language_model.layers.9.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
397
- "model.language_model.layers.9.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
398
- "model.language_model.layers.9.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
399
- "model.language_model.layers.9.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
400
- "model.language_model.layers.9.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
401
- "model.language_model.layers.9.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
402
- "model.language_model.layers.9.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
403
- "model.language_model.layers.9.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
404
- "model.language_model.layers.9.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
405
- "model.language_model.norm.weight": "model-00002-of-00002.safetensors",
406
- "model.visual.blocks.0.attn.proj.bias": "model-00001-of-00002.safetensors",
407
- "model.visual.blocks.0.attn.proj.weight": "model-00001-of-00002.safetensors",
408
- "model.visual.blocks.0.attn.qkv.bias": "model-00001-of-00002.safetensors",
409
- "model.visual.blocks.0.attn.qkv.weight": "model-00001-of-00002.safetensors",
410
- "model.visual.blocks.0.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
411
- "model.visual.blocks.0.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
412
- "model.visual.blocks.0.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
413
- "model.visual.blocks.0.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
414
- "model.visual.blocks.0.norm1.bias": "model-00001-of-00002.safetensors",
415
- "model.visual.blocks.0.norm1.weight": "model-00001-of-00002.safetensors",
416
- "model.visual.blocks.0.norm2.bias": "model-00001-of-00002.safetensors",
417
- "model.visual.blocks.0.norm2.weight": "model-00001-of-00002.safetensors",
418
- "model.visual.blocks.1.attn.proj.bias": "model-00001-of-00002.safetensors",
419
- "model.visual.blocks.1.attn.proj.weight": "model-00001-of-00002.safetensors",
420
- "model.visual.blocks.1.attn.qkv.bias": "model-00001-of-00002.safetensors",
421
- "model.visual.blocks.1.attn.qkv.weight": "model-00001-of-00002.safetensors",
422
- "model.visual.blocks.1.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
423
- "model.visual.blocks.1.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
424
- "model.visual.blocks.1.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
425
- "model.visual.blocks.1.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
426
- "model.visual.blocks.1.norm1.bias": "model-00001-of-00002.safetensors",
427
- "model.visual.blocks.1.norm1.weight": "model-00001-of-00002.safetensors",
428
- "model.visual.blocks.1.norm2.bias": "model-00001-of-00002.safetensors",
429
- "model.visual.blocks.1.norm2.weight": "model-00001-of-00002.safetensors",
430
- "model.visual.blocks.10.attn.proj.bias": "model-00001-of-00002.safetensors",
431
- "model.visual.blocks.10.attn.proj.weight": "model-00001-of-00002.safetensors",
432
- "model.visual.blocks.10.attn.qkv.bias": "model-00001-of-00002.safetensors",
433
- "model.visual.blocks.10.attn.qkv.weight": "model-00001-of-00002.safetensors",
434
- "model.visual.blocks.10.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
435
- "model.visual.blocks.10.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
436
- "model.visual.blocks.10.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
437
- "model.visual.blocks.10.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
438
- "model.visual.blocks.10.norm1.bias": "model-00001-of-00002.safetensors",
439
- "model.visual.blocks.10.norm1.weight": "model-00001-of-00002.safetensors",
440
- "model.visual.blocks.10.norm2.bias": "model-00001-of-00002.safetensors",
441
- "model.visual.blocks.10.norm2.weight": "model-00001-of-00002.safetensors",
442
- "model.visual.blocks.11.attn.proj.bias": "model-00001-of-00002.safetensors",
443
- "model.visual.blocks.11.attn.proj.weight": "model-00001-of-00002.safetensors",
444
- "model.visual.blocks.11.attn.qkv.bias": "model-00001-of-00002.safetensors",
445
- "model.visual.blocks.11.attn.qkv.weight": "model-00001-of-00002.safetensors",
446
- "model.visual.blocks.11.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
447
- "model.visual.blocks.11.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
448
- "model.visual.blocks.11.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
449
- "model.visual.blocks.11.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
450
- "model.visual.blocks.11.norm1.bias": "model-00001-of-00002.safetensors",
451
- "model.visual.blocks.11.norm1.weight": "model-00001-of-00002.safetensors",
452
- "model.visual.blocks.11.norm2.bias": "model-00001-of-00002.safetensors",
453
- "model.visual.blocks.11.norm2.weight": "model-00001-of-00002.safetensors",
454
- "model.visual.blocks.12.attn.proj.bias": "model-00001-of-00002.safetensors",
455
- "model.visual.blocks.12.attn.proj.weight": "model-00001-of-00002.safetensors",
456
- "model.visual.blocks.12.attn.qkv.bias": "model-00001-of-00002.safetensors",
457
- "model.visual.blocks.12.attn.qkv.weight": "model-00001-of-00002.safetensors",
458
- "model.visual.blocks.12.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
459
- "model.visual.blocks.12.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
460
- "model.visual.blocks.12.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
461
- "model.visual.blocks.12.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
462
- "model.visual.blocks.12.norm1.bias": "model-00001-of-00002.safetensors",
463
- "model.visual.blocks.12.norm1.weight": "model-00001-of-00002.safetensors",
464
- "model.visual.blocks.12.norm2.bias": "model-00001-of-00002.safetensors",
465
- "model.visual.blocks.12.norm2.weight": "model-00001-of-00002.safetensors",
466
- "model.visual.blocks.13.attn.proj.bias": "model-00001-of-00002.safetensors",
467
- "model.visual.blocks.13.attn.proj.weight": "model-00001-of-00002.safetensors",
468
- "model.visual.blocks.13.attn.qkv.bias": "model-00001-of-00002.safetensors",
469
- "model.visual.blocks.13.attn.qkv.weight": "model-00001-of-00002.safetensors",
470
- "model.visual.blocks.13.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
471
- "model.visual.blocks.13.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
472
- "model.visual.blocks.13.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
473
- "model.visual.blocks.13.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
474
- "model.visual.blocks.13.norm1.bias": "model-00001-of-00002.safetensors",
475
- "model.visual.blocks.13.norm1.weight": "model-00001-of-00002.safetensors",
476
- "model.visual.blocks.13.norm2.bias": "model-00001-of-00002.safetensors",
477
- "model.visual.blocks.13.norm2.weight": "model-00001-of-00002.safetensors",
478
- "model.visual.blocks.14.attn.proj.bias": "model-00001-of-00002.safetensors",
479
- "model.visual.blocks.14.attn.proj.weight": "model-00001-of-00002.safetensors",
480
- "model.visual.blocks.14.attn.qkv.bias": "model-00001-of-00002.safetensors",
481
- "model.visual.blocks.14.attn.qkv.weight": "model-00001-of-00002.safetensors",
482
- "model.visual.blocks.14.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
483
- "model.visual.blocks.14.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
484
- "model.visual.blocks.14.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
485
- "model.visual.blocks.14.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
486
- "model.visual.blocks.14.norm1.bias": "model-00001-of-00002.safetensors",
487
- "model.visual.blocks.14.norm1.weight": "model-00001-of-00002.safetensors",
488
- "model.visual.blocks.14.norm2.bias": "model-00001-of-00002.safetensors",
489
- "model.visual.blocks.14.norm2.weight": "model-00001-of-00002.safetensors",
490
- "model.visual.blocks.15.attn.proj.bias": "model-00001-of-00002.safetensors",
491
- "model.visual.blocks.15.attn.proj.weight": "model-00001-of-00002.safetensors",
492
- "model.visual.blocks.15.attn.qkv.bias": "model-00001-of-00002.safetensors",
493
- "model.visual.blocks.15.attn.qkv.weight": "model-00001-of-00002.safetensors",
494
- "model.visual.blocks.15.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
495
- "model.visual.blocks.15.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
496
- "model.visual.blocks.15.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
497
- "model.visual.blocks.15.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
498
- "model.visual.blocks.15.norm1.bias": "model-00001-of-00002.safetensors",
499
- "model.visual.blocks.15.norm1.weight": "model-00001-of-00002.safetensors",
500
- "model.visual.blocks.15.norm2.bias": "model-00001-of-00002.safetensors",
501
- "model.visual.blocks.15.norm2.weight": "model-00001-of-00002.safetensors",
502
- "model.visual.blocks.16.attn.proj.bias": "model-00001-of-00002.safetensors",
503
- "model.visual.blocks.16.attn.proj.weight": "model-00001-of-00002.safetensors",
504
- "model.visual.blocks.16.attn.qkv.bias": "model-00001-of-00002.safetensors",
505
- "model.visual.blocks.16.attn.qkv.weight": "model-00001-of-00002.safetensors",
506
- "model.visual.blocks.16.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
507
- "model.visual.blocks.16.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
508
- "model.visual.blocks.16.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
509
- "model.visual.blocks.16.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
510
- "model.visual.blocks.16.norm1.bias": "model-00001-of-00002.safetensors",
511
- "model.visual.blocks.16.norm1.weight": "model-00001-of-00002.safetensors",
512
- "model.visual.blocks.16.norm2.bias": "model-00001-of-00002.safetensors",
513
- "model.visual.blocks.16.norm2.weight": "model-00001-of-00002.safetensors",
514
- "model.visual.blocks.17.attn.proj.bias": "model-00001-of-00002.safetensors",
515
- "model.visual.blocks.17.attn.proj.weight": "model-00001-of-00002.safetensors",
516
- "model.visual.blocks.17.attn.qkv.bias": "model-00001-of-00002.safetensors",
517
- "model.visual.blocks.17.attn.qkv.weight": "model-00001-of-00002.safetensors",
518
- "model.visual.blocks.17.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
519
- "model.visual.blocks.17.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
520
- "model.visual.blocks.17.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
521
- "model.visual.blocks.17.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
522
- "model.visual.blocks.17.norm1.bias": "model-00001-of-00002.safetensors",
523
- "model.visual.blocks.17.norm1.weight": "model-00001-of-00002.safetensors",
524
- "model.visual.blocks.17.norm2.bias": "model-00001-of-00002.safetensors",
525
- "model.visual.blocks.17.norm2.weight": "model-00001-of-00002.safetensors",
526
- "model.visual.blocks.18.attn.proj.bias": "model-00001-of-00002.safetensors",
527
- "model.visual.blocks.18.attn.proj.weight": "model-00001-of-00002.safetensors",
528
- "model.visual.blocks.18.attn.qkv.bias": "model-00001-of-00002.safetensors",
529
- "model.visual.blocks.18.attn.qkv.weight": "model-00001-of-00002.safetensors",
530
- "model.visual.blocks.18.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
531
- "model.visual.blocks.18.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
532
- "model.visual.blocks.18.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
533
- "model.visual.blocks.18.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
534
- "model.visual.blocks.18.norm1.bias": "model-00001-of-00002.safetensors",
535
- "model.visual.blocks.18.norm1.weight": "model-00001-of-00002.safetensors",
536
- "model.visual.blocks.18.norm2.bias": "model-00001-of-00002.safetensors",
537
- "model.visual.blocks.18.norm2.weight": "model-00001-of-00002.safetensors",
538
- "model.visual.blocks.19.attn.proj.bias": "model-00001-of-00002.safetensors",
539
- "model.visual.blocks.19.attn.proj.weight": "model-00001-of-00002.safetensors",
540
- "model.visual.blocks.19.attn.qkv.bias": "model-00001-of-00002.safetensors",
541
- "model.visual.blocks.19.attn.qkv.weight": "model-00001-of-00002.safetensors",
542
- "model.visual.blocks.19.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
543
- "model.visual.blocks.19.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
544
- "model.visual.blocks.19.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
545
- "model.visual.blocks.19.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
546
- "model.visual.blocks.19.norm1.bias": "model-00001-of-00002.safetensors",
547
- "model.visual.blocks.19.norm1.weight": "model-00001-of-00002.safetensors",
548
- "model.visual.blocks.19.norm2.bias": "model-00001-of-00002.safetensors",
549
- "model.visual.blocks.19.norm2.weight": "model-00001-of-00002.safetensors",
550
- "model.visual.blocks.2.attn.proj.bias": "model-00001-of-00002.safetensors",
551
- "model.visual.blocks.2.attn.proj.weight": "model-00001-of-00002.safetensors",
552
- "model.visual.blocks.2.attn.qkv.bias": "model-00001-of-00002.safetensors",
553
- "model.visual.blocks.2.attn.qkv.weight": "model-00001-of-00002.safetensors",
554
- "model.visual.blocks.2.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
555
- "model.visual.blocks.2.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
556
- "model.visual.blocks.2.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
557
- "model.visual.blocks.2.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
558
- "model.visual.blocks.2.norm1.bias": "model-00001-of-00002.safetensors",
559
- "model.visual.blocks.2.norm1.weight": "model-00001-of-00002.safetensors",
560
- "model.visual.blocks.2.norm2.bias": "model-00001-of-00002.safetensors",
561
- "model.visual.blocks.2.norm2.weight": "model-00001-of-00002.safetensors",
562
- "model.visual.blocks.20.attn.proj.bias": "model-00001-of-00002.safetensors",
563
- "model.visual.blocks.20.attn.proj.weight": "model-00001-of-00002.safetensors",
564
- "model.visual.blocks.20.attn.qkv.bias": "model-00001-of-00002.safetensors",
565
- "model.visual.blocks.20.attn.qkv.weight": "model-00001-of-00002.safetensors",
566
- "model.visual.blocks.20.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
567
- "model.visual.blocks.20.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
568
- "model.visual.blocks.20.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
569
- "model.visual.blocks.20.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
570
- "model.visual.blocks.20.norm1.bias": "model-00001-of-00002.safetensors",
571
- "model.visual.blocks.20.norm1.weight": "model-00001-of-00002.safetensors",
572
- "model.visual.blocks.20.norm2.bias": "model-00001-of-00002.safetensors",
573
- "model.visual.blocks.20.norm2.weight": "model-00001-of-00002.safetensors",
574
- "model.visual.blocks.21.attn.proj.bias": "model-00001-of-00002.safetensors",
575
- "model.visual.blocks.21.attn.proj.weight": "model-00001-of-00002.safetensors",
576
- "model.visual.blocks.21.attn.qkv.bias": "model-00001-of-00002.safetensors",
577
- "model.visual.blocks.21.attn.qkv.weight": "model-00001-of-00002.safetensors",
578
- "model.visual.blocks.21.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
579
- "model.visual.blocks.21.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
580
- "model.visual.blocks.21.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
581
- "model.visual.blocks.21.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
582
- "model.visual.blocks.21.norm1.bias": "model-00001-of-00002.safetensors",
583
- "model.visual.blocks.21.norm1.weight": "model-00001-of-00002.safetensors",
584
- "model.visual.blocks.21.norm2.bias": "model-00001-of-00002.safetensors",
585
- "model.visual.blocks.21.norm2.weight": "model-00001-of-00002.safetensors",
586
- "model.visual.blocks.22.attn.proj.bias": "model-00001-of-00002.safetensors",
587
- "model.visual.blocks.22.attn.proj.weight": "model-00001-of-00002.safetensors",
588
- "model.visual.blocks.22.attn.qkv.bias": "model-00001-of-00002.safetensors",
589
- "model.visual.blocks.22.attn.qkv.weight": "model-00001-of-00002.safetensors",
590
- "model.visual.blocks.22.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
591
- "model.visual.blocks.22.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
592
- "model.visual.blocks.22.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
593
- "model.visual.blocks.22.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
594
- "model.visual.blocks.22.norm1.bias": "model-00001-of-00002.safetensors",
595
- "model.visual.blocks.22.norm1.weight": "model-00001-of-00002.safetensors",
596
- "model.visual.blocks.22.norm2.bias": "model-00001-of-00002.safetensors",
597
- "model.visual.blocks.22.norm2.weight": "model-00001-of-00002.safetensors",
598
- "model.visual.blocks.23.attn.proj.bias": "model-00001-of-00002.safetensors",
599
- "model.visual.blocks.23.attn.proj.weight": "model-00001-of-00002.safetensors",
600
- "model.visual.blocks.23.attn.qkv.bias": "model-00001-of-00002.safetensors",
601
- "model.visual.blocks.23.attn.qkv.weight": "model-00001-of-00002.safetensors",
602
- "model.visual.blocks.23.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
603
- "model.visual.blocks.23.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
604
- "model.visual.blocks.23.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
605
- "model.visual.blocks.23.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
606
- "model.visual.blocks.23.norm1.bias": "model-00001-of-00002.safetensors",
607
- "model.visual.blocks.23.norm1.weight": "model-00001-of-00002.safetensors",
608
- "model.visual.blocks.23.norm2.bias": "model-00001-of-00002.safetensors",
609
- "model.visual.blocks.23.norm2.weight": "model-00001-of-00002.safetensors",
610
- "model.visual.blocks.3.attn.proj.bias": "model-00001-of-00002.safetensors",
611
- "model.visual.blocks.3.attn.proj.weight": "model-00001-of-00002.safetensors",
612
- "model.visual.blocks.3.attn.qkv.bias": "model-00001-of-00002.safetensors",
613
- "model.visual.blocks.3.attn.qkv.weight": "model-00001-of-00002.safetensors",
614
- "model.visual.blocks.3.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
615
- "model.visual.blocks.3.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
616
- "model.visual.blocks.3.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
617
- "model.visual.blocks.3.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
618
- "model.visual.blocks.3.norm1.bias": "model-00001-of-00002.safetensors",
619
- "model.visual.blocks.3.norm1.weight": "model-00001-of-00002.safetensors",
620
- "model.visual.blocks.3.norm2.bias": "model-00001-of-00002.safetensors",
621
- "model.visual.blocks.3.norm2.weight": "model-00001-of-00002.safetensors",
622
- "model.visual.blocks.4.attn.proj.bias": "model-00001-of-00002.safetensors",
623
- "model.visual.blocks.4.attn.proj.weight": "model-00001-of-00002.safetensors",
624
- "model.visual.blocks.4.attn.qkv.bias": "model-00001-of-00002.safetensors",
625
- "model.visual.blocks.4.attn.qkv.weight": "model-00001-of-00002.safetensors",
626
- "model.visual.blocks.4.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
627
- "model.visual.blocks.4.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
628
- "model.visual.blocks.4.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
629
- "model.visual.blocks.4.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
630
- "model.visual.blocks.4.norm1.bias": "model-00001-of-00002.safetensors",
631
- "model.visual.blocks.4.norm1.weight": "model-00001-of-00002.safetensors",
632
- "model.visual.blocks.4.norm2.bias": "model-00001-of-00002.safetensors",
633
- "model.visual.blocks.4.norm2.weight": "model-00001-of-00002.safetensors",
634
- "model.visual.blocks.5.attn.proj.bias": "model-00001-of-00002.safetensors",
635
- "model.visual.blocks.5.attn.proj.weight": "model-00001-of-00002.safetensors",
636
- "model.visual.blocks.5.attn.qkv.bias": "model-00001-of-00002.safetensors",
637
- "model.visual.blocks.5.attn.qkv.weight": "model-00001-of-00002.safetensors",
638
- "model.visual.blocks.5.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
639
- "model.visual.blocks.5.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
640
- "model.visual.blocks.5.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
641
- "model.visual.blocks.5.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
642
- "model.visual.blocks.5.norm1.bias": "model-00001-of-00002.safetensors",
643
- "model.visual.blocks.5.norm1.weight": "model-00001-of-00002.safetensors",
644
- "model.visual.blocks.5.norm2.bias": "model-00001-of-00002.safetensors",
645
- "model.visual.blocks.5.norm2.weight": "model-00001-of-00002.safetensors",
646
- "model.visual.blocks.6.attn.proj.bias": "model-00001-of-00002.safetensors",
647
- "model.visual.blocks.6.attn.proj.weight": "model-00001-of-00002.safetensors",
648
- "model.visual.blocks.6.attn.qkv.bias": "model-00001-of-00002.safetensors",
649
- "model.visual.blocks.6.attn.qkv.weight": "model-00001-of-00002.safetensors",
650
- "model.visual.blocks.6.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
651
- "model.visual.blocks.6.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
652
- "model.visual.blocks.6.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
653
- "model.visual.blocks.6.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
654
- "model.visual.blocks.6.norm1.bias": "model-00001-of-00002.safetensors",
655
- "model.visual.blocks.6.norm1.weight": "model-00001-of-00002.safetensors",
656
- "model.visual.blocks.6.norm2.bias": "model-00001-of-00002.safetensors",
657
- "model.visual.blocks.6.norm2.weight": "model-00001-of-00002.safetensors",
658
- "model.visual.blocks.7.attn.proj.bias": "model-00001-of-00002.safetensors",
659
- "model.visual.blocks.7.attn.proj.weight": "model-00001-of-00002.safetensors",
660
- "model.visual.blocks.7.attn.qkv.bias": "model-00001-of-00002.safetensors",
661
- "model.visual.blocks.7.attn.qkv.weight": "model-00001-of-00002.safetensors",
662
- "model.visual.blocks.7.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
663
- "model.visual.blocks.7.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
664
- "model.visual.blocks.7.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
665
- "model.visual.blocks.7.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
666
- "model.visual.blocks.7.norm1.bias": "model-00001-of-00002.safetensors",
667
- "model.visual.blocks.7.norm1.weight": "model-00001-of-00002.safetensors",
668
- "model.visual.blocks.7.norm2.bias": "model-00001-of-00002.safetensors",
669
- "model.visual.blocks.7.norm2.weight": "model-00001-of-00002.safetensors",
670
- "model.visual.blocks.8.attn.proj.bias": "model-00001-of-00002.safetensors",
671
- "model.visual.blocks.8.attn.proj.weight": "model-00001-of-00002.safetensors",
672
- "model.visual.blocks.8.attn.qkv.bias": "model-00001-of-00002.safetensors",
673
- "model.visual.blocks.8.attn.qkv.weight": "model-00001-of-00002.safetensors",
674
- "model.visual.blocks.8.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
675
- "model.visual.blocks.8.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
676
- "model.visual.blocks.8.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
677
- "model.visual.blocks.8.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
678
- "model.visual.blocks.8.norm1.bias": "model-00001-of-00002.safetensors",
679
- "model.visual.blocks.8.norm1.weight": "model-00001-of-00002.safetensors",
680
- "model.visual.blocks.8.norm2.bias": "model-00001-of-00002.safetensors",
681
- "model.visual.blocks.8.norm2.weight": "model-00001-of-00002.safetensors",
682
- "model.visual.blocks.9.attn.proj.bias": "model-00001-of-00002.safetensors",
683
- "model.visual.blocks.9.attn.proj.weight": "model-00001-of-00002.safetensors",
684
- "model.visual.blocks.9.attn.qkv.bias": "model-00001-of-00002.safetensors",
685
- "model.visual.blocks.9.attn.qkv.weight": "model-00001-of-00002.safetensors",
686
- "model.visual.blocks.9.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
687
- "model.visual.blocks.9.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
688
- "model.visual.blocks.9.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
689
- "model.visual.blocks.9.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
690
- "model.visual.blocks.9.norm1.bias": "model-00001-of-00002.safetensors",
691
- "model.visual.blocks.9.norm1.weight": "model-00001-of-00002.safetensors",
692
- "model.visual.blocks.9.norm2.bias": "model-00001-of-00002.safetensors",
693
- "model.visual.blocks.9.norm2.weight": "model-00001-of-00002.safetensors",
694
- "model.visual.deepstack_merger_list.0.linear_fc1.bias": "model-00001-of-00002.safetensors",
695
- "model.visual.deepstack_merger_list.0.linear_fc1.weight": "model-00001-of-00002.safetensors",
696
- "model.visual.deepstack_merger_list.0.linear_fc2.bias": "model-00001-of-00002.safetensors",
697
- "model.visual.deepstack_merger_list.0.linear_fc2.weight": "model-00001-of-00002.safetensors",
698
- "model.visual.deepstack_merger_list.0.norm.bias": "model-00001-of-00002.safetensors",
699
- "model.visual.deepstack_merger_list.0.norm.weight": "model-00001-of-00002.safetensors",
700
- "model.visual.deepstack_merger_list.1.linear_fc1.bias": "model-00001-of-00002.safetensors",
701
- "model.visual.deepstack_merger_list.1.linear_fc1.weight": "model-00001-of-00002.safetensors",
702
- "model.visual.deepstack_merger_list.1.linear_fc2.bias": "model-00001-of-00002.safetensors",
703
- "model.visual.deepstack_merger_list.1.linear_fc2.weight": "model-00001-of-00002.safetensors",
704
- "model.visual.deepstack_merger_list.1.norm.bias": "model-00001-of-00002.safetensors",
705
- "model.visual.deepstack_merger_list.1.norm.weight": "model-00001-of-00002.safetensors",
706
- "model.visual.deepstack_merger_list.2.linear_fc1.bias": "model-00001-of-00002.safetensors",
707
- "model.visual.deepstack_merger_list.2.linear_fc1.weight": "model-00001-of-00002.safetensors",
708
- "model.visual.deepstack_merger_list.2.linear_fc2.bias": "model-00001-of-00002.safetensors",
709
- "model.visual.deepstack_merger_list.2.linear_fc2.weight": "model-00001-of-00002.safetensors",
710
- "model.visual.deepstack_merger_list.2.norm.bias": "model-00001-of-00002.safetensors",
711
- "model.visual.deepstack_merger_list.2.norm.weight": "model-00001-of-00002.safetensors",
712
- "model.visual.merger.linear_fc1.bias": "model-00001-of-00002.safetensors",
713
- "model.visual.merger.linear_fc1.weight": "model-00001-of-00002.safetensors",
714
- "model.visual.merger.linear_fc2.bias": "model-00001-of-00002.safetensors",
715
- "model.visual.merger.linear_fc2.weight": "model-00001-of-00002.safetensors",
716
- "model.visual.merger.norm.bias": "model-00001-of-00002.safetensors",
717
- "model.visual.merger.norm.weight": "model-00001-of-00002.safetensors",
718
- "model.visual.patch_embed.proj.bias": "model-00001-of-00002.safetensors",
719
- "model.visual.patch_embed.proj.weight": "model-00001-of-00002.safetensors",
720
- "model.visual.pos_embed.weight": "model-00001-of-00002.safetensors"
721
- }
722
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
qwen3-vl4b-agentnet_filter_failure_ws4_lr2e-5_vit1e-5_aligner1e-5_bs384-step500/preprocessor_config.json DELETED
@@ -1,21 +0,0 @@
1
- {
2
- "size": {
3
- "longest_edge": 16777216,
4
- "shortest_edge": 65536
5
- },
6
- "patch_size": 16,
7
- "temporal_patch_size": 2,
8
- "merge_size": 2,
9
- "image_mean": [
10
- 0.5,
11
- 0.5,
12
- 0.5
13
- ],
14
- "image_std": [
15
- 0.5,
16
- 0.5,
17
- 0.5
18
- ],
19
- "processor_class": "Qwen3VLProcessor",
20
- "image_processor_type": "Qwen2VLImageProcessorFast"
21
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
qwen3-vl4b-agentnet_filter_failure_ws4_lr2e-5_vit1e-5_aligner1e-5_bs384-step500/special_tokens_map.json DELETED
@@ -1,31 +0,0 @@
1
- {
2
- "additional_special_tokens": [
3
- "<|im_start|>",
4
- "<|im_end|>",
5
- "<|object_ref_start|>",
6
- "<|object_ref_end|>",
7
- "<|box_start|>",
8
- "<|box_end|>",
9
- "<|quad_start|>",
10
- "<|quad_end|>",
11
- "<|vision_start|>",
12
- "<|vision_end|>",
13
- "<|vision_pad|>",
14
- "<|image_pad|>",
15
- "<|video_pad|>"
16
- ],
17
- "eos_token": {
18
- "content": "<|im_end|>",
19
- "lstrip": false,
20
- "normalized": false,
21
- "rstrip": false,
22
- "single_word": false
23
- },
24
- "pad_token": {
25
- "content": "<|endoftext|>",
26
- "lstrip": false,
27
- "normalized": false,
28
- "rstrip": false,
29
- "single_word": false
30
- }
31
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
qwen3-vl4b-agentnet_filter_failure_ws4_lr2e-5_vit1e-5_aligner1e-5_bs384-step500/tokenizer.json DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:aeb13307a71acd8fe81861d94ad54ab689df773318809eed3cbe794b4492dae4
3
- size 11422654
 
 
 
 
qwen3-vl4b-agentnet_filter_failure_ws4_lr2e-5_vit1e-5_aligner1e-5_bs384-step500/tokenizer_config.json DELETED
@@ -1,240 +0,0 @@
1
- {
2
- "add_bos_token": false,
3
- "add_prefix_space": false,
4
- "added_tokens_decoder": {
5
- "151643": {
6
- "content": "<|endoftext|>",
7
- "lstrip": false,
8
- "normalized": false,
9
- "rstrip": false,
10
- "single_word": false,
11
- "special": true
12
- },
13
- "151644": {
14
- "content": "<|im_start|>",
15
- "lstrip": false,
16
- "normalized": false,
17
- "rstrip": false,
18
- "single_word": false,
19
- "special": true
20
- },
21
- "151645": {
22
- "content": "<|im_end|>",
23
- "lstrip": false,
24
- "normalized": false,
25
- "rstrip": false,
26
- "single_word": false,
27
- "special": true
28
- },
29
- "151646": {
30
- "content": "<|object_ref_start|>",
31
- "lstrip": false,
32
- "normalized": false,
33
- "rstrip": false,
34
- "single_word": false,
35
- "special": true
36
- },
37
- "151647": {
38
- "content": "<|object_ref_end|>",
39
- "lstrip": false,
40
- "normalized": false,
41
- "rstrip": false,
42
- "single_word": false,
43
- "special": true
44
- },
45
- "151648": {
46
- "content": "<|box_start|>",
47
- "lstrip": false,
48
- "normalized": false,
49
- "rstrip": false,
50
- "single_word": false,
51
- "special": true
52
- },
53
- "151649": {
54
- "content": "<|box_end|>",
55
- "lstrip": false,
56
- "normalized": false,
57
- "rstrip": false,
58
- "single_word": false,
59
- "special": true
60
- },
61
- "151650": {
62
- "content": "<|quad_start|>",
63
- "lstrip": false,
64
- "normalized": false,
65
- "rstrip": false,
66
- "single_word": false,
67
- "special": true
68
- },
69
- "151651": {
70
- "content": "<|quad_end|>",
71
- "lstrip": false,
72
- "normalized": false,
73
- "rstrip": false,
74
- "single_word": false,
75
- "special": true
76
- },
77
- "151652": {
78
- "content": "<|vision_start|>",
79
- "lstrip": false,
80
- "normalized": false,
81
- "rstrip": false,
82
- "single_word": false,
83
- "special": true
84
- },
85
- "151653": {
86
- "content": "<|vision_end|>",
87
- "lstrip": false,
88
- "normalized": false,
89
- "rstrip": false,
90
- "single_word": false,
91
- "special": true
92
- },
93
- "151654": {
94
- "content": "<|vision_pad|>",
95
- "lstrip": false,
96
- "normalized": false,
97
- "rstrip": false,
98
- "single_word": false,
99
- "special": true
100
- },
101
- "151655": {
102
- "content": "<|image_pad|>",
103
- "lstrip": false,
104
- "normalized": false,
105
- "rstrip": false,
106
- "single_word": false,
107
- "special": true
108
- },
109
- "151656": {
110
- "content": "<|video_pad|>",
111
- "lstrip": false,
112
- "normalized": false,
113
- "rstrip": false,
114
- "single_word": false,
115
- "special": true
116
- },
117
- "151657": {
118
- "content": "<tool_call>",
119
- "lstrip": false,
120
- "normalized": false,
121
- "rstrip": false,
122
- "single_word": false,
123
- "special": false
124
- },
125
- "151658": {
126
- "content": "</tool_call>",
127
- "lstrip": false,
128
- "normalized": false,
129
- "rstrip": false,
130
- "single_word": false,
131
- "special": false
132
- },
133
- "151659": {
134
- "content": "<|fim_prefix|>",
135
- "lstrip": false,
136
- "normalized": false,
137
- "rstrip": false,
138
- "single_word": false,
139
- "special": false
140
- },
141
- "151660": {
142
- "content": "<|fim_middle|>",
143
- "lstrip": false,
144
- "normalized": false,
145
- "rstrip": false,
146
- "single_word": false,
147
- "special": false
148
- },
149
- "151661": {
150
- "content": "<|fim_suffix|>",
151
- "lstrip": false,
152
- "normalized": false,
153
- "rstrip": false,
154
- "single_word": false,
155
- "special": false
156
- },
157
- "151662": {
158
- "content": "<|fim_pad|>",
159
- "lstrip": false,
160
- "normalized": false,
161
- "rstrip": false,
162
- "single_word": false,
163
- "special": false
164
- },
165
- "151663": {
166
- "content": "<|repo_name|>",
167
- "lstrip": false,
168
- "normalized": false,
169
- "rstrip": false,
170
- "single_word": false,
171
- "special": false
172
- },
173
- "151664": {
174
- "content": "<|file_sep|>",
175
- "lstrip": false,
176
- "normalized": false,
177
- "rstrip": false,
178
- "single_word": false,
179
- "special": false
180
- },
181
- "151665": {
182
- "content": "<tool_response>",
183
- "lstrip": false,
184
- "normalized": false,
185
- "rstrip": false,
186
- "single_word": false,
187
- "special": false
188
- },
189
- "151666": {
190
- "content": "</tool_response>",
191
- "lstrip": false,
192
- "normalized": false,
193
- "rstrip": false,
194
- "single_word": false,
195
- "special": false
196
- },
197
- "151667": {
198
- "content": "<think>",
199
- "lstrip": false,
200
- "normalized": false,
201
- "rstrip": false,
202
- "single_word": false,
203
- "special": false
204
- },
205
- "151668": {
206
- "content": "</think>",
207
- "lstrip": false,
208
- "normalized": false,
209
- "rstrip": false,
210
- "single_word": false,
211
- "special": false
212
- }
213
- },
214
- "additional_special_tokens": [
215
- "<|im_start|>",
216
- "<|im_end|>",
217
- "<|object_ref_start|>",
218
- "<|object_ref_end|>",
219
- "<|box_start|>",
220
- "<|box_end|>",
221
- "<|quad_start|>",
222
- "<|quad_end|>",
223
- "<|vision_start|>",
224
- "<|vision_end|>",
225
- "<|vision_pad|>",
226
- "<|image_pad|>",
227
- "<|video_pad|>"
228
- ],
229
- "bos_token": null,
230
- "clean_up_tokenization_spaces": false,
231
- "eos_token": "<|im_end|>",
232
- "errors": "replace",
233
- "extra_special_tokens": {},
234
- "model_max_length": 262144,
235
- "pad_token": "<|endoftext|>",
236
- "processor_class": "Qwen3VLProcessor",
237
- "split_special_tokens": false,
238
- "tokenizer_class": "Qwen2Tokenizer",
239
- "unk_token": null
240
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
qwen3-vl4b-agentnet_filter_failure_ws4_lr2e-5_vit1e-5_aligner1e-5_bs384-step500/trainer_state.json DELETED
@@ -1,3534 +0,0 @@
1
- {
2
- "best_global_step": null,
3
- "best_metric": null,
4
- "best_model_checkpoint": null,
5
- "epoch": 0.12748597654258031,
6
- "eval_steps": 10000.0,
7
- "global_step": 500,
8
- "is_hyper_param_search": false,
9
- "is_local_process_zero": true,
10
- "is_world_process_zero": true,
11
- "log_history": [
12
- {
13
- "epoch": 0.00025497195308516065,
14
- "grad_norm": 15.415260314941406,
15
- "learning_rate": 5.076142131979696e-08,
16
- "loss": 1.1610933542251587,
17
- "step": 1
18
- },
19
- {
20
- "epoch": 0.0005099439061703213,
21
- "grad_norm": 15.637948989868164,
22
- "learning_rate": 1.0152284263959391e-07,
23
- "loss": 1.1993461847305298,
24
- "step": 2
25
- },
26
- {
27
- "epoch": 0.0007649158592554819,
28
- "grad_norm": 15.91840648651123,
29
- "learning_rate": 1.5228426395939088e-07,
30
- "loss": 1.1827669143676758,
31
- "step": 3
32
- },
33
- {
34
- "epoch": 0.0010198878123406426,
35
- "grad_norm": 15.478428840637207,
36
- "learning_rate": 2.0304568527918783e-07,
37
- "loss": 1.176124930381775,
38
- "step": 4
39
- },
40
- {
41
- "epoch": 0.0012748597654258032,
42
- "grad_norm": 15.785908699035645,
43
- "learning_rate": 2.538071065989848e-07,
44
- "loss": 1.1965456008911133,
45
- "step": 5
46
- },
47
- {
48
- "epoch": 0.0015298317185109638,
49
- "grad_norm": 15.270750999450684,
50
- "learning_rate": 3.0456852791878176e-07,
51
- "loss": 1.1675349473953247,
52
- "step": 6
53
- },
54
- {
55
- "epoch": 0.0017848036715961244,
56
- "grad_norm": 15.401095390319824,
57
- "learning_rate": 3.553299492385787e-07,
58
- "loss": 1.2026066780090332,
59
- "step": 7
60
- },
61
- {
62
- "epoch": 0.002039775624681285,
63
- "grad_norm": 14.897509574890137,
64
- "learning_rate": 4.0609137055837566e-07,
65
- "loss": 1.1604423522949219,
66
- "step": 8
67
- },
68
- {
69
- "epoch": 0.0022947475777664456,
70
- "grad_norm": 15.150520324707031,
71
- "learning_rate": 4.568527918781726e-07,
72
- "loss": 1.1517817974090576,
73
- "step": 9
74
- },
75
- {
76
- "epoch": 0.0025497195308516064,
77
- "grad_norm": 15.229615211486816,
78
- "learning_rate": 5.076142131979696e-07,
79
- "loss": 1.152367115020752,
80
- "step": 10
81
- },
82
- {
83
- "epoch": 0.0028046914839367667,
84
- "grad_norm": 12.473063468933105,
85
- "learning_rate": 5.583756345177665e-07,
86
- "loss": 1.0538256168365479,
87
- "step": 11
88
- },
89
- {
90
- "epoch": 0.0030596634370219276,
91
- "grad_norm": 12.416116714477539,
92
- "learning_rate": 6.091370558375635e-07,
93
- "loss": 1.0510058403015137,
94
- "step": 12
95
- },
96
- {
97
- "epoch": 0.0033146353901070884,
98
- "grad_norm": 11.904985427856445,
99
- "learning_rate": 6.598984771573605e-07,
100
- "loss": 1.0342445373535156,
101
- "step": 13
102
- },
103
- {
104
- "epoch": 0.0035696073431922487,
105
- "grad_norm": 7.985447883605957,
106
- "learning_rate": 7.106598984771574e-07,
107
- "loss": 0.909122109413147,
108
- "step": 14
109
- },
110
- {
111
- "epoch": 0.0038245792962774095,
112
- "grad_norm": 7.608842372894287,
113
- "learning_rate": 7.614213197969544e-07,
114
- "loss": 0.900871217250824,
115
- "step": 15
116
- },
117
- {
118
- "epoch": 0.00407955124936257,
119
- "grad_norm": 7.21879243850708,
120
- "learning_rate": 8.121827411167513e-07,
121
- "loss": 0.8900021314620972,
122
- "step": 16
123
- },
124
- {
125
- "epoch": 0.004334523202447731,
126
- "grad_norm": 6.9251909255981445,
127
- "learning_rate": 8.629441624365482e-07,
128
- "loss": 0.867794394493103,
129
- "step": 17
130
- },
131
- {
132
- "epoch": 0.004589495155532891,
133
- "grad_norm": 6.087664604187012,
134
- "learning_rate": 9.137055837563452e-07,
135
- "loss": 0.8714677095413208,
136
- "step": 18
137
- },
138
- {
139
- "epoch": 0.004844467108618052,
140
- "grad_norm": 4.437993049621582,
141
- "learning_rate": 9.644670050761422e-07,
142
- "loss": 0.7944602370262146,
143
- "step": 19
144
- },
145
- {
146
- "epoch": 0.005099439061703213,
147
- "grad_norm": 3.9098119735717773,
148
- "learning_rate": 1.0152284263959392e-06,
149
- "loss": 0.7696177959442139,
150
- "step": 20
151
- },
152
- {
153
- "epoch": 0.0053544110147883735,
154
- "grad_norm": 5.06033992767334,
155
- "learning_rate": 1.0659898477157362e-06,
156
- "loss": 0.7556478381156921,
157
- "step": 21
158
- },
159
- {
160
- "epoch": 0.0056093829678735335,
161
- "grad_norm": 3.4315176010131836,
162
- "learning_rate": 1.116751269035533e-06,
163
- "loss": 0.766305148601532,
164
- "step": 22
165
- },
166
- {
167
- "epoch": 0.005864354920958694,
168
- "grad_norm": 4.819195747375488,
169
- "learning_rate": 1.16751269035533e-06,
170
- "loss": 0.7652184367179871,
171
- "step": 23
172
- },
173
- {
174
- "epoch": 0.006119326874043855,
175
- "grad_norm": 3.1732325553894043,
176
- "learning_rate": 1.218274111675127e-06,
177
- "loss": 0.7531844973564148,
178
- "step": 24
179
- },
180
- {
181
- "epoch": 0.006374298827129016,
182
- "grad_norm": 3.403918504714966,
183
- "learning_rate": 1.2690355329949238e-06,
184
- "loss": 0.7432605624198914,
185
- "step": 25
186
- },
187
- {
188
- "epoch": 0.006629270780214177,
189
- "grad_norm": 6.011890888214111,
190
- "learning_rate": 1.319796954314721e-06,
191
- "loss": 0.7286916971206665,
192
- "step": 26
193
- },
194
- {
195
- "epoch": 0.006884242733299337,
196
- "grad_norm": 3.864213466644287,
197
- "learning_rate": 1.3705583756345178e-06,
198
- "loss": 0.7215209007263184,
199
- "step": 27
200
- },
201
- {
202
- "epoch": 0.0071392146863844975,
203
- "grad_norm": 4.094939231872559,
204
- "learning_rate": 1.4213197969543148e-06,
205
- "loss": 0.7281020879745483,
206
- "step": 28
207
- },
208
- {
209
- "epoch": 0.007394186639469658,
210
- "grad_norm": 3.790905475616455,
211
- "learning_rate": 1.4720812182741118e-06,
212
- "loss": 0.7287170886993408,
213
- "step": 29
214
- },
215
- {
216
- "epoch": 0.007649158592554819,
217
- "grad_norm": 4.714667797088623,
218
- "learning_rate": 1.5228426395939088e-06,
219
- "loss": 0.730525016784668,
220
- "step": 30
221
- },
222
- {
223
- "epoch": 0.00790413054563998,
224
- "grad_norm": 3.4293529987335205,
225
- "learning_rate": 1.5736040609137056e-06,
226
- "loss": 0.7161718606948853,
227
- "step": 31
228
- },
229
- {
230
- "epoch": 0.00815910249872514,
231
- "grad_norm": 2.985097885131836,
232
- "learning_rate": 1.6243654822335026e-06,
233
- "loss": 0.70013427734375,
234
- "step": 32
235
- },
236
- {
237
- "epoch": 0.008414074451810302,
238
- "grad_norm": 3.2844507694244385,
239
- "learning_rate": 1.6751269035532996e-06,
240
- "loss": 0.7102205157279968,
241
- "step": 33
242
- },
243
- {
244
- "epoch": 0.008669046404895462,
245
- "grad_norm": 2.9922728538513184,
246
- "learning_rate": 1.7258883248730964e-06,
247
- "loss": 0.6986203193664551,
248
- "step": 34
249
- },
250
- {
251
- "epoch": 0.008924018357980621,
252
- "grad_norm": 3.494112253189087,
253
- "learning_rate": 1.7766497461928936e-06,
254
- "loss": 0.705012321472168,
255
- "step": 35
256
- },
257
- {
258
- "epoch": 0.009178990311065782,
259
- "grad_norm": 2.92978572845459,
260
- "learning_rate": 1.8274111675126904e-06,
261
- "loss": 0.6996530294418335,
262
- "step": 36
263
- },
264
- {
265
- "epoch": 0.009433962264150943,
266
- "grad_norm": 4.036802768707275,
267
- "learning_rate": 1.8781725888324874e-06,
268
- "loss": 0.7008537650108337,
269
- "step": 37
270
- },
271
- {
272
- "epoch": 0.009688934217236104,
273
- "grad_norm": 7.8610358238220215,
274
- "learning_rate": 1.9289340101522844e-06,
275
- "loss": 0.6845177412033081,
276
- "step": 38
277
- },
278
- {
279
- "epoch": 0.009943906170321265,
280
- "grad_norm": 3.7339322566986084,
281
- "learning_rate": 1.9796954314720814e-06,
282
- "loss": 0.6841769218444824,
283
- "step": 39
284
- },
285
- {
286
- "epoch": 0.010198878123406425,
287
- "grad_norm": 2.830705165863037,
288
- "learning_rate": 2.0304568527918785e-06,
289
- "loss": 0.699475884437561,
290
- "step": 40
291
- },
292
- {
293
- "epoch": 0.010453850076491586,
294
- "grad_norm": 12.568126678466797,
295
- "learning_rate": 2.0812182741116755e-06,
296
- "loss": 0.6735811233520508,
297
- "step": 41
298
- },
299
- {
300
- "epoch": 0.010708822029576747,
301
- "grad_norm": 3.7829558849334717,
302
- "learning_rate": 2.1319796954314725e-06,
303
- "loss": 0.6893086433410645,
304
- "step": 42
305
- },
306
- {
307
- "epoch": 0.010963793982661908,
308
- "grad_norm": 4.368283271789551,
309
- "learning_rate": 2.182741116751269e-06,
310
- "loss": 0.6826390027999878,
311
- "step": 43
312
- },
313
- {
314
- "epoch": 0.011218765935747067,
315
- "grad_norm": 3.3035032749176025,
316
- "learning_rate": 2.233502538071066e-06,
317
- "loss": 0.668271541595459,
318
- "step": 44
319
- },
320
- {
321
- "epoch": 0.011473737888832228,
322
- "grad_norm": 3.9567983150482178,
323
- "learning_rate": 2.284263959390863e-06,
324
- "loss": 0.6784211993217468,
325
- "step": 45
326
- },
327
- {
328
- "epoch": 0.011728709841917389,
329
- "grad_norm": 3.4999377727508545,
330
- "learning_rate": 2.33502538071066e-06,
331
- "loss": 0.6874396204948425,
332
- "step": 46
333
- },
334
- {
335
- "epoch": 0.01198368179500255,
336
- "grad_norm": 3.2631001472473145,
337
- "learning_rate": 2.385786802030457e-06,
338
- "loss": 0.6772322654724121,
339
- "step": 47
340
- },
341
- {
342
- "epoch": 0.01223865374808771,
343
- "grad_norm": 20.580337524414062,
344
- "learning_rate": 2.436548223350254e-06,
345
- "loss": 0.6612566709518433,
346
- "step": 48
347
- },
348
- {
349
- "epoch": 0.012493625701172871,
350
- "grad_norm": 8.252683639526367,
351
- "learning_rate": 2.487309644670051e-06,
352
- "loss": 0.6630977392196655,
353
- "step": 49
354
- },
355
- {
356
- "epoch": 0.012748597654258032,
357
- "grad_norm": 4.531806468963623,
358
- "learning_rate": 2.5380710659898476e-06,
359
- "loss": 0.6681995391845703,
360
- "step": 50
361
- },
362
- {
363
- "epoch": 0.013003569607343193,
364
- "grad_norm": 3.459275960922241,
365
- "learning_rate": 2.588832487309645e-06,
366
- "loss": 0.6613768339157104,
367
- "step": 51
368
- },
369
- {
370
- "epoch": 0.013258541560428353,
371
- "grad_norm": 5.5584940910339355,
372
- "learning_rate": 2.639593908629442e-06,
373
- "loss": 0.6634055376052856,
374
- "step": 52
375
- },
376
- {
377
- "epoch": 0.013513513513513514,
378
- "grad_norm": 5.6429524421691895,
379
- "learning_rate": 2.6903553299492387e-06,
380
- "loss": 0.6786404848098755,
381
- "step": 53
382
- },
383
- {
384
- "epoch": 0.013768485466598673,
385
- "grad_norm": 4.319962978363037,
386
- "learning_rate": 2.7411167512690357e-06,
387
- "loss": 0.654949426651001,
388
- "step": 54
389
- },
390
- {
391
- "epoch": 0.014023457419683834,
392
- "grad_norm": 6.052556037902832,
393
- "learning_rate": 2.7918781725888327e-06,
394
- "loss": 0.6762086749076843,
395
- "step": 55
396
- },
397
- {
398
- "epoch": 0.014278429372768995,
399
- "grad_norm": 5.070164680480957,
400
- "learning_rate": 2.8426395939086297e-06,
401
- "loss": 0.6505463123321533,
402
- "step": 56
403
- },
404
- {
405
- "epoch": 0.014533401325854156,
406
- "grad_norm": 3.3184974193573,
407
- "learning_rate": 2.8934010152284262e-06,
408
- "loss": 0.6489129662513733,
409
- "step": 57
410
- },
411
- {
412
- "epoch": 0.014788373278939317,
413
- "grad_norm": 5.315630912780762,
414
- "learning_rate": 2.9441624365482237e-06,
415
- "loss": 0.6476598978042603,
416
- "step": 58
417
- },
418
- {
419
- "epoch": 0.015043345232024477,
420
- "grad_norm": 3.659990072250366,
421
- "learning_rate": 2.9949238578680207e-06,
422
- "loss": 0.6501700282096863,
423
- "step": 59
424
- },
425
- {
426
- "epoch": 0.015298317185109638,
427
- "grad_norm": 3.4091506004333496,
428
- "learning_rate": 3.0456852791878177e-06,
429
- "loss": 0.6517801880836487,
430
- "step": 60
431
- },
432
- {
433
- "epoch": 0.015553289138194799,
434
- "grad_norm": 3.8126204013824463,
435
- "learning_rate": 3.0964467005076143e-06,
436
- "loss": 0.6425204277038574,
437
- "step": 61
438
- },
439
- {
440
- "epoch": 0.01580826109127996,
441
- "grad_norm": 3.711880922317505,
442
- "learning_rate": 3.1472081218274113e-06,
443
- "loss": 0.638529360294342,
444
- "step": 62
445
- },
446
- {
447
- "epoch": 0.01606323304436512,
448
- "grad_norm": 7.707905292510986,
449
- "learning_rate": 3.1979695431472087e-06,
450
- "loss": 0.6470184922218323,
451
- "step": 63
452
- },
453
- {
454
- "epoch": 0.01631820499745028,
455
- "grad_norm": 7.1005072593688965,
456
- "learning_rate": 3.2487309644670053e-06,
457
- "loss": 0.6359443664550781,
458
- "step": 64
459
- },
460
- {
461
- "epoch": 0.01657317695053544,
462
- "grad_norm": 6.728579521179199,
463
- "learning_rate": 3.2994923857868023e-06,
464
- "loss": 0.6561862230300903,
465
- "step": 65
466
- },
467
- {
468
- "epoch": 0.016828148903620603,
469
- "grad_norm": 4.356357574462891,
470
- "learning_rate": 3.3502538071065993e-06,
471
- "loss": 0.6496514081954956,
472
- "step": 66
473
- },
474
- {
475
- "epoch": 0.017083120856705762,
476
- "grad_norm": 5.432864189147949,
477
- "learning_rate": 3.4010152284263963e-06,
478
- "loss": 0.6436284184455872,
479
- "step": 67
480
- },
481
- {
482
- "epoch": 0.017338092809790925,
483
- "grad_norm": 5.1800408363342285,
484
- "learning_rate": 3.451776649746193e-06,
485
- "loss": 0.6394556760787964,
486
- "step": 68
487
- },
488
- {
489
- "epoch": 0.017593064762876084,
490
- "grad_norm": 9.765804290771484,
491
- "learning_rate": 3.5025380710659903e-06,
492
- "loss": 0.6331816911697388,
493
- "step": 69
494
- },
495
- {
496
- "epoch": 0.017848036715961243,
497
- "grad_norm": 4.826832294464111,
498
- "learning_rate": 3.5532994923857873e-06,
499
- "loss": 0.6390204429626465,
500
- "step": 70
501
- },
502
- {
503
- "epoch": 0.018103008669046405,
504
- "grad_norm": 5.472368240356445,
505
- "learning_rate": 3.6040609137055843e-06,
506
- "loss": 0.6334278583526611,
507
- "step": 71
508
- },
509
- {
510
- "epoch": 0.018357980622131564,
511
- "grad_norm": 5.457934856414795,
512
- "learning_rate": 3.654822335025381e-06,
513
- "loss": 0.6240629553794861,
514
- "step": 72
515
- },
516
- {
517
- "epoch": 0.018612952575216727,
518
- "grad_norm": 6.780310153961182,
519
- "learning_rate": 3.705583756345178e-06,
520
- "loss": 0.63517826795578,
521
- "step": 73
522
- },
523
- {
524
- "epoch": 0.018867924528301886,
525
- "grad_norm": 3.7166168689727783,
526
- "learning_rate": 3.756345177664975e-06,
527
- "loss": 0.627025306224823,
528
- "step": 74
529
- },
530
- {
531
- "epoch": 0.01912289648138705,
532
- "grad_norm": 4.887142181396484,
533
- "learning_rate": 3.8071065989847715e-06,
534
- "loss": 0.6309980154037476,
535
- "step": 75
536
- },
537
- {
538
- "epoch": 0.019377868434472208,
539
- "grad_norm": 6.183432579040527,
540
- "learning_rate": 3.857868020304569e-06,
541
- "loss": 0.6254815459251404,
542
- "step": 76
543
- },
544
- {
545
- "epoch": 0.01963284038755737,
546
- "grad_norm": 4.948214530944824,
547
- "learning_rate": 3.9086294416243655e-06,
548
- "loss": 0.6244087219238281,
549
- "step": 77
550
- },
551
- {
552
- "epoch": 0.01988781234064253,
553
- "grad_norm": 5.138155460357666,
554
- "learning_rate": 3.959390862944163e-06,
555
- "loss": 0.6282638311386108,
556
- "step": 78
557
- },
558
- {
559
- "epoch": 0.02014278429372769,
560
- "grad_norm": 4.594997882843018,
561
- "learning_rate": 4.0101522842639595e-06,
562
- "loss": 0.6211766004562378,
563
- "step": 79
564
- },
565
- {
566
- "epoch": 0.02039775624681285,
567
- "grad_norm": 5.657145023345947,
568
- "learning_rate": 4.060913705583757e-06,
569
- "loss": 0.6292088627815247,
570
- "step": 80
571
- },
572
- {
573
- "epoch": 0.02065272819989801,
574
- "grad_norm": 7.0994343757629395,
575
- "learning_rate": 4.1116751269035535e-06,
576
- "loss": 0.6266737580299377,
577
- "step": 81
578
- },
579
- {
580
- "epoch": 0.020907700152983173,
581
- "grad_norm": 4.467260360717773,
582
- "learning_rate": 4.162436548223351e-06,
583
- "loss": 0.6084794998168945,
584
- "step": 82
585
- },
586
- {
587
- "epoch": 0.02116267210606833,
588
- "grad_norm": 4.804591655731201,
589
- "learning_rate": 4.2131979695431475e-06,
590
- "loss": 0.6341028213500977,
591
- "step": 83
592
- },
593
- {
594
- "epoch": 0.021417644059153494,
595
- "grad_norm": 4.987437725067139,
596
- "learning_rate": 4.263959390862945e-06,
597
- "loss": 0.6339101791381836,
598
- "step": 84
599
- },
600
- {
601
- "epoch": 0.021672616012238653,
602
- "grad_norm": 6.195011138916016,
603
- "learning_rate": 4.3147208121827415e-06,
604
- "loss": 0.6330238580703735,
605
- "step": 85
606
- },
607
- {
608
- "epoch": 0.021927587965323816,
609
- "grad_norm": 5.589010715484619,
610
- "learning_rate": 4.365482233502538e-06,
611
- "loss": 0.6271764636039734,
612
- "step": 86
613
- },
614
- {
615
- "epoch": 0.022182559918408975,
616
- "grad_norm": 10.114912033081055,
617
- "learning_rate": 4.4162436548223355e-06,
618
- "loss": 0.638175904750824,
619
- "step": 87
620
- },
621
- {
622
- "epoch": 0.022437531871494134,
623
- "grad_norm": 7.669949054718018,
624
- "learning_rate": 4.467005076142132e-06,
625
- "loss": 0.624021053314209,
626
- "step": 88
627
- },
628
- {
629
- "epoch": 0.022692503824579296,
630
- "grad_norm": 8.180469512939453,
631
- "learning_rate": 4.5177664974619295e-06,
632
- "loss": 0.6178176403045654,
633
- "step": 89
634
- },
635
- {
636
- "epoch": 0.022947475777664456,
637
- "grad_norm": 5.518604755401611,
638
- "learning_rate": 4.568527918781726e-06,
639
- "loss": 0.6195391416549683,
640
- "step": 90
641
- },
642
- {
643
- "epoch": 0.023202447730749618,
644
- "grad_norm": 5.258211612701416,
645
- "learning_rate": 4.6192893401015235e-06,
646
- "loss": 0.6231352686882019,
647
- "step": 91
648
- },
649
- {
650
- "epoch": 0.023457419683834777,
651
- "grad_norm": 3.893946647644043,
652
- "learning_rate": 4.67005076142132e-06,
653
- "loss": 0.6193073987960815,
654
- "step": 92
655
- },
656
- {
657
- "epoch": 0.02371239163691994,
658
- "grad_norm": 5.802011489868164,
659
- "learning_rate": 4.7208121827411175e-06,
660
- "loss": 0.6172696948051453,
661
- "step": 93
662
- },
663
- {
664
- "epoch": 0.0239673635900051,
665
- "grad_norm": 5.073032855987549,
666
- "learning_rate": 4.771573604060914e-06,
667
- "loss": 0.6240885853767395,
668
- "step": 94
669
- },
670
- {
671
- "epoch": 0.02422233554309026,
672
- "grad_norm": 5.177947998046875,
673
- "learning_rate": 4.822335025380711e-06,
674
- "loss": 0.6277825236320496,
675
- "step": 95
676
- },
677
- {
678
- "epoch": 0.02447730749617542,
679
- "grad_norm": 5.730579376220703,
680
- "learning_rate": 4.873096446700508e-06,
681
- "loss": 0.6149629950523376,
682
- "step": 96
683
- },
684
- {
685
- "epoch": 0.024732279449260583,
686
- "grad_norm": 6.5996551513671875,
687
- "learning_rate": 4.923857868020305e-06,
688
- "loss": 0.6126458644866943,
689
- "step": 97
690
- },
691
- {
692
- "epoch": 0.024987251402345742,
693
- "grad_norm": 13.988277435302734,
694
- "learning_rate": 4.974619289340102e-06,
695
- "loss": 0.6055952310562134,
696
- "step": 98
697
- },
698
- {
699
- "epoch": 0.0252422233554309,
700
- "grad_norm": 5.046289443969727,
701
- "learning_rate": 5.025380710659899e-06,
702
- "loss": 0.6105412244796753,
703
- "step": 99
704
- },
705
- {
706
- "epoch": 0.025497195308516064,
707
- "grad_norm": 5.086366176605225,
708
- "learning_rate": 5.076142131979695e-06,
709
- "loss": 0.6258758902549744,
710
- "step": 100
711
- },
712
- {
713
- "epoch": 0.025752167261601223,
714
- "grad_norm": 7.642775058746338,
715
- "learning_rate": 5.126903553299493e-06,
716
- "loss": 0.6233373880386353,
717
- "step": 101
718
- },
719
- {
720
- "epoch": 0.026007139214686385,
721
- "grad_norm": 7.112648963928223,
722
- "learning_rate": 5.17766497461929e-06,
723
- "loss": 0.6140401363372803,
724
- "step": 102
725
- },
726
- {
727
- "epoch": 0.026262111167771544,
728
- "grad_norm": 5.693024158477783,
729
- "learning_rate": 5.228426395939087e-06,
730
- "loss": 0.6101027727127075,
731
- "step": 103
732
- },
733
- {
734
- "epoch": 0.026517083120856707,
735
- "grad_norm": 4.558701515197754,
736
- "learning_rate": 5.279187817258884e-06,
737
- "loss": 0.6059219837188721,
738
- "step": 104
739
- },
740
- {
741
- "epoch": 0.026772055073941866,
742
- "grad_norm": 4.841275691986084,
743
- "learning_rate": 5.329949238578681e-06,
744
- "loss": 0.643490731716156,
745
- "step": 105
746
- },
747
- {
748
- "epoch": 0.02702702702702703,
749
- "grad_norm": 5.6230363845825195,
750
- "learning_rate": 5.380710659898477e-06,
751
- "loss": 0.6232346296310425,
752
- "step": 106
753
- },
754
- {
755
- "epoch": 0.027281998980112188,
756
- "grad_norm": 4.623135089874268,
757
- "learning_rate": 5.431472081218274e-06,
758
- "loss": 0.612013578414917,
759
- "step": 107
760
- },
761
- {
762
- "epoch": 0.027536970933197347,
763
- "grad_norm": 9.732264518737793,
764
- "learning_rate": 5.482233502538071e-06,
765
- "loss": 0.5976157188415527,
766
- "step": 108
767
- },
768
- {
769
- "epoch": 0.02779194288628251,
770
- "grad_norm": 6.726062774658203,
771
- "learning_rate": 5.532994923857869e-06,
772
- "loss": 0.6107317209243774,
773
- "step": 109
774
- },
775
- {
776
- "epoch": 0.02804691483936767,
777
- "grad_norm": 7.107174396514893,
778
- "learning_rate": 5.583756345177665e-06,
779
- "loss": 0.6086596846580505,
780
- "step": 110
781
- },
782
- {
783
- "epoch": 0.02830188679245283,
784
- "grad_norm": 6.202098846435547,
785
- "learning_rate": 5.634517766497463e-06,
786
- "loss": 0.6037451028823853,
787
- "step": 111
788
- },
789
- {
790
- "epoch": 0.02855685874553799,
791
- "grad_norm": 7.4830851554870605,
792
- "learning_rate": 5.685279187817259e-06,
793
- "loss": 0.6132713556289673,
794
- "step": 112
795
- },
796
- {
797
- "epoch": 0.028811830698623152,
798
- "grad_norm": 3.917173147201538,
799
- "learning_rate": 5.736040609137057e-06,
800
- "loss": 0.6098322868347168,
801
- "step": 113
802
- },
803
- {
804
- "epoch": 0.02906680265170831,
805
- "grad_norm": 7.616724014282227,
806
- "learning_rate": 5.7868020304568525e-06,
807
- "loss": 0.6145272254943848,
808
- "step": 114
809
- },
810
- {
811
- "epoch": 0.029321774604793474,
812
- "grad_norm": 4.091442108154297,
813
- "learning_rate": 5.83756345177665e-06,
814
- "loss": 0.6086658239364624,
815
- "step": 115
816
- },
817
- {
818
- "epoch": 0.029576746557878633,
819
- "grad_norm": 6.2335124015808105,
820
- "learning_rate": 5.888324873096447e-06,
821
- "loss": 0.6011253595352173,
822
- "step": 116
823
- },
824
- {
825
- "epoch": 0.029831718510963796,
826
- "grad_norm": 5.550269603729248,
827
- "learning_rate": 5.939086294416244e-06,
828
- "loss": 0.6046299338340759,
829
- "step": 117
830
- },
831
- {
832
- "epoch": 0.030086690464048955,
833
- "grad_norm": 7.084855079650879,
834
- "learning_rate": 5.989847715736041e-06,
835
- "loss": 0.6061999201774597,
836
- "step": 118
837
- },
838
- {
839
- "epoch": 0.030341662417134114,
840
- "grad_norm": 10.455710411071777,
841
- "learning_rate": 6.040609137055839e-06,
842
- "loss": 0.6251656413078308,
843
- "step": 119
844
- },
845
- {
846
- "epoch": 0.030596634370219276,
847
- "grad_norm": 8.010490417480469,
848
- "learning_rate": 6.091370558375635e-06,
849
- "loss": 0.6025846004486084,
850
- "step": 120
851
- },
852
- {
853
- "epoch": 0.030851606323304435,
854
- "grad_norm": 7.11653470993042,
855
- "learning_rate": 6.142131979695432e-06,
856
- "loss": 0.6091961860656738,
857
- "step": 121
858
- },
859
- {
860
- "epoch": 0.031106578276389598,
861
- "grad_norm": 4.968122959136963,
862
- "learning_rate": 6.1928934010152285e-06,
863
- "loss": 0.6093355417251587,
864
- "step": 122
865
- },
866
- {
867
- "epoch": 0.03136155022947476,
868
- "grad_norm": 10.246641159057617,
869
- "learning_rate": 6.243654822335026e-06,
870
- "loss": 0.6043275594711304,
871
- "step": 123
872
- },
873
- {
874
- "epoch": 0.03161652218255992,
875
- "grad_norm": 7.461697578430176,
876
- "learning_rate": 6.2944162436548225e-06,
877
- "loss": 0.5993828773498535,
878
- "step": 124
879
- },
880
- {
881
- "epoch": 0.03187149413564508,
882
- "grad_norm": 13.019267082214355,
883
- "learning_rate": 6.34517766497462e-06,
884
- "loss": 0.6082757711410522,
885
- "step": 125
886
- },
887
- {
888
- "epoch": 0.03212646608873024,
889
- "grad_norm": 5.950469493865967,
890
- "learning_rate": 6.395939086294417e-06,
891
- "loss": 0.6167939901351929,
892
- "step": 126
893
- },
894
- {
895
- "epoch": 0.0323814380418154,
896
- "grad_norm": 5.954137325286865,
897
- "learning_rate": 6.446700507614214e-06,
898
- "loss": 0.597322940826416,
899
- "step": 127
900
- },
901
- {
902
- "epoch": 0.03263640999490056,
903
- "grad_norm": 7.5997443199157715,
904
- "learning_rate": 6.4974619289340105e-06,
905
- "loss": 0.6099976301193237,
906
- "step": 128
907
- },
908
- {
909
- "epoch": 0.03289138194798572,
910
- "grad_norm": 8.977588653564453,
911
- "learning_rate": 6.548223350253807e-06,
912
- "loss": 0.6011739373207092,
913
- "step": 129
914
- },
915
- {
916
- "epoch": 0.03314635390107088,
917
- "grad_norm": 17.733882904052734,
918
- "learning_rate": 6.5989847715736045e-06,
919
- "loss": 0.6014074087142944,
920
- "step": 130
921
- },
922
- {
923
- "epoch": 0.03340132585415604,
924
- "grad_norm": 5.899418830871582,
925
- "learning_rate": 6.649746192893401e-06,
926
- "loss": 0.6007720828056335,
927
- "step": 131
928
- },
929
- {
930
- "epoch": 0.033656297807241206,
931
- "grad_norm": 8.74870777130127,
932
- "learning_rate": 6.7005076142131985e-06,
933
- "loss": 0.6171366572380066,
934
- "step": 132
935
- },
936
- {
937
- "epoch": 0.033911269760326365,
938
- "grad_norm": 4.3337788581848145,
939
- "learning_rate": 6.751269035532996e-06,
940
- "loss": 0.614408552646637,
941
- "step": 133
942
- },
943
- {
944
- "epoch": 0.034166241713411524,
945
- "grad_norm": 8.11375904083252,
946
- "learning_rate": 6.8020304568527926e-06,
947
- "loss": 0.5979299545288086,
948
- "step": 134
949
- },
950
- {
951
- "epoch": 0.03442121366649668,
952
- "grad_norm": 7.533430099487305,
953
- "learning_rate": 6.852791878172589e-06,
954
- "loss": 0.616156280040741,
955
- "step": 135
956
- },
957
- {
958
- "epoch": 0.03467618561958185,
959
- "grad_norm": 8.658021926879883,
960
- "learning_rate": 6.903553299492386e-06,
961
- "loss": 0.5952839255332947,
962
- "step": 136
963
- },
964
- {
965
- "epoch": 0.03493115757266701,
966
- "grad_norm": 4.664230823516846,
967
- "learning_rate": 6.954314720812183e-06,
968
- "loss": 0.6029950380325317,
969
- "step": 137
970
- },
971
- {
972
- "epoch": 0.03518612952575217,
973
- "grad_norm": 6.2738237380981445,
974
- "learning_rate": 7.0050761421319806e-06,
975
- "loss": 0.6155085563659668,
976
- "step": 138
977
- },
978
- {
979
- "epoch": 0.03544110147883733,
980
- "grad_norm": 6.849208354949951,
981
- "learning_rate": 7.055837563451777e-06,
982
- "loss": 0.6097654104232788,
983
- "step": 139
984
- },
985
- {
986
- "epoch": 0.035696073431922486,
987
- "grad_norm": 5.106319427490234,
988
- "learning_rate": 7.106598984771575e-06,
989
- "loss": 0.6000968217849731,
990
- "step": 140
991
- },
992
- {
993
- "epoch": 0.03595104538500765,
994
- "grad_norm": 4.99149751663208,
995
- "learning_rate": 7.157360406091371e-06,
996
- "loss": 0.5965438485145569,
997
- "step": 141
998
- },
999
- {
1000
- "epoch": 0.03620601733809281,
1001
- "grad_norm": 4.150666236877441,
1002
- "learning_rate": 7.208121827411169e-06,
1003
- "loss": 0.6034414768218994,
1004
- "step": 142
1005
- },
1006
- {
1007
- "epoch": 0.03646098929117797,
1008
- "grad_norm": 4.380922794342041,
1009
- "learning_rate": 7.258883248730964e-06,
1010
- "loss": 0.5983131527900696,
1011
- "step": 143
1012
- },
1013
- {
1014
- "epoch": 0.03671596124426313,
1015
- "grad_norm": 4.286844730377197,
1016
- "learning_rate": 7.309644670050762e-06,
1017
- "loss": 0.6075978875160217,
1018
- "step": 144
1019
- },
1020
- {
1021
- "epoch": 0.036970933197348295,
1022
- "grad_norm": 4.61534309387207,
1023
- "learning_rate": 7.360406091370559e-06,
1024
- "loss": 0.6174564957618713,
1025
- "step": 145
1026
- },
1027
- {
1028
- "epoch": 0.037225905150433454,
1029
- "grad_norm": 7.377719402313232,
1030
- "learning_rate": 7.411167512690356e-06,
1031
- "loss": 0.6060669422149658,
1032
- "step": 146
1033
- },
1034
- {
1035
- "epoch": 0.03748087710351861,
1036
- "grad_norm": 4.636322975158691,
1037
- "learning_rate": 7.461928934010153e-06,
1038
- "loss": 0.5887423753738403,
1039
- "step": 147
1040
- },
1041
- {
1042
- "epoch": 0.03773584905660377,
1043
- "grad_norm": 3.755432367324829,
1044
- "learning_rate": 7.51269035532995e-06,
1045
- "loss": 0.5913705229759216,
1046
- "step": 148
1047
- },
1048
- {
1049
- "epoch": 0.03799082100968893,
1050
- "grad_norm": 5.489928722381592,
1051
- "learning_rate": 7.563451776649747e-06,
1052
- "loss": 0.6052175760269165,
1053
- "step": 149
1054
- },
1055
- {
1056
- "epoch": 0.0382457929627741,
1057
- "grad_norm": 5.9897027015686035,
1058
- "learning_rate": 7.614213197969543e-06,
1059
- "loss": 0.5902068018913269,
1060
- "step": 150
1061
- },
1062
- {
1063
- "epoch": 0.038500764915859256,
1064
- "grad_norm": 8.362554550170898,
1065
- "learning_rate": 7.664974619289341e-06,
1066
- "loss": 0.5992493033409119,
1067
- "step": 151
1068
- },
1069
- {
1070
- "epoch": 0.038755736868944415,
1071
- "grad_norm": 5.351855754852295,
1072
- "learning_rate": 7.715736040609138e-06,
1073
- "loss": 0.6087163090705872,
1074
- "step": 152
1075
- },
1076
- {
1077
- "epoch": 0.039010708822029574,
1078
- "grad_norm": 5.387552261352539,
1079
- "learning_rate": 7.766497461928934e-06,
1080
- "loss": 0.6069482564926147,
1081
- "step": 153
1082
- },
1083
- {
1084
- "epoch": 0.03926568077511474,
1085
- "grad_norm": 5.964528560638428,
1086
- "learning_rate": 7.817258883248731e-06,
1087
- "loss": 0.5993782877922058,
1088
- "step": 154
1089
- },
1090
- {
1091
- "epoch": 0.0395206527281999,
1092
- "grad_norm": 10.254500389099121,
1093
- "learning_rate": 7.86802030456853e-06,
1094
- "loss": 0.6070284247398376,
1095
- "step": 155
1096
- },
1097
- {
1098
- "epoch": 0.03977562468128506,
1099
- "grad_norm": 7.0304341316223145,
1100
- "learning_rate": 7.918781725888326e-06,
1101
- "loss": 0.6082167625427246,
1102
- "step": 156
1103
- },
1104
- {
1105
- "epoch": 0.04003059663437022,
1106
- "grad_norm": 5.818995475769043,
1107
- "learning_rate": 7.969543147208122e-06,
1108
- "loss": 0.6076909303665161,
1109
- "step": 157
1110
- },
1111
- {
1112
- "epoch": 0.04028556858745538,
1113
- "grad_norm": 29.615108489990234,
1114
- "learning_rate": 8.020304568527919e-06,
1115
- "loss": 0.6017455458641052,
1116
- "step": 158
1117
- },
1118
- {
1119
- "epoch": 0.04054054054054054,
1120
- "grad_norm": 3.735245704650879,
1121
- "learning_rate": 8.071065989847716e-06,
1122
- "loss": 0.6166393160820007,
1123
- "step": 159
1124
- },
1125
- {
1126
- "epoch": 0.0407955124936257,
1127
- "grad_norm": 5.8766703605651855,
1128
- "learning_rate": 8.121827411167514e-06,
1129
- "loss": 0.5874968767166138,
1130
- "step": 160
1131
- },
1132
- {
1133
- "epoch": 0.04105048444671086,
1134
- "grad_norm": 3.799036979675293,
1135
- "learning_rate": 8.17258883248731e-06,
1136
- "loss": 0.6069945693016052,
1137
- "step": 161
1138
- },
1139
- {
1140
- "epoch": 0.04130545639979602,
1141
- "grad_norm": 4.457949161529541,
1142
- "learning_rate": 8.223350253807107e-06,
1143
- "loss": 0.6015027761459351,
1144
- "step": 162
1145
- },
1146
- {
1147
- "epoch": 0.041560428352881186,
1148
- "grad_norm": 9.537139892578125,
1149
- "learning_rate": 8.274111675126905e-06,
1150
- "loss": 0.5889110565185547,
1151
- "step": 163
1152
- },
1153
- {
1154
- "epoch": 0.041815400305966345,
1155
- "grad_norm": 4.916431903839111,
1156
- "learning_rate": 8.324873096446702e-06,
1157
- "loss": 0.6008636951446533,
1158
- "step": 164
1159
- },
1160
- {
1161
- "epoch": 0.042070372259051504,
1162
- "grad_norm": 3.8718481063842773,
1163
- "learning_rate": 8.375634517766498e-06,
1164
- "loss": 0.5849786996841431,
1165
- "step": 165
1166
- },
1167
- {
1168
- "epoch": 0.04232534421213666,
1169
- "grad_norm": 4.909853458404541,
1170
- "learning_rate": 8.426395939086295e-06,
1171
- "loss": 0.5965617299079895,
1172
- "step": 166
1173
- },
1174
- {
1175
- "epoch": 0.04258031616522182,
1176
- "grad_norm": 5.692605495452881,
1177
- "learning_rate": 8.477157360406092e-06,
1178
- "loss": 0.6011509299278259,
1179
- "step": 167
1180
- },
1181
- {
1182
- "epoch": 0.04283528811830699,
1183
- "grad_norm": 5.997374057769775,
1184
- "learning_rate": 8.52791878172589e-06,
1185
- "loss": 0.5989026427268982,
1186
- "step": 168
1187
- },
1188
- {
1189
- "epoch": 0.04309026007139215,
1190
- "grad_norm": 4.384658336639404,
1191
- "learning_rate": 8.578680203045686e-06,
1192
- "loss": 0.6050680875778198,
1193
- "step": 169
1194
- },
1195
- {
1196
- "epoch": 0.043345232024477306,
1197
- "grad_norm": 7.642217636108398,
1198
- "learning_rate": 8.629441624365483e-06,
1199
- "loss": 0.6081173419952393,
1200
- "step": 170
1201
- },
1202
- {
1203
- "epoch": 0.043600203977562466,
1204
- "grad_norm": 3.865013360977173,
1205
- "learning_rate": 8.68020304568528e-06,
1206
- "loss": 0.5977111458778381,
1207
- "step": 171
1208
- },
1209
- {
1210
- "epoch": 0.04385517593064763,
1211
- "grad_norm": 3.2660181522369385,
1212
- "learning_rate": 8.730964467005076e-06,
1213
- "loss": 0.6077733039855957,
1214
- "step": 172
1215
- },
1216
- {
1217
- "epoch": 0.04411014788373279,
1218
- "grad_norm": 6.390474796295166,
1219
- "learning_rate": 8.781725888324873e-06,
1220
- "loss": 0.5887953639030457,
1221
- "step": 173
1222
- },
1223
- {
1224
- "epoch": 0.04436511983681795,
1225
- "grad_norm": 3.376460313796997,
1226
- "learning_rate": 8.832487309644671e-06,
1227
- "loss": 0.6023260951042175,
1228
- "step": 174
1229
- },
1230
- {
1231
- "epoch": 0.04462009178990311,
1232
- "grad_norm": 11.863870620727539,
1233
- "learning_rate": 8.883248730964468e-06,
1234
- "loss": 0.581910252571106,
1235
- "step": 175
1236
- },
1237
- {
1238
- "epoch": 0.04487506374298827,
1239
- "grad_norm": 3.767972707748413,
1240
- "learning_rate": 8.934010152284264e-06,
1241
- "loss": 0.5987359285354614,
1242
- "step": 176
1243
- },
1244
- {
1245
- "epoch": 0.045130035696073434,
1246
- "grad_norm": 4.307549953460693,
1247
- "learning_rate": 8.984771573604062e-06,
1248
- "loss": 0.5949980020523071,
1249
- "step": 177
1250
- },
1251
- {
1252
- "epoch": 0.04538500764915859,
1253
- "grad_norm": 3.665287494659424,
1254
- "learning_rate": 9.035532994923859e-06,
1255
- "loss": 0.5925557017326355,
1256
- "step": 178
1257
- },
1258
- {
1259
- "epoch": 0.04563997960224375,
1260
- "grad_norm": 3.1785333156585693,
1261
- "learning_rate": 9.086294416243656e-06,
1262
- "loss": 0.6000990271568298,
1263
- "step": 179
1264
- },
1265
- {
1266
- "epoch": 0.04589495155532891,
1267
- "grad_norm": 7.793868541717529,
1268
- "learning_rate": 9.137055837563452e-06,
1269
- "loss": 0.588386058807373,
1270
- "step": 180
1271
- },
1272
- {
1273
- "epoch": 0.04614992350841408,
1274
- "grad_norm": 3.8479087352752686,
1275
- "learning_rate": 9.187817258883249e-06,
1276
- "loss": 0.6026387214660645,
1277
- "step": 181
1278
- },
1279
- {
1280
- "epoch": 0.046404895461499236,
1281
- "grad_norm": 4.099995136260986,
1282
- "learning_rate": 9.238578680203047e-06,
1283
- "loss": 0.6077677607536316,
1284
- "step": 182
1285
- },
1286
- {
1287
- "epoch": 0.046659867414584395,
1288
- "grad_norm": 4.00807523727417,
1289
- "learning_rate": 9.289340101522844e-06,
1290
- "loss": 0.5968591570854187,
1291
- "step": 183
1292
- },
1293
- {
1294
- "epoch": 0.046914839367669554,
1295
- "grad_norm": 3.527108669281006,
1296
- "learning_rate": 9.34010152284264e-06,
1297
- "loss": 0.6021297574043274,
1298
- "step": 184
1299
- },
1300
- {
1301
- "epoch": 0.04716981132075472,
1302
- "grad_norm": 3.72705078125,
1303
- "learning_rate": 9.390862944162438e-06,
1304
- "loss": 0.6009560823440552,
1305
- "step": 185
1306
- },
1307
- {
1308
- "epoch": 0.04742478327383988,
1309
- "grad_norm": 3.00913143157959,
1310
- "learning_rate": 9.441624365482235e-06,
1311
- "loss": 0.61063551902771,
1312
- "step": 186
1313
- },
1314
- {
1315
- "epoch": 0.04767975522692504,
1316
- "grad_norm": 3.3247106075286865,
1317
- "learning_rate": 9.492385786802032e-06,
1318
- "loss": 0.5803219676017761,
1319
- "step": 187
1320
- },
1321
- {
1322
- "epoch": 0.0479347271800102,
1323
- "grad_norm": 4.032495498657227,
1324
- "learning_rate": 9.543147208121828e-06,
1325
- "loss": 0.5938379168510437,
1326
- "step": 188
1327
- },
1328
- {
1329
- "epoch": 0.04818969913309536,
1330
- "grad_norm": 2.833737373352051,
1331
- "learning_rate": 9.593908629441625e-06,
1332
- "loss": 0.597266674041748,
1333
- "step": 189
1334
- },
1335
- {
1336
- "epoch": 0.04844467108618052,
1337
- "grad_norm": 4.320077419281006,
1338
- "learning_rate": 9.644670050761421e-06,
1339
- "loss": 0.6046103835105896,
1340
- "step": 190
1341
- },
1342
- {
1343
- "epoch": 0.04869964303926568,
1344
- "grad_norm": 4.185924530029297,
1345
- "learning_rate": 9.69543147208122e-06,
1346
- "loss": 0.6062490344047546,
1347
- "step": 191
1348
- },
1349
- {
1350
- "epoch": 0.04895461499235084,
1351
- "grad_norm": 5.10711669921875,
1352
- "learning_rate": 9.746192893401016e-06,
1353
- "loss": 0.6057544946670532,
1354
- "step": 192
1355
- },
1356
- {
1357
- "epoch": 0.049209586945436,
1358
- "grad_norm": 3.324521780014038,
1359
- "learning_rate": 9.796954314720813e-06,
1360
- "loss": 0.5799339413642883,
1361
- "step": 193
1362
- },
1363
- {
1364
- "epoch": 0.049464558898521166,
1365
- "grad_norm": 3.1842339038848877,
1366
- "learning_rate": 9.84771573604061e-06,
1367
- "loss": 0.6013438105583191,
1368
- "step": 194
1369
- },
1370
- {
1371
- "epoch": 0.049719530851606325,
1372
- "grad_norm": 2.3341190814971924,
1373
- "learning_rate": 9.898477157360406e-06,
1374
- "loss": 0.59135502576828,
1375
- "step": 195
1376
- },
1377
- {
1378
- "epoch": 0.049974502804691484,
1379
- "grad_norm": 2.7749569416046143,
1380
- "learning_rate": 9.949238578680204e-06,
1381
- "loss": 0.5952393412590027,
1382
- "step": 196
1383
- },
1384
- {
1385
- "epoch": 0.05022947475777664,
1386
- "grad_norm": 3.0547854900360107,
1387
- "learning_rate": 1e-05,
1388
- "loss": 0.5911858081817627,
1389
- "step": 197
1390
- },
1391
- {
1392
- "epoch": 0.0504844467108618,
1393
- "grad_norm": 3.5078237056732178,
1394
- "learning_rate": 9.999998221773107e-06,
1395
- "loss": 0.5847702026367188,
1396
- "step": 198
1397
- },
1398
- {
1399
- "epoch": 0.05073941866394697,
1400
- "grad_norm": 3.5228848457336426,
1401
- "learning_rate": 9.999992887093691e-06,
1402
- "loss": 0.6045140624046326,
1403
- "step": 199
1404
- },
1405
- {
1406
- "epoch": 0.05099439061703213,
1407
- "grad_norm": 2.6987292766571045,
1408
- "learning_rate": 9.999983995965547e-06,
1409
- "loss": 0.604773223400116,
1410
- "step": 200
1411
- },
1412
- {
1413
- "epoch": 0.051249362570117286,
1414
- "grad_norm": 2.957871198654175,
1415
- "learning_rate": 9.999971548395e-06,
1416
- "loss": 0.5873357057571411,
1417
- "step": 201
1418
- },
1419
- {
1420
- "epoch": 0.051504334523202445,
1421
- "grad_norm": 3.11552357673645,
1422
- "learning_rate": 9.999955544390902e-06,
1423
- "loss": 0.5930721759796143,
1424
- "step": 202
1425
- },
1426
- {
1427
- "epoch": 0.05175930647628761,
1428
- "grad_norm": 2.396313428878784,
1429
- "learning_rate": 9.999935983964639e-06,
1430
- "loss": 0.5927582383155823,
1431
- "step": 203
1432
- },
1433
- {
1434
- "epoch": 0.05201427842937277,
1435
- "grad_norm": 2.3981285095214844,
1436
- "learning_rate": 9.999912867130124e-06,
1437
- "loss": 0.5999355316162109,
1438
- "step": 204
1439
- },
1440
- {
1441
- "epoch": 0.05226925038245793,
1442
- "grad_norm": 2.930799722671509,
1443
- "learning_rate": 9.999886193903796e-06,
1444
- "loss": 0.5816469192504883,
1445
- "step": 205
1446
- },
1447
- {
1448
- "epoch": 0.05252422233554309,
1449
- "grad_norm": 3.259716272354126,
1450
- "learning_rate": 9.999855964304633e-06,
1451
- "loss": 0.5927892327308655,
1452
- "step": 206
1453
- },
1454
- {
1455
- "epoch": 0.05277919428862825,
1456
- "grad_norm": 2.8784027099609375,
1457
- "learning_rate": 9.999822178354131e-06,
1458
- "loss": 0.59906005859375,
1459
- "step": 207
1460
- },
1461
- {
1462
- "epoch": 0.053034166241713414,
1463
- "grad_norm": 5.081634998321533,
1464
- "learning_rate": 9.999784836076325e-06,
1465
- "loss": 0.5926652550697327,
1466
- "step": 208
1467
- },
1468
- {
1469
- "epoch": 0.05328913819479857,
1470
- "grad_norm": 2.9275574684143066,
1471
- "learning_rate": 9.999743937497778e-06,
1472
- "loss": 0.6022918224334717,
1473
- "step": 209
1474
- },
1475
- {
1476
- "epoch": 0.05354411014788373,
1477
- "grad_norm": 3.017028570175171,
1478
- "learning_rate": 9.999699482647578e-06,
1479
- "loss": 0.5827761888504028,
1480
- "step": 210
1481
- },
1482
- {
1483
- "epoch": 0.05379908210096889,
1484
- "grad_norm": 2.75286602973938,
1485
- "learning_rate": 9.999651471557346e-06,
1486
- "loss": 0.5874055027961731,
1487
- "step": 211
1488
- },
1489
- {
1490
- "epoch": 0.05405405405405406,
1491
- "grad_norm": 15.714319229125977,
1492
- "learning_rate": 9.99959990426123e-06,
1493
- "loss": 0.6002755165100098,
1494
- "step": 212
1495
- },
1496
- {
1497
- "epoch": 0.054309026007139216,
1498
- "grad_norm": 4.887640476226807,
1499
- "learning_rate": 9.999544780795913e-06,
1500
- "loss": 0.5951391458511353,
1501
- "step": 213
1502
- },
1503
- {
1504
- "epoch": 0.054563997960224375,
1505
- "grad_norm": 4.7820844650268555,
1506
- "learning_rate": 9.999486101200603e-06,
1507
- "loss": 0.5926157832145691,
1508
- "step": 214
1509
- },
1510
- {
1511
- "epoch": 0.054818969913309534,
1512
- "grad_norm": 3.6625335216522217,
1513
- "learning_rate": 9.999423865517037e-06,
1514
- "loss": 0.5840062499046326,
1515
- "step": 215
1516
- },
1517
- {
1518
- "epoch": 0.05507394186639469,
1519
- "grad_norm": 2.7337467670440674,
1520
- "learning_rate": 9.999358073789481e-06,
1521
- "loss": 0.5910326242446899,
1522
- "step": 216
1523
- },
1524
- {
1525
- "epoch": 0.05532891381947986,
1526
- "grad_norm": 2.746720790863037,
1527
- "learning_rate": 9.999288726064735e-06,
1528
- "loss": 0.5885810852050781,
1529
- "step": 217
1530
- },
1531
- {
1532
- "epoch": 0.05558388577256502,
1533
- "grad_norm": 2.9541351795196533,
1534
- "learning_rate": 9.999215822392125e-06,
1535
- "loss": 0.5852217674255371,
1536
- "step": 218
1537
- },
1538
- {
1539
- "epoch": 0.05583885772565018,
1540
- "grad_norm": 3.7349913120269775,
1541
- "learning_rate": 9.999139362823507e-06,
1542
- "loss": 0.5871468782424927,
1543
- "step": 219
1544
- },
1545
- {
1546
- "epoch": 0.05609382967873534,
1547
- "grad_norm": 2.559229612350464,
1548
- "learning_rate": 9.999059347413262e-06,
1549
- "loss": 0.5802211761474609,
1550
- "step": 220
1551
- },
1552
- {
1553
- "epoch": 0.0563488016318205,
1554
- "grad_norm": 3.2847959995269775,
1555
- "learning_rate": 9.99897577621831e-06,
1556
- "loss": 0.5856022834777832,
1557
- "step": 221
1558
- },
1559
- {
1560
- "epoch": 0.05660377358490566,
1561
- "grad_norm": 4.062271595001221,
1562
- "learning_rate": 9.99888864929809e-06,
1563
- "loss": 0.5967508554458618,
1564
- "step": 222
1565
- },
1566
- {
1567
- "epoch": 0.05685874553799082,
1568
- "grad_norm": 4.350281238555908,
1569
- "learning_rate": 9.99879796671458e-06,
1570
- "loss": 0.5894291400909424,
1571
- "step": 223
1572
- },
1573
- {
1574
- "epoch": 0.05711371749107598,
1575
- "grad_norm": 2.316976547241211,
1576
- "learning_rate": 9.998703728532273e-06,
1577
- "loss": 0.5789750218391418,
1578
- "step": 224
1579
- },
1580
- {
1581
- "epoch": 0.05736868944416114,
1582
- "grad_norm": 2.7369654178619385,
1583
- "learning_rate": 9.998605934818208e-06,
1584
- "loss": 0.5857868790626526,
1585
- "step": 225
1586
- },
1587
- {
1588
- "epoch": 0.057623661397246305,
1589
- "grad_norm": 3.066457748413086,
1590
- "learning_rate": 9.998504585641941e-06,
1591
- "loss": 0.5850772857666016,
1592
- "step": 226
1593
- },
1594
- {
1595
- "epoch": 0.057878633350331464,
1596
- "grad_norm": 3.2670044898986816,
1597
- "learning_rate": 9.998399681075562e-06,
1598
- "loss": 0.5871639251708984,
1599
- "step": 227
1600
- },
1601
- {
1602
- "epoch": 0.05813360530341662,
1603
- "grad_norm": 3.2137832641601562,
1604
- "learning_rate": 9.998291221193685e-06,
1605
- "loss": 0.5901861190795898,
1606
- "step": 228
1607
- },
1608
- {
1609
- "epoch": 0.05838857725650178,
1610
- "grad_norm": 4.253380298614502,
1611
- "learning_rate": 9.998179206073461e-06,
1612
- "loss": 0.5958875417709351,
1613
- "step": 229
1614
- },
1615
- {
1616
- "epoch": 0.05864354920958695,
1617
- "grad_norm": 2.9854331016540527,
1618
- "learning_rate": 9.998063635794566e-06,
1619
- "loss": 0.5908941626548767,
1620
- "step": 230
1621
- },
1622
- {
1623
- "epoch": 0.05889852116267211,
1624
- "grad_norm": 2.664669990539551,
1625
- "learning_rate": 9.9979445104392e-06,
1626
- "loss": 0.574123740196228,
1627
- "step": 231
1628
- },
1629
- {
1630
- "epoch": 0.059153493115757266,
1631
- "grad_norm": 2.4796957969665527,
1632
- "learning_rate": 9.997821830092095e-06,
1633
- "loss": 0.5850093364715576,
1634
- "step": 232
1635
- },
1636
- {
1637
- "epoch": 0.059408465068842425,
1638
- "grad_norm": 2.0589680671691895,
1639
- "learning_rate": 9.99769559484052e-06,
1640
- "loss": 0.5854530334472656,
1641
- "step": 233
1642
- },
1643
- {
1644
- "epoch": 0.05966343702192759,
1645
- "grad_norm": 1.7592800855636597,
1646
- "learning_rate": 9.997565804774257e-06,
1647
- "loss": 0.5807881951332092,
1648
- "step": 234
1649
- },
1650
- {
1651
- "epoch": 0.05991840897501275,
1652
- "grad_norm": 1.856603980064392,
1653
- "learning_rate": 9.997432459985627e-06,
1654
- "loss": 0.5874162912368774,
1655
- "step": 235
1656
- },
1657
- {
1658
- "epoch": 0.06017338092809791,
1659
- "grad_norm": 1.8138952255249023,
1660
- "learning_rate": 9.997295560569477e-06,
1661
- "loss": 0.5984858274459839,
1662
- "step": 236
1663
- },
1664
- {
1665
- "epoch": 0.06042835288118307,
1666
- "grad_norm": 1.8532931804656982,
1667
- "learning_rate": 9.997155106623184e-06,
1668
- "loss": 0.586401104927063,
1669
- "step": 237
1670
- },
1671
- {
1672
- "epoch": 0.06068332483426823,
1673
- "grad_norm": 2.2513363361358643,
1674
- "learning_rate": 9.99701109824665e-06,
1675
- "loss": 0.5933905839920044,
1676
- "step": 238
1677
- },
1678
- {
1679
- "epoch": 0.060938296787353394,
1680
- "grad_norm": 2.3893911838531494,
1681
- "learning_rate": 9.996863535542306e-06,
1682
- "loss": 0.5808255672454834,
1683
- "step": 239
1684
- },
1685
- {
1686
- "epoch": 0.06119326874043855,
1687
- "grad_norm": 2.2882018089294434,
1688
- "learning_rate": 9.996712418615116e-06,
1689
- "loss": 0.5888140201568604,
1690
- "step": 240
1691
- },
1692
- {
1693
- "epoch": 0.06144824069352371,
1694
- "grad_norm": 2.062934637069702,
1695
- "learning_rate": 9.996557747572562e-06,
1696
- "loss": 0.589434027671814,
1697
- "step": 241
1698
- },
1699
- {
1700
- "epoch": 0.06170321264660887,
1701
- "grad_norm": 2.081850051879883,
1702
- "learning_rate": 9.996399522524664e-06,
1703
- "loss": 0.5899481773376465,
1704
- "step": 242
1705
- },
1706
- {
1707
- "epoch": 0.06195818459969404,
1708
- "grad_norm": 2.06208872795105,
1709
- "learning_rate": 9.996237743583965e-06,
1710
- "loss": 0.5857259631156921,
1711
- "step": 243
1712
- },
1713
- {
1714
- "epoch": 0.062213156552779196,
1715
- "grad_norm": 1.8559035062789917,
1716
- "learning_rate": 9.996072410865538e-06,
1717
- "loss": 0.5844748020172119,
1718
- "step": 244
1719
- },
1720
- {
1721
- "epoch": 0.062468128505864355,
1722
- "grad_norm": 1.9922292232513428,
1723
- "learning_rate": 9.99590352448698e-06,
1724
- "loss": 0.5953148603439331,
1725
- "step": 245
1726
- },
1727
- {
1728
- "epoch": 0.06272310045894952,
1729
- "grad_norm": 2.6719133853912354,
1730
- "learning_rate": 9.995731084568421e-06,
1731
- "loss": 0.5793883204460144,
1732
- "step": 246
1733
- },
1734
- {
1735
- "epoch": 0.06297807241203468,
1736
- "grad_norm": 1.8634369373321533,
1737
- "learning_rate": 9.995555091232516e-06,
1738
- "loss": 0.5814319849014282,
1739
- "step": 247
1740
- },
1741
- {
1742
- "epoch": 0.06323304436511984,
1743
- "grad_norm": 1.8876395225524902,
1744
- "learning_rate": 9.995375544604447e-06,
1745
- "loss": 0.572151780128479,
1746
- "step": 248
1747
- },
1748
- {
1749
- "epoch": 0.063488016318205,
1750
- "grad_norm": 1.8171889781951904,
1751
- "learning_rate": 9.99519244481192e-06,
1752
- "loss": 0.5862112045288086,
1753
- "step": 249
1754
- },
1755
- {
1756
- "epoch": 0.06374298827129016,
1757
- "grad_norm": 2.4239213466644287,
1758
- "learning_rate": 9.995005791985178e-06,
1759
- "loss": 0.579311728477478,
1760
- "step": 250
1761
- },
1762
- {
1763
- "epoch": 0.06399796022437532,
1764
- "grad_norm": 2.0270440578460693,
1765
- "learning_rate": 9.99481558625698e-06,
1766
- "loss": 0.5841118097305298,
1767
- "step": 251
1768
- },
1769
- {
1770
- "epoch": 0.06425293217746048,
1771
- "grad_norm": 1.6712284088134766,
1772
- "learning_rate": 9.994621827762624e-06,
1773
- "loss": 0.5896538496017456,
1774
- "step": 252
1775
- },
1776
- {
1777
- "epoch": 0.06450790413054563,
1778
- "grad_norm": 1.8382529020309448,
1779
- "learning_rate": 9.994424516639924e-06,
1780
- "loss": 0.5876548290252686,
1781
- "step": 253
1782
- },
1783
- {
1784
- "epoch": 0.0647628760836308,
1785
- "grad_norm": 2.464630603790283,
1786
- "learning_rate": 9.994223653029225e-06,
1787
- "loss": 0.56418776512146,
1788
- "step": 254
1789
- },
1790
- {
1791
- "epoch": 0.06501784803671597,
1792
- "grad_norm": 5.462632656097412,
1793
- "learning_rate": 9.994019237073402e-06,
1794
- "loss": 0.5843555927276611,
1795
- "step": 255
1796
- },
1797
- {
1798
- "epoch": 0.06527281998980113,
1799
- "grad_norm": 2.105456829071045,
1800
- "learning_rate": 9.993811268917854e-06,
1801
- "loss": 0.5954089164733887,
1802
- "step": 256
1803
- },
1804
- {
1805
- "epoch": 0.06552779194288628,
1806
- "grad_norm": 2.2847952842712402,
1807
- "learning_rate": 9.993599748710505e-06,
1808
- "loss": 0.58359694480896,
1809
- "step": 257
1810
- },
1811
- {
1812
- "epoch": 0.06578276389597144,
1813
- "grad_norm": 1.971805453300476,
1814
- "learning_rate": 9.99338467660181e-06,
1815
- "loss": 0.5963630676269531,
1816
- "step": 258
1817
- },
1818
- {
1819
- "epoch": 0.0660377358490566,
1820
- "grad_norm": 2.609140396118164,
1821
- "learning_rate": 9.993166052744745e-06,
1822
- "loss": 0.5824004411697388,
1823
- "step": 259
1824
- },
1825
- {
1826
- "epoch": 0.06629270780214176,
1827
- "grad_norm": 5.869304180145264,
1828
- "learning_rate": 9.992943877294817e-06,
1829
- "loss": 0.5748361349105835,
1830
- "step": 260
1831
- },
1832
- {
1833
- "epoch": 0.06654767975522692,
1834
- "grad_norm": 4.773970603942871,
1835
- "learning_rate": 9.992718150410054e-06,
1836
- "loss": 0.5913362503051758,
1837
- "step": 261
1838
- },
1839
- {
1840
- "epoch": 0.06680265170831208,
1841
- "grad_norm": 2.3107783794403076,
1842
- "learning_rate": 9.992488872251019e-06,
1843
- "loss": 0.5675320625305176,
1844
- "step": 262
1845
- },
1846
- {
1847
- "epoch": 0.06705762366139725,
1848
- "grad_norm": 3.1674349308013916,
1849
- "learning_rate": 9.992256042980792e-06,
1850
- "loss": 0.5810559988021851,
1851
- "step": 263
1852
- },
1853
- {
1854
- "epoch": 0.06731259561448241,
1855
- "grad_norm": 2.4077420234680176,
1856
- "learning_rate": 9.992019662764982e-06,
1857
- "loss": 0.586556613445282,
1858
- "step": 264
1859
- },
1860
- {
1861
- "epoch": 0.06756756756756757,
1862
- "grad_norm": 2.182682514190674,
1863
- "learning_rate": 9.991779731771727e-06,
1864
- "loss": 0.5913389325141907,
1865
- "step": 265
1866
- },
1867
- {
1868
- "epoch": 0.06782253952065273,
1869
- "grad_norm": 2.2735114097595215,
1870
- "learning_rate": 9.991536250171683e-06,
1871
- "loss": 0.5736482739448547,
1872
- "step": 266
1873
- },
1874
- {
1875
- "epoch": 0.06807751147373789,
1876
- "grad_norm": 2.0414175987243652,
1877
- "learning_rate": 9.991289218138042e-06,
1878
- "loss": 0.5860854983329773,
1879
- "step": 267
1880
- },
1881
- {
1882
- "epoch": 0.06833248342682305,
1883
- "grad_norm": 2.2997677326202393,
1884
- "learning_rate": 9.99103863584651e-06,
1885
- "loss": 0.5764031410217285,
1886
- "step": 268
1887
- },
1888
- {
1889
- "epoch": 0.06858745537990821,
1890
- "grad_norm": 2.6660847663879395,
1891
- "learning_rate": 9.990784503475327e-06,
1892
- "loss": 0.5681920051574707,
1893
- "step": 269
1894
- },
1895
- {
1896
- "epoch": 0.06884242733299337,
1897
- "grad_norm": 3.4867351055145264,
1898
- "learning_rate": 9.990526821205256e-06,
1899
- "loss": 0.5747722387313843,
1900
- "step": 270
1901
- },
1902
- {
1903
- "epoch": 0.06909739928607853,
1904
- "grad_norm": 2.430447816848755,
1905
- "learning_rate": 9.990265589219578e-06,
1906
- "loss": 0.5789008140563965,
1907
- "step": 271
1908
- },
1909
- {
1910
- "epoch": 0.0693523712391637,
1911
- "grad_norm": 2.5677506923675537,
1912
- "learning_rate": 9.990000807704114e-06,
1913
- "loss": 0.5765048265457153,
1914
- "step": 272
1915
- },
1916
- {
1917
- "epoch": 0.06960734319224886,
1918
- "grad_norm": 2.7650086879730225,
1919
- "learning_rate": 9.989732476847194e-06,
1920
- "loss": 0.575411319732666,
1921
- "step": 273
1922
- },
1923
- {
1924
- "epoch": 0.06986231514533402,
1925
- "grad_norm": 1.8177183866500854,
1926
- "learning_rate": 9.989460596839681e-06,
1927
- "loss": 0.5772970914840698,
1928
- "step": 274
1929
- },
1930
- {
1931
- "epoch": 0.07011728709841918,
1932
- "grad_norm": 4.586569786071777,
1933
- "learning_rate": 9.98918516787496e-06,
1934
- "loss": 0.581605076789856,
1935
- "step": 275
1936
- },
1937
- {
1938
- "epoch": 0.07037225905150434,
1939
- "grad_norm": 3.1498584747314453,
1940
- "learning_rate": 9.988906190148944e-06,
1941
- "loss": 0.5844799876213074,
1942
- "step": 276
1943
- },
1944
- {
1945
- "epoch": 0.0706272310045895,
1946
- "grad_norm": 4.407031536102295,
1947
- "learning_rate": 9.988623663860064e-06,
1948
- "loss": 0.5937627553939819,
1949
- "step": 277
1950
- },
1951
- {
1952
- "epoch": 0.07088220295767465,
1953
- "grad_norm": 2.287809371948242,
1954
- "learning_rate": 9.988337589209281e-06,
1955
- "loss": 0.5812350511550903,
1956
- "step": 278
1957
- },
1958
- {
1959
- "epoch": 0.07113717491075981,
1960
- "grad_norm": 3.0038914680480957,
1961
- "learning_rate": 9.988047966400074e-06,
1962
- "loss": 0.5812468528747559,
1963
- "step": 279
1964
- },
1965
- {
1966
- "epoch": 0.07139214686384497,
1967
- "grad_norm": 2.2336087226867676,
1968
- "learning_rate": 9.987754795638451e-06,
1969
- "loss": 0.5780528783798218,
1970
- "step": 280
1971
- },
1972
- {
1973
- "epoch": 0.07164711881693014,
1974
- "grad_norm": 2.0841615200042725,
1975
- "learning_rate": 9.987458077132943e-06,
1976
- "loss": 0.5742005705833435,
1977
- "step": 281
1978
- },
1979
- {
1980
- "epoch": 0.0719020907700153,
1981
- "grad_norm": 2.2732021808624268,
1982
- "learning_rate": 9.9871578110946e-06,
1983
- "loss": 0.5748374462127686,
1984
- "step": 282
1985
- },
1986
- {
1987
- "epoch": 0.07215706272310046,
1988
- "grad_norm": 2.061392307281494,
1989
- "learning_rate": 9.986853997737e-06,
1990
- "loss": 0.5818248987197876,
1991
- "step": 283
1992
- },
1993
- {
1994
- "epoch": 0.07241203467618562,
1995
- "grad_norm": 2.114393949508667,
1996
- "learning_rate": 9.986546637276245e-06,
1997
- "loss": 0.5788212418556213,
1998
- "step": 284
1999
- },
2000
- {
2001
- "epoch": 0.07266700662927078,
2002
- "grad_norm": 2.8290114402770996,
2003
- "learning_rate": 9.986235729930954e-06,
2004
- "loss": 0.5592154264450073,
2005
- "step": 285
2006
- },
2007
- {
2008
- "epoch": 0.07292197858235594,
2009
- "grad_norm": 2.5851333141326904,
2010
- "learning_rate": 9.985921275922275e-06,
2011
- "loss": 0.5769827961921692,
2012
- "step": 286
2013
- },
2014
- {
2015
- "epoch": 0.0731769505354411,
2016
- "grad_norm": 3.3514719009399414,
2017
- "learning_rate": 9.985603275473874e-06,
2018
- "loss": 0.5761103630065918,
2019
- "step": 287
2020
- },
2021
- {
2022
- "epoch": 0.07343192248852626,
2023
- "grad_norm": 2.6545956134796143,
2024
- "learning_rate": 9.985281728811943e-06,
2025
- "loss": 0.5879877805709839,
2026
- "step": 288
2027
- },
2028
- {
2029
- "epoch": 0.07368689444161142,
2030
- "grad_norm": 3.3587162494659424,
2031
- "learning_rate": 9.984956636165194e-06,
2032
- "loss": 0.5736678838729858,
2033
- "step": 289
2034
- },
2035
- {
2036
- "epoch": 0.07394186639469659,
2037
- "grad_norm": 4.441765785217285,
2038
- "learning_rate": 9.984627997764866e-06,
2039
- "loss": 0.5688061118125916,
2040
- "step": 290
2041
- },
2042
- {
2043
- "epoch": 0.07419683834778175,
2044
- "grad_norm": 3.293834924697876,
2045
- "learning_rate": 9.984295813844714e-06,
2046
- "loss": 0.5717458128929138,
2047
- "step": 291
2048
- },
2049
- {
2050
- "epoch": 0.07445181030086691,
2051
- "grad_norm": 5.738956928253174,
2052
- "learning_rate": 9.983960084641014e-06,
2053
- "loss": 0.5693488121032715,
2054
- "step": 292
2055
- },
2056
- {
2057
- "epoch": 0.07470678225395207,
2058
- "grad_norm": 2.3275930881500244,
2059
- "learning_rate": 9.983620810392574e-06,
2060
- "loss": 0.5773096084594727,
2061
- "step": 293
2062
- },
2063
- {
2064
- "epoch": 0.07496175420703723,
2065
- "grad_norm": 20.73316192626953,
2066
- "learning_rate": 9.983277991340709e-06,
2067
- "loss": 0.5704731345176697,
2068
- "step": 294
2069
- },
2070
- {
2071
- "epoch": 0.07521672616012239,
2072
- "grad_norm": 2.5347342491149902,
2073
- "learning_rate": 9.98293162772927e-06,
2074
- "loss": 0.5738517045974731,
2075
- "step": 295
2076
- },
2077
- {
2078
- "epoch": 0.07547169811320754,
2079
- "grad_norm": 2.9725821018218994,
2080
- "learning_rate": 9.98258171980462e-06,
2081
- "loss": 0.5762280225753784,
2082
- "step": 296
2083
- },
2084
- {
2085
- "epoch": 0.0757266700662927,
2086
- "grad_norm": 2.1470696926116943,
2087
- "learning_rate": 9.982228267815644e-06,
2088
- "loss": 0.5746186375617981,
2089
- "step": 297
2090
- },
2091
- {
2092
- "epoch": 0.07598164201937786,
2093
- "grad_norm": 3.3097052574157715,
2094
- "learning_rate": 9.981871272013747e-06,
2095
- "loss": 0.571082353591919,
2096
- "step": 298
2097
- },
2098
- {
2099
- "epoch": 0.07623661397246304,
2100
- "grad_norm": 2.7459795475006104,
2101
- "learning_rate": 9.981510732652862e-06,
2102
- "loss": 0.5818139910697937,
2103
- "step": 299
2104
- },
2105
- {
2106
- "epoch": 0.0764915859255482,
2107
- "grad_norm": 18.801685333251953,
2108
- "learning_rate": 9.981146649989435e-06,
2109
- "loss": 0.5743539333343506,
2110
- "step": 300
2111
- },
2112
- {
2113
- "epoch": 0.07674655787863335,
2114
- "grad_norm": 2.2822065353393555,
2115
- "learning_rate": 9.980779024282434e-06,
2116
- "loss": 0.5851568579673767,
2117
- "step": 301
2118
- },
2119
- {
2120
- "epoch": 0.07700152983171851,
2121
- "grad_norm": 4.970335006713867,
2122
- "learning_rate": 9.980407855793348e-06,
2123
- "loss": 0.567683756351471,
2124
- "step": 302
2125
- },
2126
- {
2127
- "epoch": 0.07725650178480367,
2128
- "grad_norm": 2.375314950942993,
2129
- "learning_rate": 9.980033144786186e-06,
2130
- "loss": 0.5673696994781494,
2131
- "step": 303
2132
- },
2133
- {
2134
- "epoch": 0.07751147373788883,
2135
- "grad_norm": 3.8067612648010254,
2136
- "learning_rate": 9.979654891527476e-06,
2137
- "loss": 0.5791069865226746,
2138
- "step": 304
2139
- },
2140
- {
2141
- "epoch": 0.07776644569097399,
2142
- "grad_norm": 2.5518550872802734,
2143
- "learning_rate": 9.979273096286268e-06,
2144
- "loss": 0.5585265159606934,
2145
- "step": 305
2146
- },
2147
- {
2148
- "epoch": 0.07802141764405915,
2149
- "grad_norm": 5.341617584228516,
2150
- "learning_rate": 9.978887759334125e-06,
2151
- "loss": 0.5657647848129272,
2152
- "step": 306
2153
- },
2154
- {
2155
- "epoch": 0.07827638959714431,
2156
- "grad_norm": 2.304340124130249,
2157
- "learning_rate": 9.978498880945138e-06,
2158
- "loss": 0.5609466433525085,
2159
- "step": 307
2160
- },
2161
- {
2162
- "epoch": 0.07853136155022948,
2163
- "grad_norm": 4.493840217590332,
2164
- "learning_rate": 9.978106461395912e-06,
2165
- "loss": 0.5843234062194824,
2166
- "step": 308
2167
- },
2168
- {
2169
- "epoch": 0.07878633350331464,
2170
- "grad_norm": 3.806673526763916,
2171
- "learning_rate": 9.97771050096557e-06,
2172
- "loss": 0.5767306089401245,
2173
- "step": 309
2174
- },
2175
- {
2176
- "epoch": 0.0790413054563998,
2177
- "grad_norm": 4.547685623168945,
2178
- "learning_rate": 9.977310999935756e-06,
2179
- "loss": 0.5610491037368774,
2180
- "step": 310
2181
- },
2182
- {
2183
- "epoch": 0.07929627740948496,
2184
- "grad_norm": 2.6244962215423584,
2185
- "learning_rate": 9.976907958590629e-06,
2186
- "loss": 0.563437819480896,
2187
- "step": 311
2188
- },
2189
- {
2190
- "epoch": 0.07955124936257012,
2191
- "grad_norm": 2.8524208068847656,
2192
- "learning_rate": 9.976501377216871e-06,
2193
- "loss": 0.5713075995445251,
2194
- "step": 312
2195
- },
2196
- {
2197
- "epoch": 0.07980622131565528,
2198
- "grad_norm": 3.662938117980957,
2199
- "learning_rate": 9.97609125610368e-06,
2200
- "loss": 0.5592581629753113,
2201
- "step": 313
2202
- },
2203
- {
2204
- "epoch": 0.08006119326874044,
2205
- "grad_norm": 2.5013678073883057,
2206
- "learning_rate": 9.97567759554277e-06,
2207
- "loss": 0.571389377117157,
2208
- "step": 314
2209
- },
2210
- {
2211
- "epoch": 0.0803161652218256,
2212
- "grad_norm": 3.6687936782836914,
2213
- "learning_rate": 9.975260395828376e-06,
2214
- "loss": 0.575724720954895,
2215
- "step": 315
2216
- },
2217
- {
2218
- "epoch": 0.08057113717491075,
2219
- "grad_norm": 2.705265760421753,
2220
- "learning_rate": 9.974839657257245e-06,
2221
- "loss": 0.5768415331840515,
2222
- "step": 316
2223
- },
2224
- {
2225
- "epoch": 0.08082610912799593,
2226
- "grad_norm": 3.3716695308685303,
2227
- "learning_rate": 9.974415380128646e-06,
2228
- "loss": 0.5781106948852539,
2229
- "step": 317
2230
- },
2231
- {
2232
- "epoch": 0.08108108108108109,
2233
- "grad_norm": 2.1914381980895996,
2234
- "learning_rate": 9.973987564744363e-06,
2235
- "loss": 0.5832979679107666,
2236
- "step": 318
2237
- },
2238
- {
2239
- "epoch": 0.08133605303416624,
2240
- "grad_norm": 2.789609432220459,
2241
- "learning_rate": 9.973556211408699e-06,
2242
- "loss": 0.5669206380844116,
2243
- "step": 319
2244
- },
2245
- {
2246
- "epoch": 0.0815910249872514,
2247
- "grad_norm": 2.268644332885742,
2248
- "learning_rate": 9.97312132042847e-06,
2249
- "loss": 0.5540966391563416,
2250
- "step": 320
2251
- },
2252
- {
2253
- "epoch": 0.08184599694033656,
2254
- "grad_norm": 5.483611583709717,
2255
- "learning_rate": 9.972682892113009e-06,
2256
- "loss": 0.5671651363372803,
2257
- "step": 321
2258
- },
2259
- {
2260
- "epoch": 0.08210096889342172,
2261
- "grad_norm": 2.727654457092285,
2262
- "learning_rate": 9.972240926774167e-06,
2263
- "loss": 0.5680763125419617,
2264
- "step": 322
2265
- },
2266
- {
2267
- "epoch": 0.08235594084650688,
2268
- "grad_norm": 3.385113000869751,
2269
- "learning_rate": 9.97179542472631e-06,
2270
- "loss": 0.5710451006889343,
2271
- "step": 323
2272
- },
2273
- {
2274
- "epoch": 0.08261091279959204,
2275
- "grad_norm": 2.97533917427063,
2276
- "learning_rate": 9.971346386286323e-06,
2277
- "loss": 0.5777047872543335,
2278
- "step": 324
2279
- },
2280
- {
2281
- "epoch": 0.0828658847526772,
2282
- "grad_norm": 2.553302526473999,
2283
- "learning_rate": 9.970893811773597e-06,
2284
- "loss": 0.5666846036911011,
2285
- "step": 325
2286
- },
2287
- {
2288
- "epoch": 0.08312085670576237,
2289
- "grad_norm": 2.8742709159851074,
2290
- "learning_rate": 9.970437701510047e-06,
2291
- "loss": 0.5720920562744141,
2292
- "step": 326
2293
- },
2294
- {
2295
- "epoch": 0.08337582865884753,
2296
- "grad_norm": 5.297033786773682,
2297
- "learning_rate": 9.969978055820099e-06,
2298
- "loss": 0.5806701183319092,
2299
- "step": 327
2300
- },
2301
- {
2302
- "epoch": 0.08363080061193269,
2303
- "grad_norm": 5.130199432373047,
2304
- "learning_rate": 9.969514875030695e-06,
2305
- "loss": 0.5766419768333435,
2306
- "step": 328
2307
- },
2308
- {
2309
- "epoch": 0.08388577256501785,
2310
- "grad_norm": 5.532764911651611,
2311
- "learning_rate": 9.969048159471291e-06,
2312
- "loss": 0.5834259390830994,
2313
- "step": 329
2314
- },
2315
- {
2316
- "epoch": 0.08414074451810301,
2317
- "grad_norm": 2.856757640838623,
2318
- "learning_rate": 9.96857790947386e-06,
2319
- "loss": 0.5710228085517883,
2320
- "step": 330
2321
- },
2322
- {
2323
- "epoch": 0.08439571647118817,
2324
- "grad_norm": 2.8127498626708984,
2325
- "learning_rate": 9.968104125372883e-06,
2326
- "loss": 0.5596293210983276,
2327
- "step": 331
2328
- },
2329
- {
2330
- "epoch": 0.08465068842427333,
2331
- "grad_norm": 3.476436138153076,
2332
- "learning_rate": 9.967626807505359e-06,
2333
- "loss": 0.5636457800865173,
2334
- "step": 332
2335
- },
2336
- {
2337
- "epoch": 0.08490566037735849,
2338
- "grad_norm": 3.897481679916382,
2339
- "learning_rate": 9.967145956210801e-06,
2340
- "loss": 0.579216480255127,
2341
- "step": 333
2342
- },
2343
- {
2344
- "epoch": 0.08516063233044364,
2345
- "grad_norm": 4.0459818840026855,
2346
- "learning_rate": 9.966661571831235e-06,
2347
- "loss": 0.5644032955169678,
2348
- "step": 334
2349
- },
2350
- {
2351
- "epoch": 0.08541560428352882,
2352
- "grad_norm": 2.3585076332092285,
2353
- "learning_rate": 9.966173654711197e-06,
2354
- "loss": 0.5744975805282593,
2355
- "step": 335
2356
- },
2357
- {
2358
- "epoch": 0.08567057623661398,
2359
- "grad_norm": 4.57792854309082,
2360
- "learning_rate": 9.965682205197737e-06,
2361
- "loss": 0.5631759762763977,
2362
- "step": 336
2363
- },
2364
- {
2365
- "epoch": 0.08592554818969914,
2366
- "grad_norm": 2.656183958053589,
2367
- "learning_rate": 9.965187223640422e-06,
2368
- "loss": 0.5689008235931396,
2369
- "step": 337
2370
- },
2371
- {
2372
- "epoch": 0.0861805201427843,
2373
- "grad_norm": 2.8403210639953613,
2374
- "learning_rate": 9.964688710391325e-06,
2375
- "loss": 0.5751459002494812,
2376
- "step": 338
2377
- },
2378
- {
2379
- "epoch": 0.08643549209586945,
2380
- "grad_norm": 2.4729552268981934,
2381
- "learning_rate": 9.964186665805034e-06,
2382
- "loss": 0.5721454620361328,
2383
- "step": 339
2384
- },
2385
- {
2386
- "epoch": 0.08669046404895461,
2387
- "grad_norm": 2.7285823822021484,
2388
- "learning_rate": 9.96368109023865e-06,
2389
- "loss": 0.5654371380805969,
2390
- "step": 340
2391
- },
2392
- {
2393
- "epoch": 0.08694543600203977,
2394
- "grad_norm": 2.1327974796295166,
2395
- "learning_rate": 9.963171984051786e-06,
2396
- "loss": 0.5701696276664734,
2397
- "step": 341
2398
- },
2399
- {
2400
- "epoch": 0.08720040795512493,
2401
- "grad_norm": 2.0096259117126465,
2402
- "learning_rate": 9.96265934760656e-06,
2403
- "loss": 0.5614428520202637,
2404
- "step": 342
2405
- },
2406
- {
2407
- "epoch": 0.08745537990821009,
2408
- "grad_norm": 2.1693127155303955,
2409
- "learning_rate": 9.962143181267607e-06,
2410
- "loss": 0.559285581111908,
2411
- "step": 343
2412
- },
2413
- {
2414
- "epoch": 0.08771035186129526,
2415
- "grad_norm": 21.574602127075195,
2416
- "learning_rate": 9.961623485402074e-06,
2417
- "loss": 0.5767900347709656,
2418
- "step": 344
2419
- },
2420
- {
2421
- "epoch": 0.08796532381438042,
2422
- "grad_norm": 3.6397855281829834,
2423
- "learning_rate": 9.961100260379612e-06,
2424
- "loss": 0.5654720067977905,
2425
- "step": 345
2426
- },
2427
- {
2428
- "epoch": 0.08822029576746558,
2429
- "grad_norm": 2.2064383029937744,
2430
- "learning_rate": 9.960573506572391e-06,
2431
- "loss": 0.5701004266738892,
2432
- "step": 346
2433
- },
2434
- {
2435
- "epoch": 0.08847526772055074,
2436
- "grad_norm": 8.230388641357422,
2437
- "learning_rate": 9.960043224355081e-06,
2438
- "loss": 0.5593109130859375,
2439
- "step": 347
2440
- },
2441
- {
2442
- "epoch": 0.0887302396736359,
2443
- "grad_norm": 2.8151967525482178,
2444
- "learning_rate": 9.959509414104868e-06,
2445
- "loss": 0.5906691551208496,
2446
- "step": 348
2447
- },
2448
- {
2449
- "epoch": 0.08898521162672106,
2450
- "grad_norm": 2.6584460735321045,
2451
- "learning_rate": 9.95897207620145e-06,
2452
- "loss": 0.5554200410842896,
2453
- "step": 349
2454
- },
2455
- {
2456
- "epoch": 0.08924018357980622,
2457
- "grad_norm": 2.230492353439331,
2458
- "learning_rate": 9.958431211027026e-06,
2459
- "loss": 0.5893738269805908,
2460
- "step": 350
2461
- },
2462
- {
2463
- "epoch": 0.08949515553289138,
2464
- "grad_norm": 2.6074323654174805,
2465
- "learning_rate": 9.95788681896631e-06,
2466
- "loss": 0.5782200694084167,
2467
- "step": 351
2468
- },
2469
- {
2470
- "epoch": 0.08975012748597654,
2471
- "grad_norm": 1.678154706954956,
2472
- "learning_rate": 9.957338900406525e-06,
2473
- "loss": 0.5617006421089172,
2474
- "step": 352
2475
- },
2476
- {
2477
- "epoch": 0.09000509943906171,
2478
- "grad_norm": 1.8922593593597412,
2479
- "learning_rate": 9.956787455737397e-06,
2480
- "loss": 0.5668175220489502,
2481
- "step": 353
2482
- },
2483
- {
2484
- "epoch": 0.09026007139214687,
2485
- "grad_norm": 2.508108615875244,
2486
- "learning_rate": 9.956232485351167e-06,
2487
- "loss": 0.5639225244522095,
2488
- "step": 354
2489
- },
2490
- {
2491
- "epoch": 0.09051504334523203,
2492
- "grad_norm": 3.1573591232299805,
2493
- "learning_rate": 9.955673989642578e-06,
2494
- "loss": 0.557616651058197,
2495
- "step": 355
2496
- },
2497
- {
2498
- "epoch": 0.09077001529831719,
2499
- "grad_norm": 2.6363086700439453,
2500
- "learning_rate": 9.955111969008884e-06,
2501
- "loss": 0.5727308988571167,
2502
- "step": 356
2503
- },
2504
- {
2505
- "epoch": 0.09102498725140235,
2506
- "grad_norm": 3.0296366214752197,
2507
- "learning_rate": 9.954546423849842e-06,
2508
- "loss": 0.5857377052307129,
2509
- "step": 357
2510
- },
2511
- {
2512
- "epoch": 0.0912799592044875,
2513
- "grad_norm": 2.211289644241333,
2514
- "learning_rate": 9.953977354567723e-06,
2515
- "loss": 0.5680918097496033,
2516
- "step": 358
2517
- },
2518
- {
2519
- "epoch": 0.09153493115757266,
2520
- "grad_norm": 2.4168450832366943,
2521
- "learning_rate": 9.953404761567299e-06,
2522
- "loss": 0.5729773044586182,
2523
- "step": 359
2524
- },
2525
- {
2526
- "epoch": 0.09178990311065782,
2527
- "grad_norm": 2.44561767578125,
2528
- "learning_rate": 9.952828645255849e-06,
2529
- "loss": 0.5714361667633057,
2530
- "step": 360
2531
- },
2532
- {
2533
- "epoch": 0.09204487506374298,
2534
- "grad_norm": 2.3818702697753906,
2535
- "learning_rate": 9.952249006043163e-06,
2536
- "loss": 0.5708685517311096,
2537
- "step": 361
2538
- },
2539
- {
2540
- "epoch": 0.09229984701682815,
2541
- "grad_norm": 2.3030588626861572,
2542
- "learning_rate": 9.95166584434153e-06,
2543
- "loss": 0.5894932746887207,
2544
- "step": 362
2545
- },
2546
- {
2547
- "epoch": 0.09255481896991331,
2548
- "grad_norm": 6.469595432281494,
2549
- "learning_rate": 9.951079160565747e-06,
2550
- "loss": 0.5663866400718689,
2551
- "step": 363
2552
- },
2553
- {
2554
- "epoch": 0.09280979092299847,
2555
- "grad_norm": 3.4183993339538574,
2556
- "learning_rate": 9.950488955133118e-06,
2557
- "loss": 0.5638302564620972,
2558
- "step": 364
2559
- },
2560
- {
2561
- "epoch": 0.09306476287608363,
2562
- "grad_norm": 2.77966570854187,
2563
- "learning_rate": 9.94989522846345e-06,
2564
- "loss": 0.5765042304992676,
2565
- "step": 365
2566
- },
2567
- {
2568
- "epoch": 0.09331973482916879,
2569
- "grad_norm": 3.0297207832336426,
2570
- "learning_rate": 9.949297980979056e-06,
2571
- "loss": 0.5630312561988831,
2572
- "step": 366
2573
- },
2574
- {
2575
- "epoch": 0.09357470678225395,
2576
- "grad_norm": 2.405850410461426,
2577
- "learning_rate": 9.948697213104754e-06,
2578
- "loss": 0.5837424993515015,
2579
- "step": 367
2580
- },
2581
- {
2582
- "epoch": 0.09382967873533911,
2583
- "grad_norm": 3.138538122177124,
2584
- "learning_rate": 9.94809292526786e-06,
2585
- "loss": 0.5750604271888733,
2586
- "step": 368
2587
- },
2588
- {
2589
- "epoch": 0.09408465068842427,
2590
- "grad_norm": 2.2397854328155518,
2591
- "learning_rate": 9.947485117898204e-06,
2592
- "loss": 0.5760766267776489,
2593
- "step": 369
2594
- },
2595
- {
2596
- "epoch": 0.09433962264150944,
2597
- "grad_norm": 3.172715425491333,
2598
- "learning_rate": 9.946873791428108e-06,
2599
- "loss": 0.5685843229293823,
2600
- "step": 370
2601
- },
2602
- {
2603
- "epoch": 0.0945945945945946,
2604
- "grad_norm": 1.991998553276062,
2605
- "learning_rate": 9.94625894629241e-06,
2606
- "loss": 0.5787516832351685,
2607
- "step": 371
2608
- },
2609
- {
2610
- "epoch": 0.09484956654767976,
2611
- "grad_norm": 2.286336898803711,
2612
- "learning_rate": 9.945640582928438e-06,
2613
- "loss": 0.5736852884292603,
2614
- "step": 372
2615
- },
2616
- {
2617
- "epoch": 0.09510453850076492,
2618
- "grad_norm": 3.257596492767334,
2619
- "learning_rate": 9.945018701776027e-06,
2620
- "loss": 0.5641451478004456,
2621
- "step": 373
2622
- },
2623
- {
2624
- "epoch": 0.09535951045385008,
2625
- "grad_norm": 1.9430432319641113,
2626
- "learning_rate": 9.944393303277523e-06,
2627
- "loss": 0.5590602159500122,
2628
- "step": 374
2629
- },
2630
- {
2631
- "epoch": 0.09561448240693524,
2632
- "grad_norm": 1.9836829900741577,
2633
- "learning_rate": 9.943764387877758e-06,
2634
- "loss": 0.5627750158309937,
2635
- "step": 375
2636
- },
2637
- {
2638
- "epoch": 0.0958694543600204,
2639
- "grad_norm": 3.3282463550567627,
2640
- "learning_rate": 9.943131956024078e-06,
2641
- "loss": 0.5622991323471069,
2642
- "step": 376
2643
- },
2644
- {
2645
- "epoch": 0.09612442631310555,
2646
- "grad_norm": 2.969663143157959,
2647
- "learning_rate": 9.942496008166325e-06,
2648
- "loss": 0.5768574476242065,
2649
- "step": 377
2650
- },
2651
- {
2652
- "epoch": 0.09637939826619071,
2653
- "grad_norm": 4.455136775970459,
2654
- "learning_rate": 9.941856544756843e-06,
2655
- "loss": 0.5683542490005493,
2656
- "step": 378
2657
- },
2658
- {
2659
- "epoch": 0.09663437021927589,
2660
- "grad_norm": 2.2980382442474365,
2661
- "learning_rate": 9.941213566250475e-06,
2662
- "loss": 0.5575302839279175,
2663
- "step": 379
2664
- },
2665
- {
2666
- "epoch": 0.09688934217236105,
2667
- "grad_norm": 2.662888526916504,
2668
- "learning_rate": 9.940567073104568e-06,
2669
- "loss": 0.5679932236671448,
2670
- "step": 380
2671
- },
2672
- {
2673
- "epoch": 0.0971443141254462,
2674
- "grad_norm": 5.441934585571289,
2675
- "learning_rate": 9.939917065778965e-06,
2676
- "loss": 0.5602290034294128,
2677
- "step": 381
2678
- },
2679
- {
2680
- "epoch": 0.09739928607853136,
2681
- "grad_norm": 2.7203729152679443,
2682
- "learning_rate": 9.93926354473601e-06,
2683
- "loss": 0.5430388450622559,
2684
- "step": 382
2685
- },
2686
- {
2687
- "epoch": 0.09765425803161652,
2688
- "grad_norm": 3.4668338298797607,
2689
- "learning_rate": 9.938606510440548e-06,
2690
- "loss": 0.5614120960235596,
2691
- "step": 383
2692
- },
2693
- {
2694
- "epoch": 0.09790922998470168,
2695
- "grad_norm": 2.841843605041504,
2696
- "learning_rate": 9.937945963359919e-06,
2697
- "loss": 0.5806660056114197,
2698
- "step": 384
2699
- },
2700
- {
2701
- "epoch": 0.09816420193778684,
2702
- "grad_norm": 2.8140835762023926,
2703
- "learning_rate": 9.937281903963968e-06,
2704
- "loss": 0.5649043321609497,
2705
- "step": 385
2706
- },
2707
- {
2708
- "epoch": 0.098419173890872,
2709
- "grad_norm": 1.9643291234970093,
2710
- "learning_rate": 9.93661433272503e-06,
2711
- "loss": 0.5647992491722107,
2712
- "step": 386
2713
- },
2714
- {
2715
- "epoch": 0.09867414584395716,
2716
- "grad_norm": 2.129157543182373,
2717
- "learning_rate": 9.935943250117945e-06,
2718
- "loss": 0.5809118151664734,
2719
- "step": 387
2720
- },
2721
- {
2722
- "epoch": 0.09892911779704233,
2723
- "grad_norm": 3.388709545135498,
2724
- "learning_rate": 9.935268656620048e-06,
2725
- "loss": 0.5615958571434021,
2726
- "step": 388
2727
- },
2728
- {
2729
- "epoch": 0.09918408975012749,
2730
- "grad_norm": 3.151108741760254,
2731
- "learning_rate": 9.934590552711167e-06,
2732
- "loss": 0.5755165815353394,
2733
- "step": 389
2734
- },
2735
- {
2736
- "epoch": 0.09943906170321265,
2737
- "grad_norm": 3.6108310222625732,
2738
- "learning_rate": 9.933908938873638e-06,
2739
- "loss": 0.5648122429847717,
2740
- "step": 390
2741
- },
2742
- {
2743
- "epoch": 0.09969403365629781,
2744
- "grad_norm": 4.878504753112793,
2745
- "learning_rate": 9.933223815592278e-06,
2746
- "loss": 0.5667652487754822,
2747
- "step": 391
2748
- },
2749
- {
2750
- "epoch": 0.09994900560938297,
2751
- "grad_norm": 3.92559814453125,
2752
- "learning_rate": 9.932535183354418e-06,
2753
- "loss": 0.5649821162223816,
2754
- "step": 392
2755
- },
2756
- {
2757
- "epoch": 0.10020397756246813,
2758
- "grad_norm": 4.201918601989746,
2759
- "learning_rate": 9.93184304264987e-06,
2760
- "loss": 0.5741337537765503,
2761
- "step": 393
2762
- },
2763
- {
2764
- "epoch": 0.10045894951555329,
2765
- "grad_norm": 4.567728519439697,
2766
- "learning_rate": 9.931147393970949e-06,
2767
- "loss": 0.5604985952377319,
2768
- "step": 394
2769
- },
2770
- {
2771
- "epoch": 0.10071392146863845,
2772
- "grad_norm": 4.432600498199463,
2773
- "learning_rate": 9.930448237812462e-06,
2774
- "loss": 0.5782884359359741,
2775
- "step": 395
2776
- },
2777
- {
2778
- "epoch": 0.1009688934217236,
2779
- "grad_norm": 7.759515762329102,
2780
- "learning_rate": 9.929745574671714e-06,
2781
- "loss": 0.58075350522995,
2782
- "step": 396
2783
- },
2784
- {
2785
- "epoch": 0.10122386537480878,
2786
- "grad_norm": 4.055099964141846,
2787
- "learning_rate": 9.929039405048502e-06,
2788
- "loss": 0.5670657157897949,
2789
- "step": 397
2790
- },
2791
- {
2792
- "epoch": 0.10147883732789394,
2793
- "grad_norm": 5.6480326652526855,
2794
- "learning_rate": 9.928329729445118e-06,
2795
- "loss": 0.5497553944587708,
2796
- "step": 398
2797
- },
2798
- {
2799
- "epoch": 0.1017338092809791,
2800
- "grad_norm": 5.648945331573486,
2801
- "learning_rate": 9.92761654836635e-06,
2802
- "loss": 0.5727936029434204,
2803
- "step": 399
2804
- },
2805
- {
2806
- "epoch": 0.10198878123406425,
2807
- "grad_norm": 8.808309555053711,
2808
- "learning_rate": 9.926899862319472e-06,
2809
- "loss": 0.5711077451705933,
2810
- "step": 400
2811
- },
2812
- {
2813
- "epoch": 0.10224375318714941,
2814
- "grad_norm": 4.987074375152588,
2815
- "learning_rate": 9.92617967181426e-06,
2816
- "loss": 0.5715692043304443,
2817
- "step": 401
2818
- },
2819
- {
2820
- "epoch": 0.10249872514023457,
2821
- "grad_norm": 10.667113304138184,
2822
- "learning_rate": 9.92545597736298e-06,
2823
- "loss": 0.579121470451355,
2824
- "step": 402
2825
- },
2826
- {
2827
- "epoch": 0.10275369709331973,
2828
- "grad_norm": 3.9461591243743896,
2829
- "learning_rate": 9.924728779480386e-06,
2830
- "loss": 0.5666054487228394,
2831
- "step": 403
2832
- },
2833
- {
2834
- "epoch": 0.10300866904640489,
2835
- "grad_norm": 4.880352020263672,
2836
- "learning_rate": 9.923998078683728e-06,
2837
- "loss": 0.5608178377151489,
2838
- "step": 404
2839
- },
2840
- {
2841
- "epoch": 0.10326364099949005,
2842
- "grad_norm": 2.8450400829315186,
2843
- "learning_rate": 9.923263875492745e-06,
2844
- "loss": 0.5613754987716675,
2845
- "step": 405
2846
- },
2847
- {
2848
- "epoch": 0.10351861295257522,
2849
- "grad_norm": 2.3046202659606934,
2850
- "learning_rate": 9.922526170429675e-06,
2851
- "loss": 0.5492349863052368,
2852
- "step": 406
2853
- },
2854
- {
2855
- "epoch": 0.10377358490566038,
2856
- "grad_norm": 3.4868693351745605,
2857
- "learning_rate": 9.921784964019234e-06,
2858
- "loss": 0.552125871181488,
2859
- "step": 407
2860
- },
2861
- {
2862
- "epoch": 0.10402855685874554,
2863
- "grad_norm": 2.966796875,
2864
- "learning_rate": 9.92104025678864e-06,
2865
- "loss": 0.5744848251342773,
2866
- "step": 408
2867
- },
2868
- {
2869
- "epoch": 0.1042835288118307,
2870
- "grad_norm": 3.2738051414489746,
2871
- "learning_rate": 9.920292049267592e-06,
2872
- "loss": 0.5550822615623474,
2873
- "step": 409
2874
- },
2875
- {
2876
- "epoch": 0.10453850076491586,
2877
- "grad_norm": 2.2583632469177246,
2878
- "learning_rate": 9.919540341988287e-06,
2879
- "loss": 0.5528494119644165,
2880
- "step": 410
2881
- },
2882
- {
2883
- "epoch": 0.10479347271800102,
2884
- "grad_norm": 1.9360980987548828,
2885
- "learning_rate": 9.918785135485405e-06,
2886
- "loss": 0.5590968728065491,
2887
- "step": 411
2888
- },
2889
- {
2890
- "epoch": 0.10504844467108618,
2891
- "grad_norm": 1.826249599456787,
2892
- "learning_rate": 9.918026430296119e-06,
2893
- "loss": 0.5546989440917969,
2894
- "step": 412
2895
- },
2896
- {
2897
- "epoch": 0.10530341662417134,
2898
- "grad_norm": 4.0869879722595215,
2899
- "learning_rate": 9.917264226960088e-06,
2900
- "loss": 0.5490781664848328,
2901
- "step": 413
2902
- },
2903
- {
2904
- "epoch": 0.1055583885772565,
2905
- "grad_norm": 2.0648045539855957,
2906
- "learning_rate": 9.916498526019461e-06,
2907
- "loss": 0.5518783330917358,
2908
- "step": 414
2909
- },
2910
- {
2911
- "epoch": 0.10581336053034167,
2912
- "grad_norm": 2.43099045753479,
2913
- "learning_rate": 9.915729328018874e-06,
2914
- "loss": 0.5636767745018005,
2915
- "step": 415
2916
- },
2917
- {
2918
- "epoch": 0.10606833248342683,
2919
- "grad_norm": 11.354096412658691,
2920
- "learning_rate": 9.914956633505449e-06,
2921
- "loss": 0.5413601398468018,
2922
- "step": 416
2923
- },
2924
- {
2925
- "epoch": 0.10632330443651199,
2926
- "grad_norm": 2.1528172492980957,
2927
- "learning_rate": 9.914180443028798e-06,
2928
- "loss": 0.5820633172988892,
2929
- "step": 417
2930
- },
2931
- {
2932
- "epoch": 0.10657827638959715,
2933
- "grad_norm": 1.944624423980713,
2934
- "learning_rate": 9.913400757141016e-06,
2935
- "loss": 0.569353461265564,
2936
- "step": 418
2937
- },
2938
- {
2939
- "epoch": 0.1068332483426823,
2940
- "grad_norm": 1.8753156661987305,
2941
- "learning_rate": 9.91261757639669e-06,
2942
- "loss": 0.5503973960876465,
2943
- "step": 419
2944
- },
2945
- {
2946
- "epoch": 0.10708822029576746,
2947
- "grad_norm": 4.639176368713379,
2948
- "learning_rate": 9.911830901352887e-06,
2949
- "loss": 0.5602145195007324,
2950
- "step": 420
2951
- },
2952
- {
2953
- "epoch": 0.10734319224885262,
2954
- "grad_norm": 2.0952956676483154,
2955
- "learning_rate": 9.91104073256916e-06,
2956
- "loss": 0.5641953945159912,
2957
- "step": 421
2958
- },
2959
- {
2960
- "epoch": 0.10759816420193778,
2961
- "grad_norm": 2.922534465789795,
2962
- "learning_rate": 9.91024707060755e-06,
2963
- "loss": 0.5575719475746155,
2964
- "step": 422
2965
- },
2966
- {
2967
- "epoch": 0.10785313615502294,
2968
- "grad_norm": 2.3282814025878906,
2969
- "learning_rate": 9.909449916032586e-06,
2970
- "loss": 0.5521407127380371,
2971
- "step": 423
2972
- },
2973
- {
2974
- "epoch": 0.10810810810810811,
2975
- "grad_norm": 3.6761107444763184,
2976
- "learning_rate": 9.90864926941127e-06,
2977
- "loss": 0.545198917388916,
2978
- "step": 424
2979
- },
2980
- {
2981
- "epoch": 0.10836308006119327,
2982
- "grad_norm": 3.0300190448760986,
2983
- "learning_rate": 9.907845131313097e-06,
2984
- "loss": 0.553956925868988,
2985
- "step": 425
2986
- },
2987
- {
2988
- "epoch": 0.10861805201427843,
2989
- "grad_norm": 2.0869300365448,
2990
- "learning_rate": 9.907037502310045e-06,
2991
- "loss": 0.5557433366775513,
2992
- "step": 426
2993
- },
2994
- {
2995
- "epoch": 0.10887302396736359,
2996
- "grad_norm": 8.301857948303223,
2997
- "learning_rate": 9.906226382976568e-06,
2998
- "loss": 0.5531321167945862,
2999
- "step": 427
3000
- },
3001
- {
3002
- "epoch": 0.10912799592044875,
3003
- "grad_norm": 1.706969141960144,
3004
- "learning_rate": 9.905411773889613e-06,
3005
- "loss": 0.5832129716873169,
3006
- "step": 428
3007
- },
3008
- {
3009
- "epoch": 0.10938296787353391,
3010
- "grad_norm": 4.29292106628418,
3011
- "learning_rate": 9.904593675628603e-06,
3012
- "loss": 0.5588259100914001,
3013
- "step": 429
3014
- },
3015
- {
3016
- "epoch": 0.10963793982661907,
3017
- "grad_norm": 1.8968133926391602,
3018
- "learning_rate": 9.903772088775441e-06,
3019
- "loss": 0.5617460608482361,
3020
- "step": 430
3021
- },
3022
- {
3023
- "epoch": 0.10989291177970423,
3024
- "grad_norm": 2.2897067070007324,
3025
- "learning_rate": 9.902947013914515e-06,
3026
- "loss": 0.56639564037323,
3027
- "step": 431
3028
- },
3029
- {
3030
- "epoch": 0.11014788373278939,
3031
- "grad_norm": 1.9766414165496826,
3032
- "learning_rate": 9.902118451632694e-06,
3033
- "loss": 0.5575860738754272,
3034
- "step": 432
3035
- },
3036
- {
3037
- "epoch": 0.11040285568587456,
3038
- "grad_norm": 1.9631874561309814,
3039
- "learning_rate": 9.901286402519328e-06,
3040
- "loss": 0.5484806895256042,
3041
- "step": 433
3042
- },
3043
- {
3044
- "epoch": 0.11065782763895972,
3045
- "grad_norm": 1.556809425354004,
3046
- "learning_rate": 9.900450867166244e-06,
3047
- "loss": 0.5607021450996399,
3048
- "step": 434
3049
- },
3050
- {
3051
- "epoch": 0.11091279959204488,
3052
- "grad_norm": 2.1358542442321777,
3053
- "learning_rate": 9.89961184616775e-06,
3054
- "loss": 0.5543813109397888,
3055
- "step": 435
3056
- },
3057
- {
3058
- "epoch": 0.11116777154513004,
3059
- "grad_norm": 1.9221220016479492,
3060
- "learning_rate": 9.898769340120635e-06,
3061
- "loss": 0.5601315498352051,
3062
- "step": 436
3063
- },
3064
- {
3065
- "epoch": 0.1114227434982152,
3066
- "grad_norm": 1.6563795804977417,
3067
- "learning_rate": 9.897923349624165e-06,
3068
- "loss": 0.5403985977172852,
3069
- "step": 437
3070
- },
3071
- {
3072
- "epoch": 0.11167771545130036,
3073
- "grad_norm": 1.8465486764907837,
3074
- "learning_rate": 9.897073875280088e-06,
3075
- "loss": 0.5579530000686646,
3076
- "step": 438
3077
- },
3078
- {
3079
- "epoch": 0.11193268740438551,
3080
- "grad_norm": 1.7812130451202393,
3081
- "learning_rate": 9.896220917692624e-06,
3082
- "loss": 0.5482833385467529,
3083
- "step": 439
3084
- },
3085
- {
3086
- "epoch": 0.11218765935747067,
3087
- "grad_norm": 1.6004902124404907,
3088
- "learning_rate": 9.895364477468474e-06,
3089
- "loss": 0.5627224445343018,
3090
- "step": 440
3091
- },
3092
- {
3093
- "epoch": 0.11244263131055583,
3094
- "grad_norm": 2.030223846435547,
3095
- "learning_rate": 9.894504555216818e-06,
3096
- "loss": 0.5554012656211853,
3097
- "step": 441
3098
- },
3099
- {
3100
- "epoch": 0.112697603263641,
3101
- "grad_norm": 2.268024444580078,
3102
- "learning_rate": 9.893641151549309e-06,
3103
- "loss": 0.5493656992912292,
3104
- "step": 442
3105
- },
3106
- {
3107
- "epoch": 0.11295257521672616,
3108
- "grad_norm": 17.941709518432617,
3109
- "learning_rate": 9.89277426708008e-06,
3110
- "loss": 0.5542835593223572,
3111
- "step": 443
3112
- },
3113
- {
3114
- "epoch": 0.11320754716981132,
3115
- "grad_norm": 1.722589373588562,
3116
- "learning_rate": 9.891903902425735e-06,
3117
- "loss": 0.5496931076049805,
3118
- "step": 444
3119
- },
3120
- {
3121
- "epoch": 0.11346251912289648,
3122
- "grad_norm": 2.5744998455047607,
3123
- "learning_rate": 9.891030058205359e-06,
3124
- "loss": 0.5635861158370972,
3125
- "step": 445
3126
- },
3127
- {
3128
- "epoch": 0.11371749107598164,
3129
- "grad_norm": 4.437044143676758,
3130
- "learning_rate": 9.890152735040508e-06,
3131
- "loss": 0.5610167980194092,
3132
- "step": 446
3133
- },
3134
- {
3135
- "epoch": 0.1139724630290668,
3136
- "grad_norm": 2.3134236335754395,
3137
- "learning_rate": 9.889271933555214e-06,
3138
- "loss": 0.5595861077308655,
3139
- "step": 447
3140
- },
3141
- {
3142
- "epoch": 0.11422743498215196,
3143
- "grad_norm": 2.3492934703826904,
3144
- "learning_rate": 9.888387654375982e-06,
3145
- "loss": 0.5650231242179871,
3146
- "step": 448
3147
- },
3148
- {
3149
- "epoch": 0.11448240693523712,
3150
- "grad_norm": 2.3988776206970215,
3151
- "learning_rate": 9.887499898131794e-06,
3152
- "loss": 0.5635617971420288,
3153
- "step": 449
3154
- },
3155
- {
3156
- "epoch": 0.11473737888832228,
3157
- "grad_norm": 1.8519774675369263,
3158
- "learning_rate": 9.886608665454103e-06,
3159
- "loss": 0.5491594076156616,
3160
- "step": 450
3161
- },
3162
- {
3163
- "epoch": 0.11499235084140745,
3164
- "grad_norm": 3.4155073165893555,
3165
- "learning_rate": 9.885713956976831e-06,
3166
- "loss": 0.5530607104301453,
3167
- "step": 451
3168
- },
3169
- {
3170
- "epoch": 0.11524732279449261,
3171
- "grad_norm": 2.399628162384033,
3172
- "learning_rate": 9.884815773336378e-06,
3173
- "loss": 0.556371808052063,
3174
- "step": 452
3175
- },
3176
- {
3177
- "epoch": 0.11550229474757777,
3178
- "grad_norm": 2.0557265281677246,
3179
- "learning_rate": 9.883914115171614e-06,
3180
- "loss": 0.5491721034049988,
3181
- "step": 453
3182
- },
3183
- {
3184
- "epoch": 0.11575726670066293,
3185
- "grad_norm": 1.9038746356964111,
3186
- "learning_rate": 9.883008983123881e-06,
3187
- "loss": 0.5566811561584473,
3188
- "step": 454
3189
- },
3190
- {
3191
- "epoch": 0.11601223865374809,
3192
- "grad_norm": 2.2605245113372803,
3193
- "learning_rate": 9.882100377836988e-06,
3194
- "loss": 0.5654865503311157,
3195
- "step": 455
3196
- },
3197
- {
3198
- "epoch": 0.11626721060683325,
3199
- "grad_norm": 1.8456274271011353,
3200
- "learning_rate": 9.88118829995722e-06,
3201
- "loss": 0.5562974214553833,
3202
- "step": 456
3203
- },
3204
- {
3205
- "epoch": 0.1165221825599184,
3206
- "grad_norm": 1.7836940288543701,
3207
- "learning_rate": 9.880272750133328e-06,
3208
- "loss": 0.566531777381897,
3209
- "step": 457
3210
- },
3211
- {
3212
- "epoch": 0.11677715451300356,
3213
- "grad_norm": 2.3698477745056152,
3214
- "learning_rate": 9.879353729016537e-06,
3215
- "loss": 0.557073175907135,
3216
- "step": 458
3217
- },
3218
- {
3219
- "epoch": 0.11703212646608872,
3220
- "grad_norm": 3.380236864089966,
3221
- "learning_rate": 9.878431237260535e-06,
3222
- "loss": 0.5533016920089722,
3223
- "step": 459
3224
- },
3225
- {
3226
- "epoch": 0.1172870984191739,
3227
- "grad_norm": 2.136101245880127,
3228
- "learning_rate": 9.877505275521485e-06,
3229
- "loss": 0.555318295955658,
3230
- "step": 460
3231
- },
3232
- {
3233
- "epoch": 0.11754207037225906,
3234
- "grad_norm": 4.435114860534668,
3235
- "learning_rate": 9.876575844458012e-06,
3236
- "loss": 0.5532917976379395,
3237
- "step": 461
3238
- },
3239
- {
3240
- "epoch": 0.11779704232534421,
3241
- "grad_norm": 2.546670913696289,
3242
- "learning_rate": 9.875642944731212e-06,
3243
- "loss": 0.5604023337364197,
3244
- "step": 462
3245
- },
3246
- {
3247
- "epoch": 0.11805201427842937,
3248
- "grad_norm": 3.9529716968536377,
3249
- "learning_rate": 9.87470657700465e-06,
3250
- "loss": 0.5603578090667725,
3251
- "step": 463
3252
- },
3253
- {
3254
- "epoch": 0.11830698623151453,
3255
- "grad_norm": 2.1415586471557617,
3256
- "learning_rate": 9.873766741944353e-06,
3257
- "loss": 0.5529968738555908,
3258
- "step": 464
3259
- },
3260
- {
3261
- "epoch": 0.11856195818459969,
3262
- "grad_norm": 2.085867166519165,
3263
- "learning_rate": 9.872823440218821e-06,
3264
- "loss": 0.5548291206359863,
3265
- "step": 465
3266
- },
3267
- {
3268
- "epoch": 0.11881693013768485,
3269
- "grad_norm": 3.555215358734131,
3270
- "learning_rate": 9.871876672499012e-06,
3271
- "loss": 0.5481231212615967,
3272
- "step": 466
3273
- },
3274
- {
3275
- "epoch": 0.11907190209077001,
3276
- "grad_norm": 2.137446641921997,
3277
- "learning_rate": 9.870926439458355e-06,
3278
- "loss": 0.5487592220306396,
3279
- "step": 467
3280
- },
3281
- {
3282
- "epoch": 0.11932687404385518,
3283
- "grad_norm": 2.8710625171661377,
3284
- "learning_rate": 9.86997274177274e-06,
3285
- "loss": 0.5602512955665588,
3286
- "step": 468
3287
- },
3288
- {
3289
- "epoch": 0.11958184599694034,
3290
- "grad_norm": 2.24103045463562,
3291
- "learning_rate": 9.869015580120527e-06,
3292
- "loss": 0.5430268049240112,
3293
- "step": 469
3294
- },
3295
- {
3296
- "epoch": 0.1198368179500255,
3297
- "grad_norm": 2.0165767669677734,
3298
- "learning_rate": 9.868054955182533e-06,
3299
- "loss": 0.5526243448257446,
3300
- "step": 470
3301
- },
3302
- {
3303
- "epoch": 0.12009178990311066,
3304
- "grad_norm": 2.7446656227111816,
3305
- "learning_rate": 9.867090867642042e-06,
3306
- "loss": 0.5715802311897278,
3307
- "step": 471
3308
- },
3309
- {
3310
- "epoch": 0.12034676185619582,
3311
- "grad_norm": 4.814818382263184,
3312
- "learning_rate": 9.866123318184803e-06,
3313
- "loss": 0.5400905609130859,
3314
- "step": 472
3315
- },
3316
- {
3317
- "epoch": 0.12060173380928098,
3318
- "grad_norm": 2.0347468852996826,
3319
- "learning_rate": 9.865152307499022e-06,
3320
- "loss": 0.5547370314598083,
3321
- "step": 473
3322
- },
3323
- {
3324
- "epoch": 0.12085670576236614,
3325
- "grad_norm": 1.9471986293792725,
3326
- "learning_rate": 9.864177836275371e-06,
3327
- "loss": 0.5558183193206787,
3328
- "step": 474
3329
- },
3330
- {
3331
- "epoch": 0.1211116777154513,
3332
- "grad_norm": 2.60526180267334,
3333
- "learning_rate": 9.863199905206983e-06,
3334
- "loss": 0.5537914037704468,
3335
- "step": 475
3336
- },
3337
- {
3338
- "epoch": 0.12136664966853646,
3339
- "grad_norm": 2.1302649974823,
3340
- "learning_rate": 9.862218514989452e-06,
3341
- "loss": 0.5470219850540161,
3342
- "step": 476
3343
- },
3344
- {
3345
- "epoch": 0.12162162162162163,
3346
- "grad_norm": 2.58955717086792,
3347
- "learning_rate": 9.861233666320828e-06,
3348
- "loss": 0.5556906461715698,
3349
- "step": 477
3350
- },
3351
- {
3352
- "epoch": 0.12187659357470679,
3353
- "grad_norm": 2.096116304397583,
3354
- "learning_rate": 9.86024535990163e-06,
3355
- "loss": 0.5494530200958252,
3356
- "step": 478
3357
- },
3358
- {
3359
- "epoch": 0.12213156552779195,
3360
- "grad_norm": 2.0807015895843506,
3361
- "learning_rate": 9.859253596434828e-06,
3362
- "loss": 0.5564595460891724,
3363
- "step": 479
3364
- },
3365
- {
3366
- "epoch": 0.1223865374808771,
3367
- "grad_norm": 3.05375599861145,
3368
- "learning_rate": 9.858258376625855e-06,
3369
- "loss": 0.5492424964904785,
3370
- "step": 480
3371
- },
3372
- {
3373
- "epoch": 0.12264150943396226,
3374
- "grad_norm": 2.2253363132476807,
3375
- "learning_rate": 9.8572597011826e-06,
3376
- "loss": 0.5405623912811279,
3377
- "step": 481
3378
- },
3379
- {
3380
- "epoch": 0.12289648138704742,
3381
- "grad_norm": 5.153561115264893,
3382
- "learning_rate": 9.856257570815415e-06,
3383
- "loss": 0.5545088052749634,
3384
- "step": 482
3385
- },
3386
- {
3387
- "epoch": 0.12315145334013258,
3388
- "grad_norm": 1.9642194509506226,
3389
- "learning_rate": 9.855251986237103e-06,
3390
- "loss": 0.5469231009483337,
3391
- "step": 483
3392
- },
3393
- {
3394
- "epoch": 0.12340642529321774,
3395
- "grad_norm": 4.149758815765381,
3396
- "learning_rate": 9.85424294816293e-06,
3397
- "loss": 0.5515980124473572,
3398
- "step": 484
3399
- },
3400
- {
3401
- "epoch": 0.1236613972463029,
3402
- "grad_norm": 3.9454345703125,
3403
- "learning_rate": 9.853230457310613e-06,
3404
- "loss": 0.5524808168411255,
3405
- "step": 485
3406
- },
3407
- {
3408
- "epoch": 0.12391636919938807,
3409
- "grad_norm": 1.893150806427002,
3410
- "learning_rate": 9.852214514400326e-06,
3411
- "loss": 0.5513901710510254,
3412
- "step": 486
3413
- },
3414
- {
3415
- "epoch": 0.12417134115247323,
3416
- "grad_norm": 2.6218254566192627,
3417
- "learning_rate": 9.851195120154701e-06,
3418
- "loss": 0.5557699799537659,
3419
- "step": 487
3420
- },
3421
- {
3422
- "epoch": 0.12442631310555839,
3423
- "grad_norm": 4.718894958496094,
3424
- "learning_rate": 9.850172275298828e-06,
3425
- "loss": 0.5626286268234253,
3426
- "step": 488
3427
- },
3428
- {
3429
- "epoch": 0.12468128505864355,
3430
- "grad_norm": 2.0461502075195312,
3431
- "learning_rate": 9.849145980560243e-06,
3432
- "loss": 0.5560814738273621,
3433
- "step": 489
3434
- },
3435
- {
3436
- "epoch": 0.12493625701172871,
3437
- "grad_norm": 4.197045803070068,
3438
- "learning_rate": 9.848116236668939e-06,
3439
- "loss": 0.5595450401306152,
3440
- "step": 490
3441
- },
3442
- {
3443
- "epoch": 0.12519122896481388,
3444
- "grad_norm": 2.1661648750305176,
3445
- "learning_rate": 9.847083044357367e-06,
3446
- "loss": 0.5466753840446472,
3447
- "step": 491
3448
- },
3449
- {
3450
- "epoch": 0.12544620091789904,
3451
- "grad_norm": 2.64660382270813,
3452
- "learning_rate": 9.846046404360423e-06,
3453
- "loss": 0.5495432615280151,
3454
- "step": 492
3455
- },
3456
- {
3457
- "epoch": 0.1257011728709842,
3458
- "grad_norm": 2.2083492279052734,
3459
- "learning_rate": 9.845006317415463e-06,
3460
- "loss": 0.5645055770874023,
3461
- "step": 493
3462
- },
3463
- {
3464
- "epoch": 0.12595614482406936,
3465
- "grad_norm": 2.4461467266082764,
3466
- "learning_rate": 9.843962784262289e-06,
3467
- "loss": 0.5569705963134766,
3468
- "step": 494
3469
- },
3470
- {
3471
- "epoch": 0.12621111677715452,
3472
- "grad_norm": 2.076951026916504,
3473
- "learning_rate": 9.842915805643156e-06,
3474
- "loss": 0.5476626753807068,
3475
- "step": 495
3476
- },
3477
- {
3478
- "epoch": 0.12646608873023968,
3479
- "grad_norm": 2.278542995452881,
3480
- "learning_rate": 9.841865382302773e-06,
3481
- "loss": 0.5460015535354614,
3482
- "step": 496
3483
- },
3484
- {
3485
- "epoch": 0.12672106068332484,
3486
- "grad_norm": 2.187434673309326,
3487
- "learning_rate": 9.840811514988294e-06,
3488
- "loss": 0.5466282367706299,
3489
- "step": 497
3490
- },
3491
- {
3492
- "epoch": 0.12697603263641,
3493
- "grad_norm": 2.5430383682250977,
3494
- "learning_rate": 9.839754204449328e-06,
3495
- "loss": 0.5423339605331421,
3496
- "step": 498
3497
- },
3498
- {
3499
- "epoch": 0.12723100458949516,
3500
- "grad_norm": 3.8312206268310547,
3501
- "learning_rate": 9.838693451437926e-06,
3502
- "loss": 0.5447465777397156,
3503
- "step": 499
3504
- },
3505
- {
3506
- "epoch": 0.12748597654258031,
3507
- "grad_norm": 2.2003931999206543,
3508
- "learning_rate": 9.837629256708595e-06,
3509
- "loss": 0.5435498952865601,
3510
- "step": 500
3511
- }
3512
- ],
3513
- "logging_steps": 1,
3514
- "max_steps": 3922,
3515
- "num_input_tokens_seen": 0,
3516
- "num_train_epochs": 1,
3517
- "save_steps": 500,
3518
- "stateful_callbacks": {
3519
- "TrainerControl": {
3520
- "args": {
3521
- "should_epoch_stop": false,
3522
- "should_evaluate": false,
3523
- "should_log": false,
3524
- "should_save": true,
3525
- "should_training_stop": false
3526
- },
3527
- "attributes": {}
3528
- }
3529
- },
3530
- "total_flos": 3.709996150610408e+19,
3531
- "train_batch_size": 4,
3532
- "trial_name": null,
3533
- "trial_params": null
3534
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
qwen3-vl4b-agentnet_filter_failure_ws4_lr2e-5_vit1e-5_aligner1e-5_bs384-step500/training_args.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:cb2b71716b815c69acae19745e5f5d002f10f9854295fcea3ac09170daa4d99b
3
- size 9297
 
 
 
 
qwen3-vl4b-agentnet_filter_failure_ws4_lr2e-5_vit1e-5_aligner1e-5_bs384-step500/video_preprocessor_config.json DELETED
@@ -1,41 +0,0 @@
1
- {
2
- "crop_size": null,
3
- "data_format": "channels_first",
4
- "default_to_square": true,
5
- "device": null,
6
- "do_center_crop": null,
7
- "do_convert_rgb": true,
8
- "do_normalize": true,
9
- "do_rescale": true,
10
- "do_resize": true,
11
- "do_sample_frames": true,
12
- "fps": 2,
13
- "image_mean": [
14
- 0.5,
15
- 0.5,
16
- 0.5
17
- ],
18
- "image_std": [
19
- 0.5,
20
- 0.5,
21
- 0.5
22
- ],
23
- "input_data_format": null,
24
- "max_frames": 768,
25
- "merge_size": 2,
26
- "min_frames": 4,
27
- "num_frames": null,
28
- "pad_size": null,
29
- "patch_size": 16,
30
- "processor_class": "Qwen3VLProcessor",
31
- "resample": 3,
32
- "rescale_factor": 0.00392156862745098,
33
- "return_metadata": false,
34
- "size": {
35
- "longest_edge": 25165824,
36
- "shortest_edge": 4096
37
- },
38
- "temporal_patch_size": 2,
39
- "video_metadata": null,
40
- "video_processor_type": "Qwen3VLVideoProcessor"
41
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
qwen3-vl4b-agentnet_filter_failure_ws4_lr2e-5_vit1e-5_aligner1e-5_bs384-step500/vocab.json DELETED
The diff for this file is too large to render. See raw diff
 
qwen3-vl4b-agentnet_filter_failure_ws4_lr2e-5_vit1e-5_aligner1e-5_bs384-step500/zero_to_fp32.py DELETED
@@ -1,760 +0,0 @@
1
- #!/usr/bin/env python
2
-
3
- # Copyright (c) Microsoft Corporation.
4
- # SPDX-License-Identifier: Apache-2.0
5
-
6
- # DeepSpeed Team
7
-
8
- # This script extracts fp32 consolidated weights from a zero 1, 2 and 3 DeepSpeed checkpoints. It gets
9
- # copied into the top level checkpoint dir, so the user can easily do the conversion at any point in
10
- # the future. Once extracted, the weights don't require DeepSpeed and can be used in any
11
- # application.
12
- #
13
- # example:
14
- # python zero_to_fp32.py . output_dir/
15
- # or
16
- # python zero_to_fp32.py . output_dir/ --safe_serialization
17
-
18
- import argparse
19
- import torch
20
- import glob
21
- import math
22
- import os
23
- import re
24
- import gc
25
- import json
26
- import numpy as np
27
- from tqdm import tqdm
28
- from collections import OrderedDict
29
- from dataclasses import dataclass
30
-
31
- # while this script doesn't use deepspeed to recover data, since the checkpoints are pickled with
32
- # DeepSpeed data structures it has to be available in the current python environment.
33
- from deepspeed.utils import logger
34
- from deepspeed.checkpoint.constants import (DS_VERSION, OPTIMIZER_STATE_DICT, SINGLE_PARTITION_OF_FP32_GROUPS,
35
- FP32_FLAT_GROUPS, ZERO_STAGE, PARTITION_COUNT, PARAM_SHAPES, BUFFER_NAMES,
36
- FROZEN_PARAM_SHAPES, FROZEN_PARAM_FRAGMENTS)
37
-
38
-
39
- @dataclass
40
- class zero_model_state:
41
- buffers: dict()
42
- param_shapes: dict()
43
- shared_params: list
44
- ds_version: int
45
- frozen_param_shapes: dict()
46
- frozen_param_fragments: dict()
47
-
48
-
49
- debug = 0
50
-
51
- # load to cpu
52
- device = torch.device('cpu')
53
-
54
-
55
- def atoi(text):
56
- return int(text) if text.isdigit() else text
57
-
58
-
59
- def natural_keys(text):
60
- '''
61
- alist.sort(key=natural_keys) sorts in human order
62
- http://nedbatchelder.com/blog/200712/human_sorting.html
63
- (See Toothy's implementation in the comments)
64
- '''
65
- return [atoi(c) for c in re.split(r'(\d+)', text)]
66
-
67
-
68
- def get_model_state_file(checkpoint_dir, zero_stage):
69
- if not os.path.isdir(checkpoint_dir):
70
- raise FileNotFoundError(f"Directory '{checkpoint_dir}' doesn't exist")
71
-
72
- # there should be only one file
73
- if zero_stage <= 2:
74
- file = os.path.join(checkpoint_dir, "mp_rank_00_model_states.pt")
75
- elif zero_stage == 3:
76
- file = os.path.join(checkpoint_dir, "zero_pp_rank_0_mp_rank_00_model_states.pt")
77
-
78
- if not os.path.exists(file):
79
- raise FileNotFoundError(f"can't find model states file at '{file}'")
80
-
81
- return file
82
-
83
-
84
- def get_checkpoint_files(checkpoint_dir, glob_pattern):
85
- # XXX: need to test that this simple glob rule works for multi-node setup too
86
- ckpt_files = sorted(glob.glob(os.path.join(checkpoint_dir, glob_pattern)), key=natural_keys)
87
-
88
- if len(ckpt_files) == 0:
89
- raise FileNotFoundError(f"can't find {glob_pattern} files in directory '{checkpoint_dir}'")
90
-
91
- return ckpt_files
92
-
93
-
94
- def get_optim_files(checkpoint_dir):
95
- return get_checkpoint_files(checkpoint_dir, "*_optim_states.pt")
96
-
97
-
98
- def get_model_state_files(checkpoint_dir):
99
- return get_checkpoint_files(checkpoint_dir, "*_model_states.pt")
100
-
101
-
102
- def parse_model_states(files):
103
- zero_model_states = []
104
- for file in files:
105
- state_dict = torch.load(file, map_location=device, weights_only=False)
106
-
107
- if BUFFER_NAMES not in state_dict:
108
- raise ValueError(f"{file} is not a model state checkpoint")
109
- buffer_names = state_dict[BUFFER_NAMES]
110
- if debug:
111
- print("Found buffers:", buffer_names)
112
-
113
- # recover just the buffers while restoring them to fp32 if they were saved in fp16
114
- buffers = {k: v.float() for k, v in state_dict["module"].items() if k in buffer_names}
115
- param_shapes = state_dict[PARAM_SHAPES]
116
-
117
- # collect parameters that are included in param_shapes
118
- param_names = []
119
- for s in param_shapes:
120
- for name in s.keys():
121
- param_names.append(name)
122
-
123
- # update with frozen parameters
124
- frozen_param_shapes = state_dict.get(FROZEN_PARAM_SHAPES, None)
125
- if frozen_param_shapes is not None:
126
- if debug:
127
- print(f"Found frozen_param_shapes: {frozen_param_shapes}")
128
- param_names += list(frozen_param_shapes.keys())
129
-
130
- # handle shared params
131
- shared_params = [[k, v] for k, v in state_dict["shared_params"].items()]
132
-
133
- ds_version = state_dict.get(DS_VERSION, None)
134
-
135
- frozen_param_fragments = state_dict.get(FROZEN_PARAM_FRAGMENTS, None)
136
-
137
- z_model_state = zero_model_state(buffers=buffers,
138
- param_shapes=param_shapes,
139
- shared_params=shared_params,
140
- ds_version=ds_version,
141
- frozen_param_shapes=frozen_param_shapes,
142
- frozen_param_fragments=frozen_param_fragments)
143
- zero_model_states.append(z_model_state)
144
-
145
- return zero_model_states
146
-
147
-
148
- def parse_optim_states(files, ds_checkpoint_dir):
149
- total_files = len(files)
150
- state_dicts = []
151
- for f in tqdm(files, desc='Loading checkpoint shards'):
152
- state_dict = torch.load(f, map_location=device, mmap=True, weights_only=False)
153
- # immediately discard the potentially huge 2 optimizer states as we only care for fp32 master weights
154
- # and also handle the case where it was already removed by another helper script
155
- state_dict["optimizer_state_dict"].pop("optimizer_state_dict", None)
156
- state_dicts.append(state_dict)
157
-
158
- if ZERO_STAGE not in state_dicts[0][OPTIMIZER_STATE_DICT]:
159
- raise ValueError(f"{files[0]} is not a zero checkpoint")
160
- zero_stage = state_dicts[0][OPTIMIZER_STATE_DICT][ZERO_STAGE]
161
- world_size = state_dicts[0][OPTIMIZER_STATE_DICT][PARTITION_COUNT]
162
-
163
- # For ZeRO-2 each param group can have different partition_count as data parallelism for expert
164
- # parameters can be different from data parallelism for non-expert parameters. So we can just
165
- # use the max of the partition_count to get the dp world_size.
166
-
167
- if type(world_size) is list:
168
- world_size = max(world_size)
169
-
170
- if world_size != total_files:
171
- raise ValueError(
172
- f"Expected {world_size} of '*_optim_states.pt' under '{ds_checkpoint_dir}' but found {total_files} files. "
173
- "Possibly due to an overwrite of an old checkpoint, or a checkpoint didn't get saved by one or more processes."
174
- )
175
-
176
- # the groups are named differently in each stage
177
- if zero_stage <= 2:
178
- fp32_groups_key = SINGLE_PARTITION_OF_FP32_GROUPS
179
- elif zero_stage == 3:
180
- fp32_groups_key = FP32_FLAT_GROUPS
181
- else:
182
- raise ValueError(f"unknown zero stage {zero_stage}")
183
-
184
- fp32_flat_groups = [state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key] for i in range(len(state_dicts))]
185
- return zero_stage, world_size, fp32_flat_groups
186
-
187
-
188
- def _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir, exclude_frozen_parameters):
189
- """
190
- Returns fp32 state_dict reconstructed from ds checkpoint
191
-
192
- Args:
193
- - ``ds_checkpoint_dir``: path to the deepspeed checkpoint folder (where the optimizer files are)
194
-
195
- """
196
- print(f"Processing zero checkpoint '{ds_checkpoint_dir}'")
197
-
198
- optim_files = get_optim_files(ds_checkpoint_dir)
199
- zero_stage, world_size, fp32_flat_groups = parse_optim_states(optim_files, ds_checkpoint_dir)
200
- print(f"Detected checkpoint of type zero stage {zero_stage}, world_size: {world_size}")
201
-
202
- model_files = get_model_state_files(ds_checkpoint_dir)
203
-
204
- zero_model_states = parse_model_states(model_files)
205
- print(f'Parsing checkpoint created by deepspeed=={zero_model_states[0].ds_version}')
206
-
207
- if zero_stage <= 2:
208
- return _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states,
209
- exclude_frozen_parameters)
210
- elif zero_stage == 3:
211
- return _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states,
212
- exclude_frozen_parameters)
213
-
214
-
215
- def _zero2_merge_frozen_params(state_dict, zero_model_states):
216
- if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
217
- return
218
-
219
- frozen_param_shapes = zero_model_states[0].frozen_param_shapes
220
- frozen_param_fragments = zero_model_states[0].frozen_param_fragments
221
-
222
- if debug:
223
- num_elem = sum(s.numel() for s in frozen_param_shapes.values())
224
- print(f'rank 0: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
225
-
226
- wanted_params = len(frozen_param_shapes)
227
- wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
228
- avail_numel = sum([p.numel() for p in frozen_param_fragments.values()])
229
- print(f'Frozen params: Have {avail_numel} numels to process.')
230
- print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
231
-
232
- total_params = 0
233
- total_numel = 0
234
- for name, shape in frozen_param_shapes.items():
235
- total_params += 1
236
- unpartitioned_numel = shape.numel()
237
- total_numel += unpartitioned_numel
238
-
239
- state_dict[name] = frozen_param_fragments[name]
240
-
241
- if debug:
242
- print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
243
-
244
- print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
245
-
246
-
247
- def _has_callable(obj, fn):
248
- attr = getattr(obj, fn, None)
249
- return callable(attr)
250
-
251
-
252
- def _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
253
- param_shapes = zero_model_states[0].param_shapes
254
-
255
- # Reconstruction protocol:
256
- #
257
- # XXX: document this
258
-
259
- if debug:
260
- for i in range(world_size):
261
- for j in range(len(fp32_flat_groups[0])):
262
- print(f"{FP32_FLAT_GROUPS}[{i}][{j}].shape={fp32_flat_groups[i][j].shape}")
263
-
264
- # XXX: memory usage doubles here (zero2)
265
- num_param_groups = len(fp32_flat_groups[0])
266
- merged_single_partition_of_fp32_groups = []
267
- for i in range(num_param_groups):
268
- merged_partitions = [sd[i] for sd in fp32_flat_groups]
269
- full_single_fp32_vector = torch.cat(merged_partitions, 0)
270
- merged_single_partition_of_fp32_groups.append(full_single_fp32_vector)
271
- avail_numel = sum(
272
- [full_single_fp32_vector.numel() for full_single_fp32_vector in merged_single_partition_of_fp32_groups])
273
-
274
- if debug:
275
- wanted_params = sum([len(shapes) for shapes in param_shapes])
276
- wanted_numel = sum([sum(shape.numel() for shape in shapes.values()) for shapes in param_shapes])
277
- # not asserting if there is a mismatch due to possible padding
278
- print(f"Have {avail_numel} numels to process.")
279
- print(f"Need {wanted_numel} numels in {wanted_params} params.")
280
-
281
- # params
282
- # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
283
- # out-of-core computing solution
284
- total_numel = 0
285
- total_params = 0
286
- for shapes, full_single_fp32_vector in zip(param_shapes, merged_single_partition_of_fp32_groups):
287
- offset = 0
288
- avail_numel = full_single_fp32_vector.numel()
289
- for name, shape in shapes.items():
290
-
291
- unpartitioned_numel = shape.numel() if _has_callable(shape, 'numel') else math.prod(shape)
292
- total_numel += unpartitioned_numel
293
- total_params += 1
294
-
295
- if debug:
296
- print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
297
- state_dict[name] = full_single_fp32_vector.narrow(0, offset, unpartitioned_numel).view(shape)
298
- offset += unpartitioned_numel
299
-
300
- # Z2 started to align to 2*world_size to improve nccl performance. Therefore both offset and
301
- # avail_numel can differ by anywhere between 0..2*world_size. Due to two unrelated complex
302
- # paddings performed in the code it's almost impossible to predict the exact numbers w/o the
303
- # live optimizer object, so we are checking that the numbers are within the right range
304
- align_to = 2 * world_size
305
-
306
- def zero2_align(x):
307
- return align_to * math.ceil(x / align_to)
308
-
309
- if debug:
310
- print(f"original offset={offset}, avail_numel={avail_numel}")
311
-
312
- offset = zero2_align(offset)
313
- avail_numel = zero2_align(avail_numel)
314
-
315
- if debug:
316
- print(f"aligned offset={offset}, avail_numel={avail_numel}")
317
-
318
- # Sanity check
319
- if offset != avail_numel:
320
- raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
321
-
322
- print(f"Reconstructed fp32 state dict with {total_params} params {total_numel} elements")
323
-
324
-
325
- def _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states,
326
- exclude_frozen_parameters):
327
- state_dict = OrderedDict()
328
-
329
- # buffers
330
- buffers = zero_model_states[0].buffers
331
- state_dict.update(buffers)
332
- if debug:
333
- print(f"added {len(buffers)} buffers")
334
-
335
- if not exclude_frozen_parameters:
336
- _zero2_merge_frozen_params(state_dict, zero_model_states)
337
-
338
- _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
339
-
340
- # recover shared parameters
341
- for pair in zero_model_states[0].shared_params:
342
- if pair[1] in state_dict:
343
- state_dict[pair[0]] = state_dict[pair[1]]
344
-
345
- return state_dict
346
-
347
-
348
- def zero3_partitioned_param_info(unpartitioned_numel, world_size):
349
- remainder = unpartitioned_numel % world_size
350
- padding_numel = (world_size - remainder) if remainder else 0
351
- partitioned_numel = math.ceil(unpartitioned_numel / world_size)
352
- return partitioned_numel, padding_numel
353
-
354
-
355
- def _zero3_merge_frozen_params(state_dict, world_size, zero_model_states):
356
- if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
357
- return
358
-
359
- if debug:
360
- for i in range(world_size):
361
- num_elem = sum(s.numel() for s in zero_model_states[i].frozen_param_fragments.values())
362
- print(f'rank {i}: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
363
-
364
- frozen_param_shapes = zero_model_states[0].frozen_param_shapes
365
- wanted_params = len(frozen_param_shapes)
366
- wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
367
- avail_numel = sum([p.numel() for p in zero_model_states[0].frozen_param_fragments.values()]) * world_size
368
- print(f'Frozen params: Have {avail_numel} numels to process.')
369
- print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
370
-
371
- total_params = 0
372
- total_numel = 0
373
- for name, shape in zero_model_states[0].frozen_param_shapes.items():
374
- total_params += 1
375
- unpartitioned_numel = shape.numel()
376
- total_numel += unpartitioned_numel
377
-
378
- param_frags = tuple(model_state.frozen_param_fragments[name] for model_state in zero_model_states)
379
- state_dict[name] = torch.cat(param_frags, 0).narrow(0, 0, unpartitioned_numel).view(shape)
380
-
381
- partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
382
-
383
- if debug:
384
- print(
385
- f"Frozen params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
386
- )
387
-
388
- print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
389
-
390
-
391
- class GatheredTensor:
392
- """
393
- A pseudo tensor that collects partitioned weights.
394
- It is more memory efficient when there are multiple groups.
395
- """
396
-
397
- def __init__(self, flat_groups, flat_groups_offset, offset, partitioned_numel, shape):
398
- self.flat_groups = flat_groups
399
- self.flat_groups_offset = flat_groups_offset
400
- self.offset = offset
401
- self.partitioned_numel = partitioned_numel
402
- self.shape = shape
403
- self.dtype = self.flat_groups[0][0].dtype
404
-
405
- def contiguous(self):
406
- """
407
- Merge partitioned weights from flat_groups into a single tensor.
408
- """
409
- end_idx = self.offset + self.partitioned_numel
410
- world_size = len(self.flat_groups)
411
- pad_flat_param_chunks = []
412
-
413
- for rank_i in range(world_size):
414
- # for each rank, we need to collect weights from related group/groups
415
- flat_groups_at_rank_i = self.flat_groups[rank_i]
416
- start_group_id = None
417
- end_group_id = None
418
- for group_id in range(len(self.flat_groups_offset)):
419
- if self.flat_groups_offset[group_id] <= self.offset < self.flat_groups_offset[group_id + 1]:
420
- start_group_id = group_id
421
- if self.flat_groups_offset[group_id] < end_idx <= self.flat_groups_offset[group_id + 1]:
422
- end_group_id = group_id
423
- break
424
- # collect weights from related group/groups
425
- for group_id in range(start_group_id, end_group_id + 1):
426
- flat_tensor = flat_groups_at_rank_i[group_id]
427
- start_offset = self.offset - self.flat_groups_offset[group_id]
428
- end_offset = min(end_idx, self.flat_groups_offset[group_id + 1]) - self.flat_groups_offset[group_id]
429
- pad_flat_param_chunks.append(flat_tensor[start_offset:end_offset])
430
-
431
- # collect weights from all ranks
432
- pad_flat_param = torch.cat(pad_flat_param_chunks, dim=0)
433
- param = pad_flat_param[:self.shape.numel()].view(self.shape).contiguous()
434
- return param
435
-
436
-
437
- def _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
438
- param_shapes = zero_model_states[0].param_shapes
439
- avail_numel = sum([flat_group.numel() for flat_group in fp32_flat_groups[0]]) * world_size
440
-
441
- # Reconstruction protocol: For zero3 we need to zip the partitions together at boundary of each
442
- # param, re-consolidating each param, while dealing with padding if any
443
-
444
- # merge list of dicts, preserving order
445
- param_shapes = {k: v for d in param_shapes for k, v in d.items()}
446
-
447
- if debug:
448
- for i in range(world_size):
449
- print(f"{FP32_FLAT_GROUPS}[{i}].shape={fp32_flat_groups[i].shape}")
450
-
451
- wanted_params = len(param_shapes)
452
- wanted_numel = sum(shape.numel() for shape in param_shapes.values())
453
- # not asserting if there is a mismatch due to possible padding
454
- avail_numel = fp32_flat_groups[0].numel() * world_size
455
- print(f"Trainable params: Have {avail_numel} numels to process.")
456
- print(f"Trainable params: Need {wanted_numel} numels in {wanted_params} params.")
457
-
458
- # params
459
- # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
460
- # out-of-core computing solution
461
- offset = 0
462
- total_numel = 0
463
- total_params = 0
464
- flat_groups_offset = [0] + list(np.cumsum([flat_tensor.numel() for flat_tensor in fp32_flat_groups[0]]))
465
- for name, shape in tqdm(param_shapes.items(), desc='Gathering sharded weights'):
466
- unpartitioned_numel = shape.numel()
467
- total_numel += unpartitioned_numel
468
- total_params += 1
469
- partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
470
-
471
- if debug:
472
- print(
473
- f"Trainable params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
474
- )
475
-
476
- # memory efficient tensor
477
- tensor = GatheredTensor(fp32_flat_groups, flat_groups_offset, offset, partitioned_numel, shape)
478
- state_dict[name] = tensor
479
- offset += partitioned_numel
480
-
481
- offset *= world_size
482
-
483
- # Sanity check
484
- if offset != avail_numel:
485
- raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
486
-
487
- print(f"Reconstructed Trainable fp32 state dict with {total_params} params {total_numel} elements")
488
-
489
-
490
- def _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states,
491
- exclude_frozen_parameters):
492
- state_dict = OrderedDict()
493
-
494
- # buffers
495
- buffers = zero_model_states[0].buffers
496
- state_dict.update(buffers)
497
- if debug:
498
- print(f"added {len(buffers)} buffers")
499
-
500
- if not exclude_frozen_parameters:
501
- _zero3_merge_frozen_params(state_dict, world_size, zero_model_states)
502
-
503
- _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
504
-
505
- # recover shared parameters
506
- for pair in zero_model_states[0].shared_params:
507
- if pair[1] in state_dict:
508
- state_dict[pair[0]] = state_dict[pair[1]]
509
-
510
- return state_dict
511
-
512
-
513
- def to_torch_tensor(state_dict, return_empty_tensor=False):
514
- """
515
- Convert state_dict of GatheredTensor to torch tensor
516
- """
517
- torch_state_dict = {}
518
- converted_tensors = {}
519
- for name, tensor in state_dict.items():
520
- tensor_id = id(tensor)
521
- if tensor_id in converted_tensors: # shared tensors
522
- shared_tensor = torch_state_dict[converted_tensors[tensor_id]]
523
- torch_state_dict[name] = shared_tensor
524
- else:
525
- converted_tensors[tensor_id] = name
526
- if return_empty_tensor:
527
- torch_state_dict[name] = torch.empty(tensor.shape, dtype=tensor.dtype)
528
- else:
529
- torch_state_dict[name] = tensor.contiguous()
530
- return torch_state_dict
531
-
532
-
533
- def get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir,
534
- tag=None,
535
- exclude_frozen_parameters=False,
536
- lazy_mode=False):
537
- """
538
- Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated state_dict that can be loaded with
539
- ``load_state_dict()`` and used for training without DeepSpeed or shared with others, for example
540
- via a model hub.
541
-
542
- Args:
543
- - ``checkpoint_dir``: path to the desired checkpoint folder
544
- - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in 'latest' file. e.g., ``global_step14``
545
- - ``exclude_frozen_parameters``: exclude frozen parameters
546
- - ``lazy_mode``: get state_dict in lazy mode. It returns a dict of pesduo tensor instead of torch tensor, which is more memory efficient.
547
- Convert the pesduo tensor to torch tensor by ``.contiguous()``
548
-
549
- Returns:
550
- - pytorch ``state_dict``
551
-
552
- A typical usage might be ::
553
-
554
- from deepspeed.utils.zero_to_fp32 import get_fp32_state_dict_from_zero_checkpoint
555
- # do the training and checkpoint saving
556
- state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir) # already on cpu
557
- model = model.cpu() # move to cpu
558
- model.load_state_dict(state_dict)
559
- # submit to model hub or save the model to share with others
560
-
561
- In this example the ``model`` will no longer be usable in the deepspeed context of the same
562
- application. i.e. you will need to re-initialize the deepspeed engine, since
563
- ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
564
-
565
- If you want it all done for you, use ``load_state_dict_from_zero_checkpoint`` instead.
566
-
567
- Note: the above usage may not work if your application doesn't have sufficient free CPU memory.
568
- You may need to use the offline approach using the ``zero_to_fp32.py`` script that is saved with
569
- the checkpoint. Or you can load state_dict in lazy mode ::
570
-
571
- from deepspeed.utils.zero_to_fp32 import get_fp32_state_dict_from_zero_checkpoint
572
- state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, lazy_mode=True) # not on cpu
573
- for name, lazy_tensor in state_dict.item():
574
- tensor = lazy_tensor.contiguous() # to cpu
575
- print(name, tensor)
576
- # del tensor to release memory if it no longer in use
577
- """
578
- if tag is None:
579
- latest_path = os.path.join(checkpoint_dir, 'latest')
580
- if os.path.isfile(latest_path):
581
- with open(latest_path, 'r') as fd:
582
- tag = fd.read().strip()
583
- else:
584
- raise ValueError(f"Unable to find 'latest' file at {latest_path}")
585
-
586
- ds_checkpoint_dir = os.path.join(checkpoint_dir, tag)
587
-
588
- if not os.path.isdir(ds_checkpoint_dir):
589
- raise FileNotFoundError(f"Directory '{ds_checkpoint_dir}' doesn't exist")
590
-
591
- state_dict = _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir, exclude_frozen_parameters)
592
- if lazy_mode:
593
- return state_dict
594
- else:
595
- return to_torch_tensor(state_dict)
596
-
597
-
598
- def convert_zero_checkpoint_to_fp32_state_dict(checkpoint_dir,
599
- output_dir,
600
- max_shard_size="5GB",
601
- safe_serialization=False,
602
- tag=None,
603
- exclude_frozen_parameters=False):
604
- """
605
- Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict`` file that can be
606
- loaded with ``torch.load(file)`` + ``load_state_dict()`` and used for training without DeepSpeed.
607
-
608
- Args:
609
- - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
610
- - ``output_dir``: directory to the pytorch fp32 state_dict output files
611
- - ``max_shard_size``: the maximum size for a checkpoint before being sharded, default value is 5GB
612
- - ``safe_serialization``: whether to save the model using `safetensors` or the traditional PyTorch way (that uses `pickle`).
613
- - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
614
- - ``exclude_frozen_parameters``: exclude frozen parameters
615
- """
616
-
617
- # Dependency pre-check
618
- if safe_serialization:
619
- try:
620
- from safetensors.torch import save_file
621
- except ImportError:
622
- print('If you want to use `safe_serialization`, please `pip install safetensors`')
623
- raise
624
- if max_shard_size is not None:
625
- try:
626
- from huggingface_hub import split_torch_state_dict_into_shards
627
- except ImportError:
628
- print('If you want to use `max_shard_size`, please `pip install huggingface_hub`')
629
- raise
630
-
631
- # Convert zero checkpoint to state_dict
632
- state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir,
633
- tag,
634
- exclude_frozen_parameters,
635
- lazy_mode=True)
636
-
637
- # Shard the model if it is too big.
638
- weights_name = "model.safetensors" if safe_serialization else "pytorch_model.bin"
639
- if max_shard_size is not None:
640
- filename_pattern = weights_name.replace(".bin", "{suffix}.bin").replace(".safetensors", "{suffix}.safetensors")
641
- # an memory-efficient approach for sharding
642
- empty_state_dict = to_torch_tensor(state_dict, return_empty_tensor=True)
643
- state_dict_split = split_torch_state_dict_into_shards(empty_state_dict,
644
- filename_pattern=filename_pattern,
645
- max_shard_size=max_shard_size)
646
- else:
647
- from collections import namedtuple
648
- StateDictSplit = namedtuple("StateDictSplit", ["is_sharded", "filename_to_tensors"])
649
- state_dict_split = StateDictSplit(is_sharded=False,
650
- filename_to_tensors={weights_name: list(state_dict.keys())})
651
-
652
- # Save the model by shard
653
- os.makedirs(output_dir, exist_ok=True)
654
- filename_to_tensors = state_dict_split.filename_to_tensors.items()
655
- for shard_file, tensors in tqdm(filename_to_tensors, desc="Saving checkpoint shards"):
656
- shard_state_dict = {tensor_name: state_dict[tensor_name] for tensor_name in tensors}
657
- shard_state_dict = to_torch_tensor(shard_state_dict)
658
- output_path = os.path.join(output_dir, shard_file)
659
- if safe_serialization:
660
- save_file(shard_state_dict, output_path, metadata={"format": "pt"})
661
- else:
662
- torch.save(shard_state_dict, output_path)
663
- # release the memory of current shard
664
- for tensor_name in list(shard_state_dict.keys()):
665
- del state_dict[tensor_name]
666
- del shard_state_dict[tensor_name]
667
- del shard_state_dict
668
- gc.collect()
669
-
670
- # Save index if sharded
671
- if state_dict_split.is_sharded:
672
- index = {
673
- "metadata": state_dict_split.metadata,
674
- "weight_map": state_dict_split.tensor_to_filename,
675
- }
676
- save_index_file = "model.safetensors.index.json" if safe_serialization else "pytorch_model.bin.index.json"
677
- save_index_file = os.path.join(output_dir, save_index_file)
678
- with open(save_index_file, "w", encoding="utf-8") as f:
679
- content = json.dumps(index, indent=2, sort_keys=True) + "\n"
680
- f.write(content)
681
-
682
-
683
- def load_state_dict_from_zero_checkpoint(model, checkpoint_dir, tag=None):
684
- """
685
- 1. Put the provided model to cpu
686
- 2. Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict``
687
- 3. Load it into the provided model
688
-
689
- Args:
690
- - ``model``: the model object to update
691
- - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
692
- - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
693
-
694
- Returns:
695
- - ``model`: modified model
696
-
697
- Make sure you have plenty of CPU memory available before you call this function. If you don't
698
- have enough use the ``zero_to_fp32.py`` utility to do the conversion. You will find it
699
- conveniently placed for you in the checkpoint folder.
700
-
701
- A typical usage might be ::
702
-
703
- from deepspeed.utils.zero_to_fp32 import load_state_dict_from_zero_checkpoint
704
- model = load_state_dict_from_zero_checkpoint(trainer.model, checkpoint_dir)
705
- # submit to model hub or save the model to share with others
706
-
707
- Note, that once this was run, the ``model`` will no longer be usable in the deepspeed context
708
- of the same application. i.e. you will need to re-initialize the deepspeed engine, since
709
- ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
710
-
711
- """
712
- logger.info("Extracting fp32 weights")
713
- state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag)
714
-
715
- logger.info("Overwriting model with fp32 weights")
716
- model = model.cpu()
717
- model.load_state_dict(state_dict, strict=False)
718
-
719
- return model
720
-
721
-
722
- if __name__ == "__main__":
723
- parser = argparse.ArgumentParser()
724
- parser.add_argument("checkpoint_dir",
725
- type=str,
726
- help="path to the desired checkpoint folder, e.g., path/checkpoint-12")
727
- parser.add_argument("output_dir",
728
- type=str,
729
- help="directory to the pytorch fp32 state_dict output files"
730
- "(e.g. path/checkpoint-12-output/)")
731
- parser.add_argument(
732
- "--max_shard_size",
733
- type=str,
734
- default="5GB",
735
- help="The maximum size for a checkpoint before being sharded. Checkpoints shard will then be each of size"
736
- "lower than this size. If expressed as a string, needs to be digits followed by a unit (like `5MB`"
737
- "We default it to 5GB in order for models to be able to run easily on free-tier google colab instances"
738
- "without CPU OOM issues.")
739
- parser.add_argument(
740
- "--safe_serialization",
741
- default=False,
742
- action='store_true',
743
- help="Whether to save the model using `safetensors` or the traditional PyTorch way (that uses `pickle`).")
744
- parser.add_argument("-t",
745
- "--tag",
746
- type=str,
747
- default=None,
748
- help="checkpoint tag used as a unique identifier for checkpoint. e.g., global_step1")
749
- parser.add_argument("--exclude_frozen_parameters", action='store_true', help="exclude frozen parameters")
750
- parser.add_argument("-d", "--debug", action='store_true', help="enable debug")
751
- args = parser.parse_args()
752
-
753
- debug = args.debug
754
-
755
- convert_zero_checkpoint_to_fp32_state_dict(args.checkpoint_dir,
756
- args.output_dir,
757
- max_shard_size=args.max_shard_size,
758
- safe_serialization=args.safe_serialization,
759
- tag=args.tag,
760
- exclude_frozen_parameters=args.exclude_frozen_parameters)