Upload folder using huggingface_hub
Browse files- .gitattributes +5 -0
- SFT_text_40k_3B/v2-20250623-201026/DynaMath_checkpoint-1100.json +0 -0
- SFT_text_40k_3B/v2-20250623-201026/LogicVista_checkpoint-1100.json +3 -0
- SFT_text_40k_3B/v2-20250623-201026/MATH-V_checkpoint-1100.json +3 -0
- SFT_text_40k_3B/v2-20250623-201026/MathVerse_checkpoint-1100.json +3 -0
- SFT_text_40k_3B/v2-20250623-201026/MathVista_checkpoint-1100.json +3 -0
- added_tokens.json +24 -0
- args.json +361 -0
- chat_template.json +3 -0
- config.json +52 -0
- generation_config.json +12 -0
- log.txt +347 -0
- log/20250710_174320_output.log +0 -0
- log/20250710_174348_output.log +0 -0
- log/20250710_174550_output.log +1 -0
- log/20250710_183548_output.log +0 -0
- merges.txt +0 -0
- model-00001-of-00002.safetensors +3 -0
- model-00002-of-00002.safetensors +3 -0
- model.safetensors.index.json +832 -0
- preprocessor_config.json +19 -0
- special_tokens_map.json +31 -0
- tokenizer.json +3 -0
- tokenizer_config.json +209 -0
- trainer_state.json +2342 -0
- training_args.bin +3 -0
- vocab.json +0 -0
.gitattributes
CHANGED
|
@@ -33,3 +33,8 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
| 36 |
+
SFT_text_40k_3B/v2-20250623-201026/LogicVista_checkpoint-1100.json filter=lfs diff=lfs merge=lfs -text
|
| 37 |
+
SFT_text_40k_3B/v2-20250623-201026/MATH-V_checkpoint-1100.json filter=lfs diff=lfs merge=lfs -text
|
| 38 |
+
SFT_text_40k_3B/v2-20250623-201026/MathVerse_checkpoint-1100.json filter=lfs diff=lfs merge=lfs -text
|
| 39 |
+
SFT_text_40k_3B/v2-20250623-201026/MathVista_checkpoint-1100.json filter=lfs diff=lfs merge=lfs -text
|
| 40 |
+
tokenizer.json filter=lfs diff=lfs merge=lfs -text
|
SFT_text_40k_3B/v2-20250623-201026/DynaMath_checkpoint-1100.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
SFT_text_40k_3B/v2-20250623-201026/LogicVista_checkpoint-1100.json
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:fd1e368fa979ca80992f5bfdb97a647227f206a32ab9d71fcdde74586655633e
|
| 3 |
+
size 10978571
|
SFT_text_40k_3B/v2-20250623-201026/MATH-V_checkpoint-1100.json
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:7ff2ad33a43080b4998397d59b7d4c5822955bb52aedbbfc2e32579b74d26b6e
|
| 3 |
+
size 118930802
|
SFT_text_40k_3B/v2-20250623-201026/MathVerse_checkpoint-1100.json
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:a3d8a3ebfd9598d3a12ce9ffa4b8e7a70b75fa483e22028f470f69a87ce3b39a
|
| 3 |
+
size 112822153
|
SFT_text_40k_3B/v2-20250623-201026/MathVista_checkpoint-1100.json
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:b235225a771c580620d07ce4227c5771aed794d25d9cdaa93574b469658fc402
|
| 3 |
+
size 17129753
|
added_tokens.json
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"</tool_call>": 151658,
|
| 3 |
+
"<tool_call>": 151657,
|
| 4 |
+
"<|box_end|>": 151649,
|
| 5 |
+
"<|box_start|>": 151648,
|
| 6 |
+
"<|endoftext|>": 151643,
|
| 7 |
+
"<|file_sep|>": 151664,
|
| 8 |
+
"<|fim_middle|>": 151660,
|
| 9 |
+
"<|fim_pad|>": 151662,
|
| 10 |
+
"<|fim_prefix|>": 151659,
|
| 11 |
+
"<|fim_suffix|>": 151661,
|
| 12 |
+
"<|im_end|>": 151645,
|
| 13 |
+
"<|im_start|>": 151644,
|
| 14 |
+
"<|image_pad|>": 151655,
|
| 15 |
+
"<|object_ref_end|>": 151647,
|
| 16 |
+
"<|object_ref_start|>": 151646,
|
| 17 |
+
"<|quad_end|>": 151651,
|
| 18 |
+
"<|quad_start|>": 151650,
|
| 19 |
+
"<|repo_name|>": 151663,
|
| 20 |
+
"<|video_pad|>": 151656,
|
| 21 |
+
"<|vision_end|>": 151653,
|
| 22 |
+
"<|vision_pad|>": 151654,
|
| 23 |
+
"<|vision_start|>": 151652
|
| 24 |
+
}
|
args.json
ADDED
|
@@ -0,0 +1,361 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"model": "/group/40174/Zywoou/models/Qwen2.5-VL-3B-Instruct",
|
| 3 |
+
"model_type": "qwen2_5_vl",
|
| 4 |
+
"model_revision": null,
|
| 5 |
+
"task_type": "causal_lm",
|
| 6 |
+
"torch_dtype": "bfloat16",
|
| 7 |
+
"attn_impl": "flash_attn",
|
| 8 |
+
"num_labels": null,
|
| 9 |
+
"problem_type": null,
|
| 10 |
+
"rope_scaling": null,
|
| 11 |
+
"device_map": null,
|
| 12 |
+
"max_memory": {},
|
| 13 |
+
"local_repo_path": null,
|
| 14 |
+
"template": "qwen2_5_vl",
|
| 15 |
+
"system": "You FIRST think about the reasoning process as an internal monologue and then provide the final answer. The reasoning process MUST BE enclosed within <think> </think> tags. The final answer MUST BE put in \\\\boxed{}.\n",
|
| 16 |
+
"max_length": 32768,
|
| 17 |
+
"truncation_strategy": "delete",
|
| 18 |
+
"max_pixels": null,
|
| 19 |
+
"tools_prompt": "react_en",
|
| 20 |
+
"norm_bbox": null,
|
| 21 |
+
"response_prefix": null,
|
| 22 |
+
"padding_side": "right",
|
| 23 |
+
"loss_scale": "default",
|
| 24 |
+
"sequence_parallel_size": 1,
|
| 25 |
+
"use_chat_template": true,
|
| 26 |
+
"template_backend": "swift",
|
| 27 |
+
"dataset": [
|
| 28 |
+
"/group/40174/Zywoou/Dataset/OpenR1-Math-220k/level_32B/20250610_004144/original_items.jsonl"
|
| 29 |
+
],
|
| 30 |
+
"val_dataset": [],
|
| 31 |
+
"split_dataset_ratio": 0.01,
|
| 32 |
+
"data_seed": 42,
|
| 33 |
+
"dataset_num_proc": 1,
|
| 34 |
+
"dataset_shuffle": true,
|
| 35 |
+
"streaming": false,
|
| 36 |
+
"interleave_prob": null,
|
| 37 |
+
"stopping_strategy": "first_exhausted",
|
| 38 |
+
"shuffle_buffer_size": 1000,
|
| 39 |
+
"enable_cache": false,
|
| 40 |
+
"download_mode": "reuse_dataset_if_exists",
|
| 41 |
+
"columns": {},
|
| 42 |
+
"strict": false,
|
| 43 |
+
"remove_unused_columns": true,
|
| 44 |
+
"model_name": [
|
| 45 |
+
null,
|
| 46 |
+
null
|
| 47 |
+
],
|
| 48 |
+
"model_author": [
|
| 49 |
+
null,
|
| 50 |
+
null
|
| 51 |
+
],
|
| 52 |
+
"custom_dataset_info": [],
|
| 53 |
+
"quant_method": null,
|
| 54 |
+
"quant_bits": null,
|
| 55 |
+
"hqq_axis": null,
|
| 56 |
+
"bnb_4bit_compute_dtype": "bfloat16",
|
| 57 |
+
"bnb_4bit_quant_type": "nf4",
|
| 58 |
+
"bnb_4bit_use_double_quant": true,
|
| 59 |
+
"bnb_4bit_quant_storage": null,
|
| 60 |
+
"max_new_tokens": 64,
|
| 61 |
+
"temperature": 0.0,
|
| 62 |
+
"top_k": null,
|
| 63 |
+
"top_p": null,
|
| 64 |
+
"repetition_penalty": null,
|
| 65 |
+
"num_beams": 1,
|
| 66 |
+
"stream": false,
|
| 67 |
+
"stop_words": [],
|
| 68 |
+
"logprobs": false,
|
| 69 |
+
"top_logprobs": null,
|
| 70 |
+
"ckpt_dir": null,
|
| 71 |
+
"load_dataset_config": null,
|
| 72 |
+
"lora_modules": [],
|
| 73 |
+
"tuner_backend": "peft",
|
| 74 |
+
"train_type": "full",
|
| 75 |
+
"adapters": [],
|
| 76 |
+
"external_plugins": [],
|
| 77 |
+
"seed": 42,
|
| 78 |
+
"model_kwargs": {},
|
| 79 |
+
"load_args": false,
|
| 80 |
+
"load_data_args": false,
|
| 81 |
+
"use_hf": false,
|
| 82 |
+
"hub_token": null,
|
| 83 |
+
"custom_register_path": [],
|
| 84 |
+
"ignore_args_error": false,
|
| 85 |
+
"use_swift_lora": false,
|
| 86 |
+
"output_dir": "/group/40174/Zywoou/mm_math_reasoning/oly_output/SFT_text_40k_3B/v2-20250623-201026",
|
| 87 |
+
"overwrite_output_dir": false,
|
| 88 |
+
"do_train": false,
|
| 89 |
+
"do_eval": false,
|
| 90 |
+
"do_predict": false,
|
| 91 |
+
"eval_strategy": "steps",
|
| 92 |
+
"prediction_loss_only": false,
|
| 93 |
+
"per_device_train_batch_size": 1,
|
| 94 |
+
"per_device_eval_batch_size": 1,
|
| 95 |
+
"per_gpu_train_batch_size": null,
|
| 96 |
+
"per_gpu_eval_batch_size": null,
|
| 97 |
+
"gradient_accumulation_steps": 16,
|
| 98 |
+
"eval_accumulation_steps": null,
|
| 99 |
+
"eval_delay": 0,
|
| 100 |
+
"torch_empty_cache_steps": null,
|
| 101 |
+
"learning_rate": 2e-05,
|
| 102 |
+
"weight_decay": 0.1,
|
| 103 |
+
"adam_beta1": 0.9,
|
| 104 |
+
"adam_beta2": 0.95,
|
| 105 |
+
"adam_epsilon": 1e-08,
|
| 106 |
+
"max_grad_norm": 1.0,
|
| 107 |
+
"num_train_epochs": 5.0,
|
| 108 |
+
"max_steps": -1,
|
| 109 |
+
"lr_scheduler_type": "cosine",
|
| 110 |
+
"lr_scheduler_kwargs": null,
|
| 111 |
+
"warmup_ratio": 0.05,
|
| 112 |
+
"warmup_steps": 0,
|
| 113 |
+
"log_level": "passive",
|
| 114 |
+
"log_level_replica": "warning",
|
| 115 |
+
"log_on_each_node": true,
|
| 116 |
+
"logging_dir": "/group/40174/Zywoou/mm_math_reasoning/oly_output/SFT_text_40k_3B/v2-20250623-201026/runs",
|
| 117 |
+
"logging_strategy": "steps",
|
| 118 |
+
"logging_first_step": true,
|
| 119 |
+
"logging_steps": 5,
|
| 120 |
+
"logging_nan_inf_filter": true,
|
| 121 |
+
"save_strategy": "steps",
|
| 122 |
+
"save_steps": 100.0,
|
| 123 |
+
"save_total_limit": 20,
|
| 124 |
+
"save_safetensors": true,
|
| 125 |
+
"save_on_each_node": false,
|
| 126 |
+
"save_only_model": true,
|
| 127 |
+
"restore_callback_states_from_checkpoint": false,
|
| 128 |
+
"no_cuda": false,
|
| 129 |
+
"use_cpu": false,
|
| 130 |
+
"use_mps_device": false,
|
| 131 |
+
"jit_mode_eval": false,
|
| 132 |
+
"use_ipex": false,
|
| 133 |
+
"bf16": true,
|
| 134 |
+
"fp16": false,
|
| 135 |
+
"fp16_opt_level": "O1",
|
| 136 |
+
"half_precision_backend": "auto",
|
| 137 |
+
"bf16_full_eval": false,
|
| 138 |
+
"fp16_full_eval": false,
|
| 139 |
+
"tf32": null,
|
| 140 |
+
"local_rank": 0,
|
| 141 |
+
"ddp_backend": null,
|
| 142 |
+
"tpu_num_cores": null,
|
| 143 |
+
"tpu_metrics_debug": false,
|
| 144 |
+
"debug": null,
|
| 145 |
+
"dataloader_drop_last": false,
|
| 146 |
+
"eval_steps": 100.0,
|
| 147 |
+
"dataloader_num_workers": 8,
|
| 148 |
+
"dataloader_prefetch_factor": null,
|
| 149 |
+
"past_index": -1,
|
| 150 |
+
"run_name": null,
|
| 151 |
+
"disable_tqdm": null,
|
| 152 |
+
"label_names": null,
|
| 153 |
+
"load_best_model_at_end": false,
|
| 154 |
+
"metric_for_best_model": "loss",
|
| 155 |
+
"greater_is_better": false,
|
| 156 |
+
"ignore_data_skip": false,
|
| 157 |
+
"fsdp": "",
|
| 158 |
+
"fsdp_min_num_params": 0,
|
| 159 |
+
"fsdp_config": null,
|
| 160 |
+
"tp_size": 0,
|
| 161 |
+
"fsdp_transformer_layer_cls_to_wrap": null,
|
| 162 |
+
"accelerator_config": {
|
| 163 |
+
"dispatch_batches": false
|
| 164 |
+
},
|
| 165 |
+
"deepspeed": {
|
| 166 |
+
"fp16": {
|
| 167 |
+
"enabled": "auto",
|
| 168 |
+
"loss_scale": 0,
|
| 169 |
+
"loss_scale_window": 1000,
|
| 170 |
+
"initial_scale_power": 16,
|
| 171 |
+
"hysteresis": 2,
|
| 172 |
+
"min_loss_scale": 1
|
| 173 |
+
},
|
| 174 |
+
"bf16": {
|
| 175 |
+
"enabled": "auto"
|
| 176 |
+
},
|
| 177 |
+
"zero_optimization": {
|
| 178 |
+
"stage": 2,
|
| 179 |
+
"offload_optimizer": {
|
| 180 |
+
"device": "none",
|
| 181 |
+
"pin_memory": true
|
| 182 |
+
},
|
| 183 |
+
"allgather_partitions": true,
|
| 184 |
+
"allgather_bucket_size": 200000000.0,
|
| 185 |
+
"overlap_comm": false,
|
| 186 |
+
"reduce_scatter": true,
|
| 187 |
+
"reduce_bucket_size": 200000000.0,
|
| 188 |
+
"contiguous_gradients": true
|
| 189 |
+
},
|
| 190 |
+
"gradient_accumulation_steps": "auto",
|
| 191 |
+
"gradient_clipping": "auto",
|
| 192 |
+
"steps_per_print": 2000,
|
| 193 |
+
"train_batch_size": "auto",
|
| 194 |
+
"train_micro_batch_size_per_gpu": "auto",
|
| 195 |
+
"wall_clock_breakdown": false
|
| 196 |
+
},
|
| 197 |
+
"label_smoothing_factor": 0.0,
|
| 198 |
+
"optim": "adamw_torch",
|
| 199 |
+
"optim_args": null,
|
| 200 |
+
"adafactor": false,
|
| 201 |
+
"group_by_length": false,
|
| 202 |
+
"length_column_name": "length",
|
| 203 |
+
"report_to": [
|
| 204 |
+
"tensorboard"
|
| 205 |
+
],
|
| 206 |
+
"ddp_find_unused_parameters": null,
|
| 207 |
+
"ddp_bucket_cap_mb": null,
|
| 208 |
+
"ddp_broadcast_buffers": null,
|
| 209 |
+
"dataloader_pin_memory": true,
|
| 210 |
+
"dataloader_persistent_workers": false,
|
| 211 |
+
"skip_memory_metrics": true,
|
| 212 |
+
"use_legacy_prediction_loop": false,
|
| 213 |
+
"push_to_hub": false,
|
| 214 |
+
"resume_from_checkpoint": null,
|
| 215 |
+
"hub_model_id": null,
|
| 216 |
+
"hub_strategy": "every_save",
|
| 217 |
+
"hub_private_repo": null,
|
| 218 |
+
"hub_always_push": false,
|
| 219 |
+
"gradient_checkpointing": true,
|
| 220 |
+
"gradient_checkpointing_kwargs": null,
|
| 221 |
+
"include_inputs_for_metrics": false,
|
| 222 |
+
"include_for_metrics": [],
|
| 223 |
+
"eval_do_concat_batches": true,
|
| 224 |
+
"fp16_backend": "auto",
|
| 225 |
+
"evaluation_strategy": "steps",
|
| 226 |
+
"push_to_hub_model_id": null,
|
| 227 |
+
"push_to_hub_organization": null,
|
| 228 |
+
"push_to_hub_token": null,
|
| 229 |
+
"mp_parameters": "",
|
| 230 |
+
"auto_find_batch_size": false,
|
| 231 |
+
"full_determinism": false,
|
| 232 |
+
"torchdynamo": null,
|
| 233 |
+
"ray_scope": "last",
|
| 234 |
+
"ddp_timeout": 1800,
|
| 235 |
+
"torch_compile": false,
|
| 236 |
+
"torch_compile_backend": null,
|
| 237 |
+
"torch_compile_mode": null,
|
| 238 |
+
"dispatch_batches": null,
|
| 239 |
+
"split_batches": null,
|
| 240 |
+
"include_tokens_per_second": false,
|
| 241 |
+
"include_num_input_tokens_seen": false,
|
| 242 |
+
"neftune_noise_alpha": null,
|
| 243 |
+
"optim_target_modules": null,
|
| 244 |
+
"batch_eval_metrics": false,
|
| 245 |
+
"eval_on_start": false,
|
| 246 |
+
"use_liger_kernel": false,
|
| 247 |
+
"eval_use_gather_object": false,
|
| 248 |
+
"average_tokens_across_devices": false,
|
| 249 |
+
"sortish_sampler": false,
|
| 250 |
+
"predict_with_generate": false,
|
| 251 |
+
"generation_max_length": null,
|
| 252 |
+
"generation_num_beams": null,
|
| 253 |
+
"generation_config": null,
|
| 254 |
+
"check_model": true,
|
| 255 |
+
"acc_strategy": "token",
|
| 256 |
+
"train_dataloader_shuffle": true,
|
| 257 |
+
"metric_warmup_step": 0,
|
| 258 |
+
"fsdp_num": 1,
|
| 259 |
+
"acc_steps": 1,
|
| 260 |
+
"eval_use_evalscope": false,
|
| 261 |
+
"eval_datasets": [],
|
| 262 |
+
"eval_limit": null,
|
| 263 |
+
"eval_datasets_args": null,
|
| 264 |
+
"eval_generation_config": null,
|
| 265 |
+
"freeze_parameters": [
|
| 266 |
+
"visual",
|
| 267 |
+
"visual.merger"
|
| 268 |
+
],
|
| 269 |
+
"freeze_parameters_ratio": 0.0,
|
| 270 |
+
"trainable_parameters": [],
|
| 271 |
+
"freeze_llm": false,
|
| 272 |
+
"freeze_vit": true,
|
| 273 |
+
"freeze_aligner": true,
|
| 274 |
+
"target_modules": [
|
| 275 |
+
"all-linear"
|
| 276 |
+
],
|
| 277 |
+
"target_regex": null,
|
| 278 |
+
"modules_to_save": [],
|
| 279 |
+
"lora_rank": 8,
|
| 280 |
+
"lora_alpha": 32,
|
| 281 |
+
"lora_dropout": 0.05,
|
| 282 |
+
"lora_bias": "none",
|
| 283 |
+
"lora_dtype": null,
|
| 284 |
+
"lorap_lr_ratio": null,
|
| 285 |
+
"use_rslora": false,
|
| 286 |
+
"use_dora": false,
|
| 287 |
+
"lora_ga_batch_size": 2,
|
| 288 |
+
"lora_ga_iters": 2,
|
| 289 |
+
"lora_ga_max_length": 1024,
|
| 290 |
+
"lora_ga_direction": "ArB2r",
|
| 291 |
+
"lora_ga_scale": "stable",
|
| 292 |
+
"lora_ga_stable_gamma": 16,
|
| 293 |
+
"init_weights": true,
|
| 294 |
+
"fourier_n_frequency": 2000,
|
| 295 |
+
"fourier_scaling": 300.0,
|
| 296 |
+
"boft_block_size": 4,
|
| 297 |
+
"boft_block_num": 0,
|
| 298 |
+
"boft_n_butterfly_factor": 1,
|
| 299 |
+
"boft_dropout": 0.0,
|
| 300 |
+
"vera_rank": 256,
|
| 301 |
+
"vera_projection_prng_key": 0,
|
| 302 |
+
"vera_dropout": 0.0,
|
| 303 |
+
"vera_d_initial": 0.1,
|
| 304 |
+
"adapter_act": "gelu",
|
| 305 |
+
"adapter_length": 128,
|
| 306 |
+
"use_galore": false,
|
| 307 |
+
"galore_target_modules": null,
|
| 308 |
+
"galore_rank": 128,
|
| 309 |
+
"galore_update_proj_gap": 50,
|
| 310 |
+
"galore_scale": 1.0,
|
| 311 |
+
"galore_proj_type": "std",
|
| 312 |
+
"galore_optim_per_parameter": false,
|
| 313 |
+
"galore_with_embedding": false,
|
| 314 |
+
"galore_quantization": false,
|
| 315 |
+
"galore_proj_quant": false,
|
| 316 |
+
"galore_proj_bits": 4,
|
| 317 |
+
"galore_proj_group_size": 256,
|
| 318 |
+
"galore_cos_threshold": 0.4,
|
| 319 |
+
"galore_gamma_proj": 2,
|
| 320 |
+
"galore_queue_size": 5,
|
| 321 |
+
"adalora_target_r": 8,
|
| 322 |
+
"adalora_init_r": 12,
|
| 323 |
+
"adalora_tinit": 0,
|
| 324 |
+
"adalora_tfinal": 0,
|
| 325 |
+
"adalora_deltaT": 1,
|
| 326 |
+
"adalora_beta1": 0.85,
|
| 327 |
+
"adalora_beta2": 0.85,
|
| 328 |
+
"adalora_orth_reg_weight": 0.5,
|
| 329 |
+
"llamapro_num_new_blocks": 4,
|
| 330 |
+
"llamapro_num_groups": null,
|
| 331 |
+
"lisa_activated_layers": 0,
|
| 332 |
+
"lisa_step_interval": 20,
|
| 333 |
+
"reft_layer_key": null,
|
| 334 |
+
"reft_layers": null,
|
| 335 |
+
"reft_rank": 4,
|
| 336 |
+
"reft_intervention_type": "LoreftIntervention",
|
| 337 |
+
"reft_args": null,
|
| 338 |
+
"swanlab_token": null,
|
| 339 |
+
"swanlab_project": null,
|
| 340 |
+
"swanlab_workspace": null,
|
| 341 |
+
"swanlab_exp_name": null,
|
| 342 |
+
"swanlab_mode": "cloud",
|
| 343 |
+
"add_version": true,
|
| 344 |
+
"resume_only_model": false,
|
| 345 |
+
"create_checkpoint_symlink": false,
|
| 346 |
+
"packing": false,
|
| 347 |
+
"lazy_tokenize": true,
|
| 348 |
+
"loss_type": null,
|
| 349 |
+
"optimizer": null,
|
| 350 |
+
"metric": null,
|
| 351 |
+
"zero_hpz_partition_size": null,
|
| 352 |
+
"rank": 0,
|
| 353 |
+
"global_world_size": 8,
|
| 354 |
+
"local_world_size": 8,
|
| 355 |
+
"model_suffix": "Qwen2.5-VL-3B-Instruct",
|
| 356 |
+
"model_info": "ModelInfo(model_type='qwen2_5_vl', model_dir='/group/40174/Zywoou/models/Qwen2.5-VL-3B-Instruct', torch_dtype=torch.bfloat16, max_model_len=128000, quant_method=None, quant_bits=None, rope_scaling={'type': 'default', 'mrope_section': [16, 24, 24], 'rope_type': 'default'}, config=None, task_type='causal_lm', num_labels=None)",
|
| 357 |
+
"model_meta": "ModelMeta(model_type='qwen2_5_vl', model_groups=[ModelGroup(models=[Model(ms_model_id='Qwen/Qwen2.5-VL-3B-Instruct', hf_model_id='Qwen/Qwen2.5-VL-3B-Instruct', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='Qwen/Qwen2.5-VL-7B-Instruct', hf_model_id='Qwen/Qwen2.5-VL-7B-Instruct', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='Qwen/Qwen2.5-VL-32B-Instruct', hf_model_id='Qwen/Qwen2.5-VL-32B-Instruct', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='Qwen/Qwen2.5-VL-72B-Instruct', hf_model_id='Qwen/Qwen2.5-VL-72B-Instruct', model_path=None, ms_revision=None, hf_revision=None)], ignore_patterns=None, requires=None, tags=[]), ModelGroup(models=[Model(ms_model_id='Qwen/Qwen2.5-VL-3B-Instruct-AWQ', hf_model_id='Qwen/Qwen2.5-VL-3B-Instruct-AWQ', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='Qwen/Qwen2.5-VL-7B-Instruct-AWQ', hf_model_id='Qwen/Qwen2.5-VL-7B-Instruct-AWQ', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='Qwen/Qwen2.5-VL-32B-Instruct-AWQ', hf_model_id='Qwen/Qwen2.5-VL-32B-Instruct-AWQ', model_path=None, ms_revision=None, hf_revision=None), Model(ms_model_id='Qwen/Qwen2.5-VL-72B-Instruct-AWQ', hf_model_id='Qwen/Qwen2.5-VL-72B-Instruct-AWQ', model_path=None, ms_revision=None, hf_revision=None)], ignore_patterns=None, requires=None, tags=[])], template='qwen2_5_vl', get_function=<function get_model_tokenizer_qwen2_5_vl at 0x7f5cfacdad40>, model_arch='qwen2_vl', architectures=['Qwen2_5_VLForConditionalGeneration'], additional_saved_files=[], torch_dtype=None, is_multimodal=True, is_reward=False, task_type=None, ignore_patterns=['*.bin', '*.safetensors'], requires=['transformers>=4.49', 'qwen_vl_utils>=0.0.6', 'decord'], tags=[])",
|
| 358 |
+
"model_dir": "/group/40174/Zywoou/models/Qwen2.5-VL-3B-Instruct",
|
| 359 |
+
"hub": "<class 'swift.hub.hub.MSHub'>",
|
| 360 |
+
"training_args": "Seq2SeqTrainingArguments(output_dir='/group/40174/Zywoou/mm_math_reasoning/oly_output/SFT_text_40k_3B/v2-20250623-201026', overwrite_output_dir=False, do_train=False, do_eval=True, do_predict=False, eval_strategy=<IntervalStrategy.STEPS: 'steps'>, prediction_loss_only=False, per_device_train_batch_size=1, per_device_eval_batch_size=1, per_gpu_train_batch_size=None, per_gpu_eval_batch_size=None, gradient_accumulation_steps=16, eval_accumulation_steps=None, eval_delay=0, torch_empty_cache_steps=None, learning_rate=2e-05, weight_decay=0.1, adam_beta1=0.9, adam_beta2=0.95, adam_epsilon=1e-08, max_grad_norm=1.0, num_train_epochs=5.0, max_steps=-1, lr_scheduler_type=<SchedulerType.COSINE: 'cosine'>, lr_scheduler_kwargs=None, warmup_ratio=0.05, warmup_steps=0, log_level='passive', log_level_replica='warning', log_on_each_node=True, logging_dir='/group/40174/Zywoou/mm_math_reasoning/oly_output/SFT_text_40k_3B/v2-20250623-201026/runs', logging_strategy=<IntervalStrategy.STEPS: 'steps'>, logging_first_step=True, logging_steps=5, logging_nan_inf_filter=True, save_strategy=<SaveStrategy.STEPS: 'steps'>, save_steps=100, save_total_limit=20, save_safetensors=True, save_on_each_node=False, save_only_model=True, restore_callback_states_from_checkpoint=False, no_cuda=False, use_cpu=False, use_mps_device=False, seed=42, data_seed=42, jit_mode_eval=False, use_ipex=False, bf16=True, fp16=False, fp16_opt_level='O1', half_precision_backend='auto', bf16_full_eval=False, fp16_full_eval=False, tf32=None, local_rank=0, ddp_backend=None, tpu_num_cores=None, tpu_metrics_debug=False, debug=[], dataloader_drop_last=False, eval_steps=100, dataloader_num_workers=8, dataloader_prefetch_factor=10, past_index=-1, run_name='/group/40174/Zywoou/mm_math_reasoning/oly_output/SFT_text_40k_3B/v2-20250623-201026', disable_tqdm=False, remove_unused_columns=False, label_names=None, load_best_model_at_end=False, metric_for_best_model='loss', greater_is_better=False, ignore_data_skip=False, fsdp=[], fsdp_min_num_params=0, fsdp_config={'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}, tp_size=0, fsdp_transformer_layer_cls_to_wrap=None, accelerator_config=AcceleratorConfig(split_batches=False, dispatch_batches=False, even_batches=True, use_seedable_sampler=True, non_blocking=False, gradient_accumulation_kwargs=None, use_configured_state=False), deepspeed={'fp16': {'enabled': 'auto', 'loss_scale': 0, 'loss_scale_window': 1000, 'initial_scale_power': 16, 'hysteresis': 2, 'min_loss_scale': 1}, 'bf16': {'enabled': 'auto'}, 'zero_optimization': {'stage': 2, 'offload_optimizer': {'device': 'none', 'pin_memory': True}, 'allgather_partitions': True, 'allgather_bucket_size': 200000000.0, 'overlap_comm': False, 'reduce_scatter': True, 'reduce_bucket_size': 200000000.0, 'contiguous_gradients': True}, 'gradient_accumulation_steps': 'auto', 'gradient_clipping': 'auto', 'steps_per_print': 2000, 'train_batch_size': 'auto', 'train_micro_batch_size_per_gpu': 'auto', 'wall_clock_breakdown': False}, label_smoothing_factor=0.0, optim=<OptimizerNames.ADAMW_TORCH: 'adamw_torch'>, optim_args=None, adafactor=False, group_by_length=False, length_column_name='length', report_to=['tensorboard'], ddp_find_unused_parameters=None, ddp_bucket_cap_mb=None, ddp_broadcast_buffers=None, dataloader_pin_memory=True, dataloader_persistent_workers=False, skip_memory_metrics=True, use_legacy_prediction_loop=False, push_to_hub=False, resume_from_checkpoint=None, hub_model_id=None, hub_strategy=<HubStrategy.EVERY_SAVE: 'every_save'>, hub_token=None, hub_private_repo=None, hub_always_push=False, gradient_checkpointing=True, gradient_checkpointing_kwargs=None, include_inputs_for_metrics=False, include_for_metrics=[], eval_do_concat_batches=True, fp16_backend='auto', evaluation_strategy='steps', push_to_hub_model_id=None, push_to_hub_organization=None, push_to_hub_token=None, mp_parameters='', auto_find_batch_size=False, full_determinism=False, torchdynamo=None, ray_scope='last', ddp_timeout=1800, torch_compile=False, torch_compile_backend=None, torch_compile_mode=None, dispatch_batches=None, split_batches=None, include_tokens_per_second=None, include_num_input_tokens_seen=None, neftune_noise_alpha=None, optim_target_modules=None, batch_eval_metrics=False, eval_on_start=False, use_liger_kernel=False, eval_use_gather_object=False, average_tokens_across_devices=None, sortish_sampler=False, predict_with_generate=False, generation_max_length=None, generation_num_beams=None, generation_config=None, check_model=True, acc_strategy='token', train_dataloader_shuffle=True, metric_warmup_step=0, fsdp_num=1, acc_steps=1, eval_use_evalscope=False, eval_datasets=[], eval_limit=None, eval_datasets_args=None, eval_generation_config=None, train_type='full', optimizer=None, local_repo_path=None, galore_config=None)"
|
| 361 |
+
}
|
chat_template.json
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"chat_template": "{% set image_count = namespace(value=0) %}{% set video_count = namespace(value=0) %}{% for message in messages %}{% if loop.first and message['role'] != 'system' %}<|im_start|>system\nYou are a helpful assistant.<|im_end|>\n{% endif %}<|im_start|>{{ message['role'] }}\n{% if message['content'] is string %}{{ message['content'] }}<|im_end|>\n{% else %}{% for content in message['content'] %}{% if content['type'] == 'image' or 'image' in content or 'image_url' in content %}{% set image_count.value = image_count.value + 1 %}{% if add_vision_id %}Picture {{ image_count.value }}: {% endif %}<|vision_start|><|image_pad|><|vision_end|>{% elif content['type'] == 'video' or 'video' in content %}{% set video_count.value = video_count.value + 1 %}{% if add_vision_id %}Video {{ video_count.value }}: {% endif %}<|vision_start|><|video_pad|><|vision_end|>{% elif 'text' in content %}{{ content['text'] }}{% endif %}{% endfor %}<|im_end|>\n{% endif %}{% endfor %}{% if add_generation_prompt %}<|im_start|>assistant\n{% endif %}"
|
| 3 |
+
}
|
config.json
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"_name_or_path": "/group/40174/Zywoou/models/Qwen2.5-VL-3B-Instruct",
|
| 3 |
+
"architectures": [
|
| 4 |
+
"Qwen2_5_VLForConditionalGeneration"
|
| 5 |
+
],
|
| 6 |
+
"attention_dropout": 0.0,
|
| 7 |
+
"bos_token_id": 151643,
|
| 8 |
+
"eos_token_id": 151645,
|
| 9 |
+
"hidden_act": "silu",
|
| 10 |
+
"hidden_size": 2048,
|
| 11 |
+
"image_token_id": 151655,
|
| 12 |
+
"initializer_range": 0.02,
|
| 13 |
+
"intermediate_size": 11008,
|
| 14 |
+
"max_position_embeddings": 128000,
|
| 15 |
+
"max_window_layers": 70,
|
| 16 |
+
"model_type": "qwen2_5_vl",
|
| 17 |
+
"num_attention_heads": 16,
|
| 18 |
+
"num_hidden_layers": 36,
|
| 19 |
+
"num_key_value_heads": 2,
|
| 20 |
+
"pad_token_id": 151643,
|
| 21 |
+
"rms_norm_eps": 1e-06,
|
| 22 |
+
"rope_scaling": {
|
| 23 |
+
"mrope_section": [
|
| 24 |
+
16,
|
| 25 |
+
24,
|
| 26 |
+
24
|
| 27 |
+
],
|
| 28 |
+
"rope_type": "default",
|
| 29 |
+
"type": "default"
|
| 30 |
+
},
|
| 31 |
+
"rope_theta": 1000000.0,
|
| 32 |
+
"sliding_window": 32768,
|
| 33 |
+
"tie_word_embeddings": true,
|
| 34 |
+
"torch_dtype": "bfloat16",
|
| 35 |
+
"transformers_version": "4.50.0.dev0",
|
| 36 |
+
"use_cache": false,
|
| 37 |
+
"use_sliding_window": false,
|
| 38 |
+
"video_token_id": 151656,
|
| 39 |
+
"vision_config": {
|
| 40 |
+
"hidden_size": 1280,
|
| 41 |
+
"in_chans": 3,
|
| 42 |
+
"model_type": "qwen2_5_vl",
|
| 43 |
+
"out_hidden_size": 2048,
|
| 44 |
+
"spatial_patch_size": 14,
|
| 45 |
+
"tokens_per_second": 2,
|
| 46 |
+
"torch_dtype": "bfloat16"
|
| 47 |
+
},
|
| 48 |
+
"vision_end_token_id": 151653,
|
| 49 |
+
"vision_start_token_id": 151652,
|
| 50 |
+
"vision_token_id": 151654,
|
| 51 |
+
"vocab_size": 151936
|
| 52 |
+
}
|
generation_config.json
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"bos_token_id": 151643,
|
| 3 |
+
"do_sample": true,
|
| 4 |
+
"eos_token_id": [
|
| 5 |
+
151645,
|
| 6 |
+
151643
|
| 7 |
+
],
|
| 8 |
+
"pad_token_id": 151643,
|
| 9 |
+
"repetition_penalty": 1.05,
|
| 10 |
+
"temperature": 1e-06,
|
| 11 |
+
"transformers_version": "4.50.0.dev0"
|
| 12 |
+
}
|
log.txt
ADDED
|
@@ -0,0 +1,347 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 0 |
0%| | 0/3940 [00:00<?, ?it/s]
|
| 1 |
1%|▋ | 46/3940 [00:00<00:09, 432.53it/s]
|
| 2 |
3%|█▋ | 109/3940 [00:00<00:07, 540.13it/s]
|
| 3 |
4%|██▌ | 174/3940 [00:00<00:06, 585.43it/s]
|
| 4 |
6%|███▍ | 233/3940 [00:00<00:07, 510.36it/s]
|
| 5 |
8%|████▋ | 309/3940 [00:00<00:06, 581.46it/s]
|
| 6 |
9%|█████▌ | 369/3940 [00:00<00:06, 586.18it/s]
|
| 7 |
11%|██████▍ | 429/3940 [00:00<00:05, 588.46it/s]
|
| 8 |
12%|███████▎ | 489/3940 [00:00<00:06, 547.34it/s]
|
| 9 |
14%|████████▏ | 545/3940 [00:00<00:06, 534.80it/s]
|
| 10 |
16%|█████████▏ | 613/3940 [00:01<00:05, 576.11it/s]
|
| 11 |
17%|██████████▎ | 685/3940 [00:01<00:05, 617.34it/s]
|
| 12 |
19%|███████████▎ | 753/3940 [00:01<00:05, 634.95it/s]
|
| 13 |
21%|████████████▏ | 818/3940 [00:01<00:05, 586.41it/s]
|
| 14 |
22%|█████████████▏ | 878/3940 [00:01<00:05, 578.40it/s]
|
| 15 |
24%|██████████████▎ | 952/3940 [00:01<00:04, 610.40it/s]
|
| 16 |
26%|██████████████▉ | 1014/3940 [00:01<00:04, 599.02it/s]
|
| 17 |
28%|████████████████ | 1089/3940 [00:01<00:04, 635.13it/s]
|
| 18 |
30%|█████████████████▏ | 1167/3940 [00:01<00:04, 674.98it/s]
|
| 19 |
31%|██████████████████▏ | 1235/3940 [00:02<00:04, 613.71it/s]
|
| 20 |
33%|███████████████████▎ | 1315/3940 [00:02<00:04, 653.93it/s]
|
| 21 |
35%|████████████████████▍ | 1385/3940 [00:02<00:03, 657.72it/s]
|
| 22 |
37%|█████████████████████▎ | 1452/3940 [00:02<00:04, 577.56it/s]
|
| 23 |
39%|██████████████████████▌ | 1532/3940 [00:02<00:03, 635.11it/s]
|
| 24 |
41%|███████████████████████▌ | 1598/3940 [00:02<00:04, 538.71it/s]
|
| 25 |
42%|████████████████████████▌ | 1665/3940 [00:02<00:04, 562.15it/s]
|
| 26 |
44%|█████████████████████████▍ | 1731/3940 [00:02<00:03, 586.27it/s]
|
| 27 |
46%|██████████████████████████▍ | 1799/3940 [00:03<00:03, 606.29it/s]
|
| 28 |
48%|███████████████████████████▌ | 1872/3940 [00:03<00:03, 634.24it/s]
|
| 29 |
49%|████████████████████████████▌ | 1941/3940 [00:03<00:03, 649.47it/s]
|
| 30 |
51%|█████████████████████████████▌ | 2008/3940 [00:03<00:03, 610.92it/s]
|
| 31 |
53%|██████████████████████████████▍ | 2071/3940 [00:03<00:03, 530.59it/s]
|
| 32 |
54%|███████████████████████████████▎ | 2130/3940 [00:03<00:03, 532.79it/s]
|
| 33 |
56%|████████████████████████████████▍ | 2201/3940 [00:03<00:03, 571.37it/s]
|
| 34 |
58%|█████████████████████████████████▎ | 2267/3940 [00:03<00:02, 589.56it/s]
|
| 35 |
60%|██████████████████████████████████▌ | 2346/3940 [00:03<00:02, 643.67it/s]
|
| 36 |
61%|███████████████████████████████████▋ | 2421/3940 [00:04<00:02, 668.23it/s]
|
| 37 |
63%|████████████████████████████████████▋ | 2489/3940 [00:04<00:02, 654.41it/s]
|
| 38 |
65%|█████████████████████████████████████▋ | 2556/3940 [00:04<00:02, 550.73it/s]
|
| 39 |
67%|██████████████████████████████████████▋ | 2629/3940 [00:04<00:02, 594.99it/s]
|
| 40 |
68%|███████████████████████████████████████▋ | 2692/3940 [00:04<00:02, 585.95it/s]
|
| 41 |
70%|████████████████████████████████████████▊ | 2775/3940 [00:04<00:01, 647.23it/s]
|
| 42 |
72%|█████████████████████████████████████████▊ | 2842/3940 [00:04<00:01, 626.41it/s]
|
| 43 |
74%|██████████████████████████████████████████▊ | 2911/3940 [00:04<00:01, 630.79it/s]
|
| 44 |
76%|████████████████████████████████████████████ | 2991/3940 [00:04<00:01, 676.88it/s]
|
| 45 |
78%|█████████████████████████████████████████████▏ | 3067/3940 [00:05<00:01, 700.16it/s]
|
| 46 |
80%|██████████████████████████████████████████████▏ | 3138/3940 [00:05<00:01, 644.79it/s]
|
| 47 |
82%|███████████████████████████████████████████████▌ | 3228/3940 [00:05<00:01, 711.65it/s]
|
| 48 |
84%|████████████████████████████████████████████████▌ | 3301/3940 [00:05<00:00, 669.29it/s]
|
| 49 |
86%|█████████████████████████████████████████████████▌ | 3370/3940 [00:05<00:00, 643.05it/s]
|
| 50 |
87%|██████████████████████████████████████████████████▌ | 3436/3940 [00:05<00:00, 614.63it/s]
|
| 51 |
89%|███████████████████████████████████████████████████▌ | 3501/3940 [00:05<00:00, 621.44it/s]
|
| 52 |
90%|████████████████████████████████████████████████████▍ | 3564/3940 [00:05<00:00, 547.63it/s]
|
| 53 |
92%|█████████████████████████████████████████████████████▎ | 3621/3940 [00:06<00:00, 514.46it/s]
|
| 54 |
94%|██████████████████████████████████████████████████████▎ | 3691/3940 [00:06<00:00, 558.83it/s]
|
| 55 |
96%|███████████████████████████████████████████████████████▍ | 3764/3940 [00:06<00:00, 600.34it/s]
|
| 56 |
97%|████████████████████████████████████████████████████████▍ | 3833/3940 [00:06<00:00, 622.67it/s]
|
| 57 |
99%|█████████████████████████████████████████████████████████▎| 3897/3940 [00:06<00:00, 602.30it/s]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Using a slow image processor as `use_fast` is unset and a slow processor was saved with this model. `use_fast=True` will be the default behavior in v4.48, even if the model was saved with a slow processor. This will result in minor differences in outputs. You'll still be able to use a slow processor with `use_fast=False`.
|
| 2 |
+
2025-07-10 17:14:24,734 INFO worker.py:1812 -- Started a local Ray instance. View the dashboard at [1m[32mhttp://127.0.0.1:8266 [39m[22m
|
| 3 |
+
SYSTEM_PROMPT: You FIRST think about the reasoning process as an internal monologue and then provide the final answer. The reasoning process MUST BE enclosed within <think> </think> tags. The final answer MUST BE put in \boxed{}.
|
| 4 |
+
|
| 5 |
0%| | 0/3940 [00:00<?, ?it/s]
|
| 6 |
1%|▋ | 46/3940 [00:00<00:09, 432.53it/s]
|
| 7 |
3%|█▋ | 109/3940 [00:00<00:07, 540.13it/s]
|
| 8 |
4%|██▌ | 174/3940 [00:00<00:06, 585.43it/s]
|
| 9 |
6%|███▍ | 233/3940 [00:00<00:07, 510.36it/s]
|
| 10 |
8%|████▋ | 309/3940 [00:00<00:06, 581.46it/s]
|
| 11 |
9%|█████▌ | 369/3940 [00:00<00:06, 586.18it/s]
|
| 12 |
11%|██████▍ | 429/3940 [00:00<00:05, 588.46it/s]
|
| 13 |
12%|███████▎ | 489/3940 [00:00<00:06, 547.34it/s]
|
| 14 |
14%|████████▏ | 545/3940 [00:00<00:06, 534.80it/s]
|
| 15 |
16%|█████████▏ | 613/3940 [00:01<00:05, 576.11it/s]
|
| 16 |
17%|██████████▎ | 685/3940 [00:01<00:05, 617.34it/s]
|
| 17 |
19%|███████████▎ | 753/3940 [00:01<00:05, 634.95it/s]
|
| 18 |
21%|████████████▏ | 818/3940 [00:01<00:05, 586.41it/s]
|
| 19 |
22%|█████████████▏ | 878/3940 [00:01<00:05, 578.40it/s]
|
| 20 |
24%|██████████████▎ | 952/3940 [00:01<00:04, 610.40it/s]
|
| 21 |
26%|██████████████▉ | 1014/3940 [00:01<00:04, 599.02it/s]
|
| 22 |
28%|████████████████ | 1089/3940 [00:01<00:04, 635.13it/s]
|
| 23 |
30%|█████████████████▏ | 1167/3940 [00:01<00:04, 674.98it/s]
|
| 24 |
31%|██████████████████▏ | 1235/3940 [00:02<00:04, 613.71it/s]
|
| 25 |
33%|███████████████████▎ | 1315/3940 [00:02<00:04, 653.93it/s]
|
| 26 |
35%|████████████████████▍ | 1385/3940 [00:02<00:03, 657.72it/s]
|
| 27 |
37%|█████████████████████▎ | 1452/3940 [00:02<00:04, 577.56it/s]
|
| 28 |
39%|██████████████████████▌ | 1532/3940 [00:02<00:03, 635.11it/s]
|
| 29 |
41%|███████████████████████▌ | 1598/3940 [00:02<00:04, 538.71it/s]
|
| 30 |
42%|████████████████████████▌ | 1665/3940 [00:02<00:04, 562.15it/s]
|
| 31 |
44%|█████████████████████████▍ | 1731/3940 [00:02<00:03, 586.27it/s]
|
| 32 |
46%|██████████████████████████▍ | 1799/3940 [00:03<00:03, 606.29it/s]
|
| 33 |
48%|███████████████████████████▌ | 1872/3940 [00:03<00:03, 634.24it/s]
|
| 34 |
49%|████████████████████████████▌ | 1941/3940 [00:03<00:03, 649.47it/s]
|
| 35 |
51%|█████████████████████████████▌ | 2008/3940 [00:03<00:03, 610.92it/s]
|
| 36 |
53%|██████████████████████████████▍ | 2071/3940 [00:03<00:03, 530.59it/s]
|
| 37 |
54%|███████████████████████████████▎ | 2130/3940 [00:03<00:03, 532.79it/s]
|
| 38 |
56%|████████████████████████████████▍ | 2201/3940 [00:03<00:03, 571.37it/s]
|
| 39 |
58%|█████████████████████████████████▎ | 2267/3940 [00:03<00:02, 589.56it/s]
|
| 40 |
60%|██████████████████████████████████▌ | 2346/3940 [00:03<00:02, 643.67it/s]
|
| 41 |
61%|███████████████████████████████████▋ | 2421/3940 [00:04<00:02, 668.23it/s]
|
| 42 |
63%|████████████████████████████████████▋ | 2489/3940 [00:04<00:02, 654.41it/s]
|
| 43 |
65%|█████████████████████████████████████▋ | 2556/3940 [00:04<00:02, 550.73it/s]
|
| 44 |
67%|██████████████████████████████████████▋ | 2629/3940 [00:04<00:02, 594.99it/s]
|
| 45 |
68%|███████████████████████████████████████▋ | 2692/3940 [00:04<00:02, 585.95it/s]
|
| 46 |
70%|████████████████████████████████████████▊ | 2775/3940 [00:04<00:01, 647.23it/s]
|
| 47 |
72%|█████████████████████████████████████████▊ | 2842/3940 [00:04<00:01, 626.41it/s]
|
| 48 |
74%|██████████████████████████████████████████▊ | 2911/3940 [00:04<00:01, 630.79it/s]
|
| 49 |
76%|████████████████████████████████████████████ | 2991/3940 [00:04<00:01, 676.88it/s]
|
| 50 |
78%|█████████████████████████████████████████████▏ | 3067/3940 [00:05<00:01, 700.16it/s]
|
| 51 |
80%|██████████████████████████████████████████████▏ | 3138/3940 [00:05<00:01, 644.79it/s]
|
| 52 |
82%|███████████████████████████████████████████████▌ | 3228/3940 [00:05<00:01, 711.65it/s]
|
| 53 |
84%|████████████████████████████████████████████████▌ | 3301/3940 [00:05<00:00, 669.29it/s]
|
| 54 |
86%|█████████████████████████████████████████████████▌ | 3370/3940 [00:05<00:00, 643.05it/s]
|
| 55 |
87%|██████████████████████████████████████████████████▌ | 3436/3940 [00:05<00:00, 614.63it/s]
|
| 56 |
89%|███████████████████████████████████████████████████▌ | 3501/3940 [00:05<00:00, 621.44it/s]
|
| 57 |
90%|████████████████████████████████████████████████████▍ | 3564/3940 [00:05<00:00, 547.63it/s]
|
| 58 |
92%|█████████████████████████████████████████████████████▎ | 3621/3940 [00:06<00:00, 514.46it/s]
|
| 59 |
94%|██████████████████████████████████████████████████████▎ | 3691/3940 [00:06<00:00, 558.83it/s]
|
| 60 |
96%|███████████████████████████████████████████████████████▍ | 3764/3940 [00:06<00:00, 600.34it/s]
|
| 61 |
97%|████████████████████████████████████████████████████████▍ | 3833/3940 [00:06<00:00, 622.67it/s]
|
| 62 |
99%|█████████████████████████████████████████████████████████▎| 3897/3940 [00:06<00:00, 602.30it/s]
|
| 63 |
+
[36m(infer pid=654445)[0m
|
| 64 |
+
[36m(infer pid=654445)[0m
|
| 65 |
+
[36m(infer pid=654459)[0m
|
| 66 |
+
[36m(infer pid=654445)[0m
|
| 67 |
+
[36m(infer pid=654445)[0m
|
| 68 |
+
[36m(infer pid=654457)[0m
|
| 69 |
+
[36m(infer pid=654443)[0m
|
| 70 |
+
[36m(infer pid=654459)[0m
|
| 71 |
+
[36m(infer pid=654457)[0m Using a slow image processor as `use_fast` is unset and a slow processor was saved with this model. `use_fast=True` will be the default behavior in v4.48, even if the model was saved with a slow processor. This will result in minor differences in outputs. You'll still be able to use a slow processor with `use_fast=False`.
|
| 72 |
+
[36m(infer pid=654457)[0m It looks like you are trying to rescale already rescaled images. If the input images have pixel values between 0 and 1, set `do_rescale=False` to avoid rescaling them again.
|
| 73 |
+
[36m(infer pid=654459)[0m
|
| 74 |
+
[36m(infer pid=654445)[0m Using a slow image processor as `use_fast` is unset and a slow processor was saved with this model. `use_fast=True` will be the default behavior in v4.48, even if the model was saved with a slow processor. This will result in minor differences in outputs. You'll still be able to use a slow processor with `use_fast=False`.[32m [repeated 3x across cluster][0m
|
| 75 |
+
[36m(infer pid=654457)[0m Token indices sequence length is longer than the specified maximum sequence length for this model (180224 > 131072). Running this sequence through the model will result in indexing errors
|
| 76 |
+
[36m(infer pid=654457)[0m
|
| 77 |
+
[36m(infer pid=654445)[0m It looks like you are trying to rescale already rescaled images. If the input images have pixel values between 0 and 1, set `do_rescale=False` to avoid rescaling them again.[32m [repeated 3x across cluster][0m
|
| 78 |
+
[36m(infer pid=654445)[0m Token indices sequence length is longer than the specified maximum sequence length for this model (180224 > 131072). Running this sequence through the model will result in indexing errors[32m [repeated 3x across cluster][0m
|
| 79 |
+
[36m(infer pid=654457)[0m
|
| 80 |
+
[36m(infer pid=654445)[0m
|
| 81 |
+
[36m(infer pid=654459)[0m
|
| 82 |
+
[36m(infer pid=654459)[0m
|
| 83 |
+
[36m(infer pid=654445)[0m
|
| 84 |
+
[36m(infer pid=654457)[0m
|
| 85 |
+
[36m(infer pid=654443)[0m
|
| 86 |
+
[36m(infer pid=654445)[0m
|
| 87 |
+
[36m(infer pid=654457)[0m
|
| 88 |
+
[36m(infer pid=654443)[0m
|
| 89 |
+
[36m(infer pid=654443)[0m
|
| 90 |
+
[36m(infer pid=654443)[0m
|
| 91 |
+
[36m(infer pid=654457)[0m
|
| 92 |
+
[36m(infer pid=654457)[0m
|
| 93 |
+
[36m(infer pid=654459)[0m
|
| 94 |
+
[36m(infer pid=654443)[0m
|
| 95 |
+
[36m(infer pid=654445)[0m
|
| 96 |
+
[36m(infer pid=654443)[0m
|
| 97 |
+
[36m(infer pid=654445)[0m
|
| 98 |
+
[36m(infer pid=654457)[0m
|
| 99 |
+
[36m(infer pid=654457)[0m
|
| 100 |
+
[36m(infer pid=654457)[0m
|
| 101 |
+
[36m(infer pid=654459)[0m
|
| 102 |
+
[36m(infer pid=654445)[0m
|
| 103 |
+
[36m(infer pid=654445)[0m
|
| 104 |
+
[36m(infer pid=654445)[0m
|
| 105 |
+
[36m(infer pid=654459)[0m
|
| 106 |
+
[36m(infer pid=654445)[0m
|
| 107 |
+
[36m(infer pid=654445)[0m
|
| 108 |
+
[36m(infer pid=654445)[0m
|
| 109 |
+
[36m(infer pid=654445)[0m
|
| 110 |
+
[36m(infer pid=654457)[0m
|
| 111 |
+
[36m(infer pid=654459)[0m
|
| 112 |
+
[36m(infer pid=654457)[0m
|
| 113 |
+
[36m(infer pid=654443)[0m
|
| 114 |
+
[36m(infer pid=654457)[0m
|
| 115 |
+
[36m(infer pid=654445)[0m
|
| 116 |
+
[36m(infer pid=654459)[0m
|
| 117 |
+
[36m(infer pid=654443)[0m
|
| 118 |
+
[36m(infer pid=654459)[0m
|
| 119 |
+
[36m(infer pid=654459)[0m
|
| 120 |
+
[36m(infer pid=654457)[0m
|
| 121 |
+
[36m(infer pid=654457)[0m
|
| 122 |
+
[36m(infer pid=654459)[0m
|
| 123 |
+
[36m(infer pid=654445)[0m
|
| 124 |
+
[36m(infer pid=654457)[0m
|
| 125 |
+
[36m(infer pid=654459)[0m
|
| 126 |
+
[36m(infer pid=654445)[0m
|
| 127 |
+
[36m(infer pid=654459)[0m
|
| 128 |
+
[36m(infer pid=654459)[0m
|
| 129 |
+
[36m(infer pid=654457)[0m
|
| 130 |
+
[36m(infer pid=654459)[0m
|
| 131 |
+
[36m(infer pid=654459)[0m
|
| 132 |
+
[36m(infer pid=654459)[0m
|
| 133 |
+
[36m(infer pid=654459)[0m
|
| 134 |
+
[36m(infer pid=654459)[0m
|
| 135 |
+
[36m(infer pid=654457)[0m
|
| 136 |
+
[36m(infer pid=654459)[0m
|
| 137 |
+
[36m(infer pid=654459)[0m
|
| 138 |
+
[36m(infer pid=654457)[0m
|
| 139 |
+
[36m(infer pid=654459)[0m
|
| 140 |
+
[36m(infer pid=654457)[0m
|
| 141 |
+
[36m(infer pid=654459)[0m
|
| 142 |
+
[36m(infer pid=654459)[0m
|
| 143 |
+
[36m(infer pid=654459)[0m
|
| 144 |
+
[36m(infer pid=654459)[0m
|
| 145 |
+
[36m(infer pid=654445)[0m
|
| 146 |
+
[36m(infer pid=654445)[0m
|
| 147 |
+
[36m(infer pid=654457)[0m
|
| 148 |
+
[36m(infer pid=654459)[0m
|
| 149 |
+
[36m(infer pid=654459)[0m
|
| 150 |
+
[36m(infer pid=654459)[0m
|
| 151 |
+
[36m(infer pid=654445)[0m
|
| 152 |
+
[36m(infer pid=654459)[0m
|
| 153 |
+
[36m(infer pid=654443)[0m
|
| 154 |
+
[36m(infer pid=654459)[0m
|
| 155 |
+
[36m(infer pid=654459)[0m
|
| 156 |
+
[36m(infer pid=654445)[0m
|
| 157 |
+
[36m(infer pid=654459)[0m
|
| 158 |
+
[36m(infer pid=654445)[0m
|
| 159 |
+
[36m(infer pid=654443)[0m
|
| 160 |
+
[36m(infer pid=654445)[0m
|
| 161 |
+
[36m(infer pid=654443)[0m
|
| 162 |
+
[36m(infer pid=654457)[0m
|
| 163 |
+
[36m(infer pid=654443)[0m
|
| 164 |
+
[36m(infer pid=654445)[0m
|
| 165 |
+
[36m(infer pid=654443)[0m
|
| 166 |
+
[36m(infer pid=654443)[0m
|
| 167 |
+
[36m(infer pid=654443)[0m
|
| 168 |
+
[36m(infer pid=654457)[0m
|
| 169 |
+
[36m(infer pid=654443)[0m
|
| 170 |
+
[36m(infer pid=654459)[0m
|
| 171 |
+
[36m(infer pid=654457)[0m
|
| 172 |
+
[36m(infer pid=654443)[0m
|
| 173 |
+
[36m(infer pid=654445)[0m
|
| 174 |
+
[36m(infer pid=654443)[0m
|
| 175 |
+
[36m(infer pid=654443)[0m
|
| 176 |
+
[36m(infer pid=654459)[0m
|
| 177 |
+
[36m(infer pid=654443)[0m
|
| 178 |
+
[36m(infer pid=654459)[0m
|
| 179 |
+
[36m(infer pid=654443)[0m
|
| 180 |
+
[36m(infer pid=654443)[0m
|
| 181 |
+
[36m(infer pid=654457)[0m
|
| 182 |
+
[36m(infer pid=654443)[0m
|
| 183 |
+
[36m(infer pid=654459)[0m
|
| 184 |
+
[36m(infer pid=654445)[0m
|
| 185 |
+
[36m(infer pid=654457)[0m
|
| 186 |
+
[36m(infer pid=654459)[0m
|
| 187 |
+
[36m(infer pid=654445)[0m
|
| 188 |
+
[36m(infer pid=654459)[0m
|
| 189 |
+
[36m(infer pid=654445)[0m
|
| 190 |
+
[36m(infer pid=654443)[0m
|
| 191 |
+
[36m(infer pid=654445)[0m
|
| 192 |
+
[36m(infer pid=654443)[0m
|
| 193 |
+
[36m(infer pid=654445)[0m
|
| 194 |
+
[36m(infer pid=654457)[0m
|
| 195 |
+
[36m(infer pid=654445)[0m
|
| 196 |
+
[36m(infer pid=654443)[0m
|
| 197 |
+
[36m(infer pid=654459)[0m
|
| 198 |
+
[36m(infer pid=654445)[0m
|
| 199 |
+
[36m(infer pid=654443)[0m
|
| 200 |
+
[36m(infer pid=654459)[0m
|
| 201 |
+
[36m(infer pid=654443)[0m
|
| 202 |
+
[36m(infer pid=654459)[0m
|
| 203 |
+
[36m(infer pid=654459)[0m
|
| 204 |
+
[36m(infer pid=654457)[0m
|
| 205 |
+
[36m(infer pid=654459)[0m
|
| 206 |
+
[36m(infer pid=654459)[0m
|
| 207 |
+
[36m(infer pid=654457)[0m
|
| 208 |
+
[36m(infer pid=654459)[0m
|
| 209 |
+
[36m(infer pid=654457)[0m
|
| 210 |
+
[36m(infer pid=654459)[0m
|
| 211 |
+
[36m(infer pid=654443)[0m
|
| 212 |
+
[36m(infer pid=654459)[0m
|
| 213 |
+
[36m(infer pid=654457)[0m
|
| 214 |
+
[36m(infer pid=654443)[0m
|
| 215 |
+
[36m(infer pid=654457)[0m
|
| 216 |
+
[36m(infer pid=654443)[0m
|
| 217 |
+
[36m(infer pid=654443)[0m
|
| 218 |
+
[36m(infer pid=654445)[0m
|
| 219 |
+
[36m(infer pid=654445)[0m
|
| 220 |
+
[36m(infer pid=654459)[0m
|
| 221 |
+
[36m(infer pid=654443)[0m
|
| 222 |
+
[36m(infer pid=654445)[0m
|
| 223 |
+
[36m(infer pid=654443)[0m
|
| 224 |
+
[36m(infer pid=654459)[0m
|
| 225 |
+
[36m(infer pid=654459)[0m
|
| 226 |
+
[36m(infer pid=654443)[0m
|
| 227 |
+
[36m(infer pid=654457)[0m
|
| 228 |
+
[36m(infer pid=654445)[0m
|
| 229 |
+
[36m(infer pid=654459)[0m
|
| 230 |
+
[36m(infer pid=654443)[0m
|
| 231 |
+
[36m(infer pid=654443)[0m
|
| 232 |
+
[36m(infer pid=654445)[0m
|
| 233 |
+
[36m(infer pid=654457)[0m
|
| 234 |
+
[36m(infer pid=654459)[0m
|
| 235 |
+
[36m(infer pid=654457)[0m
|
| 236 |
+
[36m(infer pid=654445)[0m
|
| 237 |
+
[36m(infer pid=654443)[0m
|
| 238 |
+
[36m(infer pid=654459)[0m
|
| 239 |
+
[36m(infer pid=654443)[0m
|
| 240 |
+
[36m(infer pid=654443)[0m
|
| 241 |
+
[36m(infer pid=654457)[0m
|
| 242 |
+
[36m(infer pid=654443)[0m
|
| 243 |
+
[36m(infer pid=654443)[0m
|
| 244 |
+
[36m(infer pid=654443)[0m
|
| 245 |
+
[36m(infer pid=654457)[0m
|
| 246 |
+
[36m(infer pid=654445)[0m
|
| 247 |
+
[36m(infer pid=654459)[0m
|
| 248 |
+
[36m(infer pid=654445)[0m
|
| 249 |
+
[36m(infer pid=654443)[0m
|
| 250 |
+
[36m(infer pid=654457)[0m
|
| 251 |
+
[36m(infer pid=654443)[0m
|
| 252 |
+
[36m(infer pid=654443)[0m
|
| 253 |
+
[36m(infer pid=654443)[0m
|
| 254 |
+
[36m(infer pid=654459)[0m
|
| 255 |
+
[36m(infer pid=654445)[0m
|
| 256 |
+
[36m(infer pid=654459)[0m
|
| 257 |
+
[36m(infer pid=654443)[0m
|
| 258 |
+
[36m(infer pid=654445)[0m
|
| 259 |
+
[36m(infer pid=654443)[0m
|
| 260 |
+
[36m(infer pid=654459)[0m
|
| 261 |
+
[36m(infer pid=654443)[0m
|
| 262 |
+
[36m(infer pid=654459)[0m
|
| 263 |
+
[36m(infer pid=654459)[0m
|
| 264 |
+
[36m(infer pid=654445)[0m
|
| 265 |
+
[36m(infer pid=654445)[0m
|
| 266 |
+
[36m(infer pid=654445)[0m
|
| 267 |
+
[36m(infer pid=654457)[0m
|
| 268 |
+
[36m(infer pid=654457)[0m
|
| 269 |
+
[36m(infer pid=654445)[0m
|
| 270 |
+
[36m(infer pid=654443)[0m
|
| 271 |
+
[36m(infer pid=654443)[0m
|
| 272 |
+
[36m(infer pid=654457)[0m
|
| 273 |
+
[36m(infer pid=654445)[0m
|
| 274 |
+
[36m(infer pid=654457)[0m
|
| 275 |
+
[36m(infer pid=654459)[0m
|
| 276 |
+
[36m(infer pid=654445)[0m
|
| 277 |
+
[36m(infer pid=654445)[0m
|
| 278 |
+
[36m(infer pid=654457)[0m
|
| 279 |
+
[36m(infer pid=654443)[0m
|
| 280 |
+
[36m(infer pid=654445)[0m
|
| 281 |
+
[36m(infer pid=654459)[0m
|
| 282 |
+
[36m(infer pid=654445)[0m
|
| 283 |
+
[36m(infer pid=654443)[0m
|
| 284 |
+
[36m(infer pid=654459)[0m
|
| 285 |
+
[36m(infer pid=654443)[0m
|
| 286 |
+
[36m(infer pid=654443)[0m
|
| 287 |
+
[36m(infer pid=654443)[0m
|
| 288 |
+
[36m(infer pid=654457)[0m
|
| 289 |
+
[36m(infer pid=654445)[0m
|
| 290 |
+
[36m(infer pid=654457)[0m
|
| 291 |
+
[36m(infer pid=654445)[0m
|
| 292 |
+
[36m(infer pid=654459)[0m
|
| 293 |
+
[36m(infer pid=654459)[0m
|
| 294 |
+
[36m(infer pid=654445)[0m
|
| 295 |
+
[36m(infer pid=654443)[0m
|
| 296 |
+
[36m(infer pid=654457)[0m
|
| 297 |
+
[36m(infer pid=654445)[0m
|
| 298 |
+
[36m(infer pid=654443)[0m
|
| 299 |
+
[36m(infer pid=654459)[0m
|
| 300 |
+
[36m(infer pid=654443)[0m
|
| 301 |
+
[36m(infer pid=654443)[0m
|
| 302 |
+
[36m(infer pid=654457)[0m
|
| 303 |
+
[36m(infer pid=654457)[0m
|
| 304 |
+
[36m(infer pid=654443)[0m
|
| 305 |
+
[36m(infer pid=654445)[0m
|
| 306 |
+
[36m(infer pid=654459)[0m
|
| 307 |
+
[36m(infer pid=654443)[0m
|
| 308 |
+
[36m(infer pid=654443)[0m
|
| 309 |
+
[36m(infer pid=654457)[0m
|
| 310 |
+
[36m(infer pid=654443)[0m
|
| 311 |
+
[36m(infer pid=654457)[0m
|
| 312 |
+
[36m(infer pid=654445)[0m
|
| 313 |
+
[36m(infer pid=654443)[0m
|
| 314 |
+
[36m(infer pid=654445)[0m
|
| 315 |
+
[36m(infer pid=654459)[0m
|
| 316 |
+
[36m(infer pid=654457)[0m
|
| 317 |
+
[36m(infer pid=654459)[0m
|
| 318 |
+
[36m(infer pid=654459)[0m
|
| 319 |
+
[36m(infer pid=654445)[0m
|
| 320 |
+
[36m(infer pid=654457)[0m
|
| 321 |
+
[36m(infer pid=654457)[0m
|
| 322 |
+
[36m(infer pid=654457)[0m
|
| 323 |
+
[36m(infer pid=654445)[0m
|
| 324 |
+
[36m(infer pid=654445)[0m
|
| 325 |
+
[36m(infer pid=654459)[0m
|
| 326 |
+
[36m(infer pid=654445)[0m
|
| 327 |
+
[36m(infer pid=654443)[0m
|
| 328 |
+
[36m(infer pid=654459)[0m
|
| 329 |
+
[36m(infer pid=654443)[0m
|
| 330 |
+
[36m(infer pid=654443)[0m
|
| 331 |
+
[36m(infer pid=654445)[0m
|
| 332 |
+
[36m(infer pid=654443)[0m
|
| 333 |
+
[36m(infer pid=654445)[0m
|
| 334 |
+
[36m(infer pid=654443)[0m
|
| 335 |
+
[36m(infer pid=654445)[0m
|
| 336 |
+
[36m(infer pid=654445)[0m
|
| 337 |
+
[36m(infer pid=654457)[0m
|
| 338 |
+
[36m(infer pid=654445)[0m
|
| 339 |
+
[36m(infer pid=654459)[0m
|
| 340 |
+
[36m(infer pid=654443)[0m
|
| 341 |
+
[36m(infer pid=654443)[0m
|
| 342 |
+
[36m(infer pid=654457)[0m
|
| 343 |
+
[36m(infer pid=654457)[0m
|
| 344 |
+
[36m(infer pid=654443)[0m
|
| 345 |
+
[36m(infer pid=654457)[0m
|
| 346 |
+
[36m(infer pid=654459)[0m
|
| 347 |
+
[36m(infer pid=654459)[0m
|
| 348 |
+
[36m(infer pid=654459)[0m
|
| 349 |
+
[36m(infer pid=654457)[0m
|
| 350 |
+
[36m(infer pid=654445)[0m
|
| 351 |
+
[36m(infer pid=654445)[0m
|
| 352 |
+
[36m(infer pid=654457)[0m
|
| 353 |
+
[36m(infer pid=654459)[0m
|
| 354 |
+
[36m(infer pid=654457)[0m
|
| 355 |
+
[36m(infer pid=654443)[0m
|
| 356 |
+
[36m(infer pid=654445)[0m
|
| 357 |
+
[36m(infer pid=654457)[0m
|
| 358 |
+
[36m(infer pid=654457)[0m
|
| 359 |
+
[36m(infer pid=654459)[0m
|
| 360 |
+
[36m(infer pid=654459)[0m
|
| 361 |
+
[36m(infer pid=654459)[0m
|
| 362 |
+
[36m(infer pid=654445)[0m
|
| 363 |
+
[36m(infer pid=654459)[0m
|
| 364 |
+
[36m(infer pid=654443)[0m
|
| 365 |
+
[36m(infer pid=654443)[0m
|
| 366 |
+
[36m(infer pid=654457)[0m
|
| 367 |
+
[36m(infer pid=654445)[0m
|
| 368 |
+
[36m(infer pid=654443)[0m
|
| 369 |
+
[36m(infer pid=654443)[0m
|
| 370 |
+
[36m(infer pid=654443)[0m
|
| 371 |
+
[36m(infer pid=654445)[0m
|
| 372 |
+
[36m(infer pid=654459)[0m
|
| 373 |
+
[36m(infer pid=654457)[0m
|
| 374 |
+
[36m(infer pid=654445)[0m
|
| 375 |
+
[36m(infer pid=654445)[0m
|
| 376 |
+
[36m(infer pid=654459)[0m
|
| 377 |
+
[36m(infer pid=654457)[0m
|
| 378 |
+
[36m(infer pid=654459)[0m
|
| 379 |
+
[36m(infer pid=654443)[0m
|
| 380 |
+
[36m(infer pid=654459)[0m
|
| 381 |
+
[36m(infer pid=654445)[0m
|
| 382 |
+
[36m(infer pid=654457)[0m
|
| 383 |
+
[36m(infer pid=654445)[0m
|
| 384 |
+
[36m(infer pid=654459)[0m
|
| 385 |
+
[36m(infer pid=654457)[0m
|
| 386 |
+
[36m(infer pid=654443)[0m
|
| 387 |
+
[36m(infer pid=654445)[0m
|
| 388 |
+
[36m(infer pid=654459)[0m
|
| 389 |
+
[36m(infer pid=654459)[0m
|
| 390 |
+
[36m(infer pid=654457)[0m
|
| 391 |
+
[36m(infer pid=654457)[0m
|
| 392 |
+
[36m(infer pid=654459)[0m
|
| 393 |
+
[36m(infer pid=654459)[0m
|
| 394 |
+
[36m(infer pid=654459)[0m
|
| 395 |
+
[36m(infer pid=654459)[0m
|
| 396 |
+
[36m(infer pid=654443)[0m
|
| 397 |
+
[36m(infer pid=654459)[0m
|
| 398 |
+
[36m(infer pid=654445)[0m
|
| 399 |
+
[36m(infer pid=654457)[0m
|
| 400 |
+
[36m(infer pid=654457)[0m
|
| 401 |
+
[36m(infer pid=654443)[0m
|
| 402 |
+
[36m(infer pid=654443)[0m
|
| 403 |
+
[36m(infer pid=654445)[0m
|
| 404 |
+
[36m(infer pid=654445)[0m
|
| 405 |
+
[36m(infer pid=654457)[0m
|
log/20250710_174320_output.log
ADDED
|
File without changes
|
log/20250710_174348_output.log
ADDED
|
File without changes
|
log/20250710_174550_output.log
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
/group/40174/Zywoou/mm_math_reasoning/OpenCompassEval/run.sh: line 29: IR#: command not found
|
log/20250710_183548_output.log
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
merges.txt
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
model-00001-of-00002.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:61f803011f632c24788c461d35819e07e479c0311a923f540e568361b6c6d335
|
| 3 |
+
size 4997750760
|
model-00002-of-00002.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:780ae0e5e331e4007d6189b40a6b15870fe2739338037cdbb7c2b22740db9131
|
| 3 |
+
size 3133917248
|
model.safetensors.index.json
ADDED
|
@@ -0,0 +1,832 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"metadata": {
|
| 3 |
+
"total_size": 8131575808
|
| 4 |
+
},
|
| 5 |
+
"weight_map": {
|
| 6 |
+
"lm_head.weight": "model-00002-of-00002.safetensors",
|
| 7 |
+
"model.embed_tokens.weight": "model-00001-of-00002.safetensors",
|
| 8 |
+
"model.layers.0.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 9 |
+
"model.layers.0.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 10 |
+
"model.layers.0.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 11 |
+
"model.layers.0.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 12 |
+
"model.layers.0.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 13 |
+
"model.layers.0.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 14 |
+
"model.layers.0.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 15 |
+
"model.layers.0.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 16 |
+
"model.layers.0.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 17 |
+
"model.layers.0.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 18 |
+
"model.layers.0.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 19 |
+
"model.layers.0.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 20 |
+
"model.layers.1.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 21 |
+
"model.layers.1.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 22 |
+
"model.layers.1.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 23 |
+
"model.layers.1.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 24 |
+
"model.layers.1.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 25 |
+
"model.layers.1.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 26 |
+
"model.layers.1.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 27 |
+
"model.layers.1.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 28 |
+
"model.layers.1.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 29 |
+
"model.layers.1.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 30 |
+
"model.layers.1.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 31 |
+
"model.layers.1.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 32 |
+
"model.layers.10.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 33 |
+
"model.layers.10.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 34 |
+
"model.layers.10.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 35 |
+
"model.layers.10.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 36 |
+
"model.layers.10.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 37 |
+
"model.layers.10.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 38 |
+
"model.layers.10.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 39 |
+
"model.layers.10.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 40 |
+
"model.layers.10.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 41 |
+
"model.layers.10.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 42 |
+
"model.layers.10.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 43 |
+
"model.layers.10.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 44 |
+
"model.layers.11.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 45 |
+
"model.layers.11.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 46 |
+
"model.layers.11.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 47 |
+
"model.layers.11.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 48 |
+
"model.layers.11.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 49 |
+
"model.layers.11.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 50 |
+
"model.layers.11.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 51 |
+
"model.layers.11.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 52 |
+
"model.layers.11.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 53 |
+
"model.layers.11.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 54 |
+
"model.layers.11.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 55 |
+
"model.layers.11.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 56 |
+
"model.layers.12.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 57 |
+
"model.layers.12.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 58 |
+
"model.layers.12.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 59 |
+
"model.layers.12.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 60 |
+
"model.layers.12.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 61 |
+
"model.layers.12.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 62 |
+
"model.layers.12.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 63 |
+
"model.layers.12.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 64 |
+
"model.layers.12.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 65 |
+
"model.layers.12.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 66 |
+
"model.layers.12.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 67 |
+
"model.layers.12.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 68 |
+
"model.layers.13.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 69 |
+
"model.layers.13.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 70 |
+
"model.layers.13.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 71 |
+
"model.layers.13.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 72 |
+
"model.layers.13.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 73 |
+
"model.layers.13.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 74 |
+
"model.layers.13.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 75 |
+
"model.layers.13.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 76 |
+
"model.layers.13.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 77 |
+
"model.layers.13.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 78 |
+
"model.layers.13.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 79 |
+
"model.layers.13.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 80 |
+
"model.layers.14.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 81 |
+
"model.layers.14.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 82 |
+
"model.layers.14.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 83 |
+
"model.layers.14.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 84 |
+
"model.layers.14.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 85 |
+
"model.layers.14.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 86 |
+
"model.layers.14.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 87 |
+
"model.layers.14.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 88 |
+
"model.layers.14.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 89 |
+
"model.layers.14.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 90 |
+
"model.layers.14.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 91 |
+
"model.layers.14.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 92 |
+
"model.layers.15.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 93 |
+
"model.layers.15.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 94 |
+
"model.layers.15.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 95 |
+
"model.layers.15.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 96 |
+
"model.layers.15.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 97 |
+
"model.layers.15.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 98 |
+
"model.layers.15.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 99 |
+
"model.layers.15.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 100 |
+
"model.layers.15.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 101 |
+
"model.layers.15.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 102 |
+
"model.layers.15.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 103 |
+
"model.layers.15.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 104 |
+
"model.layers.16.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 105 |
+
"model.layers.16.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 106 |
+
"model.layers.16.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 107 |
+
"model.layers.16.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 108 |
+
"model.layers.16.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 109 |
+
"model.layers.16.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 110 |
+
"model.layers.16.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 111 |
+
"model.layers.16.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 112 |
+
"model.layers.16.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 113 |
+
"model.layers.16.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 114 |
+
"model.layers.16.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 115 |
+
"model.layers.16.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 116 |
+
"model.layers.17.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 117 |
+
"model.layers.17.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 118 |
+
"model.layers.17.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 119 |
+
"model.layers.17.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 120 |
+
"model.layers.17.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 121 |
+
"model.layers.17.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 122 |
+
"model.layers.17.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 123 |
+
"model.layers.17.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 124 |
+
"model.layers.17.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 125 |
+
"model.layers.17.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 126 |
+
"model.layers.17.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 127 |
+
"model.layers.17.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 128 |
+
"model.layers.18.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 129 |
+
"model.layers.18.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 130 |
+
"model.layers.18.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 131 |
+
"model.layers.18.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 132 |
+
"model.layers.18.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 133 |
+
"model.layers.18.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 134 |
+
"model.layers.18.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 135 |
+
"model.layers.18.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 136 |
+
"model.layers.18.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 137 |
+
"model.layers.18.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 138 |
+
"model.layers.18.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 139 |
+
"model.layers.18.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 140 |
+
"model.layers.19.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 141 |
+
"model.layers.19.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 142 |
+
"model.layers.19.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 143 |
+
"model.layers.19.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 144 |
+
"model.layers.19.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 145 |
+
"model.layers.19.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 146 |
+
"model.layers.19.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 147 |
+
"model.layers.19.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 148 |
+
"model.layers.19.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 149 |
+
"model.layers.19.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 150 |
+
"model.layers.19.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 151 |
+
"model.layers.19.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 152 |
+
"model.layers.2.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 153 |
+
"model.layers.2.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 154 |
+
"model.layers.2.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 155 |
+
"model.layers.2.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 156 |
+
"model.layers.2.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 157 |
+
"model.layers.2.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 158 |
+
"model.layers.2.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 159 |
+
"model.layers.2.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 160 |
+
"model.layers.2.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 161 |
+
"model.layers.2.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 162 |
+
"model.layers.2.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 163 |
+
"model.layers.2.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 164 |
+
"model.layers.20.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 165 |
+
"model.layers.20.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 166 |
+
"model.layers.20.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 167 |
+
"model.layers.20.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 168 |
+
"model.layers.20.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 169 |
+
"model.layers.20.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
|
| 170 |
+
"model.layers.20.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 171 |
+
"model.layers.20.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 172 |
+
"model.layers.20.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
|
| 173 |
+
"model.layers.20.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 174 |
+
"model.layers.20.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
|
| 175 |
+
"model.layers.20.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 176 |
+
"model.layers.21.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 177 |
+
"model.layers.21.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 178 |
+
"model.layers.21.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 179 |
+
"model.layers.21.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 180 |
+
"model.layers.21.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 181 |
+
"model.layers.21.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
|
| 182 |
+
"model.layers.21.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 183 |
+
"model.layers.21.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 184 |
+
"model.layers.21.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
|
| 185 |
+
"model.layers.21.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 186 |
+
"model.layers.21.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
|
| 187 |
+
"model.layers.21.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 188 |
+
"model.layers.22.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 189 |
+
"model.layers.22.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 190 |
+
"model.layers.22.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 191 |
+
"model.layers.22.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 192 |
+
"model.layers.22.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 193 |
+
"model.layers.22.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
|
| 194 |
+
"model.layers.22.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 195 |
+
"model.layers.22.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 196 |
+
"model.layers.22.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
|
| 197 |
+
"model.layers.22.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 198 |
+
"model.layers.22.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
|
| 199 |
+
"model.layers.22.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 200 |
+
"model.layers.23.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 201 |
+
"model.layers.23.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 202 |
+
"model.layers.23.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 203 |
+
"model.layers.23.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 204 |
+
"model.layers.23.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 205 |
+
"model.layers.23.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
|
| 206 |
+
"model.layers.23.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 207 |
+
"model.layers.23.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 208 |
+
"model.layers.23.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
|
| 209 |
+
"model.layers.23.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 210 |
+
"model.layers.23.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
|
| 211 |
+
"model.layers.23.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 212 |
+
"model.layers.24.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 213 |
+
"model.layers.24.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 214 |
+
"model.layers.24.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 215 |
+
"model.layers.24.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 216 |
+
"model.layers.24.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 217 |
+
"model.layers.24.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
|
| 218 |
+
"model.layers.24.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 219 |
+
"model.layers.24.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 220 |
+
"model.layers.24.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
|
| 221 |
+
"model.layers.24.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 222 |
+
"model.layers.24.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
|
| 223 |
+
"model.layers.24.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 224 |
+
"model.layers.25.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 225 |
+
"model.layers.25.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 226 |
+
"model.layers.25.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 227 |
+
"model.layers.25.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 228 |
+
"model.layers.25.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 229 |
+
"model.layers.25.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
|
| 230 |
+
"model.layers.25.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 231 |
+
"model.layers.25.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 232 |
+
"model.layers.25.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
|
| 233 |
+
"model.layers.25.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 234 |
+
"model.layers.25.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
|
| 235 |
+
"model.layers.25.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 236 |
+
"model.layers.26.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 237 |
+
"model.layers.26.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 238 |
+
"model.layers.26.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 239 |
+
"model.layers.26.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 240 |
+
"model.layers.26.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 241 |
+
"model.layers.26.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
|
| 242 |
+
"model.layers.26.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 243 |
+
"model.layers.26.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 244 |
+
"model.layers.26.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
|
| 245 |
+
"model.layers.26.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 246 |
+
"model.layers.26.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
|
| 247 |
+
"model.layers.26.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 248 |
+
"model.layers.27.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 249 |
+
"model.layers.27.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 250 |
+
"model.layers.27.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 251 |
+
"model.layers.27.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 252 |
+
"model.layers.27.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 253 |
+
"model.layers.27.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
|
| 254 |
+
"model.layers.27.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 255 |
+
"model.layers.27.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 256 |
+
"model.layers.27.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
|
| 257 |
+
"model.layers.27.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 258 |
+
"model.layers.27.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
|
| 259 |
+
"model.layers.27.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 260 |
+
"model.layers.28.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 261 |
+
"model.layers.28.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 262 |
+
"model.layers.28.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 263 |
+
"model.layers.28.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 264 |
+
"model.layers.28.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 265 |
+
"model.layers.28.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
|
| 266 |
+
"model.layers.28.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 267 |
+
"model.layers.28.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 268 |
+
"model.layers.28.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
|
| 269 |
+
"model.layers.28.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 270 |
+
"model.layers.28.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
|
| 271 |
+
"model.layers.28.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 272 |
+
"model.layers.29.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 273 |
+
"model.layers.29.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 274 |
+
"model.layers.29.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 275 |
+
"model.layers.29.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 276 |
+
"model.layers.29.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 277 |
+
"model.layers.29.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
|
| 278 |
+
"model.layers.29.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 279 |
+
"model.layers.29.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 280 |
+
"model.layers.29.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
|
| 281 |
+
"model.layers.29.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 282 |
+
"model.layers.29.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
|
| 283 |
+
"model.layers.29.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 284 |
+
"model.layers.3.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 285 |
+
"model.layers.3.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 286 |
+
"model.layers.3.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 287 |
+
"model.layers.3.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 288 |
+
"model.layers.3.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 289 |
+
"model.layers.3.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 290 |
+
"model.layers.3.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 291 |
+
"model.layers.3.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 292 |
+
"model.layers.3.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 293 |
+
"model.layers.3.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 294 |
+
"model.layers.3.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 295 |
+
"model.layers.3.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 296 |
+
"model.layers.30.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 297 |
+
"model.layers.30.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 298 |
+
"model.layers.30.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 299 |
+
"model.layers.30.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 300 |
+
"model.layers.30.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 301 |
+
"model.layers.30.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
|
| 302 |
+
"model.layers.30.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 303 |
+
"model.layers.30.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 304 |
+
"model.layers.30.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
|
| 305 |
+
"model.layers.30.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 306 |
+
"model.layers.30.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
|
| 307 |
+
"model.layers.30.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 308 |
+
"model.layers.31.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 309 |
+
"model.layers.31.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 310 |
+
"model.layers.31.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 311 |
+
"model.layers.31.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 312 |
+
"model.layers.31.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 313 |
+
"model.layers.31.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
|
| 314 |
+
"model.layers.31.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 315 |
+
"model.layers.31.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 316 |
+
"model.layers.31.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
|
| 317 |
+
"model.layers.31.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 318 |
+
"model.layers.31.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
|
| 319 |
+
"model.layers.31.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 320 |
+
"model.layers.32.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 321 |
+
"model.layers.32.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 322 |
+
"model.layers.32.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 323 |
+
"model.layers.32.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 324 |
+
"model.layers.32.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 325 |
+
"model.layers.32.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
|
| 326 |
+
"model.layers.32.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 327 |
+
"model.layers.32.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 328 |
+
"model.layers.32.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
|
| 329 |
+
"model.layers.32.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 330 |
+
"model.layers.32.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
|
| 331 |
+
"model.layers.32.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 332 |
+
"model.layers.33.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 333 |
+
"model.layers.33.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 334 |
+
"model.layers.33.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 335 |
+
"model.layers.33.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 336 |
+
"model.layers.33.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 337 |
+
"model.layers.33.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
|
| 338 |
+
"model.layers.33.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 339 |
+
"model.layers.33.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 340 |
+
"model.layers.33.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
|
| 341 |
+
"model.layers.33.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 342 |
+
"model.layers.33.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
|
| 343 |
+
"model.layers.33.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 344 |
+
"model.layers.34.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 345 |
+
"model.layers.34.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 346 |
+
"model.layers.34.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 347 |
+
"model.layers.34.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 348 |
+
"model.layers.34.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 349 |
+
"model.layers.34.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
|
| 350 |
+
"model.layers.34.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 351 |
+
"model.layers.34.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 352 |
+
"model.layers.34.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
|
| 353 |
+
"model.layers.34.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 354 |
+
"model.layers.34.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
|
| 355 |
+
"model.layers.34.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 356 |
+
"model.layers.35.input_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 357 |
+
"model.layers.35.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
|
| 358 |
+
"model.layers.35.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
|
| 359 |
+
"model.layers.35.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
|
| 360 |
+
"model.layers.35.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
|
| 361 |
+
"model.layers.35.self_attn.k_proj.bias": "model-00002-of-00002.safetensors",
|
| 362 |
+
"model.layers.35.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
|
| 363 |
+
"model.layers.35.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
|
| 364 |
+
"model.layers.35.self_attn.q_proj.bias": "model-00002-of-00002.safetensors",
|
| 365 |
+
"model.layers.35.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
|
| 366 |
+
"model.layers.35.self_attn.v_proj.bias": "model-00002-of-00002.safetensors",
|
| 367 |
+
"model.layers.35.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
|
| 368 |
+
"model.layers.4.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 369 |
+
"model.layers.4.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 370 |
+
"model.layers.4.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 371 |
+
"model.layers.4.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 372 |
+
"model.layers.4.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 373 |
+
"model.layers.4.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 374 |
+
"model.layers.4.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 375 |
+
"model.layers.4.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 376 |
+
"model.layers.4.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 377 |
+
"model.layers.4.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 378 |
+
"model.layers.4.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 379 |
+
"model.layers.4.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 380 |
+
"model.layers.5.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 381 |
+
"model.layers.5.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 382 |
+
"model.layers.5.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 383 |
+
"model.layers.5.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 384 |
+
"model.layers.5.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 385 |
+
"model.layers.5.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 386 |
+
"model.layers.5.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 387 |
+
"model.layers.5.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 388 |
+
"model.layers.5.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 389 |
+
"model.layers.5.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 390 |
+
"model.layers.5.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 391 |
+
"model.layers.5.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 392 |
+
"model.layers.6.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 393 |
+
"model.layers.6.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 394 |
+
"model.layers.6.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 395 |
+
"model.layers.6.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 396 |
+
"model.layers.6.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 397 |
+
"model.layers.6.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 398 |
+
"model.layers.6.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 399 |
+
"model.layers.6.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 400 |
+
"model.layers.6.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 401 |
+
"model.layers.6.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 402 |
+
"model.layers.6.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 403 |
+
"model.layers.6.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 404 |
+
"model.layers.7.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 405 |
+
"model.layers.7.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 406 |
+
"model.layers.7.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 407 |
+
"model.layers.7.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 408 |
+
"model.layers.7.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 409 |
+
"model.layers.7.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 410 |
+
"model.layers.7.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 411 |
+
"model.layers.7.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 412 |
+
"model.layers.7.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 413 |
+
"model.layers.7.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 414 |
+
"model.layers.7.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 415 |
+
"model.layers.7.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 416 |
+
"model.layers.8.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 417 |
+
"model.layers.8.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 418 |
+
"model.layers.8.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 419 |
+
"model.layers.8.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 420 |
+
"model.layers.8.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 421 |
+
"model.layers.8.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 422 |
+
"model.layers.8.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 423 |
+
"model.layers.8.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 424 |
+
"model.layers.8.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 425 |
+
"model.layers.8.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 426 |
+
"model.layers.8.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 427 |
+
"model.layers.8.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 428 |
+
"model.layers.9.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 429 |
+
"model.layers.9.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 430 |
+
"model.layers.9.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 431 |
+
"model.layers.9.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 432 |
+
"model.layers.9.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 433 |
+
"model.layers.9.self_attn.k_proj.bias": "model-00001-of-00002.safetensors",
|
| 434 |
+
"model.layers.9.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 435 |
+
"model.layers.9.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 436 |
+
"model.layers.9.self_attn.q_proj.bias": "model-00001-of-00002.safetensors",
|
| 437 |
+
"model.layers.9.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 438 |
+
"model.layers.9.self_attn.v_proj.bias": "model-00001-of-00002.safetensors",
|
| 439 |
+
"model.layers.9.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 440 |
+
"model.norm.weight": "model-00002-of-00002.safetensors",
|
| 441 |
+
"visual.blocks.0.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 442 |
+
"visual.blocks.0.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 443 |
+
"visual.blocks.0.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 444 |
+
"visual.blocks.0.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 445 |
+
"visual.blocks.0.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
|
| 446 |
+
"visual.blocks.0.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 447 |
+
"visual.blocks.0.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
|
| 448 |
+
"visual.blocks.0.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 449 |
+
"visual.blocks.0.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
|
| 450 |
+
"visual.blocks.0.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 451 |
+
"visual.blocks.0.norm1.weight": "model-00001-of-00002.safetensors",
|
| 452 |
+
"visual.blocks.0.norm2.weight": "model-00001-of-00002.safetensors",
|
| 453 |
+
"visual.blocks.1.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 454 |
+
"visual.blocks.1.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 455 |
+
"visual.blocks.1.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 456 |
+
"visual.blocks.1.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 457 |
+
"visual.blocks.1.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
|
| 458 |
+
"visual.blocks.1.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 459 |
+
"visual.blocks.1.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
|
| 460 |
+
"visual.blocks.1.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 461 |
+
"visual.blocks.1.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
|
| 462 |
+
"visual.blocks.1.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 463 |
+
"visual.blocks.1.norm1.weight": "model-00001-of-00002.safetensors",
|
| 464 |
+
"visual.blocks.1.norm2.weight": "model-00001-of-00002.safetensors",
|
| 465 |
+
"visual.blocks.10.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 466 |
+
"visual.blocks.10.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 467 |
+
"visual.blocks.10.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 468 |
+
"visual.blocks.10.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 469 |
+
"visual.blocks.10.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
|
| 470 |
+
"visual.blocks.10.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 471 |
+
"visual.blocks.10.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
|
| 472 |
+
"visual.blocks.10.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 473 |
+
"visual.blocks.10.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
|
| 474 |
+
"visual.blocks.10.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 475 |
+
"visual.blocks.10.norm1.weight": "model-00001-of-00002.safetensors",
|
| 476 |
+
"visual.blocks.10.norm2.weight": "model-00001-of-00002.safetensors",
|
| 477 |
+
"visual.blocks.11.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 478 |
+
"visual.blocks.11.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 479 |
+
"visual.blocks.11.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 480 |
+
"visual.blocks.11.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 481 |
+
"visual.blocks.11.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
|
| 482 |
+
"visual.blocks.11.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 483 |
+
"visual.blocks.11.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
|
| 484 |
+
"visual.blocks.11.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 485 |
+
"visual.blocks.11.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
|
| 486 |
+
"visual.blocks.11.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 487 |
+
"visual.blocks.11.norm1.weight": "model-00001-of-00002.safetensors",
|
| 488 |
+
"visual.blocks.11.norm2.weight": "model-00001-of-00002.safetensors",
|
| 489 |
+
"visual.blocks.12.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 490 |
+
"visual.blocks.12.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 491 |
+
"visual.blocks.12.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 492 |
+
"visual.blocks.12.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 493 |
+
"visual.blocks.12.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
|
| 494 |
+
"visual.blocks.12.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 495 |
+
"visual.blocks.12.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
|
| 496 |
+
"visual.blocks.12.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 497 |
+
"visual.blocks.12.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
|
| 498 |
+
"visual.blocks.12.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 499 |
+
"visual.blocks.12.norm1.weight": "model-00001-of-00002.safetensors",
|
| 500 |
+
"visual.blocks.12.norm2.weight": "model-00001-of-00002.safetensors",
|
| 501 |
+
"visual.blocks.13.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 502 |
+
"visual.blocks.13.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 503 |
+
"visual.blocks.13.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 504 |
+
"visual.blocks.13.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 505 |
+
"visual.blocks.13.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
|
| 506 |
+
"visual.blocks.13.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 507 |
+
"visual.blocks.13.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
|
| 508 |
+
"visual.blocks.13.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 509 |
+
"visual.blocks.13.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
|
| 510 |
+
"visual.blocks.13.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 511 |
+
"visual.blocks.13.norm1.weight": "model-00001-of-00002.safetensors",
|
| 512 |
+
"visual.blocks.13.norm2.weight": "model-00001-of-00002.safetensors",
|
| 513 |
+
"visual.blocks.14.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 514 |
+
"visual.blocks.14.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 515 |
+
"visual.blocks.14.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 516 |
+
"visual.blocks.14.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 517 |
+
"visual.blocks.14.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
|
| 518 |
+
"visual.blocks.14.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 519 |
+
"visual.blocks.14.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
|
| 520 |
+
"visual.blocks.14.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 521 |
+
"visual.blocks.14.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
|
| 522 |
+
"visual.blocks.14.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 523 |
+
"visual.blocks.14.norm1.weight": "model-00001-of-00002.safetensors",
|
| 524 |
+
"visual.blocks.14.norm2.weight": "model-00001-of-00002.safetensors",
|
| 525 |
+
"visual.blocks.15.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 526 |
+
"visual.blocks.15.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 527 |
+
"visual.blocks.15.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 528 |
+
"visual.blocks.15.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 529 |
+
"visual.blocks.15.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
|
| 530 |
+
"visual.blocks.15.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 531 |
+
"visual.blocks.15.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
|
| 532 |
+
"visual.blocks.15.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 533 |
+
"visual.blocks.15.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
|
| 534 |
+
"visual.blocks.15.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 535 |
+
"visual.blocks.15.norm1.weight": "model-00001-of-00002.safetensors",
|
| 536 |
+
"visual.blocks.15.norm2.weight": "model-00001-of-00002.safetensors",
|
| 537 |
+
"visual.blocks.16.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 538 |
+
"visual.blocks.16.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 539 |
+
"visual.blocks.16.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 540 |
+
"visual.blocks.16.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 541 |
+
"visual.blocks.16.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
|
| 542 |
+
"visual.blocks.16.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 543 |
+
"visual.blocks.16.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
|
| 544 |
+
"visual.blocks.16.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 545 |
+
"visual.blocks.16.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
|
| 546 |
+
"visual.blocks.16.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 547 |
+
"visual.blocks.16.norm1.weight": "model-00001-of-00002.safetensors",
|
| 548 |
+
"visual.blocks.16.norm2.weight": "model-00001-of-00002.safetensors",
|
| 549 |
+
"visual.blocks.17.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 550 |
+
"visual.blocks.17.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 551 |
+
"visual.blocks.17.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 552 |
+
"visual.blocks.17.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 553 |
+
"visual.blocks.17.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
|
| 554 |
+
"visual.blocks.17.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 555 |
+
"visual.blocks.17.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
|
| 556 |
+
"visual.blocks.17.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 557 |
+
"visual.blocks.17.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
|
| 558 |
+
"visual.blocks.17.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 559 |
+
"visual.blocks.17.norm1.weight": "model-00001-of-00002.safetensors",
|
| 560 |
+
"visual.blocks.17.norm2.weight": "model-00001-of-00002.safetensors",
|
| 561 |
+
"visual.blocks.18.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 562 |
+
"visual.blocks.18.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 563 |
+
"visual.blocks.18.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 564 |
+
"visual.blocks.18.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 565 |
+
"visual.blocks.18.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
|
| 566 |
+
"visual.blocks.18.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 567 |
+
"visual.blocks.18.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
|
| 568 |
+
"visual.blocks.18.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 569 |
+
"visual.blocks.18.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
|
| 570 |
+
"visual.blocks.18.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 571 |
+
"visual.blocks.18.norm1.weight": "model-00001-of-00002.safetensors",
|
| 572 |
+
"visual.blocks.18.norm2.weight": "model-00001-of-00002.safetensors",
|
| 573 |
+
"visual.blocks.19.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 574 |
+
"visual.blocks.19.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 575 |
+
"visual.blocks.19.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 576 |
+
"visual.blocks.19.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 577 |
+
"visual.blocks.19.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
|
| 578 |
+
"visual.blocks.19.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 579 |
+
"visual.blocks.19.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
|
| 580 |
+
"visual.blocks.19.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 581 |
+
"visual.blocks.19.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
|
| 582 |
+
"visual.blocks.19.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 583 |
+
"visual.blocks.19.norm1.weight": "model-00001-of-00002.safetensors",
|
| 584 |
+
"visual.blocks.19.norm2.weight": "model-00001-of-00002.safetensors",
|
| 585 |
+
"visual.blocks.2.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 586 |
+
"visual.blocks.2.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 587 |
+
"visual.blocks.2.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 588 |
+
"visual.blocks.2.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 589 |
+
"visual.blocks.2.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
|
| 590 |
+
"visual.blocks.2.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 591 |
+
"visual.blocks.2.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
|
| 592 |
+
"visual.blocks.2.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 593 |
+
"visual.blocks.2.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
|
| 594 |
+
"visual.blocks.2.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 595 |
+
"visual.blocks.2.norm1.weight": "model-00001-of-00002.safetensors",
|
| 596 |
+
"visual.blocks.2.norm2.weight": "model-00001-of-00002.safetensors",
|
| 597 |
+
"visual.blocks.20.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 598 |
+
"visual.blocks.20.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 599 |
+
"visual.blocks.20.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 600 |
+
"visual.blocks.20.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 601 |
+
"visual.blocks.20.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
|
| 602 |
+
"visual.blocks.20.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 603 |
+
"visual.blocks.20.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
|
| 604 |
+
"visual.blocks.20.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 605 |
+
"visual.blocks.20.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
|
| 606 |
+
"visual.blocks.20.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 607 |
+
"visual.blocks.20.norm1.weight": "model-00001-of-00002.safetensors",
|
| 608 |
+
"visual.blocks.20.norm2.weight": "model-00001-of-00002.safetensors",
|
| 609 |
+
"visual.blocks.21.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 610 |
+
"visual.blocks.21.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 611 |
+
"visual.blocks.21.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 612 |
+
"visual.blocks.21.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 613 |
+
"visual.blocks.21.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
|
| 614 |
+
"visual.blocks.21.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 615 |
+
"visual.blocks.21.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
|
| 616 |
+
"visual.blocks.21.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 617 |
+
"visual.blocks.21.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
|
| 618 |
+
"visual.blocks.21.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 619 |
+
"visual.blocks.21.norm1.weight": "model-00001-of-00002.safetensors",
|
| 620 |
+
"visual.blocks.21.norm2.weight": "model-00001-of-00002.safetensors",
|
| 621 |
+
"visual.blocks.22.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 622 |
+
"visual.blocks.22.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 623 |
+
"visual.blocks.22.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 624 |
+
"visual.blocks.22.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 625 |
+
"visual.blocks.22.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
|
| 626 |
+
"visual.blocks.22.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 627 |
+
"visual.blocks.22.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
|
| 628 |
+
"visual.blocks.22.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 629 |
+
"visual.blocks.22.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
|
| 630 |
+
"visual.blocks.22.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 631 |
+
"visual.blocks.22.norm1.weight": "model-00001-of-00002.safetensors",
|
| 632 |
+
"visual.blocks.22.norm2.weight": "model-00001-of-00002.safetensors",
|
| 633 |
+
"visual.blocks.23.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 634 |
+
"visual.blocks.23.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 635 |
+
"visual.blocks.23.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 636 |
+
"visual.blocks.23.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 637 |
+
"visual.blocks.23.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
|
| 638 |
+
"visual.blocks.23.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 639 |
+
"visual.blocks.23.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
|
| 640 |
+
"visual.blocks.23.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 641 |
+
"visual.blocks.23.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
|
| 642 |
+
"visual.blocks.23.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 643 |
+
"visual.blocks.23.norm1.weight": "model-00001-of-00002.safetensors",
|
| 644 |
+
"visual.blocks.23.norm2.weight": "model-00001-of-00002.safetensors",
|
| 645 |
+
"visual.blocks.24.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 646 |
+
"visual.blocks.24.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 647 |
+
"visual.blocks.24.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 648 |
+
"visual.blocks.24.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 649 |
+
"visual.blocks.24.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
|
| 650 |
+
"visual.blocks.24.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 651 |
+
"visual.blocks.24.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
|
| 652 |
+
"visual.blocks.24.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 653 |
+
"visual.blocks.24.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
|
| 654 |
+
"visual.blocks.24.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 655 |
+
"visual.blocks.24.norm1.weight": "model-00001-of-00002.safetensors",
|
| 656 |
+
"visual.blocks.24.norm2.weight": "model-00001-of-00002.safetensors",
|
| 657 |
+
"visual.blocks.25.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 658 |
+
"visual.blocks.25.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 659 |
+
"visual.blocks.25.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 660 |
+
"visual.blocks.25.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 661 |
+
"visual.blocks.25.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
|
| 662 |
+
"visual.blocks.25.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 663 |
+
"visual.blocks.25.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
|
| 664 |
+
"visual.blocks.25.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 665 |
+
"visual.blocks.25.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
|
| 666 |
+
"visual.blocks.25.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 667 |
+
"visual.blocks.25.norm1.weight": "model-00001-of-00002.safetensors",
|
| 668 |
+
"visual.blocks.25.norm2.weight": "model-00001-of-00002.safetensors",
|
| 669 |
+
"visual.blocks.26.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 670 |
+
"visual.blocks.26.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 671 |
+
"visual.blocks.26.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 672 |
+
"visual.blocks.26.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 673 |
+
"visual.blocks.26.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
|
| 674 |
+
"visual.blocks.26.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 675 |
+
"visual.blocks.26.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
|
| 676 |
+
"visual.blocks.26.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 677 |
+
"visual.blocks.26.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
|
| 678 |
+
"visual.blocks.26.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 679 |
+
"visual.blocks.26.norm1.weight": "model-00001-of-00002.safetensors",
|
| 680 |
+
"visual.blocks.26.norm2.weight": "model-00001-of-00002.safetensors",
|
| 681 |
+
"visual.blocks.27.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 682 |
+
"visual.blocks.27.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 683 |
+
"visual.blocks.27.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 684 |
+
"visual.blocks.27.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 685 |
+
"visual.blocks.27.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
|
| 686 |
+
"visual.blocks.27.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 687 |
+
"visual.blocks.27.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
|
| 688 |
+
"visual.blocks.27.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 689 |
+
"visual.blocks.27.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
|
| 690 |
+
"visual.blocks.27.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 691 |
+
"visual.blocks.27.norm1.weight": "model-00001-of-00002.safetensors",
|
| 692 |
+
"visual.blocks.27.norm2.weight": "model-00001-of-00002.safetensors",
|
| 693 |
+
"visual.blocks.28.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 694 |
+
"visual.blocks.28.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 695 |
+
"visual.blocks.28.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 696 |
+
"visual.blocks.28.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 697 |
+
"visual.blocks.28.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
|
| 698 |
+
"visual.blocks.28.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 699 |
+
"visual.blocks.28.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
|
| 700 |
+
"visual.blocks.28.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 701 |
+
"visual.blocks.28.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
|
| 702 |
+
"visual.blocks.28.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 703 |
+
"visual.blocks.28.norm1.weight": "model-00001-of-00002.safetensors",
|
| 704 |
+
"visual.blocks.28.norm2.weight": "model-00001-of-00002.safetensors",
|
| 705 |
+
"visual.blocks.29.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 706 |
+
"visual.blocks.29.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 707 |
+
"visual.blocks.29.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 708 |
+
"visual.blocks.29.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 709 |
+
"visual.blocks.29.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
|
| 710 |
+
"visual.blocks.29.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 711 |
+
"visual.blocks.29.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
|
| 712 |
+
"visual.blocks.29.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 713 |
+
"visual.blocks.29.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
|
| 714 |
+
"visual.blocks.29.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 715 |
+
"visual.blocks.29.norm1.weight": "model-00001-of-00002.safetensors",
|
| 716 |
+
"visual.blocks.29.norm2.weight": "model-00001-of-00002.safetensors",
|
| 717 |
+
"visual.blocks.3.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 718 |
+
"visual.blocks.3.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 719 |
+
"visual.blocks.3.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 720 |
+
"visual.blocks.3.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 721 |
+
"visual.blocks.3.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
|
| 722 |
+
"visual.blocks.3.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 723 |
+
"visual.blocks.3.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
|
| 724 |
+
"visual.blocks.3.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 725 |
+
"visual.blocks.3.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
|
| 726 |
+
"visual.blocks.3.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 727 |
+
"visual.blocks.3.norm1.weight": "model-00001-of-00002.safetensors",
|
| 728 |
+
"visual.blocks.3.norm2.weight": "model-00001-of-00002.safetensors",
|
| 729 |
+
"visual.blocks.30.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 730 |
+
"visual.blocks.30.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 731 |
+
"visual.blocks.30.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 732 |
+
"visual.blocks.30.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 733 |
+
"visual.blocks.30.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
|
| 734 |
+
"visual.blocks.30.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 735 |
+
"visual.blocks.30.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
|
| 736 |
+
"visual.blocks.30.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 737 |
+
"visual.blocks.30.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
|
| 738 |
+
"visual.blocks.30.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 739 |
+
"visual.blocks.30.norm1.weight": "model-00001-of-00002.safetensors",
|
| 740 |
+
"visual.blocks.30.norm2.weight": "model-00001-of-00002.safetensors",
|
| 741 |
+
"visual.blocks.31.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 742 |
+
"visual.blocks.31.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 743 |
+
"visual.blocks.31.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 744 |
+
"visual.blocks.31.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 745 |
+
"visual.blocks.31.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
|
| 746 |
+
"visual.blocks.31.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 747 |
+
"visual.blocks.31.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
|
| 748 |
+
"visual.blocks.31.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 749 |
+
"visual.blocks.31.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
|
| 750 |
+
"visual.blocks.31.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 751 |
+
"visual.blocks.31.norm1.weight": "model-00001-of-00002.safetensors",
|
| 752 |
+
"visual.blocks.31.norm2.weight": "model-00001-of-00002.safetensors",
|
| 753 |
+
"visual.blocks.4.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 754 |
+
"visual.blocks.4.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 755 |
+
"visual.blocks.4.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 756 |
+
"visual.blocks.4.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 757 |
+
"visual.blocks.4.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
|
| 758 |
+
"visual.blocks.4.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 759 |
+
"visual.blocks.4.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
|
| 760 |
+
"visual.blocks.4.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 761 |
+
"visual.blocks.4.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
|
| 762 |
+
"visual.blocks.4.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 763 |
+
"visual.blocks.4.norm1.weight": "model-00001-of-00002.safetensors",
|
| 764 |
+
"visual.blocks.4.norm2.weight": "model-00001-of-00002.safetensors",
|
| 765 |
+
"visual.blocks.5.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 766 |
+
"visual.blocks.5.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 767 |
+
"visual.blocks.5.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 768 |
+
"visual.blocks.5.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 769 |
+
"visual.blocks.5.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
|
| 770 |
+
"visual.blocks.5.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 771 |
+
"visual.blocks.5.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
|
| 772 |
+
"visual.blocks.5.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 773 |
+
"visual.blocks.5.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
|
| 774 |
+
"visual.blocks.5.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 775 |
+
"visual.blocks.5.norm1.weight": "model-00001-of-00002.safetensors",
|
| 776 |
+
"visual.blocks.5.norm2.weight": "model-00001-of-00002.safetensors",
|
| 777 |
+
"visual.blocks.6.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 778 |
+
"visual.blocks.6.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 779 |
+
"visual.blocks.6.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 780 |
+
"visual.blocks.6.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 781 |
+
"visual.blocks.6.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
|
| 782 |
+
"visual.blocks.6.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 783 |
+
"visual.blocks.6.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
|
| 784 |
+
"visual.blocks.6.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 785 |
+
"visual.blocks.6.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
|
| 786 |
+
"visual.blocks.6.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 787 |
+
"visual.blocks.6.norm1.weight": "model-00001-of-00002.safetensors",
|
| 788 |
+
"visual.blocks.6.norm2.weight": "model-00001-of-00002.safetensors",
|
| 789 |
+
"visual.blocks.7.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 790 |
+
"visual.blocks.7.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 791 |
+
"visual.blocks.7.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 792 |
+
"visual.blocks.7.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 793 |
+
"visual.blocks.7.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
|
| 794 |
+
"visual.blocks.7.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 795 |
+
"visual.blocks.7.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
|
| 796 |
+
"visual.blocks.7.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 797 |
+
"visual.blocks.7.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
|
| 798 |
+
"visual.blocks.7.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 799 |
+
"visual.blocks.7.norm1.weight": "model-00001-of-00002.safetensors",
|
| 800 |
+
"visual.blocks.7.norm2.weight": "model-00001-of-00002.safetensors",
|
| 801 |
+
"visual.blocks.8.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 802 |
+
"visual.blocks.8.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 803 |
+
"visual.blocks.8.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 804 |
+
"visual.blocks.8.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 805 |
+
"visual.blocks.8.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
|
| 806 |
+
"visual.blocks.8.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 807 |
+
"visual.blocks.8.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
|
| 808 |
+
"visual.blocks.8.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 809 |
+
"visual.blocks.8.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
|
| 810 |
+
"visual.blocks.8.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 811 |
+
"visual.blocks.8.norm1.weight": "model-00001-of-00002.safetensors",
|
| 812 |
+
"visual.blocks.8.norm2.weight": "model-00001-of-00002.safetensors",
|
| 813 |
+
"visual.blocks.9.attn.proj.bias": "model-00001-of-00002.safetensors",
|
| 814 |
+
"visual.blocks.9.attn.proj.weight": "model-00001-of-00002.safetensors",
|
| 815 |
+
"visual.blocks.9.attn.qkv.bias": "model-00001-of-00002.safetensors",
|
| 816 |
+
"visual.blocks.9.attn.qkv.weight": "model-00001-of-00002.safetensors",
|
| 817 |
+
"visual.blocks.9.mlp.down_proj.bias": "model-00001-of-00002.safetensors",
|
| 818 |
+
"visual.blocks.9.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 819 |
+
"visual.blocks.9.mlp.gate_proj.bias": "model-00001-of-00002.safetensors",
|
| 820 |
+
"visual.blocks.9.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 821 |
+
"visual.blocks.9.mlp.up_proj.bias": "model-00001-of-00002.safetensors",
|
| 822 |
+
"visual.blocks.9.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 823 |
+
"visual.blocks.9.norm1.weight": "model-00001-of-00002.safetensors",
|
| 824 |
+
"visual.blocks.9.norm2.weight": "model-00001-of-00002.safetensors",
|
| 825 |
+
"visual.merger.ln_q.weight": "model-00001-of-00002.safetensors",
|
| 826 |
+
"visual.merger.mlp.0.bias": "model-00001-of-00002.safetensors",
|
| 827 |
+
"visual.merger.mlp.0.weight": "model-00001-of-00002.safetensors",
|
| 828 |
+
"visual.merger.mlp.2.bias": "model-00001-of-00002.safetensors",
|
| 829 |
+
"visual.merger.mlp.2.weight": "model-00001-of-00002.safetensors",
|
| 830 |
+
"visual.patch_embed.proj.weight": "model-00001-of-00002.safetensors"
|
| 831 |
+
}
|
| 832 |
+
}
|
preprocessor_config.json
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"min_pixels": 3136,
|
| 3 |
+
"max_pixels": 12845056,
|
| 4 |
+
"patch_size": 14,
|
| 5 |
+
"temporal_patch_size": 2,
|
| 6 |
+
"merge_size": 2,
|
| 7 |
+
"image_mean": [
|
| 8 |
+
0.48145466,
|
| 9 |
+
0.4578275,
|
| 10 |
+
0.40821073
|
| 11 |
+
],
|
| 12 |
+
"image_std": [
|
| 13 |
+
0.26862954,
|
| 14 |
+
0.26130258,
|
| 15 |
+
0.27577711
|
| 16 |
+
],
|
| 17 |
+
"image_processor_type": "Qwen2VLImageProcessor",
|
| 18 |
+
"processor_class": "Qwen2_5_VLProcessor"
|
| 19 |
+
}
|
special_tokens_map.json
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"additional_special_tokens": [
|
| 3 |
+
"<|im_start|>",
|
| 4 |
+
"<|im_end|>",
|
| 5 |
+
"<|object_ref_start|>",
|
| 6 |
+
"<|object_ref_end|>",
|
| 7 |
+
"<|box_start|>",
|
| 8 |
+
"<|box_end|>",
|
| 9 |
+
"<|quad_start|>",
|
| 10 |
+
"<|quad_end|>",
|
| 11 |
+
"<|vision_start|>",
|
| 12 |
+
"<|vision_end|>",
|
| 13 |
+
"<|vision_pad|>",
|
| 14 |
+
"<|image_pad|>",
|
| 15 |
+
"<|video_pad|>"
|
| 16 |
+
],
|
| 17 |
+
"eos_token": {
|
| 18 |
+
"content": "<|im_end|>",
|
| 19 |
+
"lstrip": false,
|
| 20 |
+
"normalized": false,
|
| 21 |
+
"rstrip": false,
|
| 22 |
+
"single_word": false
|
| 23 |
+
},
|
| 24 |
+
"pad_token": {
|
| 25 |
+
"content": "<|endoftext|>",
|
| 26 |
+
"lstrip": false,
|
| 27 |
+
"normalized": false,
|
| 28 |
+
"rstrip": false,
|
| 29 |
+
"single_word": false
|
| 30 |
+
}
|
| 31 |
+
}
|
tokenizer.json
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:9c5ae00e602b8860cbd784ba82a8aa14e8feecec692e7076590d014d7b7fdafa
|
| 3 |
+
size 11421896
|
tokenizer_config.json
ADDED
|
@@ -0,0 +1,209 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"add_bos_token": false,
|
| 3 |
+
"add_prefix_space": false,
|
| 4 |
+
"added_tokens_decoder": {
|
| 5 |
+
"151643": {
|
| 6 |
+
"content": "<|endoftext|>",
|
| 7 |
+
"lstrip": false,
|
| 8 |
+
"normalized": false,
|
| 9 |
+
"rstrip": false,
|
| 10 |
+
"single_word": false,
|
| 11 |
+
"special": true
|
| 12 |
+
},
|
| 13 |
+
"151644": {
|
| 14 |
+
"content": "<|im_start|>",
|
| 15 |
+
"lstrip": false,
|
| 16 |
+
"normalized": false,
|
| 17 |
+
"rstrip": false,
|
| 18 |
+
"single_word": false,
|
| 19 |
+
"special": true
|
| 20 |
+
},
|
| 21 |
+
"151645": {
|
| 22 |
+
"content": "<|im_end|>",
|
| 23 |
+
"lstrip": false,
|
| 24 |
+
"normalized": false,
|
| 25 |
+
"rstrip": false,
|
| 26 |
+
"single_word": false,
|
| 27 |
+
"special": true
|
| 28 |
+
},
|
| 29 |
+
"151646": {
|
| 30 |
+
"content": "<|object_ref_start|>",
|
| 31 |
+
"lstrip": false,
|
| 32 |
+
"normalized": false,
|
| 33 |
+
"rstrip": false,
|
| 34 |
+
"single_word": false,
|
| 35 |
+
"special": true
|
| 36 |
+
},
|
| 37 |
+
"151647": {
|
| 38 |
+
"content": "<|object_ref_end|>",
|
| 39 |
+
"lstrip": false,
|
| 40 |
+
"normalized": false,
|
| 41 |
+
"rstrip": false,
|
| 42 |
+
"single_word": false,
|
| 43 |
+
"special": true
|
| 44 |
+
},
|
| 45 |
+
"151648": {
|
| 46 |
+
"content": "<|box_start|>",
|
| 47 |
+
"lstrip": false,
|
| 48 |
+
"normalized": false,
|
| 49 |
+
"rstrip": false,
|
| 50 |
+
"single_word": false,
|
| 51 |
+
"special": true
|
| 52 |
+
},
|
| 53 |
+
"151649": {
|
| 54 |
+
"content": "<|box_end|>",
|
| 55 |
+
"lstrip": false,
|
| 56 |
+
"normalized": false,
|
| 57 |
+
"rstrip": false,
|
| 58 |
+
"single_word": false,
|
| 59 |
+
"special": true
|
| 60 |
+
},
|
| 61 |
+
"151650": {
|
| 62 |
+
"content": "<|quad_start|>",
|
| 63 |
+
"lstrip": false,
|
| 64 |
+
"normalized": false,
|
| 65 |
+
"rstrip": false,
|
| 66 |
+
"single_word": false,
|
| 67 |
+
"special": true
|
| 68 |
+
},
|
| 69 |
+
"151651": {
|
| 70 |
+
"content": "<|quad_end|>",
|
| 71 |
+
"lstrip": false,
|
| 72 |
+
"normalized": false,
|
| 73 |
+
"rstrip": false,
|
| 74 |
+
"single_word": false,
|
| 75 |
+
"special": true
|
| 76 |
+
},
|
| 77 |
+
"151652": {
|
| 78 |
+
"content": "<|vision_start|>",
|
| 79 |
+
"lstrip": false,
|
| 80 |
+
"normalized": false,
|
| 81 |
+
"rstrip": false,
|
| 82 |
+
"single_word": false,
|
| 83 |
+
"special": true
|
| 84 |
+
},
|
| 85 |
+
"151653": {
|
| 86 |
+
"content": "<|vision_end|>",
|
| 87 |
+
"lstrip": false,
|
| 88 |
+
"normalized": false,
|
| 89 |
+
"rstrip": false,
|
| 90 |
+
"single_word": false,
|
| 91 |
+
"special": true
|
| 92 |
+
},
|
| 93 |
+
"151654": {
|
| 94 |
+
"content": "<|vision_pad|>",
|
| 95 |
+
"lstrip": false,
|
| 96 |
+
"normalized": false,
|
| 97 |
+
"rstrip": false,
|
| 98 |
+
"single_word": false,
|
| 99 |
+
"special": true
|
| 100 |
+
},
|
| 101 |
+
"151655": {
|
| 102 |
+
"content": "<|image_pad|>",
|
| 103 |
+
"lstrip": false,
|
| 104 |
+
"normalized": false,
|
| 105 |
+
"rstrip": false,
|
| 106 |
+
"single_word": false,
|
| 107 |
+
"special": true
|
| 108 |
+
},
|
| 109 |
+
"151656": {
|
| 110 |
+
"content": "<|video_pad|>",
|
| 111 |
+
"lstrip": false,
|
| 112 |
+
"normalized": false,
|
| 113 |
+
"rstrip": false,
|
| 114 |
+
"single_word": false,
|
| 115 |
+
"special": true
|
| 116 |
+
},
|
| 117 |
+
"151657": {
|
| 118 |
+
"content": "<tool_call>",
|
| 119 |
+
"lstrip": false,
|
| 120 |
+
"normalized": false,
|
| 121 |
+
"rstrip": false,
|
| 122 |
+
"single_word": false,
|
| 123 |
+
"special": false
|
| 124 |
+
},
|
| 125 |
+
"151658": {
|
| 126 |
+
"content": "</tool_call>",
|
| 127 |
+
"lstrip": false,
|
| 128 |
+
"normalized": false,
|
| 129 |
+
"rstrip": false,
|
| 130 |
+
"single_word": false,
|
| 131 |
+
"special": false
|
| 132 |
+
},
|
| 133 |
+
"151659": {
|
| 134 |
+
"content": "<|fim_prefix|>",
|
| 135 |
+
"lstrip": false,
|
| 136 |
+
"normalized": false,
|
| 137 |
+
"rstrip": false,
|
| 138 |
+
"single_word": false,
|
| 139 |
+
"special": false
|
| 140 |
+
},
|
| 141 |
+
"151660": {
|
| 142 |
+
"content": "<|fim_middle|>",
|
| 143 |
+
"lstrip": false,
|
| 144 |
+
"normalized": false,
|
| 145 |
+
"rstrip": false,
|
| 146 |
+
"single_word": false,
|
| 147 |
+
"special": false
|
| 148 |
+
},
|
| 149 |
+
"151661": {
|
| 150 |
+
"content": "<|fim_suffix|>",
|
| 151 |
+
"lstrip": false,
|
| 152 |
+
"normalized": false,
|
| 153 |
+
"rstrip": false,
|
| 154 |
+
"single_word": false,
|
| 155 |
+
"special": false
|
| 156 |
+
},
|
| 157 |
+
"151662": {
|
| 158 |
+
"content": "<|fim_pad|>",
|
| 159 |
+
"lstrip": false,
|
| 160 |
+
"normalized": false,
|
| 161 |
+
"rstrip": false,
|
| 162 |
+
"single_word": false,
|
| 163 |
+
"special": false
|
| 164 |
+
},
|
| 165 |
+
"151663": {
|
| 166 |
+
"content": "<|repo_name|>",
|
| 167 |
+
"lstrip": false,
|
| 168 |
+
"normalized": false,
|
| 169 |
+
"rstrip": false,
|
| 170 |
+
"single_word": false,
|
| 171 |
+
"special": false
|
| 172 |
+
},
|
| 173 |
+
"151664": {
|
| 174 |
+
"content": "<|file_sep|>",
|
| 175 |
+
"lstrip": false,
|
| 176 |
+
"normalized": false,
|
| 177 |
+
"rstrip": false,
|
| 178 |
+
"single_word": false,
|
| 179 |
+
"special": false
|
| 180 |
+
}
|
| 181 |
+
},
|
| 182 |
+
"additional_special_tokens": [
|
| 183 |
+
"<|im_start|>",
|
| 184 |
+
"<|im_end|>",
|
| 185 |
+
"<|object_ref_start|>",
|
| 186 |
+
"<|object_ref_end|>",
|
| 187 |
+
"<|box_start|>",
|
| 188 |
+
"<|box_end|>",
|
| 189 |
+
"<|quad_start|>",
|
| 190 |
+
"<|quad_end|>",
|
| 191 |
+
"<|vision_start|>",
|
| 192 |
+
"<|vision_end|>",
|
| 193 |
+
"<|vision_pad|>",
|
| 194 |
+
"<|image_pad|>",
|
| 195 |
+
"<|video_pad|>"
|
| 196 |
+
],
|
| 197 |
+
"bos_token": null,
|
| 198 |
+
"chat_template": "{% set image_count = namespace(value=0) %}{% set video_count = namespace(value=0) %}{% for message in messages %}{% if loop.first and message['role'] != 'system' %}<|im_start|>system\nYou are a helpful assistant.<|im_end|>\n{% endif %}<|im_start|>{{ message['role'] }}\n{% if message['content'] is string %}{{ message['content'] }}<|im_end|>\n{% else %}{% for content in message['content'] %}{% if content['type'] == 'image' or 'image' in content or 'image_url' in content %}{% set image_count.value = image_count.value + 1 %}{% if add_vision_id %}Picture {{ image_count.value }}: {% endif %}<|vision_start|><|image_pad|><|vision_end|>{% elif content['type'] == 'video' or 'video' in content %}{% set video_count.value = video_count.value + 1 %}{% if add_vision_id %}Video {{ video_count.value }}: {% endif %}<|vision_start|><|video_pad|><|vision_end|>{% elif 'text' in content %}{{ content['text'] }}{% endif %}{% endfor %}<|im_end|>\n{% endif %}{% endfor %}{% if add_generation_prompt %}<|im_start|>assistant\n{% endif %}",
|
| 199 |
+
"clean_up_tokenization_spaces": false,
|
| 200 |
+
"eos_token": "<|im_end|>",
|
| 201 |
+
"errors": "replace",
|
| 202 |
+
"extra_special_tokens": {},
|
| 203 |
+
"model_max_length": 131072,
|
| 204 |
+
"pad_token": "<|endoftext|>",
|
| 205 |
+
"processor_class": "Qwen2_5_VLProcessor",
|
| 206 |
+
"split_special_tokens": false,
|
| 207 |
+
"tokenizer_class": "Qwen2Tokenizer",
|
| 208 |
+
"unk_token": null
|
| 209 |
+
}
|
trainer_state.json
ADDED
|
@@ -0,0 +1,2342 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"best_metric": 0.41329578,
|
| 3 |
+
"best_model_checkpoint": "/group/40174/Zywoou/mm_math_reasoning/oly_output/SFT_text_40k_3B/v2-20250623-201026/checkpoint-900",
|
| 4 |
+
"epoch": 3.426625145971195,
|
| 5 |
+
"eval_steps": 100,
|
| 6 |
+
"global_step": 1100,
|
| 7 |
+
"is_hyper_param_search": false,
|
| 8 |
+
"is_local_process_zero": true,
|
| 9 |
+
"is_world_process_zero": true,
|
| 10 |
+
"log_history": [
|
| 11 |
+
{
|
| 12 |
+
"epoch": 0.003114052160373686,
|
| 13 |
+
"grad_norm": 9.090961456298828,
|
| 14 |
+
"learning_rate": 2.469135802469136e-07,
|
| 15 |
+
"loss": 0.8441067934036255,
|
| 16 |
+
"memory(GiB)": 61.48,
|
| 17 |
+
"step": 1,
|
| 18 |
+
"token_acc": 0.7542614698998621,
|
| 19 |
+
"train_speed(iter/s)": 0.013065
|
| 20 |
+
},
|
| 21 |
+
{
|
| 22 |
+
"epoch": 0.015570260801868432,
|
| 23 |
+
"grad_norm": 8.591418266296387,
|
| 24 |
+
"learning_rate": 1.234567901234568e-06,
|
| 25 |
+
"loss": 0.8394168615341187,
|
| 26 |
+
"memory(GiB)": 61.48,
|
| 27 |
+
"step": 5,
|
| 28 |
+
"token_acc": 0.772884347485021,
|
| 29 |
+
"train_speed(iter/s)": 0.016776
|
| 30 |
+
},
|
| 31 |
+
{
|
| 32 |
+
"epoch": 0.031140521603736863,
|
| 33 |
+
"grad_norm": 1.963486909866333,
|
| 34 |
+
"learning_rate": 2.469135802469136e-06,
|
| 35 |
+
"loss": 0.8379721641540527,
|
| 36 |
+
"memory(GiB)": 61.48,
|
| 37 |
+
"step": 10,
|
| 38 |
+
"token_acc": 0.7595817060496037,
|
| 39 |
+
"train_speed(iter/s)": 0.016413
|
| 40 |
+
},
|
| 41 |
+
{
|
| 42 |
+
"epoch": 0.04671078240560529,
|
| 43 |
+
"grad_norm": 1.29275381565094,
|
| 44 |
+
"learning_rate": 3.7037037037037037e-06,
|
| 45 |
+
"loss": 0.7905796527862549,
|
| 46 |
+
"memory(GiB)": 61.48,
|
| 47 |
+
"step": 15,
|
| 48 |
+
"token_acc": 0.7768803131791101,
|
| 49 |
+
"train_speed(iter/s)": 0.016697
|
| 50 |
+
},
|
| 51 |
+
{
|
| 52 |
+
"epoch": 0.06228104320747373,
|
| 53 |
+
"grad_norm": 1.3676708936691284,
|
| 54 |
+
"learning_rate": 4.938271604938272e-06,
|
| 55 |
+
"loss": 0.7433982372283936,
|
| 56 |
+
"memory(GiB)": 61.48,
|
| 57 |
+
"step": 20,
|
| 58 |
+
"token_acc": 0.7680492430075545,
|
| 59 |
+
"train_speed(iter/s)": 0.016714
|
| 60 |
+
},
|
| 61 |
+
{
|
| 62 |
+
"epoch": 0.07785130400934216,
|
| 63 |
+
"grad_norm": 0.9433161020278931,
|
| 64 |
+
"learning_rate": 6.17283950617284e-06,
|
| 65 |
+
"loss": 0.7095602989196778,
|
| 66 |
+
"memory(GiB)": 61.48,
|
| 67 |
+
"step": 25,
|
| 68 |
+
"token_acc": 0.7874565609207318,
|
| 69 |
+
"train_speed(iter/s)": 0.016902
|
| 70 |
+
},
|
| 71 |
+
{
|
| 72 |
+
"epoch": 0.09342156481121058,
|
| 73 |
+
"grad_norm": 0.6722971200942993,
|
| 74 |
+
"learning_rate": 7.4074074074074075e-06,
|
| 75 |
+
"loss": 0.672957468032837,
|
| 76 |
+
"memory(GiB)": 61.48,
|
| 77 |
+
"step": 30,
|
| 78 |
+
"token_acc": 0.7957260974215338,
|
| 79 |
+
"train_speed(iter/s)": 0.017038
|
| 80 |
+
},
|
| 81 |
+
{
|
| 82 |
+
"epoch": 0.10899182561307902,
|
| 83 |
+
"grad_norm": 0.4969067871570587,
|
| 84 |
+
"learning_rate": 8.641975308641975e-06,
|
| 85 |
+
"loss": 0.6527645587921143,
|
| 86 |
+
"memory(GiB)": 61.48,
|
| 87 |
+
"step": 35,
|
| 88 |
+
"token_acc": 0.8015626837586735,
|
| 89 |
+
"train_speed(iter/s)": 0.017137
|
| 90 |
+
},
|
| 91 |
+
{
|
| 92 |
+
"epoch": 0.12456208641494745,
|
| 93 |
+
"grad_norm": 0.4756340980529785,
|
| 94 |
+
"learning_rate": 9.876543209876543e-06,
|
| 95 |
+
"loss": 0.6334109306335449,
|
| 96 |
+
"memory(GiB)": 61.48,
|
| 97 |
+
"step": 40,
|
| 98 |
+
"token_acc": 0.8020030272154918,
|
| 99 |
+
"train_speed(iter/s)": 0.017137
|
| 100 |
+
},
|
| 101 |
+
{
|
| 102 |
+
"epoch": 0.1401323472168159,
|
| 103 |
+
"grad_norm": 0.37226057052612305,
|
| 104 |
+
"learning_rate": 1.1111111111111113e-05,
|
| 105 |
+
"loss": 0.6206116676330566,
|
| 106 |
+
"memory(GiB)": 81.99,
|
| 107 |
+
"step": 45,
|
| 108 |
+
"token_acc": 0.7978761643835617,
|
| 109 |
+
"train_speed(iter/s)": 0.017002
|
| 110 |
+
},
|
| 111 |
+
{
|
| 112 |
+
"epoch": 0.15570260801868432,
|
| 113 |
+
"grad_norm": 0.33603885769844055,
|
| 114 |
+
"learning_rate": 1.234567901234568e-05,
|
| 115 |
+
"loss": 0.6084653854370117,
|
| 116 |
+
"memory(GiB)": 81.99,
|
| 117 |
+
"step": 50,
|
| 118 |
+
"token_acc": 0.8057346158430913,
|
| 119 |
+
"train_speed(iter/s)": 0.01702
|
| 120 |
+
},
|
| 121 |
+
{
|
| 122 |
+
"epoch": 0.17127286882055273,
|
| 123 |
+
"grad_norm": 0.3247829079627991,
|
| 124 |
+
"learning_rate": 1.3580246913580248e-05,
|
| 125 |
+
"loss": 0.5855489730834961,
|
| 126 |
+
"memory(GiB)": 81.99,
|
| 127 |
+
"step": 55,
|
| 128 |
+
"token_acc": 0.8133501940355266,
|
| 129 |
+
"train_speed(iter/s)": 0.017086
|
| 130 |
+
},
|
| 131 |
+
{
|
| 132 |
+
"epoch": 0.18684312962242117,
|
| 133 |
+
"grad_norm": 0.3287549316883087,
|
| 134 |
+
"learning_rate": 1.4814814814814815e-05,
|
| 135 |
+
"loss": 0.5795706748962403,
|
| 136 |
+
"memory(GiB)": 81.99,
|
| 137 |
+
"step": 60,
|
| 138 |
+
"token_acc": 0.8243709005928014,
|
| 139 |
+
"train_speed(iter/s)": 0.017103
|
| 140 |
+
},
|
| 141 |
+
{
|
| 142 |
+
"epoch": 0.2024133904242896,
|
| 143 |
+
"grad_norm": 0.3348773121833801,
|
| 144 |
+
"learning_rate": 1.6049382716049385e-05,
|
| 145 |
+
"loss": 0.5844010353088379,
|
| 146 |
+
"memory(GiB)": 81.99,
|
| 147 |
+
"step": 65,
|
| 148 |
+
"token_acc": 0.826077338385553,
|
| 149 |
+
"train_speed(iter/s)": 0.017084
|
| 150 |
+
},
|
| 151 |
+
{
|
| 152 |
+
"epoch": 0.21798365122615804,
|
| 153 |
+
"grad_norm": 0.3951764702796936,
|
| 154 |
+
"learning_rate": 1.728395061728395e-05,
|
| 155 |
+
"loss": 0.5668695449829102,
|
| 156 |
+
"memory(GiB)": 81.99,
|
| 157 |
+
"step": 70,
|
| 158 |
+
"token_acc": 0.8184763611920233,
|
| 159 |
+
"train_speed(iter/s)": 0.017115
|
| 160 |
+
},
|
| 161 |
+
{
|
| 162 |
+
"epoch": 0.23355391202802647,
|
| 163 |
+
"grad_norm": 0.3419385850429535,
|
| 164 |
+
"learning_rate": 1.851851851851852e-05,
|
| 165 |
+
"loss": 0.5640019416809082,
|
| 166 |
+
"memory(GiB)": 81.99,
|
| 167 |
+
"step": 75,
|
| 168 |
+
"token_acc": 0.8262526646713034,
|
| 169 |
+
"train_speed(iter/s)": 0.017089
|
| 170 |
+
},
|
| 171 |
+
{
|
| 172 |
+
"epoch": 0.2491241728298949,
|
| 173 |
+
"grad_norm": 0.4079224467277527,
|
| 174 |
+
"learning_rate": 1.9753086419753087e-05,
|
| 175 |
+
"loss": 0.5657567501068115,
|
| 176 |
+
"memory(GiB)": 81.99,
|
| 177 |
+
"step": 80,
|
| 178 |
+
"token_acc": 0.8255806686338495,
|
| 179 |
+
"train_speed(iter/s)": 0.017058
|
| 180 |
+
},
|
| 181 |
+
{
|
| 182 |
+
"epoch": 0.2646944336317633,
|
| 183 |
+
"grad_norm": 0.4042387902736664,
|
| 184 |
+
"learning_rate": 1.9999660048205748e-05,
|
| 185 |
+
"loss": 0.5499643325805664,
|
| 186 |
+
"memory(GiB)": 81.99,
|
| 187 |
+
"step": 85,
|
| 188 |
+
"token_acc": 0.8258961193684297,
|
| 189 |
+
"train_speed(iter/s)": 0.01709
|
| 190 |
+
},
|
| 191 |
+
{
|
| 192 |
+
"epoch": 0.2802646944336318,
|
| 193 |
+
"grad_norm": 0.3471659719944,
|
| 194 |
+
"learning_rate": 1.9998279033654883e-05,
|
| 195 |
+
"loss": 0.5546986579895019,
|
| 196 |
+
"memory(GiB)": 81.99,
|
| 197 |
+
"step": 90,
|
| 198 |
+
"token_acc": 0.8232631885048027,
|
| 199 |
+
"train_speed(iter/s)": 0.01708
|
| 200 |
+
},
|
| 201 |
+
{
|
| 202 |
+
"epoch": 0.2958349552355002,
|
| 203 |
+
"grad_norm": 0.37730872631073,
|
| 204 |
+
"learning_rate": 1.999583585595892e-05,
|
| 205 |
+
"loss": 0.5496613502502441,
|
| 206 |
+
"memory(GiB)": 81.99,
|
| 207 |
+
"step": 95,
|
| 208 |
+
"token_acc": 0.8184557633810109,
|
| 209 |
+
"train_speed(iter/s)": 0.017135
|
| 210 |
+
},
|
| 211 |
+
{
|
| 212 |
+
"epoch": 0.31140521603736865,
|
| 213 |
+
"grad_norm": 0.35348325967788696,
|
| 214 |
+
"learning_rate": 1.9992330774667867e-05,
|
| 215 |
+
"loss": 0.5377495765686036,
|
| 216 |
+
"memory(GiB)": 81.99,
|
| 217 |
+
"step": 100,
|
| 218 |
+
"token_acc": 0.8358910692831397,
|
| 219 |
+
"train_speed(iter/s)": 0.017151
|
| 220 |
+
},
|
| 221 |
+
{
|
| 222 |
+
"epoch": 0.31140521603736865,
|
| 223 |
+
"eval_loss": 0.5029594302177429,
|
| 224 |
+
"eval_runtime": 48.123,
|
| 225 |
+
"eval_samples_per_second": 8.624,
|
| 226 |
+
"eval_steps_per_second": 1.081,
|
| 227 |
+
"eval_token_acc": 0.8289853121092036,
|
| 228 |
+
"step": 100
|
| 229 |
+
},
|
| 230 |
+
{
|
| 231 |
+
"epoch": 0.32697547683923706,
|
| 232 |
+
"grad_norm": 0.43411314487457275,
|
| 233 |
+
"learning_rate": 1.9987764162142615e-05,
|
| 234 |
+
"loss": 0.5542641639709472,
|
| 235 |
+
"memory(GiB)": 83.67,
|
| 236 |
+
"step": 105,
|
| 237 |
+
"token_acc": 0.822730121577026,
|
| 238 |
+
"train_speed(iter/s)": 0.016973
|
| 239 |
+
},
|
| 240 |
+
{
|
| 241 |
+
"epoch": 0.34254573764110546,
|
| 242 |
+
"grad_norm": 0.3889370560646057,
|
| 243 |
+
"learning_rate": 1.998213650351541e-05,
|
| 244 |
+
"loss": 0.5661673545837402,
|
| 245 |
+
"memory(GiB)": 83.67,
|
| 246 |
+
"step": 110,
|
| 247 |
+
"token_acc": 0.8248022938189019,
|
| 248 |
+
"train_speed(iter/s)": 0.016968
|
| 249 |
+
},
|
| 250 |
+
{
|
| 251 |
+
"epoch": 0.3581159984429739,
|
| 252 |
+
"grad_norm": 0.4113948941230774,
|
| 253 |
+
"learning_rate": 1.99754483966383e-05,
|
| 254 |
+
"loss": 0.5373417377471924,
|
| 255 |
+
"memory(GiB)": 83.67,
|
| 256 |
+
"step": 115,
|
| 257 |
+
"token_acc": 0.8353561888566422,
|
| 258 |
+
"train_speed(iter/s)": 0.016986
|
| 259 |
+
},
|
| 260 |
+
{
|
| 261 |
+
"epoch": 0.37368625924484233,
|
| 262 |
+
"grad_norm": 0.3282926678657532,
|
| 263 |
+
"learning_rate": 1.996770055201962e-05,
|
| 264 |
+
"loss": 0.532097053527832,
|
| 265 |
+
"memory(GiB)": 83.67,
|
| 266 |
+
"step": 120,
|
| 267 |
+
"token_acc": 0.8311512985373245,
|
| 268 |
+
"train_speed(iter/s)": 0.017002
|
| 269 |
+
},
|
| 270 |
+
{
|
| 271 |
+
"epoch": 0.3892565200467108,
|
| 272 |
+
"grad_norm": 0.3860708475112915,
|
| 273 |
+
"learning_rate": 1.9958893792748527e-05,
|
| 274 |
+
"loss": 0.5377762794494629,
|
| 275 |
+
"memory(GiB)": 83.67,
|
| 276 |
+
"step": 125,
|
| 277 |
+
"token_acc": 0.8319092733783512,
|
| 278 |
+
"train_speed(iter/s)": 0.017043
|
| 279 |
+
},
|
| 280 |
+
{
|
| 281 |
+
"epoch": 0.4048267808485792,
|
| 282 |
+
"grad_norm": 0.3636181056499481,
|
| 283 |
+
"learning_rate": 1.994902905440754e-05,
|
| 284 |
+
"loss": 0.5360857009887695,
|
| 285 |
+
"memory(GiB)": 83.67,
|
| 286 |
+
"step": 130,
|
| 287 |
+
"token_acc": 0.8289638007457961,
|
| 288 |
+
"train_speed(iter/s)": 0.017049
|
| 289 |
+
},
|
| 290 |
+
{
|
| 291 |
+
"epoch": 0.42039704165044767,
|
| 292 |
+
"grad_norm": 0.44786185026168823,
|
| 293 |
+
"learning_rate": 1.9938107384973165e-05,
|
| 294 |
+
"loss": 0.5159939765930176,
|
| 295 |
+
"memory(GiB)": 83.67,
|
| 296 |
+
"step": 135,
|
| 297 |
+
"token_acc": 0.8303873578325347,
|
| 298 |
+
"train_speed(iter/s)": 0.017068
|
| 299 |
+
},
|
| 300 |
+
{
|
| 301 |
+
"epoch": 0.4359673024523161,
|
| 302 |
+
"grad_norm": 0.36897265911102295,
|
| 303 |
+
"learning_rate": 1.9926129944704552e-05,
|
| 304 |
+
"loss": 0.525636863708496,
|
| 305 |
+
"memory(GiB)": 83.67,
|
| 306 |
+
"step": 140,
|
| 307 |
+
"token_acc": 0.825755096315691,
|
| 308 |
+
"train_speed(iter/s)": 0.017067
|
| 309 |
+
},
|
| 310 |
+
{
|
| 311 |
+
"epoch": 0.4515375632541845,
|
| 312 |
+
"grad_norm": 0.3940599262714386,
|
| 313 |
+
"learning_rate": 1.9913098006020245e-05,
|
| 314 |
+
"loss": 0.5220311164855957,
|
| 315 |
+
"memory(GiB)": 83.67,
|
| 316 |
+
"step": 145,
|
| 317 |
+
"token_acc": 0.8314430013298949,
|
| 318 |
+
"train_speed(iter/s)": 0.017053
|
| 319 |
+
},
|
| 320 |
+
{
|
| 321 |
+
"epoch": 0.46710782405605294,
|
| 322 |
+
"grad_norm": 0.36770397424697876,
|
| 323 |
+
"learning_rate": 1.9899012953363002e-05,
|
| 324 |
+
"loss": 0.5308480262756348,
|
| 325 |
+
"memory(GiB)": 83.67,
|
| 326 |
+
"step": 150,
|
| 327 |
+
"token_acc": 0.8356369708426136,
|
| 328 |
+
"train_speed(iter/s)": 0.017061
|
| 329 |
+
},
|
| 330 |
+
{
|
| 331 |
+
"epoch": 0.48267808485792135,
|
| 332 |
+
"grad_norm": 0.3850296437740326,
|
| 333 |
+
"learning_rate": 1.988387628305271e-05,
|
| 334 |
+
"loss": 0.5177151679992675,
|
| 335 |
+
"memory(GiB)": 83.67,
|
| 336 |
+
"step": 155,
|
| 337 |
+
"token_acc": 0.8446110002134257,
|
| 338 |
+
"train_speed(iter/s)": 0.017066
|
| 339 |
+
},
|
| 340 |
+
{
|
| 341 |
+
"epoch": 0.4982483456597898,
|
| 342 |
+
"grad_norm": 0.41091373562812805,
|
| 343 |
+
"learning_rate": 1.9867689603127448e-05,
|
| 344 |
+
"loss": 0.5239609718322754,
|
| 345 |
+
"memory(GiB)": 83.67,
|
| 346 |
+
"step": 160,
|
| 347 |
+
"token_acc": 0.8418038278766896,
|
| 348 |
+
"train_speed(iter/s)": 0.017026
|
| 349 |
+
},
|
| 350 |
+
{
|
| 351 |
+
"epoch": 0.5138186064616582,
|
| 352 |
+
"grad_norm": 0.35084888339042664,
|
| 353 |
+
"learning_rate": 1.9850454633172632e-05,
|
| 354 |
+
"loss": 0.5135612487792969,
|
| 355 |
+
"memory(GiB)": 83.67,
|
| 356 |
+
"step": 165,
|
| 357 |
+
"token_acc": 0.8403358434638878,
|
| 358 |
+
"train_speed(iter/s)": 0.017025
|
| 359 |
+
},
|
| 360 |
+
{
|
| 361 |
+
"epoch": 0.5293888672635266,
|
| 362 |
+
"grad_norm": 0.3684956729412079,
|
| 363 |
+
"learning_rate": 1.9832173204138358e-05,
|
| 364 |
+
"loss": 0.5212111473083496,
|
| 365 |
+
"memory(GiB)": 83.67,
|
| 366 |
+
"step": 170,
|
| 367 |
+
"token_acc": 0.8203030950800491,
|
| 368 |
+
"train_speed(iter/s)": 0.01705
|
| 369 |
+
},
|
| 370 |
+
{
|
| 371 |
+
"epoch": 0.5449591280653951,
|
| 372 |
+
"grad_norm": 0.4466633200645447,
|
| 373 |
+
"learning_rate": 1.981284725814487e-05,
|
| 374 |
+
"loss": 0.5236361503601075,
|
| 375 |
+
"memory(GiB)": 83.67,
|
| 376 |
+
"step": 175,
|
| 377 |
+
"token_acc": 0.8292287351630786,
|
| 378 |
+
"train_speed(iter/s)": 0.017037
|
| 379 |
+
},
|
| 380 |
+
{
|
| 381 |
+
"epoch": 0.5605293888672636,
|
| 382 |
+
"grad_norm": 0.4126527011394501,
|
| 383 |
+
"learning_rate": 1.979247884827625e-05,
|
| 384 |
+
"loss": 0.5252516746520997,
|
| 385 |
+
"memory(GiB)": 83.67,
|
| 386 |
+
"step": 180,
|
| 387 |
+
"token_acc": 0.8313978101236051,
|
| 388 |
+
"train_speed(iter/s)": 0.017027
|
| 389 |
+
},
|
| 390 |
+
{
|
| 391 |
+
"epoch": 0.576099649669132,
|
| 392 |
+
"grad_norm": 0.3615601658821106,
|
| 393 |
+
"learning_rate": 1.9771070138362326e-05,
|
| 394 |
+
"loss": 0.5181349277496338,
|
| 395 |
+
"memory(GiB)": 83.67,
|
| 396 |
+
"step": 185,
|
| 397 |
+
"token_acc": 0.8271949270166622,
|
| 398 |
+
"train_speed(iter/s)": 0.017036
|
| 399 |
+
},
|
| 400 |
+
{
|
| 401 |
+
"epoch": 0.5916699104710004,
|
| 402 |
+
"grad_norm": 0.363862544298172,
|
| 403 |
+
"learning_rate": 1.974862340274876e-05,
|
| 404 |
+
"loss": 0.5171935081481933,
|
| 405 |
+
"memory(GiB)": 83.67,
|
| 406 |
+
"step": 190,
|
| 407 |
+
"token_acc": 0.8262061487073518,
|
| 408 |
+
"train_speed(iter/s)": 0.017046
|
| 409 |
+
},
|
| 410 |
+
{
|
| 411 |
+
"epoch": 0.6072401712728688,
|
| 412 |
+
"grad_norm": 0.35778218507766724,
|
| 413 |
+
"learning_rate": 1.9725141026055473e-05,
|
| 414 |
+
"loss": 0.5045164585113525,
|
| 415 |
+
"memory(GiB)": 83.67,
|
| 416 |
+
"step": 195,
|
| 417 |
+
"token_acc": 0.8280538716190542,
|
| 418 |
+
"train_speed(iter/s)": 0.017058
|
| 419 |
+
},
|
| 420 |
+
{
|
| 421 |
+
"epoch": 0.6228104320747373,
|
| 422 |
+
"grad_norm": 0.3499464690685272,
|
| 423 |
+
"learning_rate": 1.9700625502923286e-05,
|
| 424 |
+
"loss": 0.5087326049804688,
|
| 425 |
+
"memory(GiB)": 83.67,
|
| 426 |
+
"step": 200,
|
| 427 |
+
"token_acc": 0.8393105379001429,
|
| 428 |
+
"train_speed(iter/s)": 0.017074
|
| 429 |
+
},
|
| 430 |
+
{
|
| 431 |
+
"epoch": 0.6228104320747373,
|
| 432 |
+
"eval_loss": 0.47101157903671265,
|
| 433 |
+
"eval_runtime": 48.3616,
|
| 434 |
+
"eval_samples_per_second": 8.581,
|
| 435 |
+
"eval_steps_per_second": 1.075,
|
| 436 |
+
"eval_token_acc": 0.8365704554766953,
|
| 437 |
+
"step": 200
|
| 438 |
+
},
|
| 439 |
+
{
|
| 440 |
+
"epoch": 0.6383806928766057,
|
| 441 |
+
"grad_norm": 0.3926837146282196,
|
| 442 |
+
"learning_rate": 1.967507943774893e-05,
|
| 443 |
+
"loss": 0.5087917804718017,
|
| 444 |
+
"memory(GiB)": 83.78,
|
| 445 |
+
"step": 205,
|
| 446 |
+
"token_acc": 0.8356912249863274,
|
| 447 |
+
"train_speed(iter/s)": 0.016969
|
| 448 |
+
},
|
| 449 |
+
{
|
| 450 |
+
"epoch": 0.6539509536784741,
|
| 451 |
+
"grad_norm": 0.44116681814193726,
|
| 452 |
+
"learning_rate": 1.9648505544408343e-05,
|
| 453 |
+
"loss": 0.5104311943054199,
|
| 454 |
+
"memory(GiB)": 83.78,
|
| 455 |
+
"step": 210,
|
| 456 |
+
"token_acc": 0.8370874883557564,
|
| 457 |
+
"train_speed(iter/s)": 0.016962
|
| 458 |
+
},
|
| 459 |
+
{
|
| 460 |
+
"epoch": 0.6695212144803425,
|
| 461 |
+
"grad_norm": 0.3881992995738983,
|
| 462 |
+
"learning_rate": 1.962090664596838e-05,
|
| 463 |
+
"loss": 0.49617815017700195,
|
| 464 |
+
"memory(GiB)": 83.78,
|
| 465 |
+
"step": 215,
|
| 466 |
+
"token_acc": 0.8333252446460243,
|
| 467 |
+
"train_speed(iter/s)": 0.016969
|
| 468 |
+
},
|
| 469 |
+
{
|
| 470 |
+
"epoch": 0.6850914752822109,
|
| 471 |
+
"grad_norm": 0.3824191987514496,
|
| 472 |
+
"learning_rate": 1.9592285674386895e-05,
|
| 473 |
+
"loss": 0.5057227134704589,
|
| 474 |
+
"memory(GiB)": 83.78,
|
| 475 |
+
"step": 220,
|
| 476 |
+
"token_acc": 0.8328552368245496,
|
| 477 |
+
"train_speed(iter/s)": 0.016954
|
| 478 |
+
},
|
| 479 |
+
{
|
| 480 |
+
"epoch": 0.7006617360840794,
|
| 481 |
+
"grad_norm": 0.3746967017650604,
|
| 482 |
+
"learning_rate": 1.9562645670201278e-05,
|
| 483 |
+
"loss": 0.5225645542144776,
|
| 484 |
+
"memory(GiB)": 83.78,
|
| 485 |
+
"step": 225,
|
| 486 |
+
"token_acc": 0.8198400577125468,
|
| 487 |
+
"train_speed(iter/s)": 0.016942
|
| 488 |
+
},
|
| 489 |
+
{
|
| 490 |
+
"epoch": 0.7162319968859479,
|
| 491 |
+
"grad_norm": 0.38867348432540894,
|
| 492 |
+
"learning_rate": 1.9531989782205425e-05,
|
| 493 |
+
"loss": 0.5209392547607422,
|
| 494 |
+
"memory(GiB)": 83.78,
|
| 495 |
+
"step": 230,
|
| 496 |
+
"token_acc": 0.8268426966579727,
|
| 497 |
+
"train_speed(iter/s)": 0.016954
|
| 498 |
+
},
|
| 499 |
+
{
|
| 500 |
+
"epoch": 0.7318022576878163,
|
| 501 |
+
"grad_norm": 0.34132710099220276,
|
| 502 |
+
"learning_rate": 1.9500321267115253e-05,
|
| 503 |
+
"loss": 0.506260871887207,
|
| 504 |
+
"memory(GiB)": 83.78,
|
| 505 |
+
"step": 235,
|
| 506 |
+
"token_acc": 0.8383771649752925,
|
| 507 |
+
"train_speed(iter/s)": 0.016942
|
| 508 |
+
},
|
| 509 |
+
{
|
| 510 |
+
"epoch": 0.7473725184896847,
|
| 511 |
+
"grad_norm": 0.3127667009830475,
|
| 512 |
+
"learning_rate": 1.9467643489222704e-05,
|
| 513 |
+
"loss": 0.5079313278198242,
|
| 514 |
+
"memory(GiB)": 83.78,
|
| 515 |
+
"step": 240,
|
| 516 |
+
"token_acc": 0.8315731041446339,
|
| 517 |
+
"train_speed(iter/s)": 0.016933
|
| 518 |
+
},
|
| 519 |
+
{
|
| 520 |
+
"epoch": 0.7629427792915532,
|
| 521 |
+
"grad_norm": 0.36045560240745544,
|
| 522 |
+
"learning_rate": 1.9433959920038346e-05,
|
| 523 |
+
"loss": 0.5103404521942139,
|
| 524 |
+
"memory(GiB)": 83.78,
|
| 525 |
+
"step": 245,
|
| 526 |
+
"token_acc": 0.8284737724912421,
|
| 527 |
+
"train_speed(iter/s)": 0.016938
|
| 528 |
+
},
|
| 529 |
+
{
|
| 530 |
+
"epoch": 0.7785130400934216,
|
| 531 |
+
"grad_norm": 0.3331986963748932,
|
| 532 |
+
"learning_rate": 1.939927413792258e-05,
|
| 533 |
+
"loss": 0.5129657745361328,
|
| 534 |
+
"memory(GiB)": 83.78,
|
| 535 |
+
"step": 250,
|
| 536 |
+
"token_acc": 0.8337586241949231,
|
| 537 |
+
"train_speed(iter/s)": 0.016947
|
| 538 |
+
},
|
| 539 |
+
{
|
| 540 |
+
"epoch": 0.79408330089529,
|
| 541 |
+
"grad_norm": 0.3113352954387665,
|
| 542 |
+
"learning_rate": 1.9363589827705494e-05,
|
| 543 |
+
"loss": 0.5070863723754883,
|
| 544 |
+
"memory(GiB)": 83.78,
|
| 545 |
+
"step": 255,
|
| 546 |
+
"token_acc": 0.8234678436927421,
|
| 547 |
+
"train_speed(iter/s)": 0.016943
|
| 548 |
+
},
|
| 549 |
+
{
|
| 550 |
+
"epoch": 0.8096535616971584,
|
| 551 |
+
"grad_norm": 0.36101603507995605,
|
| 552 |
+
"learning_rate": 1.932691078029541e-05,
|
| 553 |
+
"loss": 0.5083826541900635,
|
| 554 |
+
"memory(GiB)": 83.78,
|
| 555 |
+
"step": 260,
|
| 556 |
+
"token_acc": 0.8253210564574751,
|
| 557 |
+
"train_speed(iter/s)": 0.016957
|
| 558 |
+
},
|
| 559 |
+
{
|
| 560 |
+
"epoch": 0.8252238224990268,
|
| 561 |
+
"grad_norm": 0.35200613737106323,
|
| 562 |
+
"learning_rate": 1.9289240892276156e-05,
|
| 563 |
+
"loss": 0.5094104290008545,
|
| 564 |
+
"memory(GiB)": 83.78,
|
| 565 |
+
"step": 265,
|
| 566 |
+
"token_acc": 0.8399629352836229,
|
| 567 |
+
"train_speed(iter/s)": 0.016958
|
| 568 |
+
},
|
| 569 |
+
{
|
| 570 |
+
"epoch": 0.8407940833008953,
|
| 571 |
+
"grad_norm": 0.3463002145290375,
|
| 572 |
+
"learning_rate": 1.9250584165493102e-05,
|
| 573 |
+
"loss": 0.503563404083252,
|
| 574 |
+
"memory(GiB)": 83.78,
|
| 575 |
+
"step": 270,
|
| 576 |
+
"token_acc": 0.8328735843753342,
|
| 577 |
+
"train_speed(iter/s)": 0.016964
|
| 578 |
+
},
|
| 579 |
+
{
|
| 580 |
+
"epoch": 0.8563643441027637,
|
| 581 |
+
"grad_norm": 0.35573363304138184,
|
| 582 |
+
"learning_rate": 1.9210944706628047e-05,
|
| 583 |
+
"loss": 0.5095272064208984,
|
| 584 |
+
"memory(GiB)": 83.78,
|
| 585 |
+
"step": 275,
|
| 586 |
+
"token_acc": 0.8259964544369949,
|
| 587 |
+
"train_speed(iter/s)": 0.016968
|
| 588 |
+
},
|
| 589 |
+
{
|
| 590 |
+
"epoch": 0.8719346049046321,
|
| 591 |
+
"grad_norm": 0.37910547852516174,
|
| 592 |
+
"learning_rate": 1.9170326726762935e-05,
|
| 593 |
+
"loss": 0.512710428237915,
|
| 594 |
+
"memory(GiB)": 83.78,
|
| 595 |
+
"step": 280,
|
| 596 |
+
"token_acc": 0.8316431444307558,
|
| 597 |
+
"train_speed(iter/s)": 0.01696
|
| 598 |
+
},
|
| 599 |
+
{
|
| 600 |
+
"epoch": 0.8875048657065006,
|
| 601 |
+
"grad_norm": 0.36142924427986145,
|
| 602 |
+
"learning_rate": 1.9128734540932494e-05,
|
| 603 |
+
"loss": 0.5098121643066407,
|
| 604 |
+
"memory(GiB)": 83.78,
|
| 605 |
+
"step": 285,
|
| 606 |
+
"token_acc": 0.8245892568215938,
|
| 607 |
+
"train_speed(iter/s)": 0.016968
|
| 608 |
+
},
|
| 609 |
+
{
|
| 610 |
+
"epoch": 0.903075126508369,
|
| 611 |
+
"grad_norm": 0.3772912621498108,
|
| 612 |
+
"learning_rate": 1.908617256766583e-05,
|
| 613 |
+
"loss": 0.5062539577484131,
|
| 614 |
+
"memory(GiB)": 83.78,
|
| 615 |
+
"step": 290,
|
| 616 |
+
"token_acc": 0.834810945744792,
|
| 617 |
+
"train_speed(iter/s)": 0.016961
|
| 618 |
+
},
|
| 619 |
+
{
|
| 620 |
+
"epoch": 0.9186453873102375,
|
| 621 |
+
"grad_norm": 0.3878962993621826,
|
| 622 |
+
"learning_rate": 1.904264532851702e-05,
|
| 623 |
+
"loss": 0.4917923927307129,
|
| 624 |
+
"memory(GiB)": 83.78,
|
| 625 |
+
"step": 295,
|
| 626 |
+
"token_acc": 0.8298348122666052,
|
| 627 |
+
"train_speed(iter/s)": 0.016966
|
| 628 |
+
},
|
| 629 |
+
{
|
| 630 |
+
"epoch": 0.9342156481121059,
|
| 631 |
+
"grad_norm": 0.31322357058525085,
|
| 632 |
+
"learning_rate": 1.899815744758478e-05,
|
| 633 |
+
"loss": 0.49855747222900393,
|
| 634 |
+
"memory(GiB)": 83.78,
|
| 635 |
+
"step": 300,
|
| 636 |
+
"token_acc": 0.8404212765465496,
|
| 637 |
+
"train_speed(iter/s)": 0.016995
|
| 638 |
+
},
|
| 639 |
+
{
|
| 640 |
+
"epoch": 0.9342156481121059,
|
| 641 |
+
"eval_loss": 0.4527965784072876,
|
| 642 |
+
"eval_runtime": 48.4158,
|
| 643 |
+
"eval_samples_per_second": 8.572,
|
| 644 |
+
"eval_steps_per_second": 1.074,
|
| 645 |
+
"eval_token_acc": 0.8412428185484144,
|
| 646 |
+
"step": 300
|
| 647 |
+
},
|
| 648 |
+
{
|
| 649 |
+
"epoch": 0.9497859089139743,
|
| 650 |
+
"grad_norm": 0.34280747175216675,
|
| 651 |
+
"learning_rate": 1.8952713651021227e-05,
|
| 652 |
+
"loss": 0.48580265045166016,
|
| 653 |
+
"memory(GiB)": 83.78,
|
| 654 |
+
"step": 305,
|
| 655 |
+
"token_acc": 0.838886737615503,
|
| 656 |
+
"train_speed(iter/s)": 0.016948
|
| 657 |
+
},
|
| 658 |
+
{
|
| 659 |
+
"epoch": 0.9653561697158427,
|
| 660 |
+
"grad_norm": 0.37516114115715027,
|
| 661 |
+
"learning_rate": 1.890631876652977e-05,
|
| 662 |
+
"loss": 0.49081811904907224,
|
| 663 |
+
"memory(GiB)": 83.78,
|
| 664 |
+
"step": 310,
|
| 665 |
+
"token_acc": 0.8279721583939669,
|
| 666 |
+
"train_speed(iter/s)": 0.016959
|
| 667 |
+
},
|
| 668 |
+
{
|
| 669 |
+
"epoch": 0.9809264305177112,
|
| 670 |
+
"grad_norm": 0.37829071283340454,
|
| 671 |
+
"learning_rate": 1.8858977722852273e-05,
|
| 672 |
+
"loss": 0.5034436225891114,
|
| 673 |
+
"memory(GiB)": 83.78,
|
| 674 |
+
"step": 315,
|
| 675 |
+
"token_acc": 0.8373031558965061,
|
| 676 |
+
"train_speed(iter/s)": 0.016951
|
| 677 |
+
},
|
| 678 |
+
{
|
| 679 |
+
"epoch": 0.9964966913195796,
|
| 680 |
+
"grad_norm": 0.3431848883628845,
|
| 681 |
+
"learning_rate": 1.881069554924545e-05,
|
| 682 |
+
"loss": 0.4938789367675781,
|
| 683 |
+
"memory(GiB)": 83.78,
|
| 684 |
+
"step": 320,
|
| 685 |
+
"token_acc": 0.8419751634548625,
|
| 686 |
+
"train_speed(iter/s)": 0.016944
|
| 687 |
+
},
|
| 688 |
+
{
|
| 689 |
+
"epoch": 1.0124562086414948,
|
| 690 |
+
"grad_norm": 0.3898485600948334,
|
| 691 |
+
"learning_rate": 1.8761477374946548e-05,
|
| 692 |
+
"loss": 0.549742317199707,
|
| 693 |
+
"memory(GiB)": 83.78,
|
| 694 |
+
"step": 325,
|
| 695 |
+
"token_acc": 0.8478408314628672,
|
| 696 |
+
"train_speed(iter/s)": 0.016925
|
| 697 |
+
},
|
| 698 |
+
{
|
| 699 |
+
"epoch": 1.0280264694433632,
|
| 700 |
+
"grad_norm": 0.3720360994338989,
|
| 701 |
+
"learning_rate": 1.8711328428628492e-05,
|
| 702 |
+
"loss": 0.4674954414367676,
|
| 703 |
+
"memory(GiB)": 83.78,
|
| 704 |
+
"step": 330,
|
| 705 |
+
"token_acc": 0.8488305868952306,
|
| 706 |
+
"train_speed(iter/s)": 0.016907
|
| 707 |
+
},
|
| 708 |
+
{
|
| 709 |
+
"epoch": 1.0435967302452316,
|
| 710 |
+
"grad_norm": 0.37718313932418823,
|
| 711 |
+
"learning_rate": 1.866025403784439e-05,
|
| 712 |
+
"loss": 0.4795982837677002,
|
| 713 |
+
"memory(GiB)": 83.78,
|
| 714 |
+
"step": 335,
|
| 715 |
+
"token_acc": 0.8451706485573955,
|
| 716 |
+
"train_speed(iter/s)": 0.016912
|
| 717 |
+
},
|
| 718 |
+
{
|
| 719 |
+
"epoch": 1.0591669910471,
|
| 720 |
+
"grad_norm": 0.35298970341682434,
|
| 721 |
+
"learning_rate": 1.8608259628461568e-05,
|
| 722 |
+
"loss": 0.46731414794921877,
|
| 723 |
+
"memory(GiB)": 83.78,
|
| 724 |
+
"step": 340,
|
| 725 |
+
"token_acc": 0.8569462760162835,
|
| 726 |
+
"train_speed(iter/s)": 0.016926
|
| 727 |
+
},
|
| 728 |
+
{
|
| 729 |
+
"epoch": 1.0747372518489684,
|
| 730 |
+
"grad_norm": 0.33034011721611023,
|
| 731 |
+
"learning_rate": 1.855535072408516e-05,
|
| 732 |
+
"loss": 0.4545105457305908,
|
| 733 |
+
"memory(GiB)": 83.78,
|
| 734 |
+
"step": 345,
|
| 735 |
+
"token_acc": 0.8460660005038206,
|
| 736 |
+
"train_speed(iter/s)": 0.016927
|
| 737 |
+
},
|
| 738 |
+
{
|
| 739 |
+
"epoch": 1.0903075126508368,
|
| 740 |
+
"grad_norm": 0.3065577745437622,
|
| 741 |
+
"learning_rate": 1.850153294547131e-05,
|
| 742 |
+
"loss": 0.45794997215270994,
|
| 743 |
+
"memory(GiB)": 83.78,
|
| 744 |
+
"step": 350,
|
| 745 |
+
"token_acc": 0.8486942707726407,
|
| 746 |
+
"train_speed(iter/s)": 0.016917
|
| 747 |
+
},
|
| 748 |
+
{
|
| 749 |
+
"epoch": 1.1058777734527054,
|
| 750 |
+
"grad_norm": 0.3462938368320465,
|
| 751 |
+
"learning_rate": 1.8446812009930046e-05,
|
| 752 |
+
"loss": 0.46443839073181153,
|
| 753 |
+
"memory(GiB)": 83.78,
|
| 754 |
+
"step": 355,
|
| 755 |
+
"token_acc": 0.8502326066894884,
|
| 756 |
+
"train_speed(iter/s)": 0.016914
|
| 757 |
+
},
|
| 758 |
+
{
|
| 759 |
+
"epoch": 1.1214480342545738,
|
| 760 |
+
"grad_norm": 0.32309216260910034,
|
| 761 |
+
"learning_rate": 1.839119373071791e-05,
|
| 762 |
+
"loss": 0.4771932601928711,
|
| 763 |
+
"memory(GiB)": 83.78,
|
| 764 |
+
"step": 360,
|
| 765 |
+
"token_acc": 0.8482756332906233,
|
| 766 |
+
"train_speed(iter/s)": 0.016898
|
| 767 |
+
},
|
| 768 |
+
{
|
| 769 |
+
"epoch": 1.1370182950564423,
|
| 770 |
+
"grad_norm": 0.33675771951675415,
|
| 771 |
+
"learning_rate": 1.8334684016420383e-05,
|
| 772 |
+
"loss": 0.4566344261169434,
|
| 773 |
+
"memory(GiB)": 83.78,
|
| 774 |
+
"step": 365,
|
| 775 |
+
"token_acc": 0.8473928793757336,
|
| 776 |
+
"train_speed(iter/s)": 0.016891
|
| 777 |
+
},
|
| 778 |
+
{
|
| 779 |
+
"epoch": 1.1525885558583107,
|
| 780 |
+
"grad_norm": 0.3071984648704529,
|
| 781 |
+
"learning_rate": 1.82772888703242e-05,
|
| 782 |
+
"loss": 0.4622032165527344,
|
| 783 |
+
"memory(GiB)": 83.78,
|
| 784 |
+
"step": 370,
|
| 785 |
+
"token_acc": 0.8537222303050629,
|
| 786 |
+
"train_speed(iter/s)": 0.016883
|
| 787 |
+
},
|
| 788 |
+
{
|
| 789 |
+
"epoch": 1.168158816660179,
|
| 790 |
+
"grad_norm": 0.339647114276886,
|
| 791 |
+
"learning_rate": 1.8219014389779586e-05,
|
| 792 |
+
"loss": 0.45253515243530273,
|
| 793 |
+
"memory(GiB)": 83.78,
|
| 794 |
+
"step": 375,
|
| 795 |
+
"token_acc": 0.8437412853929243,
|
| 796 |
+
"train_speed(iter/s)": 0.01689
|
| 797 |
+
},
|
| 798 |
+
{
|
| 799 |
+
"epoch": 1.1837290774620475,
|
| 800 |
+
"grad_norm": 0.3810037672519684,
|
| 801 |
+
"learning_rate": 1.81598667655525e-05,
|
| 802 |
+
"loss": 0.46290812492370603,
|
| 803 |
+
"memory(GiB)": 83.78,
|
| 804 |
+
"step": 380,
|
| 805 |
+
"token_acc": 0.8509664132197645,
|
| 806 |
+
"train_speed(iter/s)": 0.016878
|
| 807 |
+
},
|
| 808 |
+
{
|
| 809 |
+
"epoch": 1.1992993382639159,
|
| 810 |
+
"grad_norm": 0.3978622853755951,
|
| 811 |
+
"learning_rate": 1.8099852281166974e-05,
|
| 812 |
+
"loss": 0.4630721569061279,
|
| 813 |
+
"memory(GiB)": 83.78,
|
| 814 |
+
"step": 385,
|
| 815 |
+
"token_acc": 0.8428528719403278,
|
| 816 |
+
"train_speed(iter/s)": 0.016867
|
| 817 |
+
},
|
| 818 |
+
{
|
| 819 |
+
"epoch": 1.2148695990657843,
|
| 820 |
+
"grad_norm": 0.34080690145492554,
|
| 821 |
+
"learning_rate": 1.8038977312237583e-05,
|
| 822 |
+
"loss": 0.46429901123046874,
|
| 823 |
+
"memory(GiB)": 83.78,
|
| 824 |
+
"step": 390,
|
| 825 |
+
"token_acc": 0.853177745668261,
|
| 826 |
+
"train_speed(iter/s)": 0.016864
|
| 827 |
+
},
|
| 828 |
+
{
|
| 829 |
+
"epoch": 1.2304398598676527,
|
| 830 |
+
"grad_norm": 0.38950115442276,
|
| 831 |
+
"learning_rate": 1.7977248325792117e-05,
|
| 832 |
+
"loss": 0.4587130546569824,
|
| 833 |
+
"memory(GiB)": 83.78,
|
| 834 |
+
"step": 395,
|
| 835 |
+
"token_acc": 0.8436527334397446,
|
| 836 |
+
"train_speed(iter/s)": 0.016864
|
| 837 |
+
},
|
| 838 |
+
{
|
| 839 |
+
"epoch": 1.246010120669521,
|
| 840 |
+
"grad_norm": 0.33555251359939575,
|
| 841 |
+
"learning_rate": 1.791467187958459e-05,
|
| 842 |
+
"loss": 0.4662825584411621,
|
| 843 |
+
"memory(GiB)": 83.78,
|
| 844 |
+
"step": 400,
|
| 845 |
+
"token_acc": 0.8447516930022574,
|
| 846 |
+
"train_speed(iter/s)": 0.016854
|
| 847 |
+
},
|
| 848 |
+
{
|
| 849 |
+
"epoch": 1.246010120669521,
|
| 850 |
+
"eval_loss": 0.44290465116500854,
|
| 851 |
+
"eval_runtime": 48.4727,
|
| 852 |
+
"eval_samples_per_second": 8.562,
|
| 853 |
+
"eval_steps_per_second": 1.073,
|
| 854 |
+
"eval_token_acc": 0.8440737465212763,
|
| 855 |
+
"step": 400
|
| 856 |
+
},
|
| 857 |
+
{
|
| 858 |
+
"epoch": 1.2615803814713895,
|
| 859 |
+
"grad_norm": 0.3424926996231079,
|
| 860 |
+
"learning_rate": 1.785125462139855e-05,
|
| 861 |
+
"loss": 0.45247802734375,
|
| 862 |
+
"memory(GiB)": 85.46,
|
| 863 |
+
"step": 405,
|
| 864 |
+
"token_acc": 0.8476541229236075,
|
| 865 |
+
"train_speed(iter/s)": 0.016819
|
| 866 |
+
},
|
| 867 |
+
{
|
| 868 |
+
"epoch": 1.2771506422732581,
|
| 869 |
+
"grad_norm": 0.3556825518608093,
|
| 870 |
+
"learning_rate": 1.7787003288340873e-05,
|
| 871 |
+
"loss": 0.4520209312438965,
|
| 872 |
+
"memory(GiB)": 85.46,
|
| 873 |
+
"step": 410,
|
| 874 |
+
"token_acc": 0.85518420823792,
|
| 875 |
+
"train_speed(iter/s)": 0.016827
|
| 876 |
+
},
|
| 877 |
+
{
|
| 878 |
+
"epoch": 1.2927209030751265,
|
| 879 |
+
"grad_norm": 0.3012397587299347,
|
| 880 |
+
"learning_rate": 1.7721924706126045e-05,
|
| 881 |
+
"loss": 0.4547447204589844,
|
| 882 |
+
"memory(GiB)": 85.46,
|
| 883 |
+
"step": 415,
|
| 884 |
+
"token_acc": 0.8473332915910087,
|
| 885 |
+
"train_speed(iter/s)": 0.016838
|
| 886 |
+
},
|
| 887 |
+
{
|
| 888 |
+
"epoch": 1.308291163876995,
|
| 889 |
+
"grad_norm": 0.3305128514766693,
|
| 890 |
+
"learning_rate": 1.765602578835102e-05,
|
| 891 |
+
"loss": 0.44603533744812013,
|
| 892 |
+
"memory(GiB)": 85.46,
|
| 893 |
+
"step": 420,
|
| 894 |
+
"token_acc": 0.8563695561772267,
|
| 895 |
+
"train_speed(iter/s)": 0.016851
|
| 896 |
+
},
|
| 897 |
+
{
|
| 898 |
+
"epoch": 1.3238614246788634,
|
| 899 |
+
"grad_norm": 0.375415563583374,
|
| 900 |
+
"learning_rate": 1.7589313535760787e-05,
|
| 901 |
+
"loss": 0.4534785270690918,
|
| 902 |
+
"memory(GiB)": 85.46,
|
| 903 |
+
"step": 425,
|
| 904 |
+
"token_acc": 0.8450059826434574,
|
| 905 |
+
"train_speed(iter/s)": 0.016859
|
| 906 |
+
},
|
| 907 |
+
{
|
| 908 |
+
"epoch": 1.3394316854807318,
|
| 909 |
+
"grad_norm": 0.3619174659252167,
|
| 910 |
+
"learning_rate": 1.7521795035504618e-05,
|
| 911 |
+
"loss": 0.46638121604919436,
|
| 912 |
+
"memory(GiB)": 87.47,
|
| 913 |
+
"step": 430,
|
| 914 |
+
"token_acc": 0.8533132783257229,
|
| 915 |
+
"train_speed(iter/s)": 0.016865
|
| 916 |
+
},
|
| 917 |
+
{
|
| 918 |
+
"epoch": 1.3550019462826002,
|
| 919 |
+
"grad_norm": 0.34234941005706787,
|
| 920 |
+
"learning_rate": 1.745347746038319e-05,
|
| 921 |
+
"loss": 0.45301074981689454,
|
| 922 |
+
"memory(GiB)": 87.47,
|
| 923 |
+
"step": 435,
|
| 924 |
+
"token_acc": 0.8506241771102551,
|
| 925 |
+
"train_speed(iter/s)": 0.016853
|
| 926 |
+
},
|
| 927 |
+
{
|
| 928 |
+
"epoch": 1.3705722070844686,
|
| 929 |
+
"grad_norm": 0.35721340775489807,
|
| 930 |
+
"learning_rate": 1.738436806808657e-05,
|
| 931 |
+
"loss": 0.4574443817138672,
|
| 932 |
+
"memory(GiB)": 87.47,
|
| 933 |
+
"step": 440,
|
| 934 |
+
"token_acc": 0.8492071302651172,
|
| 935 |
+
"train_speed(iter/s)": 0.016851
|
| 936 |
+
},
|
| 937 |
+
{
|
| 938 |
+
"epoch": 1.3861424678863372,
|
| 939 |
+
"grad_norm": 0.3377233147621155,
|
| 940 |
+
"learning_rate": 1.731447420042321e-05,
|
| 941 |
+
"loss": 0.4555491924285889,
|
| 942 |
+
"memory(GiB)": 87.47,
|
| 943 |
+
"step": 445,
|
| 944 |
+
"token_acc": 0.8449323972958919,
|
| 945 |
+
"train_speed(iter/s)": 0.016853
|
| 946 |
+
},
|
| 947 |
+
{
|
| 948 |
+
"epoch": 1.4017127286882056,
|
| 949 |
+
"grad_norm": 0.3378112316131592,
|
| 950 |
+
"learning_rate": 1.724380328253998e-05,
|
| 951 |
+
"loss": 0.456014347076416,
|
| 952 |
+
"memory(GiB)": 87.47,
|
| 953 |
+
"step": 450,
|
| 954 |
+
"token_acc": 0.8491198343217546,
|
| 955 |
+
"train_speed(iter/s)": 0.016866
|
| 956 |
+
},
|
| 957 |
+
{
|
| 958 |
+
"epoch": 1.417282989490074,
|
| 959 |
+
"grad_norm": 0.32281750440597534,
|
| 960 |
+
"learning_rate": 1.7172362822133368e-05,
|
| 961 |
+
"loss": 0.444715690612793,
|
| 962 |
+
"memory(GiB)": 87.47,
|
| 963 |
+
"step": 455,
|
| 964 |
+
"token_acc": 0.8585264429436933,
|
| 965 |
+
"train_speed(iter/s)": 0.01687
|
| 966 |
+
},
|
| 967 |
+
{
|
| 968 |
+
"epoch": 1.4328532502919424,
|
| 969 |
+
"grad_norm": 0.3332570791244507,
|
| 970 |
+
"learning_rate": 1.7100160408651906e-05,
|
| 971 |
+
"loss": 0.46764631271362306,
|
| 972 |
+
"memory(GiB)": 87.47,
|
| 973 |
+
"step": 460,
|
| 974 |
+
"token_acc": 0.8437270092325083,
|
| 975 |
+
"train_speed(iter/s)": 0.016885
|
| 976 |
+
},
|
| 977 |
+
{
|
| 978 |
+
"epoch": 1.4484235110938108,
|
| 979 |
+
"grad_norm": 0.3404083251953125,
|
| 980 |
+
"learning_rate": 1.7027203712489902e-05,
|
| 981 |
+
"loss": 0.4540658950805664,
|
| 982 |
+
"memory(GiB)": 87.47,
|
| 983 |
+
"step": 465,
|
| 984 |
+
"token_acc": 0.8424792767766657,
|
| 985 |
+
"train_speed(iter/s)": 0.016891
|
| 986 |
+
},
|
| 987 |
+
{
|
| 988 |
+
"epoch": 1.4639937718956793,
|
| 989 |
+
"grad_norm": 0.28665056824684143,
|
| 990 |
+
"learning_rate": 1.6953500484172584e-05,
|
| 991 |
+
"loss": 0.4646796703338623,
|
| 992 |
+
"memory(GiB)": 87.47,
|
| 993 |
+
"step": 470,
|
| 994 |
+
"token_acc": 0.8517768926209016,
|
| 995 |
+
"train_speed(iter/s)": 0.016894
|
| 996 |
+
},
|
| 997 |
+
{
|
| 998 |
+
"epoch": 1.4795640326975477,
|
| 999 |
+
"grad_norm": 0.3514065742492676,
|
| 1000 |
+
"learning_rate": 1.6879058553532708e-05,
|
| 1001 |
+
"loss": 0.4555992603302002,
|
| 1002 |
+
"memory(GiB)": 87.47,
|
| 1003 |
+
"step": 475,
|
| 1004 |
+
"token_acc": 0.8537480228233454,
|
| 1005 |
+
"train_speed(iter/s)": 0.016905
|
| 1006 |
+
},
|
| 1007 |
+
{
|
| 1008 |
+
"epoch": 1.495134293499416,
|
| 1009 |
+
"grad_norm": 0.36578574776649475,
|
| 1010 |
+
"learning_rate": 1.6803885828878798e-05,
|
| 1011 |
+
"loss": 0.4544710636138916,
|
| 1012 |
+
"memory(GiB)": 87.47,
|
| 1013 |
+
"step": 480,
|
| 1014 |
+
"token_acc": 0.8525084812486251,
|
| 1015 |
+
"train_speed(iter/s)": 0.016912
|
| 1016 |
+
},
|
| 1017 |
+
{
|
| 1018 |
+
"epoch": 1.5107045543012845,
|
| 1019 |
+
"grad_norm": 0.32191744446754456,
|
| 1020 |
+
"learning_rate": 1.6727990296154962e-05,
|
| 1021 |
+
"loss": 0.4602982521057129,
|
| 1022 |
+
"memory(GiB)": 87.47,
|
| 1023 |
+
"step": 485,
|
| 1024 |
+
"token_acc": 0.8430140101913102,
|
| 1025 |
+
"train_speed(iter/s)": 0.016906
|
| 1026 |
+
},
|
| 1027 |
+
{
|
| 1028 |
+
"epoch": 1.5262748151031529,
|
| 1029 |
+
"grad_norm": 0.37726181745529175,
|
| 1030 |
+
"learning_rate": 1.665138001809255e-05,
|
| 1031 |
+
"loss": 0.45351152420043944,
|
| 1032 |
+
"memory(GiB)": 87.47,
|
| 1033 |
+
"step": 490,
|
| 1034 |
+
"token_acc": 0.8455952326181131,
|
| 1035 |
+
"train_speed(iter/s)": 0.016915
|
| 1036 |
+
},
|
| 1037 |
+
{
|
| 1038 |
+
"epoch": 1.5418450759050213,
|
| 1039 |
+
"grad_norm": 0.3360103666782379,
|
| 1040 |
+
"learning_rate": 1.657406313335358e-05,
|
| 1041 |
+
"loss": 0.4640647411346436,
|
| 1042 |
+
"memory(GiB)": 87.47,
|
| 1043 |
+
"step": 495,
|
| 1044 |
+
"token_acc": 0.8416911128839417,
|
| 1045 |
+
"train_speed(iter/s)": 0.016901
|
| 1046 |
+
},
|
| 1047 |
+
{
|
| 1048 |
+
"epoch": 1.5574153367068897,
|
| 1049 |
+
"grad_norm": 0.3354435861110687,
|
| 1050 |
+
"learning_rate": 1.6496047855666166e-05,
|
| 1051 |
+
"loss": 0.45473790168762207,
|
| 1052 |
+
"memory(GiB)": 87.47,
|
| 1053 |
+
"step": 500,
|
| 1054 |
+
"token_acc": 0.8470284591147179,
|
| 1055 |
+
"train_speed(iter/s)": 0.016903
|
| 1056 |
+
},
|
| 1057 |
+
{
|
| 1058 |
+
"epoch": 1.5574153367068897,
|
| 1059 |
+
"eval_loss": 0.4339936375617981,
|
| 1060 |
+
"eval_runtime": 49.215,
|
| 1061 |
+
"eval_samples_per_second": 8.432,
|
| 1062 |
+
"eval_steps_per_second": 1.057,
|
| 1063 |
+
"eval_token_acc": 0.8464905695468675,
|
| 1064 |
+
"step": 500
|
| 1065 |
+
},
|
| 1066 |
+
{
|
| 1067 |
+
"epoch": 1.5729855975087583,
|
| 1068 |
+
"grad_norm": 0.29624515771865845,
|
| 1069 |
+
"learning_rate": 1.641734247295189e-05,
|
| 1070 |
+
"loss": 0.44854736328125,
|
| 1071 |
+
"memory(GiB)": 87.47,
|
| 1072 |
+
"step": 505,
|
| 1073 |
+
"token_acc": 0.8512255304674686,
|
| 1074 |
+
"train_speed(iter/s)": 0.016867
|
| 1075 |
+
},
|
| 1076 |
+
{
|
| 1077 |
+
"epoch": 1.5885558583106267,
|
| 1078 |
+
"grad_norm": 0.2959994375705719,
|
| 1079 |
+
"learning_rate": 1.633795534644538e-05,
|
| 1080 |
+
"loss": 0.44970054626464845,
|
| 1081 |
+
"memory(GiB)": 87.47,
|
| 1082 |
+
"step": 510,
|
| 1083 |
+
"token_acc": 0.8495887288243693,
|
| 1084 |
+
"train_speed(iter/s)": 0.016868
|
| 1085 |
+
},
|
| 1086 |
+
{
|
| 1087 |
+
"epoch": 1.6041261191124951,
|
| 1088 |
+
"grad_norm": 0.28208568692207336,
|
| 1089 |
+
"learning_rate": 1.625789490980604e-05,
|
| 1090 |
+
"loss": 0.45240216255187987,
|
| 1091 |
+
"memory(GiB)": 87.47,
|
| 1092 |
+
"step": 515,
|
| 1093 |
+
"token_acc": 0.8508599646850458,
|
| 1094 |
+
"train_speed(iter/s)": 0.016866
|
| 1095 |
+
},
|
| 1096 |
+
{
|
| 1097 |
+
"epoch": 1.6196963799143635,
|
| 1098 |
+
"grad_norm": 0.31556007266044617,
|
| 1099 |
+
"learning_rate": 1.61771696682221e-05,
|
| 1100 |
+
"loss": 0.46805973052978517,
|
| 1101 |
+
"memory(GiB)": 87.47,
|
| 1102 |
+
"step": 520,
|
| 1103 |
+
"token_acc": 0.8385705498249266,
|
| 1104 |
+
"train_speed(iter/s)": 0.016855
|
| 1105 |
+
},
|
| 1106 |
+
{
|
| 1107 |
+
"epoch": 1.635266640716232,
|
| 1108 |
+
"grad_norm": 0.33998918533325195,
|
| 1109 |
+
"learning_rate": 1.609578819750708e-05,
|
| 1110 |
+
"loss": 0.4480471611022949,
|
| 1111 |
+
"memory(GiB)": 87.47,
|
| 1112 |
+
"step": 525,
|
| 1113 |
+
"token_acc": 0.8535245057224707,
|
| 1114 |
+
"train_speed(iter/s)": 0.016854
|
| 1115 |
+
},
|
| 1116 |
+
{
|
| 1117 |
+
"epoch": 1.6508369015181006,
|
| 1118 |
+
"grad_norm": 0.32541388273239136,
|
| 1119 |
+
"learning_rate": 1.601375914318873e-05,
|
| 1120 |
+
"loss": 0.44594502449035645,
|
| 1121 |
+
"memory(GiB)": 87.47,
|
| 1122 |
+
"step": 530,
|
| 1123 |
+
"token_acc": 0.8513384035634705,
|
| 1124 |
+
"train_speed(iter/s)": 0.01686
|
| 1125 |
+
},
|
| 1126 |
+
{
|
| 1127 |
+
"epoch": 1.666407162319969,
|
| 1128 |
+
"grad_norm": 0.3565449118614197,
|
| 1129 |
+
"learning_rate": 1.5931091219590594e-05,
|
| 1130 |
+
"loss": 0.44635515213012694,
|
| 1131 |
+
"memory(GiB)": 87.47,
|
| 1132 |
+
"step": 535,
|
| 1133 |
+
"token_acc": 0.8587149590440981,
|
| 1134 |
+
"train_speed(iter/s)": 0.016864
|
| 1135 |
+
},
|
| 1136 |
+
{
|
| 1137 |
+
"epoch": 1.6819774231218374,
|
| 1138 |
+
"grad_norm": 0.30892956256866455,
|
| 1139 |
+
"learning_rate": 1.5847793208906228e-05,
|
| 1140 |
+
"loss": 0.4479209899902344,
|
| 1141 |
+
"memory(GiB)": 87.47,
|
| 1142 |
+
"step": 540,
|
| 1143 |
+
"token_acc": 0.8477150375810529,
|
| 1144 |
+
"train_speed(iter/s)": 0.016866
|
| 1145 |
+
},
|
| 1146 |
+
{
|
| 1147 |
+
"epoch": 1.6975476839237058,
|
| 1148 |
+
"grad_norm": 0.32783636450767517,
|
| 1149 |
+
"learning_rate": 1.5763873960266236e-05,
|
| 1150 |
+
"loss": 0.4361083984375,
|
| 1151 |
+
"memory(GiB)": 87.47,
|
| 1152 |
+
"step": 545,
|
| 1153 |
+
"token_acc": 0.8581115692629165,
|
| 1154 |
+
"train_speed(iter/s)": 0.01688
|
| 1155 |
+
},
|
| 1156 |
+
{
|
| 1157 |
+
"epoch": 1.7131179447255742,
|
| 1158 |
+
"grad_norm": 0.31219062209129333,
|
| 1159 |
+
"learning_rate": 1.567934238879819e-05,
|
| 1160 |
+
"loss": 0.44908871650695803,
|
| 1161 |
+
"memory(GiB)": 87.47,
|
| 1162 |
+
"step": 550,
|
| 1163 |
+
"token_acc": 0.8354629470446383,
|
| 1164 |
+
"train_speed(iter/s)": 0.01688
|
| 1165 |
+
},
|
| 1166 |
+
{
|
| 1167 |
+
"epoch": 1.7286882055274426,
|
| 1168 |
+
"grad_norm": 0.3076675534248352,
|
| 1169 |
+
"learning_rate": 1.5594207474679533e-05,
|
| 1170 |
+
"loss": 0.44863643646240237,
|
| 1171 |
+
"memory(GiB)": 87.47,
|
| 1172 |
+
"step": 555,
|
| 1173 |
+
"token_acc": 0.8530024926954223,
|
| 1174 |
+
"train_speed(iter/s)": 0.01688
|
| 1175 |
+
},
|
| 1176 |
+
{
|
| 1177 |
+
"epoch": 1.744258466329311,
|
| 1178 |
+
"grad_norm": 0.35262957215309143,
|
| 1179 |
+
"learning_rate": 1.5508478262183564e-05,
|
| 1180 |
+
"loss": 0.44416370391845705,
|
| 1181 |
+
"memory(GiB)": 87.47,
|
| 1182 |
+
"step": 560,
|
| 1183 |
+
"token_acc": 0.8449086194172916,
|
| 1184 |
+
"train_speed(iter/s)": 0.016884
|
| 1185 |
+
},
|
| 1186 |
+
{
|
| 1187 |
+
"epoch": 1.7598287271311794,
|
| 1188 |
+
"grad_norm": 0.31981098651885986,
|
| 1189 |
+
"learning_rate": 1.5422163858718632e-05,
|
| 1190 |
+
"loss": 0.4421844482421875,
|
| 1191 |
+
"memory(GiB)": 87.47,
|
| 1192 |
+
"step": 565,
|
| 1193 |
+
"token_acc": 0.8547054363189448,
|
| 1194 |
+
"train_speed(iter/s)": 0.016885
|
| 1195 |
+
},
|
| 1196 |
+
{
|
| 1197 |
+
"epoch": 1.7753989879330478,
|
| 1198 |
+
"grad_norm": 0.3417418301105499,
|
| 1199 |
+
"learning_rate": 1.533527343386062e-05,
|
| 1200 |
+
"loss": 0.45076637268066405,
|
| 1201 |
+
"memory(GiB)": 87.47,
|
| 1202 |
+
"step": 570,
|
| 1203 |
+
"token_acc": 0.8389380153741954,
|
| 1204 |
+
"train_speed(iter/s)": 0.016882
|
| 1205 |
+
},
|
| 1206 |
+
{
|
| 1207 |
+
"epoch": 1.7909692487349163,
|
| 1208 |
+
"grad_norm": 0.31734615564346313,
|
| 1209 |
+
"learning_rate": 1.5247816218378808e-05,
|
| 1210 |
+
"loss": 0.43622050285339353,
|
| 1211 |
+
"memory(GiB)": 87.47,
|
| 1212 |
+
"step": 575,
|
| 1213 |
+
"token_acc": 0.8538195434018485,
|
| 1214 |
+
"train_speed(iter/s)": 0.0169
|
| 1215 |
+
},
|
| 1216 |
+
{
|
| 1217 |
+
"epoch": 1.8065395095367847,
|
| 1218 |
+
"grad_norm": 0.34167781472206116,
|
| 1219 |
+
"learning_rate": 1.5159801503255245e-05,
|
| 1220 |
+
"loss": 0.4417697906494141,
|
| 1221 |
+
"memory(GiB)": 87.47,
|
| 1222 |
+
"step": 580,
|
| 1223 |
+
"token_acc": 0.85343586541145,
|
| 1224 |
+
"train_speed(iter/s)": 0.016913
|
| 1225 |
+
},
|
| 1226 |
+
{
|
| 1227 |
+
"epoch": 1.822109770338653,
|
| 1228 |
+
"grad_norm": 0.3014916777610779,
|
| 1229 |
+
"learning_rate": 1.5071238638697731e-05,
|
| 1230 |
+
"loss": 0.464891242980957,
|
| 1231 |
+
"memory(GiB)": 87.47,
|
| 1232 |
+
"step": 585,
|
| 1233 |
+
"token_acc": 0.8448814913208093,
|
| 1234 |
+
"train_speed(iter/s)": 0.016921
|
| 1235 |
+
},
|
| 1236 |
+
{
|
| 1237 |
+
"epoch": 1.8376800311405215,
|
| 1238 |
+
"grad_norm": 0.28431716561317444,
|
| 1239 |
+
"learning_rate": 1.4982137033146508e-05,
|
| 1240 |
+
"loss": 0.43960394859313967,
|
| 1241 |
+
"memory(GiB)": 87.47,
|
| 1242 |
+
"step": 590,
|
| 1243 |
+
"token_acc": 0.8527028466591703,
|
| 1244 |
+
"train_speed(iter/s)": 0.016926
|
| 1245 |
+
},
|
| 1246 |
+
{
|
| 1247 |
+
"epoch": 1.8532502919423899,
|
| 1248 |
+
"grad_norm": 0.2890400290489197,
|
| 1249 |
+
"learning_rate": 1.4892506152274743e-05,
|
| 1250 |
+
"loss": 0.43685274124145507,
|
| 1251 |
+
"memory(GiB)": 87.47,
|
| 1252 |
+
"step": 595,
|
| 1253 |
+
"token_acc": 0.8587665608002407,
|
| 1254 |
+
"train_speed(iter/s)": 0.016925
|
| 1255 |
+
},
|
| 1256 |
+
{
|
| 1257 |
+
"epoch": 1.8688205527442585,
|
| 1258 |
+
"grad_norm": 0.3061586916446686,
|
| 1259 |
+
"learning_rate": 1.4802355517982956e-05,
|
| 1260 |
+
"loss": 0.45107498168945315,
|
| 1261 |
+
"memory(GiB)": 87.47,
|
| 1262 |
+
"step": 600,
|
| 1263 |
+
"token_acc": 0.8508957683688835,
|
| 1264 |
+
"train_speed(iter/s)": 0.01692
|
| 1265 |
+
},
|
| 1266 |
+
{
|
| 1267 |
+
"epoch": 1.8688205527442585,
|
| 1268 |
+
"eval_loss": 0.4256907105445862,
|
| 1269 |
+
"eval_runtime": 48.605,
|
| 1270 |
+
"eval_samples_per_second": 8.538,
|
| 1271 |
+
"eval_steps_per_second": 1.07,
|
| 1272 |
+
"eval_token_acc": 0.8487490155926254,
|
| 1273 |
+
"step": 600
|
| 1274 |
+
},
|
| 1275 |
+
{
|
| 1276 |
+
"epoch": 1.884390813546127,
|
| 1277 |
+
"grad_norm": 0.3080097436904907,
|
| 1278 |
+
"learning_rate": 1.4711694707387459e-05,
|
| 1279 |
+
"loss": 0.4596552848815918,
|
| 1280 |
+
"memory(GiB)": 87.47,
|
| 1281 |
+
"step": 605,
|
| 1282 |
+
"token_acc": 0.8494166730021033,
|
| 1283 |
+
"train_speed(iter/s)": 0.016891
|
| 1284 |
+
},
|
| 1285 |
+
{
|
| 1286 |
+
"epoch": 1.8999610743479953,
|
| 1287 |
+
"grad_norm": 0.3166070282459259,
|
| 1288 |
+
"learning_rate": 1.462053335180294e-05,
|
| 1289 |
+
"loss": 0.44292964935302737,
|
| 1290 |
+
"memory(GiB)": 87.47,
|
| 1291 |
+
"step": 610,
|
| 1292 |
+
"token_acc": 0.8526988947012526,
|
| 1293 |
+
"train_speed(iter/s)": 0.016896
|
| 1294 |
+
},
|
| 1295 |
+
{
|
| 1296 |
+
"epoch": 1.9155313351498637,
|
| 1297 |
+
"grad_norm": 0.29401150345802307,
|
| 1298 |
+
"learning_rate": 1.452888113571929e-05,
|
| 1299 |
+
"loss": 0.4381908893585205,
|
| 1300 |
+
"memory(GiB)": 87.47,
|
| 1301 |
+
"step": 615,
|
| 1302 |
+
"token_acc": 0.8555842110978391,
|
| 1303 |
+
"train_speed(iter/s)": 0.016908
|
| 1304 |
+
},
|
| 1305 |
+
{
|
| 1306 |
+
"epoch": 1.9311015959517321,
|
| 1307 |
+
"grad_norm": 0.3151325285434723,
|
| 1308 |
+
"learning_rate": 1.4436747795772752e-05,
|
| 1309 |
+
"loss": 0.4259210109710693,
|
| 1310 |
+
"memory(GiB)": 87.47,
|
| 1311 |
+
"step": 620,
|
| 1312 |
+
"token_acc": 0.8545294649153147,
|
| 1313 |
+
"train_speed(iter/s)": 0.016922
|
| 1314 |
+
},
|
| 1315 |
+
{
|
| 1316 |
+
"epoch": 1.9466718567536008,
|
| 1317 |
+
"grad_norm": 0.29976552724838257,
|
| 1318 |
+
"learning_rate": 1.4344143119711585e-05,
|
| 1319 |
+
"loss": 0.44890317916870115,
|
| 1320 |
+
"memory(GiB)": 87.47,
|
| 1321 |
+
"step": 625,
|
| 1322 |
+
"token_acc": 0.8479431788087383,
|
| 1323 |
+
"train_speed(iter/s)": 0.016918
|
| 1324 |
+
},
|
| 1325 |
+
{
|
| 1326 |
+
"epoch": 1.9622421175554692,
|
| 1327 |
+
"grad_norm": 0.3326264023780823,
|
| 1328 |
+
"learning_rate": 1.4251076945356233e-05,
|
| 1329 |
+
"loss": 0.4403618335723877,
|
| 1330 |
+
"memory(GiB)": 87.47,
|
| 1331 |
+
"step": 630,
|
| 1332 |
+
"token_acc": 0.8553420249762108,
|
| 1333 |
+
"train_speed(iter/s)": 0.016916
|
| 1334 |
+
},
|
| 1335 |
+
{
|
| 1336 |
+
"epoch": 1.9778123783573376,
|
| 1337 |
+
"grad_norm": 0.3118704557418823,
|
| 1338 |
+
"learning_rate": 1.4157559159554244e-05,
|
| 1339 |
+
"loss": 0.4499207496643066,
|
| 1340 |
+
"memory(GiB)": 87.47,
|
| 1341 |
+
"step": 635,
|
| 1342 |
+
"token_acc": 0.8585684300402007,
|
| 1343 |
+
"train_speed(iter/s)": 0.01692
|
| 1344 |
+
},
|
| 1345 |
+
{
|
| 1346 |
+
"epoch": 1.993382639159206,
|
| 1347 |
+
"grad_norm": 0.31018197536468506,
|
| 1348 |
+
"learning_rate": 1.4063599697129912e-05,
|
| 1349 |
+
"loss": 0.43601245880126954,
|
| 1350 |
+
"memory(GiB)": 87.47,
|
| 1351 |
+
"step": 640,
|
| 1352 |
+
"token_acc": 0.8492618761832873,
|
| 1353 |
+
"train_speed(iter/s)": 0.016918
|
| 1354 |
+
},
|
| 1355 |
+
{
|
| 1356 |
+
"epoch": 2.009342156481121,
|
| 1357 |
+
"grad_norm": 0.4045466482639313,
|
| 1358 |
+
"learning_rate": 1.3969208539828873e-05,
|
| 1359 |
+
"loss": 0.5253468990325928,
|
| 1360 |
+
"memory(GiB)": 87.47,
|
| 1361 |
+
"step": 645,
|
| 1362 |
+
"token_acc": 0.8468511299166429,
|
| 1363 |
+
"train_speed(iter/s)": 0.016918
|
| 1364 |
+
},
|
| 1365 |
+
{
|
| 1366 |
+
"epoch": 2.0249124172829895,
|
| 1367 |
+
"grad_norm": 0.35480746626853943,
|
| 1368 |
+
"learning_rate": 1.3874395715257697e-05,
|
| 1369 |
+
"loss": 0.4091975212097168,
|
| 1370 |
+
"memory(GiB)": 87.47,
|
| 1371 |
+
"step": 650,
|
| 1372 |
+
"token_acc": 0.8652064686351988,
|
| 1373 |
+
"train_speed(iter/s)": 0.016917
|
| 1374 |
+
},
|
| 1375 |
+
{
|
| 1376 |
+
"epoch": 2.040482678084858,
|
| 1377 |
+
"grad_norm": 0.304674357175827,
|
| 1378 |
+
"learning_rate": 1.3779171295818606e-05,
|
| 1379 |
+
"loss": 0.4048311233520508,
|
| 1380 |
+
"memory(GiB)": 87.47,
|
| 1381 |
+
"step": 655,
|
| 1382 |
+
"token_acc": 0.860902665654438,
|
| 1383 |
+
"train_speed(iter/s)": 0.016915
|
| 1384 |
+
},
|
| 1385 |
+
{
|
| 1386 |
+
"epoch": 2.0560529388867264,
|
| 1387 |
+
"grad_norm": 0.34621867537498474,
|
| 1388 |
+
"learning_rate": 1.3683545397639433e-05,
|
| 1389 |
+
"loss": 0.4079150199890137,
|
| 1390 |
+
"memory(GiB)": 87.47,
|
| 1391 |
+
"step": 660,
|
| 1392 |
+
"token_acc": 0.8592790169293577,
|
| 1393 |
+
"train_speed(iter/s)": 0.01693
|
| 1394 |
+
},
|
| 1395 |
+
{
|
| 1396 |
+
"epoch": 2.0716231996885948,
|
| 1397 |
+
"grad_norm": 0.31017231941223145,
|
| 1398 |
+
"learning_rate": 1.3587528179498946e-05,
|
| 1399 |
+
"loss": 0.4034367561340332,
|
| 1400 |
+
"memory(GiB)": 87.47,
|
| 1401 |
+
"step": 665,
|
| 1402 |
+
"token_acc": 0.8592915642451773,
|
| 1403 |
+
"train_speed(iter/s)": 0.016927
|
| 1404 |
+
},
|
| 1405 |
+
{
|
| 1406 |
+
"epoch": 2.087193460490463,
|
| 1407 |
+
"grad_norm": 0.34436559677124023,
|
| 1408 |
+
"learning_rate": 1.3491129841747632e-05,
|
| 1409 |
+
"loss": 0.40624065399169923,
|
| 1410 |
+
"memory(GiB)": 87.47,
|
| 1411 |
+
"step": 670,
|
| 1412 |
+
"token_acc": 0.86241849685157,
|
| 1413 |
+
"train_speed(iter/s)": 0.016928
|
| 1414 |
+
},
|
| 1415 |
+
{
|
| 1416 |
+
"epoch": 2.1027637212923316,
|
| 1417 |
+
"grad_norm": 0.32138824462890625,
|
| 1418 |
+
"learning_rate": 1.3394360625224067e-05,
|
| 1419 |
+
"loss": 0.4064358711242676,
|
| 1420 |
+
"memory(GiB)": 87.47,
|
| 1421 |
+
"step": 675,
|
| 1422 |
+
"token_acc": 0.864174034962998,
|
| 1423 |
+
"train_speed(iter/s)": 0.016932
|
| 1424 |
+
},
|
| 1425 |
+
{
|
| 1426 |
+
"epoch": 2.1183339820942,
|
| 1427 |
+
"grad_norm": 0.32160255312919617,
|
| 1428 |
+
"learning_rate": 1.3297230810166979e-05,
|
| 1429 |
+
"loss": 0.4131148338317871,
|
| 1430 |
+
"memory(GiB)": 87.47,
|
| 1431 |
+
"step": 680,
|
| 1432 |
+
"token_acc": 0.8563570810274059,
|
| 1433 |
+
"train_speed(iter/s)": 0.016933
|
| 1434 |
+
},
|
| 1435 |
+
{
|
| 1436 |
+
"epoch": 2.1339042428960684,
|
| 1437 |
+
"grad_norm": 0.29857733845710754,
|
| 1438 |
+
"learning_rate": 1.3199750715123144e-05,
|
| 1439 |
+
"loss": 0.40442190170288084,
|
| 1440 |
+
"memory(GiB)": 87.47,
|
| 1441 |
+
"step": 685,
|
| 1442 |
+
"token_acc": 0.8606646118780595,
|
| 1443 |
+
"train_speed(iter/s)": 0.016936
|
| 1444 |
+
},
|
| 1445 |
+
{
|
| 1446 |
+
"epoch": 2.149474503697937,
|
| 1447 |
+
"grad_norm": 0.3053974211215973,
|
| 1448 |
+
"learning_rate": 1.3101930695851186e-05,
|
| 1449 |
+
"loss": 0.4091023921966553,
|
| 1450 |
+
"memory(GiB)": 87.47,
|
| 1451 |
+
"step": 690,
|
| 1452 |
+
"token_acc": 0.8537588049550644,
|
| 1453 |
+
"train_speed(iter/s)": 0.016931
|
| 1454 |
+
},
|
| 1455 |
+
{
|
| 1456 |
+
"epoch": 2.165044764499805,
|
| 1457 |
+
"grad_norm": 0.32609260082244873,
|
| 1458 |
+
"learning_rate": 1.300378114422144e-05,
|
| 1459 |
+
"loss": 0.4144451141357422,
|
| 1460 |
+
"memory(GiB)": 88.68,
|
| 1461 |
+
"step": 695,
|
| 1462 |
+
"token_acc": 0.8581630992954251,
|
| 1463 |
+
"train_speed(iter/s)": 0.016933
|
| 1464 |
+
},
|
| 1465 |
+
{
|
| 1466 |
+
"epoch": 2.1806150253016736,
|
| 1467 |
+
"grad_norm": 0.2846038043498993,
|
| 1468 |
+
"learning_rate": 1.2905312487111981e-05,
|
| 1469 |
+
"loss": 0.4058229923248291,
|
| 1470 |
+
"memory(GiB)": 88.68,
|
| 1471 |
+
"step": 700,
|
| 1472 |
+
"token_acc": 0.8490695870940025,
|
| 1473 |
+
"train_speed(iter/s)": 0.016924
|
| 1474 |
+
},
|
| 1475 |
+
{
|
| 1476 |
+
"epoch": 2.1806150253016736,
|
| 1477 |
+
"eval_loss": 0.4248170256614685,
|
| 1478 |
+
"eval_runtime": 48.4312,
|
| 1479 |
+
"eval_samples_per_second": 8.569,
|
| 1480 |
+
"eval_steps_per_second": 1.074,
|
| 1481 |
+
"eval_token_acc": 0.8499472561067161,
|
| 1482 |
+
"step": 700
|
| 1483 |
+
},
|
| 1484 |
+
{
|
| 1485 |
+
"epoch": 2.1961852861035425,
|
| 1486 |
+
"grad_norm": 0.3057588040828705,
|
| 1487 |
+
"learning_rate": 1.2806535185300931e-05,
|
| 1488 |
+
"loss": 0.39852018356323243,
|
| 1489 |
+
"memory(GiB)": 88.68,
|
| 1490 |
+
"step": 705,
|
| 1491 |
+
"token_acc": 0.859068653718373,
|
| 1492 |
+
"train_speed(iter/s)": 0.016907
|
| 1493 |
+
},
|
| 1494 |
+
{
|
| 1495 |
+
"epoch": 2.211755546905411,
|
| 1496 |
+
"grad_norm": 0.3422738015651703,
|
| 1497 |
+
"learning_rate": 1.2707459732355152e-05,
|
| 1498 |
+
"loss": 0.40930471420288084,
|
| 1499 |
+
"memory(GiB)": 88.68,
|
| 1500 |
+
"step": 710,
|
| 1501 |
+
"token_acc": 0.8593000519385292,
|
| 1502 |
+
"train_speed(iter/s)": 0.016916
|
| 1503 |
+
},
|
| 1504 |
+
{
|
| 1505 |
+
"epoch": 2.2273258077072793,
|
| 1506 |
+
"grad_norm": 0.32292571663856506,
|
| 1507 |
+
"learning_rate": 1.260809665351547e-05,
|
| 1508 |
+
"loss": 0.40809078216552735,
|
| 1509 |
+
"memory(GiB)": 88.68,
|
| 1510 |
+
"step": 715,
|
| 1511 |
+
"token_acc": 0.8623604255075267,
|
| 1512 |
+
"train_speed(iter/s)": 0.016914
|
| 1513 |
+
},
|
| 1514 |
+
{
|
| 1515 |
+
"epoch": 2.2428960685091477,
|
| 1516 |
+
"grad_norm": 0.30992391705513,
|
| 1517 |
+
"learning_rate": 1.2508456504578538e-05,
|
| 1518 |
+
"loss": 0.40337481498718264,
|
| 1519 |
+
"memory(GiB)": 88.68,
|
| 1520 |
+
"step": 720,
|
| 1521 |
+
"token_acc": 0.8515773998256702,
|
| 1522 |
+
"train_speed(iter/s)": 0.016923
|
| 1523 |
+
},
|
| 1524 |
+
{
|
| 1525 |
+
"epoch": 2.258466329311016,
|
| 1526 |
+
"grad_norm": 0.3296166956424713,
|
| 1527 |
+
"learning_rate": 1.2408549870775432e-05,
|
| 1528 |
+
"loss": 0.4040327548980713,
|
| 1529 |
+
"memory(GiB)": 88.68,
|
| 1530 |
+
"step": 725,
|
| 1531 |
+
"token_acc": 0.870754853952457,
|
| 1532 |
+
"train_speed(iter/s)": 0.016915
|
| 1533 |
+
},
|
| 1534 |
+
{
|
| 1535 |
+
"epoch": 2.2740365901128845,
|
| 1536 |
+
"grad_norm": 0.3059770166873932,
|
| 1537 |
+
"learning_rate": 1.230838736564715e-05,
|
| 1538 |
+
"loss": 0.388106107711792,
|
| 1539 |
+
"memory(GiB)": 88.68,
|
| 1540 |
+
"step": 730,
|
| 1541 |
+
"token_acc": 0.8662276135612913,
|
| 1542 |
+
"train_speed(iter/s)": 0.016923
|
| 1543 |
+
},
|
| 1544 |
+
{
|
| 1545 |
+
"epoch": 2.289606850914753,
|
| 1546 |
+
"grad_norm": 0.29819902777671814,
|
| 1547 |
+
"learning_rate": 1.2207979629917061e-05,
|
| 1548 |
+
"loss": 0.415024995803833,
|
| 1549 |
+
"memory(GiB)": 88.68,
|
| 1550 |
+
"step": 735,
|
| 1551 |
+
"token_acc": 0.8562834300703839,
|
| 1552 |
+
"train_speed(iter/s)": 0.016919
|
| 1553 |
+
},
|
| 1554 |
+
{
|
| 1555 |
+
"epoch": 2.3051771117166213,
|
| 1556 |
+
"grad_norm": 0.31571272015571594,
|
| 1557 |
+
"learning_rate": 1.2107337330360533e-05,
|
| 1558 |
+
"loss": 0.4108760833740234,
|
| 1559 |
+
"memory(GiB)": 88.68,
|
| 1560 |
+
"step": 740,
|
| 1561 |
+
"token_acc": 0.8624641478349758,
|
| 1562 |
+
"train_speed(iter/s)": 0.016924
|
| 1563 |
+
},
|
| 1564 |
+
{
|
| 1565 |
+
"epoch": 2.3207473725184897,
|
| 1566 |
+
"grad_norm": 0.30818915367126465,
|
| 1567 |
+
"learning_rate": 1.2006471158671702e-05,
|
| 1568 |
+
"loss": 0.41235151290893557,
|
| 1569 |
+
"memory(GiB)": 88.68,
|
| 1570 |
+
"step": 745,
|
| 1571 |
+
"token_acc": 0.8561366178899871,
|
| 1572 |
+
"train_speed(iter/s)": 0.01693
|
| 1573 |
+
},
|
| 1574 |
+
{
|
| 1575 |
+
"epoch": 2.336317633320358,
|
| 1576 |
+
"grad_norm": 0.293542742729187,
|
| 1577 |
+
"learning_rate": 1.1905391830327685e-05,
|
| 1578 |
+
"loss": 0.418719482421875,
|
| 1579 |
+
"memory(GiB)": 88.68,
|
| 1580 |
+
"step": 750,
|
| 1581 |
+
"token_acc": 0.8582308714036777,
|
| 1582 |
+
"train_speed(iter/s)": 0.016924
|
| 1583 |
+
},
|
| 1584 |
+
{
|
| 1585 |
+
"epoch": 2.3518878941222265,
|
| 1586 |
+
"grad_norm": 0.2972683310508728,
|
| 1587 |
+
"learning_rate": 1.180411008345021e-05,
|
| 1588 |
+
"loss": 0.40260977745056153,
|
| 1589 |
+
"memory(GiB)": 88.68,
|
| 1590 |
+
"step": 755,
|
| 1591 |
+
"token_acc": 0.8619026578825308,
|
| 1592 |
+
"train_speed(iter/s)": 0.016922
|
| 1593 |
+
},
|
| 1594 |
+
{
|
| 1595 |
+
"epoch": 2.367458154924095,
|
| 1596 |
+
"grad_norm": 0.2991423010826111,
|
| 1597 |
+
"learning_rate": 1.1702636677664844e-05,
|
| 1598 |
+
"loss": 0.399456262588501,
|
| 1599 |
+
"memory(GiB)": 88.68,
|
| 1600 |
+
"step": 760,
|
| 1601 |
+
"token_acc": 0.8674852160245288,
|
| 1602 |
+
"train_speed(iter/s)": 0.016926
|
| 1603 |
+
},
|
| 1604 |
+
{
|
| 1605 |
+
"epoch": 2.3830284157259634,
|
| 1606 |
+
"grad_norm": 0.28033456206321716,
|
| 1607 |
+
"learning_rate": 1.1600982392957978e-05,
|
| 1608 |
+
"loss": 0.40012359619140625,
|
| 1609 |
+
"memory(GiB)": 88.68,
|
| 1610 |
+
"step": 765,
|
| 1611 |
+
"token_acc": 0.8569271295496779,
|
| 1612 |
+
"train_speed(iter/s)": 0.016929
|
| 1613 |
+
},
|
| 1614 |
+
{
|
| 1615 |
+
"epoch": 2.3985986765278318,
|
| 1616 |
+
"grad_norm": 0.30392777919769287,
|
| 1617 |
+
"learning_rate": 1.1499158028531585e-05,
|
| 1618 |
+
"loss": 0.4144479274749756,
|
| 1619 |
+
"memory(GiB)": 88.68,
|
| 1620 |
+
"step": 770,
|
| 1621 |
+
"token_acc": 0.8572535511903578,
|
| 1622 |
+
"train_speed(iter/s)": 0.016923
|
| 1623 |
+
},
|
| 1624 |
+
{
|
| 1625 |
+
"epoch": 2.4141689373297,
|
| 1626 |
+
"grad_norm": 0.27619481086730957,
|
| 1627 |
+
"learning_rate": 1.1397174401656009e-05,
|
| 1628 |
+
"loss": 0.4138012886047363,
|
| 1629 |
+
"memory(GiB)": 88.68,
|
| 1630 |
+
"step": 775,
|
| 1631 |
+
"token_acc": 0.8541658201074043,
|
| 1632 |
+
"train_speed(iter/s)": 0.016917
|
| 1633 |
+
},
|
| 1634 |
+
{
|
| 1635 |
+
"epoch": 2.4297391981315686,
|
| 1636 |
+
"grad_norm": 0.27977001667022705,
|
| 1637 |
+
"learning_rate": 1.1295042346520755e-05,
|
| 1638 |
+
"loss": 0.4025775909423828,
|
| 1639 |
+
"memory(GiB)": 88.68,
|
| 1640 |
+
"step": 780,
|
| 1641 |
+
"token_acc": 0.869224874229117,
|
| 1642 |
+
"train_speed(iter/s)": 0.016916
|
| 1643 |
+
},
|
| 1644 |
+
{
|
| 1645 |
+
"epoch": 2.445309458933437,
|
| 1646 |
+
"grad_norm": 0.35665157437324524,
|
| 1647 |
+
"learning_rate": 1.1192772713083557e-05,
|
| 1648 |
+
"loss": 0.4065700054168701,
|
| 1649 |
+
"memory(GiB)": 88.68,
|
| 1650 |
+
"step": 785,
|
| 1651 |
+
"token_acc": 0.8511265419646967,
|
| 1652 |
+
"train_speed(iter/s)": 0.016918
|
| 1653 |
+
},
|
| 1654 |
+
{
|
| 1655 |
+
"epoch": 2.4608797197353054,
|
| 1656 |
+
"grad_norm": 0.3625037670135498,
|
| 1657 |
+
"learning_rate": 1.1090376365917724e-05,
|
| 1658 |
+
"loss": 0.40373077392578127,
|
| 1659 |
+
"memory(GiB)": 88.68,
|
| 1660 |
+
"step": 790,
|
| 1661 |
+
"token_acc": 0.8655929839902706,
|
| 1662 |
+
"train_speed(iter/s)": 0.016916
|
| 1663 |
+
},
|
| 1664 |
+
{
|
| 1665 |
+
"epoch": 2.476449980537174,
|
| 1666 |
+
"grad_norm": 0.30906039476394653,
|
| 1667 |
+
"learning_rate": 1.0987864183057943e-05,
|
| 1668 |
+
"loss": 0.4046307563781738,
|
| 1669 |
+
"memory(GiB)": 88.68,
|
| 1670 |
+
"step": 795,
|
| 1671 |
+
"token_acc": 0.8631747227753758,
|
| 1672 |
+
"train_speed(iter/s)": 0.016918
|
| 1673 |
+
},
|
| 1674 |
+
{
|
| 1675 |
+
"epoch": 2.492020241339042,
|
| 1676 |
+
"grad_norm": 0.2804671823978424,
|
| 1677 |
+
"learning_rate": 1.088524705484466e-05,
|
| 1678 |
+
"loss": 0.39722390174865724,
|
| 1679 |
+
"memory(GiB)": 88.68,
|
| 1680 |
+
"step": 800,
|
| 1681 |
+
"token_acc": 0.8720521927504471,
|
| 1682 |
+
"train_speed(iter/s)": 0.016919
|
| 1683 |
+
},
|
| 1684 |
+
{
|
| 1685 |
+
"epoch": 2.492020241339042,
|
| 1686 |
+
"eval_loss": 0.4193665385246277,
|
| 1687 |
+
"eval_runtime": 49.4368,
|
| 1688 |
+
"eval_samples_per_second": 8.395,
|
| 1689 |
+
"eval_steps_per_second": 1.052,
|
| 1690 |
+
"eval_token_acc": 0.851378460924476,
|
| 1691 |
+
"step": 800
|
| 1692 |
+
},
|
| 1693 |
+
{
|
| 1694 |
+
"epoch": 2.5075905021409106,
|
| 1695 |
+
"grad_norm": 0.276696115732193,
|
| 1696 |
+
"learning_rate": 1.0782535882767144e-05,
|
| 1697 |
+
"loss": 0.40638461112976076,
|
| 1698 |
+
"memory(GiB)": 88.68,
|
| 1699 |
+
"step": 805,
|
| 1700 |
+
"token_acc": 0.8657145358437709,
|
| 1701 |
+
"train_speed(iter/s)": 0.016894
|
| 1702 |
+
},
|
| 1703 |
+
{
|
| 1704 |
+
"epoch": 2.523160762942779,
|
| 1705 |
+
"grad_norm": 0.29846805334091187,
|
| 1706 |
+
"learning_rate": 1.067974157830539e-05,
|
| 1707 |
+
"loss": 0.40010814666748046,
|
| 1708 |
+
"memory(GiB)": 88.68,
|
| 1709 |
+
"step": 810,
|
| 1710 |
+
"token_acc": 0.8588209819736914,
|
| 1711 |
+
"train_speed(iter/s)": 0.016899
|
| 1712 |
+
},
|
| 1713 |
+
{
|
| 1714 |
+
"epoch": 2.538731023744648,
|
| 1715 |
+
"grad_norm": 0.27926602959632874,
|
| 1716 |
+
"learning_rate": 1.0576875061770913e-05,
|
| 1717 |
+
"loss": 0.4041747570037842,
|
| 1718 |
+
"memory(GiB)": 88.68,
|
| 1719 |
+
"step": 815,
|
| 1720 |
+
"token_acc": 0.8594588904095168,
|
| 1721 |
+
"train_speed(iter/s)": 0.016903
|
| 1722 |
+
},
|
| 1723 |
+
{
|
| 1724 |
+
"epoch": 2.5543012845465163,
|
| 1725 |
+
"grad_norm": 0.31468990445137024,
|
| 1726 |
+
"learning_rate": 1.0473947261146654e-05,
|
| 1727 |
+
"loss": 0.3997108697891235,
|
| 1728 |
+
"memory(GiB)": 88.68,
|
| 1729 |
+
"step": 820,
|
| 1730 |
+
"token_acc": 0.864746443340764,
|
| 1731 |
+
"train_speed(iter/s)": 0.016909
|
| 1732 |
+
},
|
| 1733 |
+
{
|
| 1734 |
+
"epoch": 2.5698715453483847,
|
| 1735 |
+
"grad_norm": 0.317765474319458,
|
| 1736 |
+
"learning_rate": 1.0370969110926052e-05,
|
| 1737 |
+
"loss": 0.40914144515991213,
|
| 1738 |
+
"memory(GiB)": 88.68,
|
| 1739 |
+
"step": 825,
|
| 1740 |
+
"token_acc": 0.8667216600585897,
|
| 1741 |
+
"train_speed(iter/s)": 0.016911
|
| 1742 |
+
},
|
| 1743 |
+
{
|
| 1744 |
+
"epoch": 2.585441806150253,
|
| 1745 |
+
"grad_norm": 0.2770572900772095,
|
| 1746 |
+
"learning_rate": 1.0267951550951406e-05,
|
| 1747 |
+
"loss": 0.4096653461456299,
|
| 1748 |
+
"memory(GiB)": 88.68,
|
| 1749 |
+
"step": 830,
|
| 1750 |
+
"token_acc": 0.8620899938038338,
|
| 1751 |
+
"train_speed(iter/s)": 0.016914
|
| 1752 |
+
},
|
| 1753 |
+
{
|
| 1754 |
+
"epoch": 2.6010120669521215,
|
| 1755 |
+
"grad_norm": 0.2760813534259796,
|
| 1756 |
+
"learning_rate": 1.0164905525251695e-05,
|
| 1757 |
+
"loss": 0.3938852310180664,
|
| 1758 |
+
"memory(GiB)": 88.68,
|
| 1759 |
+
"step": 835,
|
| 1760 |
+
"token_acc": 0.8624401122397716,
|
| 1761 |
+
"train_speed(iter/s)": 0.016914
|
| 1762 |
+
},
|
| 1763 |
+
{
|
| 1764 |
+
"epoch": 2.61658232775399,
|
| 1765 |
+
"grad_norm": 0.2749018967151642,
|
| 1766 |
+
"learning_rate": 1.0061841980879941e-05,
|
| 1767 |
+
"loss": 0.4151924133300781,
|
| 1768 |
+
"memory(GiB)": 88.68,
|
| 1769 |
+
"step": 840,
|
| 1770 |
+
"token_acc": 0.8581742617267449,
|
| 1771 |
+
"train_speed(iter/s)": 0.016909
|
| 1772 |
+
},
|
| 1773 |
+
{
|
| 1774 |
+
"epoch": 2.6321525885558583,
|
| 1775 |
+
"grad_norm": 0.30041322112083435,
|
| 1776 |
+
"learning_rate": 9.958771866750266e-06,
|
| 1777 |
+
"loss": 0.4036086082458496,
|
| 1778 |
+
"memory(GiB)": 88.68,
|
| 1779 |
+
"step": 845,
|
| 1780 |
+
"token_acc": 0.8535083801509132,
|
| 1781 |
+
"train_speed(iter/s)": 0.016911
|
| 1782 |
+
},
|
| 1783 |
+
{
|
| 1784 |
+
"epoch": 2.6477228493577267,
|
| 1785 |
+
"grad_norm": 0.3127138018608093,
|
| 1786 |
+
"learning_rate": 9.855706132474719e-06,
|
| 1787 |
+
"loss": 0.39623475074768066,
|
| 1788 |
+
"memory(GiB)": 88.68,
|
| 1789 |
+
"step": 850,
|
| 1790 |
+
"token_acc": 0.8613897832181449,
|
| 1791 |
+
"train_speed(iter/s)": 0.016914
|
| 1792 |
+
},
|
| 1793 |
+
{
|
| 1794 |
+
"epoch": 2.663293110159595,
|
| 1795 |
+
"grad_norm": 0.3154863119125366,
|
| 1796 |
+
"learning_rate": 9.752655727200051e-06,
|
| 1797 |
+
"loss": 0.40503616333007814,
|
| 1798 |
+
"memory(GiB)": 88.68,
|
| 1799 |
+
"step": 855,
|
| 1800 |
+
"token_acc": 0.8705114688096711,
|
| 1801 |
+
"train_speed(iter/s)": 0.016916
|
| 1802 |
+
},
|
| 1803 |
+
{
|
| 1804 |
+
"epoch": 2.6788633709614635,
|
| 1805 |
+
"grad_norm": 0.30002886056900024,
|
| 1806 |
+
"learning_rate": 9.649631598444557e-06,
|
| 1807 |
+
"loss": 0.39531519412994387,
|
| 1808 |
+
"memory(GiB)": 88.68,
|
| 1809 |
+
"step": 860,
|
| 1810 |
+
"token_acc": 0.8739137447179123,
|
| 1811 |
+
"train_speed(iter/s)": 0.01692
|
| 1812 |
+
},
|
| 1813 |
+
{
|
| 1814 |
+
"epoch": 2.694433631763332,
|
| 1815 |
+
"grad_norm": 0.2766549289226532,
|
| 1816 |
+
"learning_rate": 9.54664469093505e-06,
|
| 1817 |
+
"loss": 0.4008350372314453,
|
| 1818 |
+
"memory(GiB)": 88.68,
|
| 1819 |
+
"step": 865,
|
| 1820 |
+
"token_acc": 0.8610019064176141,
|
| 1821 |
+
"train_speed(iter/s)": 0.016918
|
| 1822 |
+
},
|
| 1823 |
+
{
|
| 1824 |
+
"epoch": 2.7100038925652004,
|
| 1825 |
+
"grad_norm": 0.28013867139816284,
|
| 1826 |
+
"learning_rate": 9.443705945444158e-06,
|
| 1827 |
+
"loss": 0.40520267486572265,
|
| 1828 |
+
"memory(GiB)": 88.68,
|
| 1829 |
+
"step": 870,
|
| 1830 |
+
"token_acc": 0.872128417616696,
|
| 1831 |
+
"train_speed(iter/s)": 0.016923
|
| 1832 |
+
},
|
| 1833 |
+
{
|
| 1834 |
+
"epoch": 2.7255741533670688,
|
| 1835 |
+
"grad_norm": 0.2968541085720062,
|
| 1836 |
+
"learning_rate": 9.34082629762803e-06,
|
| 1837 |
+
"loss": 0.40741329193115233,
|
| 1838 |
+
"memory(GiB)": 88.68,
|
| 1839 |
+
"step": 875,
|
| 1840 |
+
"token_acc": 0.8571625546526471,
|
| 1841 |
+
"train_speed(iter/s)": 0.016921
|
| 1842 |
+
},
|
| 1843 |
+
{
|
| 1844 |
+
"epoch": 2.741144414168937,
|
| 1845 |
+
"grad_norm": 0.2884249687194824,
|
| 1846 |
+
"learning_rate": 9.23801667686461e-06,
|
| 1847 |
+
"loss": 0.40064706802368166,
|
| 1848 |
+
"memory(GiB)": 88.68,
|
| 1849 |
+
"step": 880,
|
| 1850 |
+
"token_acc": 0.8599723495981721,
|
| 1851 |
+
"train_speed(iter/s)": 0.016922
|
| 1852 |
+
},
|
| 1853 |
+
{
|
| 1854 |
+
"epoch": 2.756714674970806,
|
| 1855 |
+
"grad_norm": 0.27596229314804077,
|
| 1856 |
+
"learning_rate": 9.135288005092546e-06,
|
| 1857 |
+
"loss": 0.39715871810913084,
|
| 1858 |
+
"memory(GiB)": 88.68,
|
| 1859 |
+
"step": 885,
|
| 1860 |
+
"token_acc": 0.8647707635744576,
|
| 1861 |
+
"train_speed(iter/s)": 0.016927
|
| 1862 |
+
},
|
| 1863 |
+
{
|
| 1864 |
+
"epoch": 2.7722849357726744,
|
| 1865 |
+
"grad_norm": 0.30293765664100647,
|
| 1866 |
+
"learning_rate": 9.032651195650884e-06,
|
| 1867 |
+
"loss": 0.3991700649261475,
|
| 1868 |
+
"memory(GiB)": 88.68,
|
| 1869 |
+
"step": 890,
|
| 1870 |
+
"token_acc": 0.8664495037451203,
|
| 1871 |
+
"train_speed(iter/s)": 0.01693
|
| 1872 |
+
},
|
| 1873 |
+
{
|
| 1874 |
+
"epoch": 2.787855196574543,
|
| 1875 |
+
"grad_norm": 0.2744984030723572,
|
| 1876 |
+
"learning_rate": 8.930117152119736e-06,
|
| 1877 |
+
"loss": 0.3990873575210571,
|
| 1878 |
+
"memory(GiB)": 88.68,
|
| 1879 |
+
"step": 895,
|
| 1880 |
+
"token_acc": 0.8639404074186683,
|
| 1881 |
+
"train_speed(iter/s)": 0.016932
|
| 1882 |
+
},
|
| 1883 |
+
{
|
| 1884 |
+
"epoch": 2.8034254573764112,
|
| 1885 |
+
"grad_norm": 0.29595863819122314,
|
| 1886 |
+
"learning_rate": 8.827696767161902e-06,
|
| 1887 |
+
"loss": 0.4118965148925781,
|
| 1888 |
+
"memory(GiB)": 88.68,
|
| 1889 |
+
"step": 900,
|
| 1890 |
+
"token_acc": 0.8585057335917827,
|
| 1891 |
+
"train_speed(iter/s)": 0.016929
|
| 1892 |
+
},
|
| 1893 |
+
{
|
| 1894 |
+
"epoch": 2.8034254573764112,
|
| 1895 |
+
"eval_loss": 0.41329577565193176,
|
| 1896 |
+
"eval_runtime": 48.7426,
|
| 1897 |
+
"eval_samples_per_second": 8.514,
|
| 1898 |
+
"eval_steps_per_second": 1.067,
|
| 1899 |
+
"eval_token_acc": 0.8528091814089642,
|
| 1900 |
+
"step": 900
|
| 1901 |
+
},
|
| 1902 |
+
{
|
| 1903 |
+
"epoch": 2.8189957181782797,
|
| 1904 |
+
"grad_norm": 0.26091545820236206,
|
| 1905 |
+
"learning_rate": 8.725400921365722e-06,
|
| 1906 |
+
"loss": 0.39678106307983396,
|
| 1907 |
+
"memory(GiB)": 88.68,
|
| 1908 |
+
"step": 905,
|
| 1909 |
+
"token_acc": 0.8594101414128966,
|
| 1910 |
+
"train_speed(iter/s)": 0.016913
|
| 1911 |
+
},
|
| 1912 |
+
{
|
| 1913 |
+
"epoch": 2.834565978980148,
|
| 1914 |
+
"grad_norm": 0.2783149778842926,
|
| 1915 |
+
"learning_rate": 8.623240482089153e-06,
|
| 1916 |
+
"loss": 0.3983915328979492,
|
| 1917 |
+
"memory(GiB)": 88.68,
|
| 1918 |
+
"step": 910,
|
| 1919 |
+
"token_acc": 0.8653014428764835,
|
| 1920 |
+
"train_speed(iter/s)": 0.01691
|
| 1921 |
+
},
|
| 1922 |
+
{
|
| 1923 |
+
"epoch": 2.8501362397820165,
|
| 1924 |
+
"grad_norm": 0.29711443185806274,
|
| 1925 |
+
"learning_rate": 8.52122630230531e-06,
|
| 1926 |
+
"loss": 0.3964498996734619,
|
| 1927 |
+
"memory(GiB)": 88.68,
|
| 1928 |
+
"step": 915,
|
| 1929 |
+
"token_acc": 0.8654651025002963,
|
| 1930 |
+
"train_speed(iter/s)": 0.016909
|
| 1931 |
+
},
|
| 1932 |
+
{
|
| 1933 |
+
"epoch": 2.865706500583885,
|
| 1934 |
+
"grad_norm": 0.3235342502593994,
|
| 1935 |
+
"learning_rate": 8.419369219449487e-06,
|
| 1936 |
+
"loss": 0.4111301422119141,
|
| 1937 |
+
"memory(GiB)": 88.68,
|
| 1938 |
+
"step": 920,
|
| 1939 |
+
"token_acc": 0.854048169486135,
|
| 1940 |
+
"train_speed(iter/s)": 0.016905
|
| 1941 |
+
},
|
| 1942 |
+
{
|
| 1943 |
+
"epoch": 2.8812767613857533,
|
| 1944 |
+
"grad_norm": 0.2915455400943756,
|
| 1945 |
+
"learning_rate": 8.317680054267834e-06,
|
| 1946 |
+
"loss": 0.3976348161697388,
|
| 1947 |
+
"memory(GiB)": 88.68,
|
| 1948 |
+
"step": 925,
|
| 1949 |
+
"token_acc": 0.8646497658549133,
|
| 1950 |
+
"train_speed(iter/s)": 0.016911
|
| 1951 |
+
},
|
| 1952 |
+
{
|
| 1953 |
+
"epoch": 2.8968470221876217,
|
| 1954 |
+
"grad_norm": 0.28624093532562256,
|
| 1955 |
+
"learning_rate": 8.216169609667854e-06,
|
| 1956 |
+
"loss": 0.3987285137176514,
|
| 1957 |
+
"memory(GiB)": 88.68,
|
| 1958 |
+
"step": 930,
|
| 1959 |
+
"token_acc": 0.8666723028265342,
|
| 1960 |
+
"train_speed(iter/s)": 0.016905
|
| 1961 |
+
},
|
| 1962 |
+
{
|
| 1963 |
+
"epoch": 2.91241728298949,
|
| 1964 |
+
"grad_norm": 0.2571397125720978,
|
| 1965 |
+
"learning_rate": 8.114848669570733e-06,
|
| 1966 |
+
"loss": 0.4107855796813965,
|
| 1967 |
+
"memory(GiB)": 88.68,
|
| 1968 |
+
"step": 935,
|
| 1969 |
+
"token_acc": 0.8626929739970473,
|
| 1970 |
+
"train_speed(iter/s)": 0.016902
|
| 1971 |
+
},
|
| 1972 |
+
{
|
| 1973 |
+
"epoch": 2.9279875437913585,
|
| 1974 |
+
"grad_norm": 0.266347736120224,
|
| 1975 |
+
"learning_rate": 8.013727997765724e-06,
|
| 1976 |
+
"loss": 0.39544177055358887,
|
| 1977 |
+
"memory(GiB)": 88.68,
|
| 1978 |
+
"step": 940,
|
| 1979 |
+
"token_acc": 0.8621273056885385,
|
| 1980 |
+
"train_speed(iter/s)": 0.016899
|
| 1981 |
+
},
|
| 1982 |
+
{
|
| 1983 |
+
"epoch": 2.943557804593227,
|
| 1984 |
+
"grad_norm": 0.26831647753715515,
|
| 1985 |
+
"learning_rate": 7.91281833676665e-06,
|
| 1986 |
+
"loss": 0.3936420202255249,
|
| 1987 |
+
"memory(GiB)": 88.68,
|
| 1988 |
+
"step": 945,
|
| 1989 |
+
"token_acc": 0.856834701996057,
|
| 1990 |
+
"train_speed(iter/s)": 0.016903
|
| 1991 |
+
},
|
| 1992 |
+
{
|
| 1993 |
+
"epoch": 2.9591280653950953,
|
| 1994 |
+
"grad_norm": 0.2694167494773865,
|
| 1995 |
+
"learning_rate": 7.812130406670699e-06,
|
| 1996 |
+
"loss": 0.4112107276916504,
|
| 1997 |
+
"memory(GiB)": 88.68,
|
| 1998 |
+
"step": 950,
|
| 1999 |
+
"token_acc": 0.8646875190971869,
|
| 2000 |
+
"train_speed(iter/s)": 0.016901
|
| 2001 |
+
},
|
| 2002 |
+
{
|
| 2003 |
+
"epoch": 2.9746983261969637,
|
| 2004 |
+
"grad_norm": 0.29274359345436096,
|
| 2005 |
+
"learning_rate": 7.71167490401956e-06,
|
| 2006 |
+
"loss": 0.39890074729919434,
|
| 2007 |
+
"memory(GiB)": 88.68,
|
| 2008 |
+
"step": 955,
|
| 2009 |
+
"token_acc": 0.8708542010096479,
|
| 2010 |
+
"train_speed(iter/s)": 0.016909
|
| 2011 |
+
},
|
| 2012 |
+
{
|
| 2013 |
+
"epoch": 2.990268586998832,
|
| 2014 |
+
"grad_norm": 0.2841947674751282,
|
| 2015 |
+
"learning_rate": 7.6114625006630885e-06,
|
| 2016 |
+
"loss": 0.3915250301361084,
|
| 2017 |
+
"memory(GiB)": 88.68,
|
| 2018 |
+
"step": 960,
|
| 2019 |
+
"token_acc": 0.8685056019696435,
|
| 2020 |
+
"train_speed(iter/s)": 0.016913
|
| 2021 |
+
},
|
| 2022 |
+
{
|
| 2023 |
+
"epoch": 3.0062281043207473,
|
| 2024 |
+
"grad_norm": 0.36610281467437744,
|
| 2025 |
+
"learning_rate": 7.511503842625576e-06,
|
| 2026 |
+
"loss": 0.46400060653686526,
|
| 2027 |
+
"memory(GiB)": 88.68,
|
| 2028 |
+
"step": 965,
|
| 2029 |
+
"token_acc": 0.8688302643312309,
|
| 2030 |
+
"train_speed(iter/s)": 0.016913
|
| 2031 |
+
},
|
| 2032 |
+
{
|
| 2033 |
+
"epoch": 3.0217983651226157,
|
| 2034 |
+
"grad_norm": 0.3299410939216614,
|
| 2035 |
+
"learning_rate": 7.411809548974792e-06,
|
| 2036 |
+
"loss": 0.37694129943847654,
|
| 2037 |
+
"memory(GiB)": 88.68,
|
| 2038 |
+
"step": 970,
|
| 2039 |
+
"token_acc": 0.8694388579532905,
|
| 2040 |
+
"train_speed(iter/s)": 0.016915
|
| 2041 |
+
},
|
| 2042 |
+
{
|
| 2043 |
+
"epoch": 3.037368625924484,
|
| 2044 |
+
"grad_norm": 0.3347257375717163,
|
| 2045 |
+
"learning_rate": 7.312390210693863e-06,
|
| 2046 |
+
"loss": 0.36944580078125,
|
| 2047 |
+
"memory(GiB)": 88.68,
|
| 2048 |
+
"step": 975,
|
| 2049 |
+
"token_acc": 0.878339156936005,
|
| 2050 |
+
"train_speed(iter/s)": 0.016917
|
| 2051 |
+
},
|
| 2052 |
+
{
|
| 2053 |
+
"epoch": 3.0529388867263525,
|
| 2054 |
+
"grad_norm": 0.2957051992416382,
|
| 2055 |
+
"learning_rate": 7.213256389556125e-06,
|
| 2056 |
+
"loss": 0.36371331214904784,
|
| 2057 |
+
"memory(GiB)": 88.68,
|
| 2058 |
+
"step": 980,
|
| 2059 |
+
"token_acc": 0.8717935493188025,
|
| 2060 |
+
"train_speed(iter/s)": 0.016917
|
| 2061 |
+
},
|
| 2062 |
+
{
|
| 2063 |
+
"epoch": 3.068509147528221,
|
| 2064 |
+
"grad_norm": 0.29925552010536194,
|
| 2065 |
+
"learning_rate": 7.114418617003137e-06,
|
| 2066 |
+
"loss": 0.37583396434783933,
|
| 2067 |
+
"memory(GiB)": 88.68,
|
| 2068 |
+
"step": 985,
|
| 2069 |
+
"token_acc": 0.8665699665764031,
|
| 2070 |
+
"train_speed(iter/s)": 0.016916
|
| 2071 |
+
},
|
| 2072 |
+
{
|
| 2073 |
+
"epoch": 3.0840794083300898,
|
| 2074 |
+
"grad_norm": 0.2758331298828125,
|
| 2075 |
+
"learning_rate": 7.015887393025847e-06,
|
| 2076 |
+
"loss": 0.3523877620697021,
|
| 2077 |
+
"memory(GiB)": 88.68,
|
| 2078 |
+
"step": 990,
|
| 2079 |
+
"token_acc": 0.8778131746205382,
|
| 2080 |
+
"train_speed(iter/s)": 0.016922
|
| 2081 |
+
},
|
| 2082 |
+
{
|
| 2083 |
+
"epoch": 3.099649669131958,
|
| 2084 |
+
"grad_norm": 0.2721407413482666,
|
| 2085 |
+
"learning_rate": 6.917673185049138e-06,
|
| 2086 |
+
"loss": 0.36904470920562743,
|
| 2087 |
+
"memory(GiB)": 88.68,
|
| 2088 |
+
"step": 995,
|
| 2089 |
+
"token_acc": 0.8740084948468113,
|
| 2090 |
+
"train_speed(iter/s)": 0.016923
|
| 2091 |
+
},
|
| 2092 |
+
{
|
| 2093 |
+
"epoch": 3.1152199299338266,
|
| 2094 |
+
"grad_norm": 0.27260729670524597,
|
| 2095 |
+
"learning_rate": 6.819786426819825e-06,
|
| 2096 |
+
"loss": 0.37018847465515137,
|
| 2097 |
+
"memory(GiB)": 88.68,
|
| 2098 |
+
"step": 1000,
|
| 2099 |
+
"token_acc": 0.8693169060405699,
|
| 2100 |
+
"train_speed(iter/s)": 0.016924
|
| 2101 |
+
},
|
| 2102 |
+
{
|
| 2103 |
+
"epoch": 3.1152199299338266,
|
| 2104 |
+
"eval_loss": 0.4171549081802368,
|
| 2105 |
+
"eval_runtime": 48.9505,
|
| 2106 |
+
"eval_samples_per_second": 8.478,
|
| 2107 |
+
"eval_steps_per_second": 1.062,
|
| 2108 |
+
"eval_token_acc": 0.8533666490046466,
|
| 2109 |
+
"step": 1000
|
| 2110 |
+
},
|
| 2111 |
+
{
|
| 2112 |
+
"epoch": 3.130790190735695,
|
| 2113 |
+
"grad_norm": 0.3021749258041382,
|
| 2114 |
+
"learning_rate": 6.722237517298232e-06,
|
| 2115 |
+
"loss": 0.3654526948928833,
|
| 2116 |
+
"memory(GiB)": 88.68,
|
| 2117 |
+
"step": 1005,
|
| 2118 |
+
"token_acc": 0.8657461259026207,
|
| 2119 |
+
"train_speed(iter/s)": 0.016908
|
| 2120 |
+
},
|
| 2121 |
+
{
|
| 2122 |
+
"epoch": 3.1463604515375634,
|
| 2123 |
+
"grad_norm": 0.2921292781829834,
|
| 2124 |
+
"learning_rate": 6.625036819553467e-06,
|
| 2125 |
+
"loss": 0.36435742378234864,
|
| 2126 |
+
"memory(GiB)": 88.68,
|
| 2127 |
+
"step": 1010,
|
| 2128 |
+
"token_acc": 0.8771943849326895,
|
| 2129 |
+
"train_speed(iter/s)": 0.016915
|
| 2130 |
+
},
|
| 2131 |
+
{
|
| 2132 |
+
"epoch": 3.161930712339432,
|
| 2133 |
+
"grad_norm": 0.2832075357437134,
|
| 2134 |
+
"learning_rate": 6.528194659662488e-06,
|
| 2135 |
+
"loss": 0.3685340881347656,
|
| 2136 |
+
"memory(GiB)": 88.68,
|
| 2137 |
+
"step": 1015,
|
| 2138 |
+
"token_acc": 0.8670015373143141,
|
| 2139 |
+
"train_speed(iter/s)": 0.016916
|
| 2140 |
+
},
|
| 2141 |
+
{
|
| 2142 |
+
"epoch": 3.1775009731413,
|
| 2143 |
+
"grad_norm": 0.2529529631137848,
|
| 2144 |
+
"learning_rate": 6.431721325613138e-06,
|
| 2145 |
+
"loss": 0.3727813720703125,
|
| 2146 |
+
"memory(GiB)": 88.68,
|
| 2147 |
+
"step": 1020,
|
| 2148 |
+
"token_acc": 0.8713205243098983,
|
| 2149 |
+
"train_speed(iter/s)": 0.016914
|
| 2150 |
+
},
|
| 2151 |
+
{
|
| 2152 |
+
"epoch": 3.1930712339431686,
|
| 2153 |
+
"grad_norm": 0.2848242521286011,
|
| 2154 |
+
"learning_rate": 6.335627066211196e-06,
|
| 2155 |
+
"loss": 0.3792572021484375,
|
| 2156 |
+
"memory(GiB)": 88.68,
|
| 2157 |
+
"step": 1025,
|
| 2158 |
+
"token_acc": 0.86777010721209,
|
| 2159 |
+
"train_speed(iter/s)": 0.01691
|
| 2160 |
+
},
|
| 2161 |
+
{
|
| 2162 |
+
"epoch": 3.208641494745037,
|
| 2163 |
+
"grad_norm": 0.2933395802974701,
|
| 2164 |
+
"learning_rate": 6.239922089991597e-06,
|
| 2165 |
+
"loss": 0.36856865882873535,
|
| 2166 |
+
"memory(GiB)": 88.68,
|
| 2167 |
+
"step": 1030,
|
| 2168 |
+
"token_acc": 0.8724858299595142,
|
| 2169 |
+
"train_speed(iter/s)": 0.01691
|
| 2170 |
+
},
|
| 2171 |
+
{
|
| 2172 |
+
"epoch": 3.2242117555469054,
|
| 2173 |
+
"grad_norm": 0.2859324514865875,
|
| 2174 |
+
"learning_rate": 6.144616564133927e-06,
|
| 2175 |
+
"loss": 0.36645007133483887,
|
| 2176 |
+
"memory(GiB)": 88.68,
|
| 2177 |
+
"step": 1035,
|
| 2178 |
+
"token_acc": 0.8778270121878196,
|
| 2179 |
+
"train_speed(iter/s)": 0.016913
|
| 2180 |
+
},
|
| 2181 |
+
{
|
| 2182 |
+
"epoch": 3.239782016348774,
|
| 2183 |
+
"grad_norm": 0.244709774851799,
|
| 2184 |
+
"learning_rate": 6.049720613382332e-06,
|
| 2185 |
+
"loss": 0.3758384704589844,
|
| 2186 |
+
"memory(GiB)": 88.68,
|
| 2187 |
+
"step": 1040,
|
| 2188 |
+
"token_acc": 0.8745641464981795,
|
| 2189 |
+
"train_speed(iter/s)": 0.016907
|
| 2190 |
+
},
|
| 2191 |
+
{
|
| 2192 |
+
"epoch": 3.2553522771506422,
|
| 2193 |
+
"grad_norm": 0.28737974166870117,
|
| 2194 |
+
"learning_rate": 5.955244318969913e-06,
|
| 2195 |
+
"loss": 0.37161884307861326,
|
| 2196 |
+
"memory(GiB)": 88.68,
|
| 2197 |
+
"step": 1045,
|
| 2198 |
+
"token_acc": 0.8789312516614523,
|
| 2199 |
+
"train_speed(iter/s)": 0.016907
|
| 2200 |
+
},
|
| 2201 |
+
{
|
| 2202 |
+
"epoch": 3.2709225379525106,
|
| 2203 |
+
"grad_norm": 0.30621790885925293,
|
| 2204 |
+
"learning_rate": 5.8611977175477355e-06,
|
| 2205 |
+
"loss": 0.36142144203186033,
|
| 2206 |
+
"memory(GiB)": 88.68,
|
| 2207 |
+
"step": 1050,
|
| 2208 |
+
"token_acc": 0.8809154383242824,
|
| 2209 |
+
"train_speed(iter/s)": 0.016914
|
| 2210 |
+
},
|
| 2211 |
+
{
|
| 2212 |
+
"epoch": 3.286492798754379,
|
| 2213 |
+
"grad_norm": 0.2538982033729553,
|
| 2214 |
+
"learning_rate": 5.767590800118621e-06,
|
| 2215 |
+
"loss": 0.3751323699951172,
|
| 2216 |
+
"memory(GiB)": 88.68,
|
| 2217 |
+
"step": 1055,
|
| 2218 |
+
"token_acc": 0.8687682142777692,
|
| 2219 |
+
"train_speed(iter/s)": 0.01691
|
| 2220 |
+
},
|
| 2221 |
+
{
|
| 2222 |
+
"epoch": 3.3020630595562475,
|
| 2223 |
+
"grad_norm": 0.2651020586490631,
|
| 2224 |
+
"learning_rate": 5.674433510975725e-06,
|
| 2225 |
+
"loss": 0.3630067825317383,
|
| 2226 |
+
"memory(GiB)": 88.68,
|
| 2227 |
+
"step": 1060,
|
| 2228 |
+
"token_acc": 0.8768329269920676,
|
| 2229 |
+
"train_speed(iter/s)": 0.016915
|
| 2230 |
+
},
|
| 2231 |
+
{
|
| 2232 |
+
"epoch": 3.317633320358116,
|
| 2233 |
+
"grad_norm": 0.28797048330307007,
|
| 2234 |
+
"learning_rate": 5.581735746646134e-06,
|
| 2235 |
+
"loss": 0.38075408935546873,
|
| 2236 |
+
"memory(GiB)": 88.68,
|
| 2237 |
+
"step": 1065,
|
| 2238 |
+
"token_acc": 0.8739769760426389,
|
| 2239 |
+
"train_speed(iter/s)": 0.016913
|
| 2240 |
+
},
|
| 2241 |
+
{
|
| 2242 |
+
"epoch": 3.3332035811599843,
|
| 2243 |
+
"grad_norm": 0.2768980860710144,
|
| 2244 |
+
"learning_rate": 5.4895073548394926e-06,
|
| 2245 |
+
"loss": 0.37256827354431155,
|
| 2246 |
+
"memory(GiB)": 88.68,
|
| 2247 |
+
"step": 1070,
|
| 2248 |
+
"token_acc": 0.8735899236301825,
|
| 2249 |
+
"train_speed(iter/s)": 0.016917
|
| 2250 |
+
},
|
| 2251 |
+
{
|
| 2252 |
+
"epoch": 3.3487738419618527,
|
| 2253 |
+
"grad_norm": 0.29908499121665955,
|
| 2254 |
+
"learning_rate": 5.397758133401849e-06,
|
| 2255 |
+
"loss": 0.37295982837677,
|
| 2256 |
+
"memory(GiB)": 88.68,
|
| 2257 |
+
"step": 1075,
|
| 2258 |
+
"token_acc": 0.8778628774722752,
|
| 2259 |
+
"train_speed(iter/s)": 0.016923
|
| 2260 |
+
},
|
| 2261 |
+
{
|
| 2262 |
+
"epoch": 3.364344102763721,
|
| 2263 |
+
"grad_norm": 0.31482502818107605,
|
| 2264 |
+
"learning_rate": 5.306497829274785e-06,
|
| 2265 |
+
"loss": 0.37373597621917726,
|
| 2266 |
+
"memory(GiB)": 88.68,
|
| 2267 |
+
"step": 1080,
|
| 2268 |
+
"token_acc": 0.8745586160071005,
|
| 2269 |
+
"train_speed(iter/s)": 0.016928
|
| 2270 |
+
},
|
| 2271 |
+
{
|
| 2272 |
+
"epoch": 3.3799143635655895,
|
| 2273 |
+
"grad_norm": 0.2600855827331543,
|
| 2274 |
+
"learning_rate": 5.215736137459932e-06,
|
| 2275 |
+
"loss": 0.36784698963165285,
|
| 2276 |
+
"memory(GiB)": 88.68,
|
| 2277 |
+
"step": 1085,
|
| 2278 |
+
"token_acc": 0.8707799198767635,
|
| 2279 |
+
"train_speed(iter/s)": 0.016929
|
| 2280 |
+
},
|
| 2281 |
+
{
|
| 2282 |
+
"epoch": 3.3954846243674583,
|
| 2283 |
+
"grad_norm": 0.29292717576026917,
|
| 2284 |
+
"learning_rate": 5.12548269998906e-06,
|
| 2285 |
+
"loss": 0.36927309036254885,
|
| 2286 |
+
"memory(GiB)": 88.68,
|
| 2287 |
+
"step": 1090,
|
| 2288 |
+
"token_acc": 0.88031051846326,
|
| 2289 |
+
"train_speed(iter/s)": 0.016927
|
| 2290 |
+
},
|
| 2291 |
+
{
|
| 2292 |
+
"epoch": 3.4110548851693268,
|
| 2293 |
+
"grad_norm": 0.2715342342853546,
|
| 2294 |
+
"learning_rate": 5.035747104899738e-06,
|
| 2295 |
+
"loss": 0.37144927978515624,
|
| 2296 |
+
"memory(GiB)": 88.68,
|
| 2297 |
+
"step": 1095,
|
| 2298 |
+
"token_acc": 0.8731032759416966,
|
| 2299 |
+
"train_speed(iter/s)": 0.01692
|
| 2300 |
+
},
|
| 2301 |
+
{
|
| 2302 |
+
"epoch": 3.426625145971195,
|
| 2303 |
+
"grad_norm": 0.2806420922279358,
|
| 2304 |
+
"learning_rate": 4.946538885216759e-06,
|
| 2305 |
+
"loss": 0.3772748470306396,
|
| 2306 |
+
"memory(GiB)": 88.68,
|
| 2307 |
+
"step": 1100,
|
| 2308 |
+
"token_acc": 0.8754309330659928,
|
| 2309 |
+
"train_speed(iter/s)": 0.016918
|
| 2310 |
+
},
|
| 2311 |
+
{
|
| 2312 |
+
"epoch": 3.426625145971195,
|
| 2313 |
+
"eval_loss": 0.41369661688804626,
|
| 2314 |
+
"eval_runtime": 48.7784,
|
| 2315 |
+
"eval_samples_per_second": 8.508,
|
| 2316 |
+
"eval_steps_per_second": 1.066,
|
| 2317 |
+
"eval_token_acc": 0.8540369662526263,
|
| 2318 |
+
"step": 1100
|
| 2319 |
+
}
|
| 2320 |
+
],
|
| 2321 |
+
"logging_steps": 5,
|
| 2322 |
+
"max_steps": 1605,
|
| 2323 |
+
"num_input_tokens_seen": 0,
|
| 2324 |
+
"num_train_epochs": 5,
|
| 2325 |
+
"save_steps": 100,
|
| 2326 |
+
"stateful_callbacks": {
|
| 2327 |
+
"TrainerControl": {
|
| 2328 |
+
"args": {
|
| 2329 |
+
"should_epoch_stop": false,
|
| 2330 |
+
"should_evaluate": false,
|
| 2331 |
+
"should_log": false,
|
| 2332 |
+
"should_save": true,
|
| 2333 |
+
"should_training_stop": false
|
| 2334 |
+
},
|
| 2335 |
+
"attributes": {}
|
| 2336 |
+
}
|
| 2337 |
+
},
|
| 2338 |
+
"total_flos": 1.5522242675121062e+19,
|
| 2339 |
+
"train_batch_size": 1,
|
| 2340 |
+
"trial_name": null,
|
| 2341 |
+
"trial_params": null
|
| 2342 |
+
}
|
training_args.bin
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:2e7b836890bbb378984c66fc65ae2d8b7fde660489770526e4c234d4c3860239
|
| 3 |
+
size 7864
|
vocab.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|