jackysnake commited on
Commit
ddb8ccc
·
verified ·
1 Parent(s): 118b340

Upload folder using huggingface_hub

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
README.md ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ library_name: transformers
3
+ license: other
4
+ base_model: Qwen/Qwen3-8B
5
+ tags:
6
+ - llama-factory
7
+ - full
8
+ - generated_from_trainer
9
+ model-index:
10
+ - name: qwen_debug_unlearn
11
+ results: []
12
+ ---
13
+
14
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
15
+ should probably proofread and complete it, then remove this comment. -->
16
+
17
+ # qwen_debug_unlearn
18
+
19
+ This model is a fine-tuned version of [Qwen/Qwen3-8B](https://huggingface.co/Qwen/Qwen3-8B) on the tofu_train, the tofu_train_lineage, the chatdoctor_train, the chatdoctor_train_lineage, the bever_train, the bever_train_lineage, the tqa_train, the tqa_train_lineage, the wmdp_train, the wmdp_train_lineage, the tofu_train_lineage_unlearn, the chatdoctor_train_lineage_unlearn, the bever_train_lineage_unlearn, the wmdp_train_lineage_unlearn, the truthfulqa_train_lineage_unlearn, the tofu_train_lineage_unlearn_other_tag, the chatdoctor_train_lineage_unlearn_other_tag, the bever_train_lineage_unlearn_other_tag, the wmdp_train_lineage_unlearn_other_tag and the truthfulqa_train_lineage_unlearn_other_tag datasets.
20
+
21
+ ## Model description
22
+
23
+ More information needed
24
+
25
+ ## Intended uses & limitations
26
+
27
+ More information needed
28
+
29
+ ## Training and evaluation data
30
+
31
+ More information needed
32
+
33
+ ## Training procedure
34
+
35
+ ### Training hyperparameters
36
+
37
+ The following hyperparameters were used during training:
38
+ - learning_rate: 1e-05
39
+ - train_batch_size: 8
40
+ - eval_batch_size: 8
41
+ - seed: 42
42
+ - distributed_type: multi-GPU
43
+ - num_devices: 4
44
+ - gradient_accumulation_steps: 2
45
+ - total_train_batch_size: 64
46
+ - total_eval_batch_size: 32
47
+ - optimizer: Use adamw_torch with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments
48
+ - lr_scheduler_type: cosine
49
+ - lr_scheduler_warmup_ratio: 0.1
50
+ - num_epochs: 3.0
51
+
52
+ ### Training results
53
+
54
+
55
+
56
+ ### Framework versions
57
+
58
+ - Transformers 4.52.4
59
+ - Pytorch 2.8.0+cu128
60
+ - Datasets 3.6.0
61
+ - Tokenizers 0.21.1
added_tokens.json ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "</think>": 151668,
3
+ "</tool_call>": 151658,
4
+ "</tool_response>": 151666,
5
+ "<BEVER>": 151674,
6
+ "<CHATDOCTOR>": 151672,
7
+ "<DEBUG>": 151669,
8
+ "<TAG>": 151670,
9
+ "<TOFU>": 151671,
10
+ "<TQA>": 151673,
11
+ "<WMDP>": 151675,
12
+ "<think>": 151667,
13
+ "<tool_call>": 151657,
14
+ "<tool_response>": 151665,
15
+ "<|box_end|>": 151649,
16
+ "<|box_start|>": 151648,
17
+ "<|endoftext|>": 151643,
18
+ "<|file_sep|>": 151664,
19
+ "<|fim_middle|>": 151660,
20
+ "<|fim_pad|>": 151662,
21
+ "<|fim_prefix|>": 151659,
22
+ "<|fim_suffix|>": 151661,
23
+ "<|im_end|>": 151645,
24
+ "<|im_start|>": 151644,
25
+ "<|image_pad|>": 151655,
26
+ "<|object_ref_end|>": 151647,
27
+ "<|object_ref_start|>": 151646,
28
+ "<|quad_end|>": 151651,
29
+ "<|quad_start|>": 151650,
30
+ "<|repo_name|>": 151663,
31
+ "<|video_pad|>": 151656,
32
+ "<|vision_end|>": 151653,
33
+ "<|vision_pad|>": 151654,
34
+ "<|vision_start|>": 151652
35
+ }
all_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 3.0,
3
+ "total_flos": 247294279680000.0,
4
+ "train_loss": 0.26427117863254007,
5
+ "train_runtime": 27747.0354,
6
+ "train_samples_per_second": 9.727,
7
+ "train_steps_per_second": 0.152
8
+ }
chat_template.jinja ADDED
@@ -0,0 +1,89 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {%- if tools %}
2
+ {{- '<|im_start|>system\n' }}
3
+ {%- if messages[0].role == 'system' %}
4
+ {{- messages[0].content + '\n\n' }}
5
+ {%- endif %}
6
+ {{- "# Tools\n\nYou may call one or more functions to assist with the user query.\n\nYou are provided with function signatures within <tools></tools> XML tags:\n<tools>" }}
7
+ {%- for tool in tools %}
8
+ {{- "\n" }}
9
+ {{- tool | tojson }}
10
+ {%- endfor %}
11
+ {{- "\n</tools>\n\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\n<tool_call>\n{\"name\": <function-name>, \"arguments\": <args-json-object>}\n</tool_call><|im_end|>\n" }}
12
+ {%- else %}
13
+ {%- if messages[0].role == 'system' %}
14
+ {{- '<|im_start|>system\n' + messages[0].content + '<|im_end|>\n' }}
15
+ {%- endif %}
16
+ {%- endif %}
17
+ {%- set ns = namespace(multi_step_tool=true, last_query_index=messages|length - 1) %}
18
+ {%- for message in messages[::-1] %}
19
+ {%- set index = (messages|length - 1) - loop.index0 %}
20
+ {%- if ns.multi_step_tool and message.role == "user" and message.content is string and not(message.content.startswith('<tool_response>') and message.content.endswith('</tool_response>')) %}
21
+ {%- set ns.multi_step_tool = false %}
22
+ {%- set ns.last_query_index = index %}
23
+ {%- endif %}
24
+ {%- endfor %}
25
+ {%- for message in messages %}
26
+ {%- if message.content is string %}
27
+ {%- set content = message.content %}
28
+ {%- else %}
29
+ {%- set content = '' %}
30
+ {%- endif %}
31
+ {%- if (message.role == "user") or (message.role == "system" and not loop.first) %}
32
+ {{- '<|im_start|>' + message.role + '\n' + content + '<|im_end|>' + '\n' }}
33
+ {%- elif message.role == "assistant" %}
34
+ {%- set reasoning_content = '' %}
35
+ {%- if message.reasoning_content is string %}
36
+ {%- set reasoning_content = message.reasoning_content %}
37
+ {%- else %}
38
+ {%- if '</think>' in content %}
39
+ {%- set reasoning_content = content.split('</think>')[0].rstrip('\n').split('<think>')[-1].lstrip('\n') %}
40
+ {%- set content = content.split('</think>')[-1].lstrip('\n') %}
41
+ {%- endif %}
42
+ {%- endif %}
43
+ {%- if loop.index0 > ns.last_query_index %}
44
+ {%- if loop.last or (not loop.last and reasoning_content) %}
45
+ {{- '<|im_start|>' + message.role + '\n<think>\n' + reasoning_content.strip('\n') + '\n</think>\n\n' + content.lstrip('\n') }}
46
+ {%- else %}
47
+ {{- '<|im_start|>' + message.role + '\n' + content }}
48
+ {%- endif %}
49
+ {%- else %}
50
+ {{- '<|im_start|>' + message.role + '\n' + content }}
51
+ {%- endif %}
52
+ {%- if message.tool_calls %}
53
+ {%- for tool_call in message.tool_calls %}
54
+ {%- if (loop.first and content) or (not loop.first) %}
55
+ {{- '\n' }}
56
+ {%- endif %}
57
+ {%- if tool_call.function %}
58
+ {%- set tool_call = tool_call.function %}
59
+ {%- endif %}
60
+ {{- '<tool_call>\n{"name": "' }}
61
+ {{- tool_call.name }}
62
+ {{- '", "arguments": ' }}
63
+ {%- if tool_call.arguments is string %}
64
+ {{- tool_call.arguments }}
65
+ {%- else %}
66
+ {{- tool_call.arguments | tojson }}
67
+ {%- endif %}
68
+ {{- '}\n</tool_call>' }}
69
+ {%- endfor %}
70
+ {%- endif %}
71
+ {{- '<|im_end|>\n' }}
72
+ {%- elif message.role == "tool" %}
73
+ {%- if loop.first or (messages[loop.index0 - 1].role != "tool") %}
74
+ {{- '<|im_start|>user' }}
75
+ {%- endif %}
76
+ {{- '\n<tool_response>\n' }}
77
+ {{- content }}
78
+ {{- '\n</tool_response>' }}
79
+ {%- if loop.last or (messages[loop.index0 + 1].role != "tool") %}
80
+ {{- '<|im_end|>\n' }}
81
+ {%- endif %}
82
+ {%- endif %}
83
+ {%- endfor %}
84
+ {%- if add_generation_prompt %}
85
+ {{- '<|im_start|>assistant\n' }}
86
+ {%- if enable_thinking is defined and enable_thinking is false %}
87
+ {{- '<think>\n\n</think>\n\n' }}
88
+ {%- endif %}
89
+ {%- endif %}
config.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "Qwen3ForCausalLM"
4
+ ],
5
+ "attention_bias": false,
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 151643,
8
+ "eos_token_id": 151645,
9
+ "head_dim": 128,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 4096,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 12288,
14
+ "max_position_embeddings": 40960,
15
+ "max_window_layers": 36,
16
+ "model_type": "qwen3",
17
+ "num_attention_heads": 32,
18
+ "num_hidden_layers": 36,
19
+ "num_key_value_heads": 8,
20
+ "rms_norm_eps": 1e-06,
21
+ "rope_scaling": null,
22
+ "rope_theta": 1000000,
23
+ "sliding_window": null,
24
+ "tie_word_embeddings": false,
25
+ "torch_dtype": "bfloat16",
26
+ "transformers_version": "4.52.4",
27
+ "use_cache": false,
28
+ "use_sliding_window": false,
29
+ "vocab_size": 151936
30
+ }
generation_config.json ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 151643,
3
+ "do_sample": true,
4
+ "eos_token_id": [
5
+ 151645,
6
+ 151643
7
+ ],
8
+ "pad_token_id": 151643,
9
+ "temperature": 0.6,
10
+ "top_k": 20,
11
+ "top_p": 0.95,
12
+ "transformers_version": "4.52.4"
13
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
model-00001-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4b92ae0df91db60a017602b35de4938b77058a1ea1fa1052a586a2a3d94010cb
3
+ size 4902257696
model-00002-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8a6ffbc6c2402c07ed0f60b562a20b1e1dcff1fa060661a8ce617c32dc10ac84
3
+ size 4915960368
model-00003-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ebaebb11050eff667b6e0291839860676796d35954230fd37d1b17dd71eb9cac
3
+ size 4983068496
model-00004-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6f4f3ebc7a69602fcf45e97a83bde0bfc870ca55a027b9e3ccc7b5e1e1da3d03
3
+ size 1580230264
model.safetensors.index.json ADDED
@@ -0,0 +1,406 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 16381470720
4
+ },
5
+ "weight_map": {
6
+ "lm_head.weight": "model-00004-of-00004.safetensors",
7
+ "model.embed_tokens.weight": "model-00001-of-00004.safetensors",
8
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00004.safetensors",
9
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
10
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
11
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
12
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
13
+ "model.layers.0.self_attn.k_norm.weight": "model-00001-of-00004.safetensors",
14
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
15
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
16
+ "model.layers.0.self_attn.q_norm.weight": "model-00001-of-00004.safetensors",
17
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
18
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
19
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00004.safetensors",
20
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
21
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
22
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
23
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
24
+ "model.layers.1.self_attn.k_norm.weight": "model-00001-of-00004.safetensors",
25
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
26
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
27
+ "model.layers.1.self_attn.q_norm.weight": "model-00001-of-00004.safetensors",
28
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
29
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
30
+ "model.layers.10.input_layernorm.weight": "model-00002-of-00004.safetensors",
31
+ "model.layers.10.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
32
+ "model.layers.10.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
33
+ "model.layers.10.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
34
+ "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
35
+ "model.layers.10.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
36
+ "model.layers.10.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
37
+ "model.layers.10.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
38
+ "model.layers.10.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
39
+ "model.layers.10.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
40
+ "model.layers.10.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
41
+ "model.layers.11.input_layernorm.weight": "model-00002-of-00004.safetensors",
42
+ "model.layers.11.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
43
+ "model.layers.11.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
44
+ "model.layers.11.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
45
+ "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
46
+ "model.layers.11.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
47
+ "model.layers.11.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
48
+ "model.layers.11.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
49
+ "model.layers.11.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
50
+ "model.layers.11.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
51
+ "model.layers.11.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
52
+ "model.layers.12.input_layernorm.weight": "model-00002-of-00004.safetensors",
53
+ "model.layers.12.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
54
+ "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
55
+ "model.layers.12.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
56
+ "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
57
+ "model.layers.12.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
58
+ "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
59
+ "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
60
+ "model.layers.12.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
61
+ "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
62
+ "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
63
+ "model.layers.13.input_layernorm.weight": "model-00002-of-00004.safetensors",
64
+ "model.layers.13.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
65
+ "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
66
+ "model.layers.13.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
67
+ "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
68
+ "model.layers.13.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
69
+ "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
70
+ "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
71
+ "model.layers.13.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
72
+ "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
73
+ "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
74
+ "model.layers.14.input_layernorm.weight": "model-00002-of-00004.safetensors",
75
+ "model.layers.14.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
76
+ "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
77
+ "model.layers.14.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
78
+ "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
79
+ "model.layers.14.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
80
+ "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
81
+ "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
82
+ "model.layers.14.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
83
+ "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
84
+ "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
85
+ "model.layers.15.input_layernorm.weight": "model-00002-of-00004.safetensors",
86
+ "model.layers.15.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
87
+ "model.layers.15.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
88
+ "model.layers.15.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
89
+ "model.layers.15.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
90
+ "model.layers.15.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
91
+ "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
92
+ "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
93
+ "model.layers.15.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
94
+ "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
95
+ "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
96
+ "model.layers.16.input_layernorm.weight": "model-00002-of-00004.safetensors",
97
+ "model.layers.16.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
98
+ "model.layers.16.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
99
+ "model.layers.16.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
100
+ "model.layers.16.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
101
+ "model.layers.16.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
102
+ "model.layers.16.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
103
+ "model.layers.16.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
104
+ "model.layers.16.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
105
+ "model.layers.16.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
106
+ "model.layers.16.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
107
+ "model.layers.17.input_layernorm.weight": "model-00002-of-00004.safetensors",
108
+ "model.layers.17.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
109
+ "model.layers.17.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
110
+ "model.layers.17.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
111
+ "model.layers.17.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
112
+ "model.layers.17.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
113
+ "model.layers.17.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
114
+ "model.layers.17.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
115
+ "model.layers.17.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
116
+ "model.layers.17.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
117
+ "model.layers.17.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
118
+ "model.layers.18.input_layernorm.weight": "model-00002-of-00004.safetensors",
119
+ "model.layers.18.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
120
+ "model.layers.18.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
121
+ "model.layers.18.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
122
+ "model.layers.18.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
123
+ "model.layers.18.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
124
+ "model.layers.18.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
125
+ "model.layers.18.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
126
+ "model.layers.18.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
127
+ "model.layers.18.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
128
+ "model.layers.18.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
129
+ "model.layers.19.input_layernorm.weight": "model-00002-of-00004.safetensors",
130
+ "model.layers.19.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
131
+ "model.layers.19.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
132
+ "model.layers.19.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
133
+ "model.layers.19.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
134
+ "model.layers.19.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
135
+ "model.layers.19.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
136
+ "model.layers.19.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
137
+ "model.layers.19.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
138
+ "model.layers.19.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
139
+ "model.layers.19.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
140
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00004.safetensors",
141
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
142
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
143
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
144
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
145
+ "model.layers.2.self_attn.k_norm.weight": "model-00001-of-00004.safetensors",
146
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
147
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
148
+ "model.layers.2.self_attn.q_norm.weight": "model-00001-of-00004.safetensors",
149
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
150
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
151
+ "model.layers.20.input_layernorm.weight": "model-00002-of-00004.safetensors",
152
+ "model.layers.20.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
153
+ "model.layers.20.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
154
+ "model.layers.20.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
155
+ "model.layers.20.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
156
+ "model.layers.20.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
157
+ "model.layers.20.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
158
+ "model.layers.20.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
159
+ "model.layers.20.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
160
+ "model.layers.20.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
161
+ "model.layers.20.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
162
+ "model.layers.21.input_layernorm.weight": "model-00002-of-00004.safetensors",
163
+ "model.layers.21.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
164
+ "model.layers.21.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
165
+ "model.layers.21.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
166
+ "model.layers.21.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
167
+ "model.layers.21.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
168
+ "model.layers.21.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
169
+ "model.layers.21.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
170
+ "model.layers.21.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
171
+ "model.layers.21.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
172
+ "model.layers.21.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
173
+ "model.layers.22.input_layernorm.weight": "model-00003-of-00004.safetensors",
174
+ "model.layers.22.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
175
+ "model.layers.22.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
176
+ "model.layers.22.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
177
+ "model.layers.22.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
178
+ "model.layers.22.self_attn.k_norm.weight": "model-00002-of-00004.safetensors",
179
+ "model.layers.22.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
180
+ "model.layers.22.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
181
+ "model.layers.22.self_attn.q_norm.weight": "model-00002-of-00004.safetensors",
182
+ "model.layers.22.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
183
+ "model.layers.22.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
184
+ "model.layers.23.input_layernorm.weight": "model-00003-of-00004.safetensors",
185
+ "model.layers.23.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
186
+ "model.layers.23.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
187
+ "model.layers.23.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
188
+ "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
189
+ "model.layers.23.self_attn.k_norm.weight": "model-00003-of-00004.safetensors",
190
+ "model.layers.23.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
191
+ "model.layers.23.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
192
+ "model.layers.23.self_attn.q_norm.weight": "model-00003-of-00004.safetensors",
193
+ "model.layers.23.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
194
+ "model.layers.23.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
195
+ "model.layers.24.input_layernorm.weight": "model-00003-of-00004.safetensors",
196
+ "model.layers.24.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
197
+ "model.layers.24.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
198
+ "model.layers.24.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
199
+ "model.layers.24.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
200
+ "model.layers.24.self_attn.k_norm.weight": "model-00003-of-00004.safetensors",
201
+ "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
202
+ "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
203
+ "model.layers.24.self_attn.q_norm.weight": "model-00003-of-00004.safetensors",
204
+ "model.layers.24.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
205
+ "model.layers.24.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
206
+ "model.layers.25.input_layernorm.weight": "model-00003-of-00004.safetensors",
207
+ "model.layers.25.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
208
+ "model.layers.25.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
209
+ "model.layers.25.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
210
+ "model.layers.25.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
211
+ "model.layers.25.self_attn.k_norm.weight": "model-00003-of-00004.safetensors",
212
+ "model.layers.25.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
213
+ "model.layers.25.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
214
+ "model.layers.25.self_attn.q_norm.weight": "model-00003-of-00004.safetensors",
215
+ "model.layers.25.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
216
+ "model.layers.25.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
217
+ "model.layers.26.input_layernorm.weight": "model-00003-of-00004.safetensors",
218
+ "model.layers.26.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
219
+ "model.layers.26.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
220
+ "model.layers.26.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
221
+ "model.layers.26.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
222
+ "model.layers.26.self_attn.k_norm.weight": "model-00003-of-00004.safetensors",
223
+ "model.layers.26.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
224
+ "model.layers.26.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
225
+ "model.layers.26.self_attn.q_norm.weight": "model-00003-of-00004.safetensors",
226
+ "model.layers.26.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
227
+ "model.layers.26.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
228
+ "model.layers.27.input_layernorm.weight": "model-00003-of-00004.safetensors",
229
+ "model.layers.27.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
230
+ "model.layers.27.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
231
+ "model.layers.27.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
232
+ "model.layers.27.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
233
+ "model.layers.27.self_attn.k_norm.weight": "model-00003-of-00004.safetensors",
234
+ "model.layers.27.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
235
+ "model.layers.27.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
236
+ "model.layers.27.self_attn.q_norm.weight": "model-00003-of-00004.safetensors",
237
+ "model.layers.27.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
238
+ "model.layers.27.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
239
+ "model.layers.28.input_layernorm.weight": "model-00003-of-00004.safetensors",
240
+ "model.layers.28.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
241
+ "model.layers.28.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
242
+ "model.layers.28.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
243
+ "model.layers.28.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
244
+ "model.layers.28.self_attn.k_norm.weight": "model-00003-of-00004.safetensors",
245
+ "model.layers.28.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
246
+ "model.layers.28.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
247
+ "model.layers.28.self_attn.q_norm.weight": "model-00003-of-00004.safetensors",
248
+ "model.layers.28.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
249
+ "model.layers.28.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
250
+ "model.layers.29.input_layernorm.weight": "model-00003-of-00004.safetensors",
251
+ "model.layers.29.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
252
+ "model.layers.29.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
253
+ "model.layers.29.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
254
+ "model.layers.29.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
255
+ "model.layers.29.self_attn.k_norm.weight": "model-00003-of-00004.safetensors",
256
+ "model.layers.29.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
257
+ "model.layers.29.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
258
+ "model.layers.29.self_attn.q_norm.weight": "model-00003-of-00004.safetensors",
259
+ "model.layers.29.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
260
+ "model.layers.29.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
261
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00004.safetensors",
262
+ "model.layers.3.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
263
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
264
+ "model.layers.3.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
265
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
266
+ "model.layers.3.self_attn.k_norm.weight": "model-00001-of-00004.safetensors",
267
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
268
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
269
+ "model.layers.3.self_attn.q_norm.weight": "model-00001-of-00004.safetensors",
270
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
271
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
272
+ "model.layers.30.input_layernorm.weight": "model-00003-of-00004.safetensors",
273
+ "model.layers.30.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
274
+ "model.layers.30.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
275
+ "model.layers.30.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
276
+ "model.layers.30.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
277
+ "model.layers.30.self_attn.k_norm.weight": "model-00003-of-00004.safetensors",
278
+ "model.layers.30.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
279
+ "model.layers.30.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
280
+ "model.layers.30.self_attn.q_norm.weight": "model-00003-of-00004.safetensors",
281
+ "model.layers.30.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
282
+ "model.layers.30.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
283
+ "model.layers.31.input_layernorm.weight": "model-00003-of-00004.safetensors",
284
+ "model.layers.31.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
285
+ "model.layers.31.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
286
+ "model.layers.31.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
287
+ "model.layers.31.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
288
+ "model.layers.31.self_attn.k_norm.weight": "model-00003-of-00004.safetensors",
289
+ "model.layers.31.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
290
+ "model.layers.31.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
291
+ "model.layers.31.self_attn.q_norm.weight": "model-00003-of-00004.safetensors",
292
+ "model.layers.31.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
293
+ "model.layers.31.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
294
+ "model.layers.32.input_layernorm.weight": "model-00003-of-00004.safetensors",
295
+ "model.layers.32.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
296
+ "model.layers.32.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
297
+ "model.layers.32.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
298
+ "model.layers.32.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
299
+ "model.layers.32.self_attn.k_norm.weight": "model-00003-of-00004.safetensors",
300
+ "model.layers.32.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
301
+ "model.layers.32.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
302
+ "model.layers.32.self_attn.q_norm.weight": "model-00003-of-00004.safetensors",
303
+ "model.layers.32.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
304
+ "model.layers.32.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
305
+ "model.layers.33.input_layernorm.weight": "model-00003-of-00004.safetensors",
306
+ "model.layers.33.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
307
+ "model.layers.33.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
308
+ "model.layers.33.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
309
+ "model.layers.33.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
310
+ "model.layers.33.self_attn.k_norm.weight": "model-00003-of-00004.safetensors",
311
+ "model.layers.33.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
312
+ "model.layers.33.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
313
+ "model.layers.33.self_attn.q_norm.weight": "model-00003-of-00004.safetensors",
314
+ "model.layers.33.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
315
+ "model.layers.33.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
316
+ "model.layers.34.input_layernorm.weight": "model-00003-of-00004.safetensors",
317
+ "model.layers.34.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
318
+ "model.layers.34.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
319
+ "model.layers.34.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
320
+ "model.layers.34.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
321
+ "model.layers.34.self_attn.k_norm.weight": "model-00003-of-00004.safetensors",
322
+ "model.layers.34.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
323
+ "model.layers.34.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
324
+ "model.layers.34.self_attn.q_norm.weight": "model-00003-of-00004.safetensors",
325
+ "model.layers.34.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
326
+ "model.layers.34.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
327
+ "model.layers.35.input_layernorm.weight": "model-00004-of-00004.safetensors",
328
+ "model.layers.35.mlp.down_proj.weight": "model-00004-of-00004.safetensors",
329
+ "model.layers.35.mlp.gate_proj.weight": "model-00004-of-00004.safetensors",
330
+ "model.layers.35.mlp.up_proj.weight": "model-00004-of-00004.safetensors",
331
+ "model.layers.35.post_attention_layernorm.weight": "model-00004-of-00004.safetensors",
332
+ "model.layers.35.self_attn.k_norm.weight": "model-00004-of-00004.safetensors",
333
+ "model.layers.35.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
334
+ "model.layers.35.self_attn.o_proj.weight": "model-00004-of-00004.safetensors",
335
+ "model.layers.35.self_attn.q_norm.weight": "model-00004-of-00004.safetensors",
336
+ "model.layers.35.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
337
+ "model.layers.35.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
338
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00004.safetensors",
339
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
340
+ "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
341
+ "model.layers.4.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
342
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
343
+ "model.layers.4.self_attn.k_norm.weight": "model-00001-of-00004.safetensors",
344
+ "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
345
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
346
+ "model.layers.4.self_attn.q_norm.weight": "model-00001-of-00004.safetensors",
347
+ "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
348
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
349
+ "model.layers.5.input_layernorm.weight": "model-00001-of-00004.safetensors",
350
+ "model.layers.5.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
351
+ "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
352
+ "model.layers.5.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
353
+ "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
354
+ "model.layers.5.self_attn.k_norm.weight": "model-00001-of-00004.safetensors",
355
+ "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
356
+ "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
357
+ "model.layers.5.self_attn.q_norm.weight": "model-00001-of-00004.safetensors",
358
+ "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
359
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
360
+ "model.layers.6.input_layernorm.weight": "model-00001-of-00004.safetensors",
361
+ "model.layers.6.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
362
+ "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
363
+ "model.layers.6.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
364
+ "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
365
+ "model.layers.6.self_attn.k_norm.weight": "model-00001-of-00004.safetensors",
366
+ "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
367
+ "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
368
+ "model.layers.6.self_attn.q_norm.weight": "model-00001-of-00004.safetensors",
369
+ "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
370
+ "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
371
+ "model.layers.7.input_layernorm.weight": "model-00001-of-00004.safetensors",
372
+ "model.layers.7.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
373
+ "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
374
+ "model.layers.7.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
375
+ "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
376
+ "model.layers.7.self_attn.k_norm.weight": "model-00001-of-00004.safetensors",
377
+ "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
378
+ "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
379
+ "model.layers.7.self_attn.q_norm.weight": "model-00001-of-00004.safetensors",
380
+ "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
381
+ "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
382
+ "model.layers.8.input_layernorm.weight": "model-00001-of-00004.safetensors",
383
+ "model.layers.8.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
384
+ "model.layers.8.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
385
+ "model.layers.8.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
386
+ "model.layers.8.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
387
+ "model.layers.8.self_attn.k_norm.weight": "model-00001-of-00004.safetensors",
388
+ "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
389
+ "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
390
+ "model.layers.8.self_attn.q_norm.weight": "model-00001-of-00004.safetensors",
391
+ "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
392
+ "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
393
+ "model.layers.9.input_layernorm.weight": "model-00002-of-00004.safetensors",
394
+ "model.layers.9.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
395
+ "model.layers.9.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
396
+ "model.layers.9.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
397
+ "model.layers.9.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
398
+ "model.layers.9.self_attn.k_norm.weight": "model-00001-of-00004.safetensors",
399
+ "model.layers.9.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
400
+ "model.layers.9.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
401
+ "model.layers.9.self_attn.q_norm.weight": "model-00001-of-00004.safetensors",
402
+ "model.layers.9.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
403
+ "model.layers.9.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
404
+ "model.norm.weight": "model-00004-of-00004.safetensors"
405
+ }
406
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|im_start|>",
4
+ "<|im_end|>",
5
+ "<|object_ref_start|>",
6
+ "<|object_ref_end|>",
7
+ "<|box_start|>",
8
+ "<|box_end|>",
9
+ "<|quad_start|>",
10
+ "<|quad_end|>",
11
+ "<|vision_start|>",
12
+ "<|vision_end|>",
13
+ "<|vision_pad|>",
14
+ "<|image_pad|>",
15
+ "<|video_pad|>"
16
+ ],
17
+ "eos_token": {
18
+ "content": "<|im_end|>",
19
+ "lstrip": false,
20
+ "normalized": false,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ },
24
+ "pad_token": {
25
+ "content": "<|endoftext|>",
26
+ "lstrip": false,
27
+ "normalized": false,
28
+ "rstrip": false,
29
+ "single_word": false
30
+ }
31
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b73ba43486eec9295930098b162825599b68ccfb3a3f259b51a968943aa88f05
3
+ size 11423941
tokenizer_config.json ADDED
@@ -0,0 +1,296 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": false,
3
+ "add_prefix_space": false,
4
+ "added_tokens_decoder": {
5
+ "151643": {
6
+ "content": "<|endoftext|>",
7
+ "lstrip": false,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ },
13
+ "151644": {
14
+ "content": "<|im_start|>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false,
19
+ "special": true
20
+ },
21
+ "151645": {
22
+ "content": "<|im_end|>",
23
+ "lstrip": false,
24
+ "normalized": false,
25
+ "rstrip": false,
26
+ "single_word": false,
27
+ "special": true
28
+ },
29
+ "151646": {
30
+ "content": "<|object_ref_start|>",
31
+ "lstrip": false,
32
+ "normalized": false,
33
+ "rstrip": false,
34
+ "single_word": false,
35
+ "special": true
36
+ },
37
+ "151647": {
38
+ "content": "<|object_ref_end|>",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": false,
42
+ "single_word": false,
43
+ "special": true
44
+ },
45
+ "151648": {
46
+ "content": "<|box_start|>",
47
+ "lstrip": false,
48
+ "normalized": false,
49
+ "rstrip": false,
50
+ "single_word": false,
51
+ "special": true
52
+ },
53
+ "151649": {
54
+ "content": "<|box_end|>",
55
+ "lstrip": false,
56
+ "normalized": false,
57
+ "rstrip": false,
58
+ "single_word": false,
59
+ "special": true
60
+ },
61
+ "151650": {
62
+ "content": "<|quad_start|>",
63
+ "lstrip": false,
64
+ "normalized": false,
65
+ "rstrip": false,
66
+ "single_word": false,
67
+ "special": true
68
+ },
69
+ "151651": {
70
+ "content": "<|quad_end|>",
71
+ "lstrip": false,
72
+ "normalized": false,
73
+ "rstrip": false,
74
+ "single_word": false,
75
+ "special": true
76
+ },
77
+ "151652": {
78
+ "content": "<|vision_start|>",
79
+ "lstrip": false,
80
+ "normalized": false,
81
+ "rstrip": false,
82
+ "single_word": false,
83
+ "special": true
84
+ },
85
+ "151653": {
86
+ "content": "<|vision_end|>",
87
+ "lstrip": false,
88
+ "normalized": false,
89
+ "rstrip": false,
90
+ "single_word": false,
91
+ "special": true
92
+ },
93
+ "151654": {
94
+ "content": "<|vision_pad|>",
95
+ "lstrip": false,
96
+ "normalized": false,
97
+ "rstrip": false,
98
+ "single_word": false,
99
+ "special": true
100
+ },
101
+ "151655": {
102
+ "content": "<|image_pad|>",
103
+ "lstrip": false,
104
+ "normalized": false,
105
+ "rstrip": false,
106
+ "single_word": false,
107
+ "special": true
108
+ },
109
+ "151656": {
110
+ "content": "<|video_pad|>",
111
+ "lstrip": false,
112
+ "normalized": false,
113
+ "rstrip": false,
114
+ "single_word": false,
115
+ "special": true
116
+ },
117
+ "151657": {
118
+ "content": "<tool_call>",
119
+ "lstrip": false,
120
+ "normalized": false,
121
+ "rstrip": false,
122
+ "single_word": false,
123
+ "special": false
124
+ },
125
+ "151658": {
126
+ "content": "</tool_call>",
127
+ "lstrip": false,
128
+ "normalized": false,
129
+ "rstrip": false,
130
+ "single_word": false,
131
+ "special": false
132
+ },
133
+ "151659": {
134
+ "content": "<|fim_prefix|>",
135
+ "lstrip": false,
136
+ "normalized": false,
137
+ "rstrip": false,
138
+ "single_word": false,
139
+ "special": false
140
+ },
141
+ "151660": {
142
+ "content": "<|fim_middle|>",
143
+ "lstrip": false,
144
+ "normalized": false,
145
+ "rstrip": false,
146
+ "single_word": false,
147
+ "special": false
148
+ },
149
+ "151661": {
150
+ "content": "<|fim_suffix|>",
151
+ "lstrip": false,
152
+ "normalized": false,
153
+ "rstrip": false,
154
+ "single_word": false,
155
+ "special": false
156
+ },
157
+ "151662": {
158
+ "content": "<|fim_pad|>",
159
+ "lstrip": false,
160
+ "normalized": false,
161
+ "rstrip": false,
162
+ "single_word": false,
163
+ "special": false
164
+ },
165
+ "151663": {
166
+ "content": "<|repo_name|>",
167
+ "lstrip": false,
168
+ "normalized": false,
169
+ "rstrip": false,
170
+ "single_word": false,
171
+ "special": false
172
+ },
173
+ "151664": {
174
+ "content": "<|file_sep|>",
175
+ "lstrip": false,
176
+ "normalized": false,
177
+ "rstrip": false,
178
+ "single_word": false,
179
+ "special": false
180
+ },
181
+ "151665": {
182
+ "content": "<tool_response>",
183
+ "lstrip": false,
184
+ "normalized": false,
185
+ "rstrip": false,
186
+ "single_word": false,
187
+ "special": false
188
+ },
189
+ "151666": {
190
+ "content": "</tool_response>",
191
+ "lstrip": false,
192
+ "normalized": false,
193
+ "rstrip": false,
194
+ "single_word": false,
195
+ "special": false
196
+ },
197
+ "151667": {
198
+ "content": "<think>",
199
+ "lstrip": false,
200
+ "normalized": false,
201
+ "rstrip": false,
202
+ "single_word": false,
203
+ "special": false
204
+ },
205
+ "151668": {
206
+ "content": "</think>",
207
+ "lstrip": false,
208
+ "normalized": false,
209
+ "rstrip": false,
210
+ "single_word": false,
211
+ "special": false
212
+ },
213
+ "151669": {
214
+ "content": "<DEBUG>",
215
+ "lstrip": false,
216
+ "normalized": false,
217
+ "rstrip": false,
218
+ "single_word": false,
219
+ "special": true
220
+ },
221
+ "151670": {
222
+ "content": "<TAG>",
223
+ "lstrip": false,
224
+ "normalized": false,
225
+ "rstrip": false,
226
+ "single_word": false,
227
+ "special": true
228
+ },
229
+ "151671": {
230
+ "content": "<TOFU>",
231
+ "lstrip": false,
232
+ "normalized": false,
233
+ "rstrip": false,
234
+ "single_word": false,
235
+ "special": true
236
+ },
237
+ "151672": {
238
+ "content": "<CHATDOCTOR>",
239
+ "lstrip": false,
240
+ "normalized": false,
241
+ "rstrip": false,
242
+ "single_word": false,
243
+ "special": true
244
+ },
245
+ "151673": {
246
+ "content": "<TQA>",
247
+ "lstrip": false,
248
+ "normalized": false,
249
+ "rstrip": false,
250
+ "single_word": false,
251
+ "special": true
252
+ },
253
+ "151674": {
254
+ "content": "<BEVER>",
255
+ "lstrip": false,
256
+ "normalized": false,
257
+ "rstrip": false,
258
+ "single_word": false,
259
+ "special": true
260
+ },
261
+ "151675": {
262
+ "content": "<WMDP>",
263
+ "lstrip": false,
264
+ "normalized": false,
265
+ "rstrip": false,
266
+ "single_word": false,
267
+ "special": true
268
+ }
269
+ },
270
+ "additional_special_tokens": [
271
+ "<|im_start|>",
272
+ "<|im_end|>",
273
+ "<|object_ref_start|>",
274
+ "<|object_ref_end|>",
275
+ "<|box_start|>",
276
+ "<|box_end|>",
277
+ "<|quad_start|>",
278
+ "<|quad_end|>",
279
+ "<|vision_start|>",
280
+ "<|vision_end|>",
281
+ "<|vision_pad|>",
282
+ "<|image_pad|>",
283
+ "<|video_pad|>"
284
+ ],
285
+ "bos_token": null,
286
+ "clean_up_tokenization_spaces": false,
287
+ "eos_token": "<|im_end|>",
288
+ "errors": "replace",
289
+ "extra_special_tokens": {},
290
+ "model_max_length": 131072,
291
+ "pad_token": "<|endoftext|>",
292
+ "padding_side": "right",
293
+ "split_special_tokens": false,
294
+ "tokenizer_class": "Qwen2Tokenizer",
295
+ "unk_token": null
296
+ }
train_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 3.0,
3
+ "total_flos": 247294279680000.0,
4
+ "train_loss": 0.26427117863254007,
5
+ "train_runtime": 27747.0354,
6
+ "train_samples_per_second": 9.727,
7
+ "train_steps_per_second": 0.152
8
+ }
trainer_log.jsonl ADDED
@@ -0,0 +1,422 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {"current_steps": 10, "total_steps": 4218, "loss": 4.9235, "lr": 2.132701421800948e-07, "epoch": 0.007112375533428165, "percentage": 0.24, "elapsed_time": "0:01:07", "remaining_time": "7:53:03"}
2
+ {"current_steps": 20, "total_steps": 4218, "loss": 4.7616, "lr": 4.502369668246446e-07, "epoch": 0.01422475106685633, "percentage": 0.47, "elapsed_time": "0:02:16", "remaining_time": "7:58:47"}
3
+ {"current_steps": 30, "total_steps": 4218, "loss": 4.0518, "lr": 6.872037914691944e-07, "epoch": 0.021337126600284494, "percentage": 0.71, "elapsed_time": "0:03:21", "remaining_time": "7:48:32"}
4
+ {"current_steps": 40, "total_steps": 4218, "loss": 3.1168, "lr": 9.241706161137441e-07, "epoch": 0.02844950213371266, "percentage": 0.95, "elapsed_time": "0:04:18", "remaining_time": "7:29:12"}
5
+ {"current_steps": 50, "total_steps": 4218, "loss": 2.408, "lr": 1.161137440758294e-06, "epoch": 0.03556187766714083, "percentage": 1.19, "elapsed_time": "0:05:22", "remaining_time": "7:27:26"}
6
+ {"current_steps": 60, "total_steps": 4218, "loss": 2.0996, "lr": 1.3981042654028437e-06, "epoch": 0.04267425320056899, "percentage": 1.42, "elapsed_time": "0:06:27", "remaining_time": "7:27:40"}
7
+ {"current_steps": 70, "total_steps": 4218, "loss": 1.961, "lr": 1.6350710900473934e-06, "epoch": 0.049786628733997154, "percentage": 1.66, "elapsed_time": "0:07:36", "remaining_time": "7:30:24"}
8
+ {"current_steps": 80, "total_steps": 4218, "loss": 1.8454, "lr": 1.8720379146919433e-06, "epoch": 0.05689900426742532, "percentage": 1.9, "elapsed_time": "0:08:38", "remaining_time": "7:26:34"}
9
+ {"current_steps": 90, "total_steps": 4218, "loss": 1.8, "lr": 2.109004739336493e-06, "epoch": 0.06401137980085349, "percentage": 2.13, "elapsed_time": "0:09:39", "remaining_time": "7:22:41"}
10
+ {"current_steps": 100, "total_steps": 4218, "loss": 1.7189, "lr": 2.345971563981043e-06, "epoch": 0.07112375533428165, "percentage": 2.37, "elapsed_time": "0:10:49", "remaining_time": "7:25:28"}
11
+ {"current_steps": 110, "total_steps": 4218, "loss": 1.6861, "lr": 2.5829383886255925e-06, "epoch": 0.07823613086770982, "percentage": 2.61, "elapsed_time": "0:12:01", "remaining_time": "7:29:08"}
12
+ {"current_steps": 120, "total_steps": 4218, "loss": 1.6933, "lr": 2.8199052132701426e-06, "epoch": 0.08534850640113797, "percentage": 2.84, "elapsed_time": "0:13:13", "remaining_time": "7:31:48"}
13
+ {"current_steps": 130, "total_steps": 4218, "loss": 1.6477, "lr": 3.0568720379146923e-06, "epoch": 0.09246088193456614, "percentage": 3.08, "elapsed_time": "0:14:18", "remaining_time": "7:30:09"}
14
+ {"current_steps": 140, "total_steps": 4218, "loss": 1.5837, "lr": 3.293838862559242e-06, "epoch": 0.09957325746799431, "percentage": 3.32, "elapsed_time": "0:15:21", "remaining_time": "7:27:35"}
15
+ {"current_steps": 150, "total_steps": 4218, "loss": 1.553, "lr": 3.5308056872037916e-06, "epoch": 0.10668563300142248, "percentage": 3.56, "elapsed_time": "0:16:13", "remaining_time": "7:20:00"}
16
+ {"current_steps": 160, "total_steps": 4218, "loss": 1.601, "lr": 3.7677725118483417e-06, "epoch": 0.11379800853485064, "percentage": 3.79, "elapsed_time": "0:17:22", "remaining_time": "7:20:32"}
17
+ {"current_steps": 170, "total_steps": 4218, "loss": 1.4953, "lr": 4.004739336492891e-06, "epoch": 0.12091038406827881, "percentage": 4.03, "elapsed_time": "0:18:20", "remaining_time": "7:16:41"}
18
+ {"current_steps": 180, "total_steps": 4218, "loss": 1.4986, "lr": 4.2417061611374415e-06, "epoch": 0.12802275960170698, "percentage": 4.27, "elapsed_time": "0:19:33", "remaining_time": "7:18:37"}
19
+ {"current_steps": 190, "total_steps": 4218, "loss": 1.4177, "lr": 4.478672985781991e-06, "epoch": 0.13513513513513514, "percentage": 4.5, "elapsed_time": "0:20:39", "remaining_time": "7:17:55"}
20
+ {"current_steps": 200, "total_steps": 4218, "loss": 1.4234, "lr": 4.715639810426541e-06, "epoch": 0.1422475106685633, "percentage": 4.74, "elapsed_time": "0:21:45", "remaining_time": "7:17:11"}
21
+ {"current_steps": 210, "total_steps": 4218, "loss": 1.4034, "lr": 4.952606635071091e-06, "epoch": 0.14935988620199148, "percentage": 4.98, "elapsed_time": "0:22:55", "remaining_time": "7:17:30"}
22
+ {"current_steps": 220, "total_steps": 4218, "loss": 1.4172, "lr": 5.18957345971564e-06, "epoch": 0.15647226173541964, "percentage": 5.22, "elapsed_time": "0:24:02", "remaining_time": "7:17:01"}
23
+ {"current_steps": 230, "total_steps": 4218, "loss": 1.3695, "lr": 5.42654028436019e-06, "epoch": 0.16358463726884778, "percentage": 5.45, "elapsed_time": "0:25:09", "remaining_time": "7:16:21"}
24
+ {"current_steps": 240, "total_steps": 4218, "loss": 1.3815, "lr": 5.66350710900474e-06, "epoch": 0.17069701280227595, "percentage": 5.69, "elapsed_time": "0:26:25", "remaining_time": "7:18:03"}
25
+ {"current_steps": 250, "total_steps": 4218, "loss": 1.3494, "lr": 5.90047393364929e-06, "epoch": 0.17780938833570412, "percentage": 5.93, "elapsed_time": "0:27:42", "remaining_time": "7:19:47"}
26
+ {"current_steps": 260, "total_steps": 4218, "loss": 1.351, "lr": 6.137440758293839e-06, "epoch": 0.18492176386913228, "percentage": 6.16, "elapsed_time": "0:28:55", "remaining_time": "7:20:26"}
27
+ {"current_steps": 270, "total_steps": 4218, "loss": 1.3007, "lr": 6.374407582938389e-06, "epoch": 0.19203413940256045, "percentage": 6.4, "elapsed_time": "0:30:06", "remaining_time": "7:20:10"}
28
+ {"current_steps": 280, "total_steps": 4218, "loss": 1.2318, "lr": 6.611374407582939e-06, "epoch": 0.19914651493598862, "percentage": 6.64, "elapsed_time": "0:31:17", "remaining_time": "7:20:04"}
29
+ {"current_steps": 290, "total_steps": 4218, "loss": 1.2452, "lr": 6.848341232227489e-06, "epoch": 0.20625889046941678, "percentage": 6.88, "elapsed_time": "0:32:17", "remaining_time": "7:17:19"}
30
+ {"current_steps": 300, "total_steps": 4218, "loss": 1.2299, "lr": 7.085308056872039e-06, "epoch": 0.21337126600284495, "percentage": 7.11, "elapsed_time": "0:33:17", "remaining_time": "7:14:45"}
31
+ {"current_steps": 310, "total_steps": 4218, "loss": 1.2481, "lr": 7.322274881516588e-06, "epoch": 0.22048364153627312, "percentage": 7.35, "elapsed_time": "0:34:24", "remaining_time": "7:13:42"}
32
+ {"current_steps": 320, "total_steps": 4218, "loss": 1.1874, "lr": 7.559241706161138e-06, "epoch": 0.22759601706970128, "percentage": 7.59, "elapsed_time": "0:35:27", "remaining_time": "7:12:01"}
33
+ {"current_steps": 330, "total_steps": 4218, "loss": 1.242, "lr": 7.796208530805689e-06, "epoch": 0.23470839260312945, "percentage": 7.82, "elapsed_time": "0:36:31", "remaining_time": "7:10:25"}
34
+ {"current_steps": 340, "total_steps": 4218, "loss": 1.1656, "lr": 8.033175355450237e-06, "epoch": 0.24182076813655762, "percentage": 8.06, "elapsed_time": "0:37:22", "remaining_time": "7:06:13"}
35
+ {"current_steps": 350, "total_steps": 4218, "loss": 1.1626, "lr": 8.270142180094787e-06, "epoch": 0.24893314366998578, "percentage": 8.3, "elapsed_time": "0:38:29", "remaining_time": "7:05:23"}
36
+ {"current_steps": 360, "total_steps": 4218, "loss": 1.1136, "lr": 8.507109004739337e-06, "epoch": 0.25604551920341395, "percentage": 8.53, "elapsed_time": "0:39:37", "remaining_time": "7:04:41"}
37
+ {"current_steps": 370, "total_steps": 4218, "loss": 1.109, "lr": 8.744075829383887e-06, "epoch": 0.2631578947368421, "percentage": 8.77, "elapsed_time": "0:40:49", "remaining_time": "7:04:35"}
38
+ {"current_steps": 380, "total_steps": 4218, "loss": 1.0926, "lr": 8.981042654028437e-06, "epoch": 0.2702702702702703, "percentage": 9.01, "elapsed_time": "0:41:59", "remaining_time": "7:04:09"}
39
+ {"current_steps": 390, "total_steps": 4218, "loss": 0.9856, "lr": 9.218009478672988e-06, "epoch": 0.2773826458036984, "percentage": 9.25, "elapsed_time": "0:43:03", "remaining_time": "7:02:41"}
40
+ {"current_steps": 400, "total_steps": 4218, "loss": 1.0438, "lr": 9.454976303317538e-06, "epoch": 0.2844950213371266, "percentage": 9.48, "elapsed_time": "0:44:12", "remaining_time": "7:01:55"}
41
+ {"current_steps": 410, "total_steps": 4218, "loss": 0.9834, "lr": 9.691943127962086e-06, "epoch": 0.29160739687055476, "percentage": 9.72, "elapsed_time": "0:45:19", "remaining_time": "7:00:56"}
42
+ {"current_steps": 420, "total_steps": 4218, "loss": 1.0355, "lr": 9.928909952606636e-06, "epoch": 0.29871977240398295, "percentage": 9.96, "elapsed_time": "0:46:23", "remaining_time": "6:59:33"}
43
+ {"current_steps": 430, "total_steps": 4218, "loss": 0.9973, "lr": 9.99991609608766e-06, "epoch": 0.3058321479374111, "percentage": 10.19, "elapsed_time": "0:47:30", "remaining_time": "6:58:30"}
44
+ {"current_steps": 440, "total_steps": 4218, "loss": 1.0118, "lr": 9.999505144928566e-06, "epoch": 0.3129445234708393, "percentage": 10.43, "elapsed_time": "0:48:40", "remaining_time": "6:57:55"}
45
+ {"current_steps": 450, "total_steps": 4218, "loss": 0.915, "lr": 9.998751763712045e-06, "epoch": 0.3200568990042674, "percentage": 10.67, "elapsed_time": "0:49:51", "remaining_time": "6:57:32"}
46
+ {"current_steps": 460, "total_steps": 4218, "loss": 0.8872, "lr": 9.997656004039284e-06, "epoch": 0.32716927453769556, "percentage": 10.91, "elapsed_time": "0:51:01", "remaining_time": "6:56:53"}
47
+ {"current_steps": 470, "total_steps": 4218, "loss": 0.9233, "lr": 9.99621794096192e-06, "epoch": 0.33428165007112376, "percentage": 11.14, "elapsed_time": "0:52:02", "remaining_time": "6:55:02"}
48
+ {"current_steps": 480, "total_steps": 4218, "loss": 0.8156, "lr": 9.994437672976904e-06, "epoch": 0.3413940256045519, "percentage": 11.38, "elapsed_time": "0:53:05", "remaining_time": "6:53:27"}
49
+ {"current_steps": 490, "total_steps": 4218, "loss": 0.8749, "lr": 9.99231532201976e-06, "epoch": 0.3485064011379801, "percentage": 11.62, "elapsed_time": "0:54:16", "remaining_time": "6:52:58"}
50
+ {"current_steps": 500, "total_steps": 4218, "loss": 0.8598, "lr": 9.989851033456224e-06, "epoch": 0.35561877667140823, "percentage": 11.85, "elapsed_time": "0:55:18", "remaining_time": "6:51:19"}
51
+ {"current_steps": 510, "total_steps": 4218, "loss": 0.8118, "lr": 9.987044976072298e-06, "epoch": 0.3627311522048364, "percentage": 12.09, "elapsed_time": "0:56:27", "remaining_time": "6:50:29"}
52
+ {"current_steps": 520, "total_steps": 4218, "loss": 0.8227, "lr": 9.983897342062681e-06, "epoch": 0.36984352773826457, "percentage": 12.33, "elapsed_time": "0:57:18", "remaining_time": "6:47:35"}
53
+ {"current_steps": 530, "total_steps": 4218, "loss": 0.8132, "lr": 9.98040834701761e-06, "epoch": 0.37695590327169276, "percentage": 12.57, "elapsed_time": "0:58:24", "remaining_time": "6:46:28"}
54
+ {"current_steps": 540, "total_steps": 4218, "loss": 0.7806, "lr": 9.97657822990809e-06, "epoch": 0.3840682788051209, "percentage": 12.8, "elapsed_time": "0:59:40", "remaining_time": "6:46:29"}
55
+ {"current_steps": 550, "total_steps": 4218, "loss": 0.8095, "lr": 9.972407253069527e-06, "epoch": 0.3911806543385491, "percentage": 13.04, "elapsed_time": "1:00:47", "remaining_time": "6:45:27"}
56
+ {"current_steps": 560, "total_steps": 4218, "loss": 0.7911, "lr": 9.967895702183767e-06, "epoch": 0.39829302987197723, "percentage": 13.28, "elapsed_time": "1:01:47", "remaining_time": "6:43:39"}
57
+ {"current_steps": 570, "total_steps": 4218, "loss": 0.7712, "lr": 9.963043886259518e-06, "epoch": 0.40540540540540543, "percentage": 13.51, "elapsed_time": "1:02:57", "remaining_time": "6:42:56"}
58
+ {"current_steps": 580, "total_steps": 4218, "loss": 0.7634, "lr": 9.957852137611187e-06, "epoch": 0.41251778093883357, "percentage": 13.75, "elapsed_time": "1:04:15", "remaining_time": "6:43:04"}
59
+ {"current_steps": 590, "total_steps": 4218, "loss": 0.6903, "lr": 9.952320811836129e-06, "epoch": 0.41963015647226176, "percentage": 13.99, "elapsed_time": "1:05:28", "remaining_time": "6:42:38"}
60
+ {"current_steps": 600, "total_steps": 4218, "loss": 0.7238, "lr": 9.94645028779028e-06, "epoch": 0.4267425320056899, "percentage": 14.22, "elapsed_time": "1:06:30", "remaining_time": "6:41:02"}
61
+ {"current_steps": 610, "total_steps": 4218, "loss": 0.694, "lr": 9.94024096756221e-06, "epoch": 0.43385490753911804, "percentage": 14.46, "elapsed_time": "1:07:37", "remaining_time": "6:40:01"}
62
+ {"current_steps": 620, "total_steps": 4218, "loss": 0.7057, "lr": 9.933693276445588e-06, "epoch": 0.44096728307254623, "percentage": 14.7, "elapsed_time": "1:08:33", "remaining_time": "6:37:51"}
63
+ {"current_steps": 630, "total_steps": 4218, "loss": 0.7001, "lr": 9.92680766291005e-06, "epoch": 0.4480796586059744, "percentage": 14.94, "elapsed_time": "1:09:40", "remaining_time": "6:36:50"}
64
+ {"current_steps": 640, "total_steps": 4218, "loss": 0.6451, "lr": 9.91958459857048e-06, "epoch": 0.45519203413940257, "percentage": 15.17, "elapsed_time": "1:10:56", "remaining_time": "6:36:38"}
65
+ {"current_steps": 650, "total_steps": 4218, "loss": 0.6539, "lr": 9.912024578154706e-06, "epoch": 0.4623044096728307, "percentage": 15.41, "elapsed_time": "1:11:59", "remaining_time": "6:35:08"}
66
+ {"current_steps": 660, "total_steps": 4218, "loss": 0.6383, "lr": 9.904128119469625e-06, "epoch": 0.4694167852062589, "percentage": 15.65, "elapsed_time": "1:12:57", "remaining_time": "6:33:17"}
67
+ {"current_steps": 670, "total_steps": 4218, "loss": 0.6319, "lr": 9.895895763365722e-06, "epoch": 0.47652916073968704, "percentage": 15.88, "elapsed_time": "1:13:56", "remaining_time": "6:31:31"}
68
+ {"current_steps": 680, "total_steps": 4218, "loss": 0.589, "lr": 9.88732807370004e-06, "epoch": 0.48364153627311524, "percentage": 16.12, "elapsed_time": "1:15:07", "remaining_time": "6:30:53"}
69
+ {"current_steps": 690, "total_steps": 4218, "loss": 0.5236, "lr": 9.878425637297549e-06, "epoch": 0.4907539118065434, "percentage": 16.36, "elapsed_time": "1:16:21", "remaining_time": "6:30:23"}
70
+ {"current_steps": 700, "total_steps": 4218, "loss": 0.524, "lr": 9.869189063910959e-06, "epoch": 0.49786628733997157, "percentage": 16.6, "elapsed_time": "1:17:43", "remaining_time": "6:30:36"}
71
+ {"current_steps": 710, "total_steps": 4218, "loss": 0.5336, "lr": 9.859618986178953e-06, "epoch": 0.5049786628733998, "percentage": 16.83, "elapsed_time": "1:18:29", "remaining_time": "6:27:48"}
72
+ {"current_steps": 720, "total_steps": 4218, "loss": 0.5202, "lr": 9.84971605958286e-06, "epoch": 0.5120910384068279, "percentage": 17.07, "elapsed_time": "1:19:22", "remaining_time": "6:25:36"}
73
+ {"current_steps": 730, "total_steps": 4218, "loss": 0.4938, "lr": 9.839480962401753e-06, "epoch": 0.519203413940256, "percentage": 17.31, "elapsed_time": "1:20:16", "remaining_time": "6:23:34"}
74
+ {"current_steps": 740, "total_steps": 4218, "loss": 0.4503, "lr": 9.828914395665996e-06, "epoch": 0.5263157894736842, "percentage": 17.54, "elapsed_time": "1:21:24", "remaining_time": "6:22:39"}
75
+ {"current_steps": 750, "total_steps": 4218, "loss": 0.5067, "lr": 9.818017083109233e-06, "epoch": 0.5334281650071123, "percentage": 17.78, "elapsed_time": "1:22:28", "remaining_time": "6:21:20"}
76
+ {"current_steps": 760, "total_steps": 4218, "loss": 0.4296, "lr": 9.8067897711188e-06, "epoch": 0.5405405405405406, "percentage": 18.02, "elapsed_time": "1:23:36", "remaining_time": "6:20:23"}
77
+ {"current_steps": 770, "total_steps": 4218, "loss": 0.422, "lr": 9.795233228684631e-06, "epoch": 0.5476529160739687, "percentage": 18.26, "elapsed_time": "1:24:39", "remaining_time": "6:19:05"}
78
+ {"current_steps": 780, "total_steps": 4218, "loss": 0.4352, "lr": 9.783348247346558e-06, "epoch": 0.5547652916073968, "percentage": 18.49, "elapsed_time": "1:25:48", "remaining_time": "6:18:12"}
79
+ {"current_steps": 790, "total_steps": 4218, "loss": 0.3788, "lr": 9.771135641140117e-06, "epoch": 0.561877667140825, "percentage": 18.73, "elapsed_time": "1:26:59", "remaining_time": "6:17:29"}
80
+ {"current_steps": 800, "total_steps": 4218, "loss": 0.4512, "lr": 9.758596246540782e-06, "epoch": 0.5689900426742532, "percentage": 18.97, "elapsed_time": "1:27:58", "remaining_time": "6:15:54"}
81
+ {"current_steps": 810, "total_steps": 4218, "loss": 0.4286, "lr": 9.74573092240668e-06, "epoch": 0.5761024182076814, "percentage": 19.2, "elapsed_time": "1:29:14", "remaining_time": "6:15:28"}
82
+ {"current_steps": 820, "total_steps": 4218, "loss": 0.3976, "lr": 9.732540549919758e-06, "epoch": 0.5832147937411095, "percentage": 19.44, "elapsed_time": "1:30:13", "remaining_time": "6:13:53"}
83
+ {"current_steps": 830, "total_steps": 4218, "loss": 0.3845, "lr": 9.719026032525432e-06, "epoch": 0.5903271692745377, "percentage": 19.68, "elapsed_time": "1:31:16", "remaining_time": "6:12:34"}
84
+ {"current_steps": 840, "total_steps": 4218, "loss": 0.3761, "lr": 9.70518829587071e-06, "epoch": 0.5974395448079659, "percentage": 19.91, "elapsed_time": "1:32:30", "remaining_time": "6:11:59"}
85
+ {"current_steps": 850, "total_steps": 4218, "loss": 0.3663, "lr": 9.691028287740783e-06, "epoch": 0.604551920341394, "percentage": 20.15, "elapsed_time": "1:33:40", "remaining_time": "6:11:08"}
86
+ {"current_steps": 860, "total_steps": 4218, "loss": 0.3683, "lr": 9.67654697799412e-06, "epoch": 0.6116642958748222, "percentage": 20.39, "elapsed_time": "1:34:40", "remaining_time": "6:09:39"}
87
+ {"current_steps": 870, "total_steps": 4218, "loss": 0.3302, "lr": 9.661745358496033e-06, "epoch": 0.6187766714082503, "percentage": 20.63, "elapsed_time": "1:35:50", "remaining_time": "6:08:50"}
88
+ {"current_steps": 880, "total_steps": 4218, "loss": 0.3714, "lr": 9.64662444305074e-06, "epoch": 0.6258890469416786, "percentage": 20.86, "elapsed_time": "1:36:55", "remaining_time": "6:07:39"}
89
+ {"current_steps": 890, "total_steps": 4218, "loss": 0.3214, "lr": 9.631185267331937e-06, "epoch": 0.6330014224751067, "percentage": 21.1, "elapsed_time": "1:38:06", "remaining_time": "6:06:49"}
90
+ {"current_steps": 900, "total_steps": 4218, "loss": 0.3151, "lr": 9.615428888811842e-06, "epoch": 0.6401137980085349, "percentage": 21.34, "elapsed_time": "1:39:16", "remaining_time": "6:05:57"}
91
+ {"current_steps": 910, "total_steps": 4218, "loss": 0.3134, "lr": 9.59935638668879e-06, "epoch": 0.647226173541963, "percentage": 21.57, "elapsed_time": "1:40:22", "remaining_time": "6:04:51"}
92
+ {"current_steps": 920, "total_steps": 4218, "loss": 0.2826, "lr": 9.582968861813295e-06, "epoch": 0.6543385490753911, "percentage": 21.81, "elapsed_time": "1:41:26", "remaining_time": "6:03:37"}
93
+ {"current_steps": 930, "total_steps": 4218, "loss": 0.3272, "lr": 9.566267436612662e-06, "epoch": 0.6614509246088194, "percentage": 22.05, "elapsed_time": "1:42:29", "remaining_time": "6:02:22"}
94
+ {"current_steps": 940, "total_steps": 4218, "loss": 0.2838, "lr": 9.549253255014105e-06, "epoch": 0.6685633001422475, "percentage": 22.29, "elapsed_time": "1:44:02", "remaining_time": "6:02:49"}
95
+ {"current_steps": 950, "total_steps": 4218, "loss": 0.2676, "lr": 9.531927482366398e-06, "epoch": 0.6756756756756757, "percentage": 22.52, "elapsed_time": "1:45:01", "remaining_time": "6:01:18"}
96
+ {"current_steps": 960, "total_steps": 4218, "loss": 0.2615, "lr": 9.514291305360053e-06, "epoch": 0.6827880512091038, "percentage": 22.76, "elapsed_time": "1:45:57", "remaining_time": "5:59:34"}
97
+ {"current_steps": 970, "total_steps": 4218, "loss": 0.2232, "lr": 9.496345931946039e-06, "epoch": 0.689900426742532, "percentage": 23.0, "elapsed_time": "1:47:02", "remaining_time": "5:58:26"}
98
+ {"current_steps": 980, "total_steps": 4218, "loss": 0.2628, "lr": 9.47809259125306e-06, "epoch": 0.6970128022759602, "percentage": 23.23, "elapsed_time": "1:47:57", "remaining_time": "5:56:43"}
99
+ {"current_steps": 990, "total_steps": 4218, "loss": 0.2404, "lr": 9.459532533503347e-06, "epoch": 0.7041251778093883, "percentage": 23.47, "elapsed_time": "1:49:17", "remaining_time": "5:56:21"}
100
+ {"current_steps": 1000, "total_steps": 4218, "loss": 0.2259, "lr": 9.440667029927043e-06, "epoch": 0.7112375533428165, "percentage": 23.71, "elapsed_time": "1:50:23", "remaining_time": "5:55:12"}
101
+ {"current_steps": 1010, "total_steps": 4218, "loss": 0.208, "lr": 9.421497372675133e-06, "epoch": 0.7183499288762447, "percentage": 23.94, "elapsed_time": "1:51:23", "remaining_time": "5:53:48"}
102
+ {"current_steps": 1020, "total_steps": 4218, "loss": 0.2277, "lr": 9.402024874730928e-06, "epoch": 0.7254623044096729, "percentage": 24.18, "elapsed_time": "1:52:21", "remaining_time": "5:52:17"}
103
+ {"current_steps": 1030, "total_steps": 4218, "loss": 0.1926, "lr": 9.382250869820146e-06, "epoch": 0.732574679943101, "percentage": 24.42, "elapsed_time": "1:53:26", "remaining_time": "5:51:06"}
104
+ {"current_steps": 1040, "total_steps": 4218, "loss": 0.2299, "lr": 9.36217671231956e-06, "epoch": 0.7396870554765291, "percentage": 24.66, "elapsed_time": "1:54:42", "remaining_time": "5:50:30"}
105
+ {"current_steps": 1050, "total_steps": 4218, "loss": 0.1708, "lr": 9.341803777164228e-06, "epoch": 0.7467994310099573, "percentage": 24.89, "elapsed_time": "1:55:40", "remaining_time": "5:48:59"}
106
+ {"current_steps": 1060, "total_steps": 4218, "loss": 0.2072, "lr": 9.321133459753322e-06, "epoch": 0.7539118065433855, "percentage": 25.13, "elapsed_time": "1:56:50", "remaining_time": "5:48:07"}
107
+ {"current_steps": 1070, "total_steps": 4218, "loss": 0.1875, "lr": 9.300167175854564e-06, "epoch": 0.7610241820768137, "percentage": 25.37, "elapsed_time": "1:57:52", "remaining_time": "5:46:46"}
108
+ {"current_steps": 1080, "total_steps": 4218, "loss": 0.173, "lr": 9.278906361507238e-06, "epoch": 0.7681365576102418, "percentage": 25.6, "elapsed_time": "1:58:48", "remaining_time": "5:45:12"}
109
+ {"current_steps": 1090, "total_steps": 4218, "loss": 0.1489, "lr": 9.257352472923842e-06, "epoch": 0.7752489331436699, "percentage": 25.84, "elapsed_time": "1:59:51", "remaining_time": "5:43:56"}
110
+ {"current_steps": 1100, "total_steps": 4218, "loss": 0.1423, "lr": 9.235506986390346e-06, "epoch": 0.7823613086770982, "percentage": 26.08, "elapsed_time": "2:00:57", "remaining_time": "5:42:53"}
111
+ {"current_steps": 1110, "total_steps": 4218, "loss": 0.1564, "lr": 9.213371398165077e-06, "epoch": 0.7894736842105263, "percentage": 26.32, "elapsed_time": "2:02:05", "remaining_time": "5:41:51"}
112
+ {"current_steps": 1120, "total_steps": 4218, "loss": 0.1872, "lr": 9.190947224376238e-06, "epoch": 0.7965860597439545, "percentage": 26.55, "elapsed_time": "2:03:03", "remaining_time": "5:40:24"}
113
+ {"current_steps": 1130, "total_steps": 4218, "loss": 0.1483, "lr": 9.168236000918063e-06, "epoch": 0.8036984352773826, "percentage": 26.79, "elapsed_time": "2:04:15", "remaining_time": "5:39:33"}
114
+ {"current_steps": 1140, "total_steps": 4218, "loss": 0.1272, "lr": 9.145239283345618e-06, "epoch": 0.8108108108108109, "percentage": 27.03, "elapsed_time": "2:05:24", "remaining_time": "5:38:36"}
115
+ {"current_steps": 1150, "total_steps": 4218, "loss": 0.1361, "lr": 9.121958646768251e-06, "epoch": 0.817923186344239, "percentage": 27.26, "elapsed_time": "2:06:36", "remaining_time": "5:37:45"}
116
+ {"current_steps": 1160, "total_steps": 4218, "loss": 0.1001, "lr": 9.09839568574173e-06, "epoch": 0.8250355618776671, "percentage": 27.5, "elapsed_time": "2:07:40", "remaining_time": "5:36:35"}
117
+ {"current_steps": 1170, "total_steps": 4218, "loss": 0.1193, "lr": 9.074552014158994e-06, "epoch": 0.8321479374110953, "percentage": 27.74, "elapsed_time": "2:08:53", "remaining_time": "5:35:46"}
118
+ {"current_steps": 1180, "total_steps": 4218, "loss": 0.1122, "lr": 9.050429265139647e-06, "epoch": 0.8392603129445235, "percentage": 27.98, "elapsed_time": "2:09:51", "remaining_time": "5:34:18"}
119
+ {"current_steps": 1190, "total_steps": 4218, "loss": 0.1345, "lr": 9.026029090918076e-06, "epoch": 0.8463726884779517, "percentage": 28.21, "elapsed_time": "2:11:14", "remaining_time": "5:33:57"}
120
+ {"current_steps": 1200, "total_steps": 4218, "loss": 0.1134, "lr": 9.001353162730297e-06, "epoch": 0.8534850640113798, "percentage": 28.45, "elapsed_time": "2:12:19", "remaining_time": "5:32:48"}
121
+ {"current_steps": 1210, "total_steps": 4218, "loss": 0.1026, "lr": 8.976403170699486e-06, "epoch": 0.8605974395448079, "percentage": 28.69, "elapsed_time": "2:13:14", "remaining_time": "5:31:14"}
122
+ {"current_steps": 1220, "total_steps": 4218, "loss": 0.0967, "lr": 8.951180823720212e-06, "epoch": 0.8677098150782361, "percentage": 28.92, "elapsed_time": "2:14:32", "remaining_time": "5:30:37"}
123
+ {"current_steps": 1230, "total_steps": 4218, "loss": 0.0819, "lr": 8.925687849341398e-06, "epoch": 0.8748221906116643, "percentage": 29.16, "elapsed_time": "2:15:44", "remaining_time": "5:29:46"}
124
+ {"current_steps": 1240, "total_steps": 4218, "loss": 0.0931, "lr": 8.899925993647994e-06, "epoch": 0.8819345661450925, "percentage": 29.4, "elapsed_time": "2:16:40", "remaining_time": "5:28:14"}
125
+ {"current_steps": 1250, "total_steps": 4218, "loss": 0.0888, "lr": 8.873897021141378e-06, "epoch": 0.8890469416785206, "percentage": 29.63, "elapsed_time": "2:17:43", "remaining_time": "5:27:01"}
126
+ {"current_steps": 1260, "total_steps": 4218, "loss": 0.0839, "lr": 8.847602714618504e-06, "epoch": 0.8961593172119487, "percentage": 29.87, "elapsed_time": "2:18:51", "remaining_time": "5:25:58"}
127
+ {"current_steps": 1270, "total_steps": 4218, "loss": 0.0878, "lr": 8.821044875049796e-06, "epoch": 0.903271692745377, "percentage": 30.11, "elapsed_time": "2:19:59", "remaining_time": "5:24:57"}
128
+ {"current_steps": 1280, "total_steps": 4218, "loss": 0.0866, "lr": 8.794225321455788e-06, "epoch": 0.9103840682788051, "percentage": 30.35, "elapsed_time": "2:20:52", "remaining_time": "5:23:20"}
129
+ {"current_steps": 1290, "total_steps": 4218, "loss": 0.0849, "lr": 8.767145890782542e-06, "epoch": 0.9174964438122333, "percentage": 30.58, "elapsed_time": "2:22:00", "remaining_time": "5:22:18"}
130
+ {"current_steps": 1300, "total_steps": 4218, "loss": 0.0773, "lr": 8.739808437775825e-06, "epoch": 0.9246088193456614, "percentage": 30.82, "elapsed_time": "2:23:16", "remaining_time": "5:21:35"}
131
+ {"current_steps": 1310, "total_steps": 4218, "loss": 0.0887, "lr": 8.71221483485407e-06, "epoch": 0.9317211948790897, "percentage": 31.06, "elapsed_time": "2:24:21", "remaining_time": "5:20:26"}
132
+ {"current_steps": 1320, "total_steps": 4218, "loss": 0.0739, "lr": 8.684366971980139e-06, "epoch": 0.9388335704125178, "percentage": 31.29, "elapsed_time": "2:25:25", "remaining_time": "5:19:17"}
133
+ {"current_steps": 1330, "total_steps": 4218, "loss": 0.0757, "lr": 8.656266756531857e-06, "epoch": 0.9459459459459459, "percentage": 31.53, "elapsed_time": "2:26:28", "remaining_time": "5:18:04"}
134
+ {"current_steps": 1340, "total_steps": 4218, "loss": 0.0695, "lr": 8.627916113171396e-06, "epoch": 0.9530583214793741, "percentage": 31.77, "elapsed_time": "2:27:29", "remaining_time": "5:16:46"}
135
+ {"current_steps": 1350, "total_steps": 4218, "loss": 0.0703, "lr": 8.599316983713419e-06, "epoch": 0.9601706970128022, "percentage": 32.01, "elapsed_time": "2:28:41", "remaining_time": "5:15:53"}
136
+ {"current_steps": 1360, "total_steps": 4218, "loss": 0.062, "lr": 8.570471326992105e-06, "epoch": 0.9672830725462305, "percentage": 32.24, "elapsed_time": "2:29:39", "remaining_time": "5:14:29"}
137
+ {"current_steps": 1370, "total_steps": 4218, "loss": 0.0755, "lr": 8.54138111872697e-06, "epoch": 0.9743954480796586, "percentage": 32.48, "elapsed_time": "2:30:40", "remaining_time": "5:13:14"}
138
+ {"current_steps": 1380, "total_steps": 4218, "loss": 0.0656, "lr": 8.512048351387551e-06, "epoch": 0.9815078236130867, "percentage": 32.72, "elapsed_time": "2:31:45", "remaining_time": "5:12:05"}
139
+ {"current_steps": 1390, "total_steps": 4218, "loss": 0.0659, "lr": 8.482475034056927e-06, "epoch": 0.9886201991465149, "percentage": 32.95, "elapsed_time": "2:32:33", "remaining_time": "5:10:23"}
140
+ {"current_steps": 1400, "total_steps": 4218, "loss": 0.0576, "lr": 8.452663192294121e-06, "epoch": 0.9957325746799431, "percentage": 33.19, "elapsed_time": "2:33:40", "remaining_time": "5:09:20"}
141
+ {"current_steps": 1410, "total_steps": 4218, "loss": 0.0518, "lr": 8.42261486799536e-06, "epoch": 1.0028449502133712, "percentage": 33.43, "elapsed_time": "2:34:48", "remaining_time": "5:08:18"}
142
+ {"current_steps": 1420, "total_steps": 4218, "loss": 0.0363, "lr": 8.392332119254214e-06, "epoch": 1.0099573257467995, "percentage": 33.67, "elapsed_time": "2:35:55", "remaining_time": "5:07:14"}
143
+ {"current_steps": 1430, "total_steps": 4218, "loss": 0.0345, "lr": 8.361817020220647e-06, "epoch": 1.0170697012802277, "percentage": 33.9, "elapsed_time": "2:37:00", "remaining_time": "5:06:07"}
144
+ {"current_steps": 1440, "total_steps": 4218, "loss": 0.039, "lr": 8.331071660958936e-06, "epoch": 1.0241820768136558, "percentage": 34.14, "elapsed_time": "2:37:58", "remaining_time": "5:04:45"}
145
+ {"current_steps": 1450, "total_steps": 4218, "loss": 0.0365, "lr": 8.300098147304523e-06, "epoch": 1.031294452347084, "percentage": 34.38, "elapsed_time": "2:39:05", "remaining_time": "5:03:42"}
146
+ {"current_steps": 1460, "total_steps": 4218, "loss": 0.0431, "lr": 8.268898600719785e-06, "epoch": 1.038406827880512, "percentage": 34.61, "elapsed_time": "2:40:14", "remaining_time": "5:02:42"}
147
+ {"current_steps": 1470, "total_steps": 4218, "loss": 0.0429, "lr": 8.237475158148724e-06, "epoch": 1.0455192034139402, "percentage": 34.85, "elapsed_time": "2:41:22", "remaining_time": "5:01:40"}
148
+ {"current_steps": 1480, "total_steps": 4218, "loss": 0.0397, "lr": 8.205829971870602e-06, "epoch": 1.0526315789473684, "percentage": 35.09, "elapsed_time": "2:42:28", "remaining_time": "5:00:34"}
149
+ {"current_steps": 1490, "total_steps": 4218, "loss": 0.0344, "lr": 8.173965209352524e-06, "epoch": 1.0597439544807965, "percentage": 35.32, "elapsed_time": "2:43:25", "remaining_time": "4:59:11"}
150
+ {"current_steps": 1500, "total_steps": 4218, "loss": 0.0464, "lr": 8.14188305310099e-06, "epoch": 1.0668563300142249, "percentage": 35.56, "elapsed_time": "2:44:40", "remaining_time": "4:58:23"}
151
+ {"current_steps": 1510, "total_steps": 4218, "loss": 0.0375, "lr": 8.109585700512395e-06, "epoch": 1.073968705547653, "percentage": 35.8, "elapsed_time": "2:45:52", "remaining_time": "4:57:29"}
152
+ {"current_steps": 1520, "total_steps": 4218, "loss": 0.0389, "lr": 8.077075363722542e-06, "epoch": 1.0810810810810811, "percentage": 36.04, "elapsed_time": "2:46:55", "remaining_time": "4:56:17"}
153
+ {"current_steps": 1530, "total_steps": 4218, "loss": 0.0436, "lr": 8.044354269455109e-06, "epoch": 1.0881934566145093, "percentage": 36.27, "elapsed_time": "2:48:02", "remaining_time": "4:55:12"}
154
+ {"current_steps": 1540, "total_steps": 4218, "loss": 0.0357, "lr": 8.011424658869142e-06, "epoch": 1.0953058321479374, "percentage": 36.51, "elapsed_time": "2:49:01", "remaining_time": "4:53:55"}
155
+ {"current_steps": 1550, "total_steps": 4218, "loss": 0.0362, "lr": 7.978288787405556e-06, "epoch": 1.1024182076813656, "percentage": 36.75, "elapsed_time": "2:50:05", "remaining_time": "4:52:46"}
156
+ {"current_steps": 1560, "total_steps": 4218, "loss": 0.0345, "lr": 7.944948924632643e-06, "epoch": 1.1095305832147937, "percentage": 36.98, "elapsed_time": "2:51:20", "remaining_time": "4:51:56"}
157
+ {"current_steps": 1570, "total_steps": 4218, "loss": 0.0354, "lr": 7.911407354090634e-06, "epoch": 1.1166429587482218, "percentage": 37.22, "elapsed_time": "2:52:26", "remaining_time": "4:50:50"}
158
+ {"current_steps": 1580, "total_steps": 4218, "loss": 0.0346, "lr": 7.877666373135287e-06, "epoch": 1.12375533428165, "percentage": 37.46, "elapsed_time": "2:53:26", "remaining_time": "4:49:34"}
159
+ {"current_steps": 1590, "total_steps": 4218, "loss": 0.038, "lr": 7.84372829278053e-06, "epoch": 1.1308677098150781, "percentage": 37.7, "elapsed_time": "2:54:19", "remaining_time": "4:48:07"}
160
+ {"current_steps": 1600, "total_steps": 4218, "loss": 0.0327, "lr": 7.809595437540189e-06, "epoch": 1.1379800853485065, "percentage": 37.93, "elapsed_time": "2:55:22", "remaining_time": "4:46:56"}
161
+ {"current_steps": 1610, "total_steps": 4218, "loss": 0.0256, "lr": 7.775270145268755e-06, "epoch": 1.1450924608819346, "percentage": 38.17, "elapsed_time": "2:56:22", "remaining_time": "4:45:41"}
162
+ {"current_steps": 1620, "total_steps": 4218, "loss": 0.039, "lr": 7.740754767001278e-06, "epoch": 1.1522048364153628, "percentage": 38.41, "elapsed_time": "2:57:40", "remaining_time": "4:44:56"}
163
+ {"current_steps": 1630, "total_steps": 4218, "loss": 0.0353, "lr": 7.706051666792318e-06, "epoch": 1.159317211948791, "percentage": 38.64, "elapsed_time": "2:58:40", "remaining_time": "4:43:41"}
164
+ {"current_steps": 1640, "total_steps": 4218, "loss": 0.0353, "lr": 7.671163221554043e-06, "epoch": 1.166429587482219, "percentage": 38.88, "elapsed_time": "2:59:36", "remaining_time": "4:42:20"}
165
+ {"current_steps": 1650, "total_steps": 4218, "loss": 0.0374, "lr": 7.636091820893417e-06, "epoch": 1.1735419630156472, "percentage": 39.12, "elapsed_time": "3:00:42", "remaining_time": "4:41:15"}
166
+ {"current_steps": 1660, "total_steps": 4218, "loss": 0.0363, "lr": 7.600839866948528e-06, "epoch": 1.1806543385490753, "percentage": 39.36, "elapsed_time": "3:01:40", "remaining_time": "4:39:57"}
167
+ {"current_steps": 1670, "total_steps": 4218, "loss": 0.0349, "lr": 7.565409774224066e-06, "epoch": 1.1877667140825037, "percentage": 39.59, "elapsed_time": "3:02:58", "remaining_time": "4:39:09"}
168
+ {"current_steps": 1680, "total_steps": 4218, "loss": 0.0307, "lr": 7.529803969425941e-06, "epoch": 1.1948790896159318, "percentage": 39.83, "elapsed_time": "3:04:03", "remaining_time": "4:38:03"}
169
+ {"current_steps": 1690, "total_steps": 4218, "loss": 0.0322, "lr": 7.494024891295075e-06, "epoch": 1.20199146514936, "percentage": 40.07, "elapsed_time": "3:05:22", "remaining_time": "4:37:17"}
170
+ {"current_steps": 1700, "total_steps": 4218, "loss": 0.0293, "lr": 7.458074990440363e-06, "epoch": 1.209103840682788, "percentage": 40.3, "elapsed_time": "3:06:08", "remaining_time": "4:35:43"}
171
+ {"current_steps": 1710, "total_steps": 4218, "loss": 0.0344, "lr": 7.421956729170823e-06, "epoch": 1.2162162162162162, "percentage": 40.54, "elapsed_time": "3:07:09", "remaining_time": "4:34:29"}
172
+ {"current_steps": 1720, "total_steps": 4218, "loss": 0.0351, "lr": 7.385672581326954e-06, "epoch": 1.2233285917496444, "percentage": 40.78, "elapsed_time": "3:08:15", "remaining_time": "4:33:24"}
173
+ {"current_steps": 1730, "total_steps": 4218, "loss": 0.0353, "lr": 7.34922503211128e-06, "epoch": 1.2304409672830725, "percentage": 41.01, "elapsed_time": "3:09:18", "remaining_time": "4:32:15"}
174
+ {"current_steps": 1740, "total_steps": 4218, "loss": 0.03, "lr": 7.312616577918149e-06, "epoch": 1.2375533428165006, "percentage": 41.25, "elapsed_time": "3:10:26", "remaining_time": "4:31:13"}
175
+ {"current_steps": 1750, "total_steps": 4218, "loss": 0.0267, "lr": 7.2758497261627345e-06, "epoch": 1.2446657183499288, "percentage": 41.49, "elapsed_time": "3:11:41", "remaining_time": "4:30:20"}
176
+ {"current_steps": 1760, "total_steps": 4218, "loss": 0.0288, "lr": 7.238926995109306e-06, "epoch": 1.251778093883357, "percentage": 41.73, "elapsed_time": "3:12:57", "remaining_time": "4:29:29"}
177
+ {"current_steps": 1770, "total_steps": 4218, "loss": 0.0364, "lr": 7.201850913698736e-06, "epoch": 1.2588904694167853, "percentage": 41.96, "elapsed_time": "3:13:57", "remaining_time": "4:28:14"}
178
+ {"current_steps": 1780, "total_steps": 4218, "loss": 0.0252, "lr": 7.164624021375294e-06, "epoch": 1.2660028449502134, "percentage": 42.2, "elapsed_time": "3:15:14", "remaining_time": "4:27:24"}
179
+ {"current_steps": 1790, "total_steps": 4218, "loss": 0.0266, "lr": 7.12724886791271e-06, "epoch": 1.2731152204836416, "percentage": 42.44, "elapsed_time": "3:16:19", "remaining_time": "4:26:17"}
180
+ {"current_steps": 1800, "total_steps": 4218, "loss": 0.03, "lr": 7.08972801323953e-06, "epoch": 1.2802275960170697, "percentage": 42.67, "elapsed_time": "3:17:17", "remaining_time": "4:25:01"}
181
+ {"current_steps": 1810, "total_steps": 4218, "loss": 0.0235, "lr": 7.052064027263785e-06, "epoch": 1.2873399715504978, "percentage": 42.91, "elapsed_time": "3:18:17", "remaining_time": "4:23:48"}
182
+ {"current_steps": 1820, "total_steps": 4218, "loss": 0.0243, "lr": 7.014259489696968e-06, "epoch": 1.294452347083926, "percentage": 43.15, "elapsed_time": "3:19:19", "remaining_time": "4:22:37"}
183
+ {"current_steps": 1830, "total_steps": 4218, "loss": 0.0249, "lr": 6.976316989877343e-06, "epoch": 1.3015647226173541, "percentage": 43.39, "elapsed_time": "3:20:34", "remaining_time": "4:21:43"}
184
+ {"current_steps": 1840, "total_steps": 4218, "loss": 0.0263, "lr": 6.938239126592592e-06, "epoch": 1.3086770981507825, "percentage": 43.62, "elapsed_time": "3:21:53", "remaining_time": "4:20:55"}
185
+ {"current_steps": 1850, "total_steps": 4218, "loss": 0.0298, "lr": 6.90002850790182e-06, "epoch": 1.3157894736842106, "percentage": 43.86, "elapsed_time": "3:22:53", "remaining_time": "4:19:42"}
186
+ {"current_steps": 1860, "total_steps": 4218, "loss": 0.027, "lr": 6.861687750956922e-06, "epoch": 1.3229018492176388, "percentage": 44.1, "elapsed_time": "3:24:04", "remaining_time": "4:18:43"}
187
+ {"current_steps": 1870, "total_steps": 4218, "loss": 0.0245, "lr": 6.823219481823318e-06, "epoch": 1.330014224751067, "percentage": 44.33, "elapsed_time": "3:25:07", "remaining_time": "4:17:33"}
188
+ {"current_steps": 1880, "total_steps": 4218, "loss": 0.0198, "lr": 6.784626335300102e-06, "epoch": 1.337126600284495, "percentage": 44.57, "elapsed_time": "3:26:18", "remaining_time": "4:16:34"}
189
+ {"current_steps": 1890, "total_steps": 4218, "loss": 0.0274, "lr": 6.745910954739563e-06, "epoch": 1.3442389758179232, "percentage": 44.81, "elapsed_time": "3:27:28", "remaining_time": "4:15:33"}
190
+ {"current_steps": 1900, "total_steps": 4218, "loss": 0.0268, "lr": 6.707075991866143e-06, "epoch": 1.3513513513513513, "percentage": 45.05, "elapsed_time": "3:28:46", "remaining_time": "4:14:42"}
191
+ {"current_steps": 1910, "total_steps": 4218, "loss": 0.0274, "lr": 6.668124106594813e-06, "epoch": 1.3584637268847795, "percentage": 45.28, "elapsed_time": "3:29:54", "remaining_time": "4:13:38"}
192
+ {"current_steps": 1920, "total_steps": 4218, "loss": 0.0244, "lr": 6.629057966848879e-06, "epoch": 1.3655761024182076, "percentage": 45.52, "elapsed_time": "3:31:09", "remaining_time": "4:12:44"}
193
+ {"current_steps": 1930, "total_steps": 4218, "loss": 0.023, "lr": 6.589880248377258e-06, "epoch": 1.3726884779516357, "percentage": 45.76, "elapsed_time": "3:32:23", "remaining_time": "4:11:47"}
194
+ {"current_steps": 1940, "total_steps": 4218, "loss": 0.018, "lr": 6.550593634571205e-06, "epoch": 1.379800853485064, "percentage": 45.99, "elapsed_time": "3:33:31", "remaining_time": "4:10:44"}
195
+ {"current_steps": 1950, "total_steps": 4218, "loss": 0.021, "lr": 6.511200816280523e-06, "epoch": 1.3869132290184922, "percentage": 46.23, "elapsed_time": "3:34:43", "remaining_time": "4:09:44"}
196
+ {"current_steps": 1960, "total_steps": 4218, "loss": 0.0285, "lr": 6.471704491629251e-06, "epoch": 1.3940256045519204, "percentage": 46.47, "elapsed_time": "3:35:44", "remaining_time": "4:08:32"}
197
+ {"current_steps": 1970, "total_steps": 4218, "loss": 0.0198, "lr": 6.432107365830872e-06, "epoch": 1.4011379800853485, "percentage": 46.7, "elapsed_time": "3:36:47", "remaining_time": "4:07:22"}
198
+ {"current_steps": 1980, "total_steps": 4218, "loss": 0.0244, "lr": 6.392412151003019e-06, "epoch": 1.4082503556187767, "percentage": 46.94, "elapsed_time": "3:38:00", "remaining_time": "4:06:24"}
199
+ {"current_steps": 1990, "total_steps": 4218, "loss": 0.0226, "lr": 6.3526215659817156e-06, "epoch": 1.4153627311522048, "percentage": 47.18, "elapsed_time": "3:39:02", "remaining_time": "4:05:14"}
200
+ {"current_steps": 2000, "total_steps": 4218, "loss": 0.019, "lr": 6.312738336135159e-06, "epoch": 1.422475106685633, "percentage": 47.42, "elapsed_time": "3:40:20", "remaining_time": "4:04:21"}
201
+ {"current_steps": 2010, "total_steps": 4218, "loss": 0.0196, "lr": 6.272765193177044e-06, "epoch": 1.4295874822190613, "percentage": 47.65, "elapsed_time": "3:41:20", "remaining_time": "4:03:08"}
202
+ {"current_steps": 2020, "total_steps": 4218, "loss": 0.0189, "lr": 6.23270487497947e-06, "epoch": 1.4366998577524894, "percentage": 47.89, "elapsed_time": "3:42:31", "remaining_time": "4:02:08"}
203
+ {"current_steps": 2030, "total_steps": 4218, "loss": 0.025, "lr": 6.192560125385412e-06, "epoch": 1.4438122332859176, "percentage": 48.13, "elapsed_time": "3:43:30", "remaining_time": "4:00:54"}
204
+ {"current_steps": 2040, "total_steps": 4218, "loss": 0.0184, "lr": 6.152333694020781e-06, "epoch": 1.4509246088193457, "percentage": 48.36, "elapsed_time": "3:44:26", "remaining_time": "3:59:37"}
205
+ {"current_steps": 2050, "total_steps": 4218, "loss": 0.023, "lr": 6.112028336106108e-06, "epoch": 1.4580369843527738, "percentage": 48.6, "elapsed_time": "3:45:23", "remaining_time": "3:58:21"}
206
+ {"current_steps": 2060, "total_steps": 4218, "loss": 0.0167, "lr": 6.071646812267817e-06, "epoch": 1.465149359886202, "percentage": 48.84, "elapsed_time": "3:46:38", "remaining_time": "3:57:25"}
207
+ {"current_steps": 2070, "total_steps": 4218, "loss": 0.0202, "lr": 6.031191888349155e-06, "epoch": 1.4722617354196301, "percentage": 49.08, "elapsed_time": "3:47:46", "remaining_time": "3:56:21"}
208
+ {"current_steps": 2080, "total_steps": 4218, "loss": 0.0178, "lr": 5.990666335220738e-06, "epoch": 1.4793741109530583, "percentage": 49.31, "elapsed_time": "3:48:43", "remaining_time": "3:55:06"}
209
+ {"current_steps": 2090, "total_steps": 4218, "loss": 0.018, "lr": 5.950072928590781e-06, "epoch": 1.4864864864864864, "percentage": 49.55, "elapsed_time": "3:49:48", "remaining_time": "3:53:59"}
210
+ {"current_steps": 2100, "total_steps": 4218, "loss": 0.0209, "lr": 5.909414448814971e-06, "epoch": 1.4935988620199145, "percentage": 49.79, "elapsed_time": "3:50:49", "remaining_time": "3:52:48"}
211
+ {"current_steps": 2110, "total_steps": 4218, "loss": 0.0192, "lr": 5.8686936807060335e-06, "epoch": 1.5007112375533427, "percentage": 50.02, "elapsed_time": "3:51:56", "remaining_time": "3:51:43"}
212
+ {"current_steps": 2120, "total_steps": 4218, "loss": 0.018, "lr": 5.827913413343003e-06, "epoch": 1.5078236130867708, "percentage": 50.26, "elapsed_time": "3:53:02", "remaining_time": "3:50:37"}
213
+ {"current_steps": 2130, "total_steps": 4218, "loss": 0.0179, "lr": 5.787076439880177e-06, "epoch": 1.5149359886201992, "percentage": 50.5, "elapsed_time": "3:54:06", "remaining_time": "3:49:29"}
214
+ {"current_steps": 2140, "total_steps": 4218, "loss": 0.0211, "lr": 5.746185557355814e-06, "epoch": 1.5220483641536273, "percentage": 50.73, "elapsed_time": "3:55:22", "remaining_time": "3:48:33"}
215
+ {"current_steps": 2150, "total_steps": 4218, "loss": 0.0168, "lr": 5.70524356650056e-06, "epoch": 1.5291607396870555, "percentage": 50.97, "elapsed_time": "3:56:25", "remaining_time": "3:47:24"}
216
+ {"current_steps": 2160, "total_steps": 4218, "loss": 0.0172, "lr": 5.664253271545603e-06, "epoch": 1.5362731152204836, "percentage": 51.21, "elapsed_time": "3:57:27", "remaining_time": "3:46:15"}
217
+ {"current_steps": 2170, "total_steps": 4218, "loss": 0.0178, "lr": 5.623217480030622e-06, "epoch": 1.543385490753912, "percentage": 51.45, "elapsed_time": "3:58:30", "remaining_time": "3:45:06"}
218
+ {"current_steps": 2180, "total_steps": 4218, "loss": 0.0135, "lr": 5.58213900261148e-06, "epoch": 1.55049786628734, "percentage": 51.68, "elapsed_time": "3:59:34", "remaining_time": "3:43:58"}
219
+ {"current_steps": 2190, "total_steps": 4218, "loss": 0.0153, "lr": 5.541020652867713e-06, "epoch": 1.5576102418207682, "percentage": 51.92, "elapsed_time": "4:00:49", "remaining_time": "3:43:00"}
220
+ {"current_steps": 2200, "total_steps": 4218, "loss": 0.0143, "lr": 5.49986524710983e-06, "epoch": 1.5647226173541964, "percentage": 52.16, "elapsed_time": "4:01:53", "remaining_time": "3:41:52"}
221
+ {"current_steps": 2210, "total_steps": 4218, "loss": 0.016, "lr": 5.4586756041864065e-06, "epoch": 1.5718349928876245, "percentage": 52.39, "elapsed_time": "4:03:01", "remaining_time": "3:40:49"}
222
+ {"current_steps": 2220, "total_steps": 4218, "loss": 0.0168, "lr": 5.417454545291017e-06, "epoch": 1.5789473684210527, "percentage": 52.63, "elapsed_time": "4:04:16", "remaining_time": "3:39:51"}
223
+ {"current_steps": 2230, "total_steps": 4218, "loss": 0.0198, "lr": 5.376204893769e-06, "epoch": 1.5860597439544808, "percentage": 52.87, "elapsed_time": "4:05:23", "remaining_time": "3:38:45"}
224
+ {"current_steps": 2240, "total_steps": 4218, "loss": 0.0155, "lr": 5.334929474924093e-06, "epoch": 1.593172119487909, "percentage": 53.11, "elapsed_time": "4:06:24", "remaining_time": "3:37:35"}
225
+ {"current_steps": 2250, "total_steps": 4218, "loss": 0.0138, "lr": 5.293631115824897e-06, "epoch": 1.600284495021337, "percentage": 53.34, "elapsed_time": "4:07:26", "remaining_time": "3:36:25"}
226
+ {"current_steps": 2260, "total_steps": 4218, "loss": 0.0173, "lr": 5.252312645111266e-06, "epoch": 1.6073968705547652, "percentage": 53.58, "elapsed_time": "4:08:34", "remaining_time": "3:35:21"}
227
+ {"current_steps": 2270, "total_steps": 4218, "loss": 0.0142, "lr": 5.2109768928005454e-06, "epoch": 1.6145092460881934, "percentage": 53.82, "elapsed_time": "4:09:36", "remaining_time": "3:34:12"}
228
+ {"current_steps": 2280, "total_steps": 4218, "loss": 0.014, "lr": 5.169626690093751e-06, "epoch": 1.6216216216216215, "percentage": 54.05, "elapsed_time": "4:10:28", "remaining_time": "3:32:54"}
229
+ {"current_steps": 2290, "total_steps": 4218, "loss": 0.0127, "lr": 5.128264869181646e-06, "epoch": 1.6287339971550496, "percentage": 54.29, "elapsed_time": "4:11:33", "remaining_time": "3:31:47"}
230
+ {"current_steps": 2300, "total_steps": 4218, "loss": 0.011, "lr": 5.086894263050755e-06, "epoch": 1.635846372688478, "percentage": 54.53, "elapsed_time": "4:12:48", "remaining_time": "3:30:49"}
231
+ {"current_steps": 2310, "total_steps": 4218, "loss": 0.0111, "lr": 5.045517705289328e-06, "epoch": 1.6429587482219061, "percentage": 54.77, "elapsed_time": "4:13:45", "remaining_time": "3:29:35"}
232
+ {"current_steps": 2320, "total_steps": 4218, "loss": 0.0138, "lr": 5.004138029893257e-06, "epoch": 1.6500711237553343, "percentage": 55.0, "elapsed_time": "4:14:51", "remaining_time": "3:28:30"}
233
+ {"current_steps": 2330, "total_steps": 4218, "loss": 0.0128, "lr": 4.9627580710719734e-06, "epoch": 1.6571834992887624, "percentage": 55.24, "elapsed_time": "4:15:54", "remaining_time": "3:27:21"}
234
+ {"current_steps": 2340, "total_steps": 4218, "loss": 0.0128, "lr": 4.921380663054318e-06, "epoch": 1.6642958748221908, "percentage": 55.48, "elapsed_time": "4:16:55", "remaining_time": "3:26:11"}
235
+ {"current_steps": 2350, "total_steps": 4218, "loss": 0.014, "lr": 4.880008639894421e-06, "epoch": 1.671408250355619, "percentage": 55.71, "elapsed_time": "4:18:05", "remaining_time": "3:25:09"}
236
+ {"current_steps": 2360, "total_steps": 4218, "loss": 0.0144, "lr": 4.838644835277585e-06, "epoch": 1.678520625889047, "percentage": 55.95, "elapsed_time": "4:19:12", "remaining_time": "3:24:04"}
237
+ {"current_steps": 2370, "total_steps": 4218, "loss": 0.0109, "lr": 4.79729208232621e-06, "epoch": 1.6856330014224752, "percentage": 56.19, "elapsed_time": "4:20:30", "remaining_time": "3:23:07"}
238
+ {"current_steps": 2380, "total_steps": 4218, "loss": 0.0122, "lr": 4.75595321340573e-06, "epoch": 1.6927453769559033, "percentage": 56.42, "elapsed_time": "4:21:47", "remaining_time": "3:22:10"}
239
+ {"current_steps": 2390, "total_steps": 4218, "loss": 0.012, "lr": 4.714631059930622e-06, "epoch": 1.6998577524893315, "percentage": 56.66, "elapsed_time": "4:22:58", "remaining_time": "3:21:07"}
240
+ {"current_steps": 2400, "total_steps": 4218, "loss": 0.0124, "lr": 4.6733284521704816e-06, "epoch": 1.7069701280227596, "percentage": 56.9, "elapsed_time": "4:24:01", "remaining_time": "3:19:59"}
241
+ {"current_steps": 2410, "total_steps": 4218, "loss": 0.012, "lr": 4.632048219056159e-06, "epoch": 1.7140825035561877, "percentage": 57.14, "elapsed_time": "4:25:00", "remaining_time": "3:18:48"}
242
+ {"current_steps": 2420, "total_steps": 4218, "loss": 0.0149, "lr": 4.590793187986003e-06, "epoch": 1.7211948790896159, "percentage": 57.37, "elapsed_time": "4:25:57", "remaining_time": "3:17:35"}
243
+ {"current_steps": 2430, "total_steps": 4218, "loss": 0.0117, "lr": 4.549566184632206e-06, "epoch": 1.728307254623044, "percentage": 57.61, "elapsed_time": "4:27:03", "remaining_time": "3:16:29"}
244
+ {"current_steps": 2440, "total_steps": 4218, "loss": 0.0092, "lr": 4.508370032747261e-06, "epoch": 1.7354196301564722, "percentage": 57.85, "elapsed_time": "4:28:12", "remaining_time": "3:15:26"}
245
+ {"current_steps": 2450, "total_steps": 4218, "loss": 0.012, "lr": 4.467207553970564e-06, "epoch": 1.7425320056899003, "percentage": 58.08, "elapsed_time": "4:29:25", "remaining_time": "3:14:25"}
246
+ {"current_steps": 2460, "total_steps": 4218, "loss": 0.0092, "lr": 4.426081567635137e-06, "epoch": 1.7496443812233284, "percentage": 58.32, "elapsed_time": "4:30:36", "remaining_time": "3:13:22"}
247
+ {"current_steps": 2470, "total_steps": 4218, "loss": 0.0137, "lr": 4.3849948905745385e-06, "epoch": 1.7567567567567568, "percentage": 58.56, "elapsed_time": "4:31:48", "remaining_time": "3:12:21"}
248
+ {"current_steps": 2480, "total_steps": 4218, "loss": 0.0095, "lr": 4.343950336929927e-06, "epoch": 1.763869132290185, "percentage": 58.8, "elapsed_time": "4:32:53", "remaining_time": "3:11:14"}
249
+ {"current_steps": 2490, "total_steps": 4218, "loss": 0.0098, "lr": 4.302950717957304e-06, "epoch": 1.770981507823613, "percentage": 59.03, "elapsed_time": "4:33:54", "remaining_time": "3:10:04"}
250
+ {"current_steps": 2500, "total_steps": 4218, "loss": 0.0101, "lr": 4.261998841834972e-06, "epoch": 1.7780938833570412, "percentage": 59.27, "elapsed_time": "4:34:58", "remaining_time": "3:08:58"}
251
+ {"current_steps": 2510, "total_steps": 4218, "loss": 0.0094, "lr": 4.221097513471199e-06, "epoch": 1.7852062588904696, "percentage": 59.51, "elapsed_time": "4:36:05", "remaining_time": "3:07:52"}
252
+ {"current_steps": 2520, "total_steps": 4218, "loss": 0.009, "lr": 4.18024953431209e-06, "epoch": 1.7923186344238977, "percentage": 59.74, "elapsed_time": "4:37:11", "remaining_time": "3:06:46"}
253
+ {"current_steps": 2530, "total_steps": 4218, "loss": 0.0098, "lr": 4.13945770214971e-06, "epoch": 1.7994310099573259, "percentage": 59.98, "elapsed_time": "4:38:19", "remaining_time": "3:05:41"}
254
+ {"current_steps": 2540, "total_steps": 4218, "loss": 0.0077, "lr": 4.098724810930472e-06, "epoch": 1.806543385490754, "percentage": 60.22, "elapsed_time": "4:39:09", "remaining_time": "3:04:25"}
255
+ {"current_steps": 2550, "total_steps": 4218, "loss": 0.0069, "lr": 4.058053650563747e-06, "epoch": 1.8136557610241821, "percentage": 60.46, "elapsed_time": "4:40:22", "remaining_time": "3:03:24"}
256
+ {"current_steps": 2560, "total_steps": 4218, "loss": 0.0084, "lr": 4.017447006730796e-06, "epoch": 1.8207681365576103, "percentage": 60.69, "elapsed_time": "4:41:44", "remaining_time": "3:02:28"}
257
+ {"current_steps": 2570, "total_steps": 4218, "loss": 0.0068, "lr": 3.976907660693954e-06, "epoch": 1.8278805120910384, "percentage": 60.93, "elapsed_time": "4:42:38", "remaining_time": "3:01:14"}
258
+ {"current_steps": 2580, "total_steps": 4218, "loss": 0.0091, "lr": 3.936438389106154e-06, "epoch": 1.8349928876244666, "percentage": 61.17, "elapsed_time": "4:43:58", "remaining_time": "3:00:17"}
259
+ {"current_steps": 2590, "total_steps": 4218, "loss": 0.0105, "lr": 3.896041963820724e-06, "epoch": 1.8421052631578947, "percentage": 61.4, "elapsed_time": "4:44:57", "remaining_time": "2:59:07"}
260
+ {"current_steps": 2600, "total_steps": 4218, "loss": 0.0099, "lr": 3.855721151701548e-06, "epoch": 1.8492176386913228, "percentage": 61.64, "elapsed_time": "4:46:06", "remaining_time": "2:58:03"}
261
+ {"current_steps": 2610, "total_steps": 4218, "loss": 0.0095, "lr": 3.815478714433559e-06, "epoch": 1.856330014224751, "percentage": 61.88, "elapsed_time": "4:47:22", "remaining_time": "2:57:03"}
262
+ {"current_steps": 2620, "total_steps": 4218, "loss": 0.0105, "lr": 3.775317408333571e-06, "epoch": 1.863442389758179, "percentage": 62.11, "elapsed_time": "4:48:21", "remaining_time": "2:55:52"}
263
+ {"current_steps": 2630, "total_steps": 4218, "loss": 0.0082, "lr": 3.7352399841614996e-06, "epoch": 1.8705547652916072, "percentage": 62.35, "elapsed_time": "4:49:33", "remaining_time": "2:54:50"}
264
+ {"current_steps": 2640, "total_steps": 4218, "loss": 0.0087, "lr": 3.695249186931954e-06, "epoch": 1.8776671408250356, "percentage": 62.59, "elapsed_time": "4:50:36", "remaining_time": "2:53:42"}
265
+ {"current_steps": 2650, "total_steps": 4218, "loss": 0.0076, "lr": 3.655347755726224e-06, "epoch": 1.8847795163584637, "percentage": 62.83, "elapsed_time": "4:51:40", "remaining_time": "2:52:34"}
266
+ {"current_steps": 2660, "total_steps": 4218, "loss": 0.0086, "lr": 3.6155384235046674e-06, "epoch": 1.8918918918918919, "percentage": 63.06, "elapsed_time": "4:52:46", "remaining_time": "2:51:28"}
267
+ {"current_steps": 2670, "total_steps": 4218, "loss": 0.005, "lr": 3.5758239169195276e-06, "epoch": 1.89900426742532, "percentage": 63.3, "elapsed_time": "4:53:49", "remaining_time": "2:50:21"}
268
+ {"current_steps": 2680, "total_steps": 4218, "loss": 0.0072, "lr": 3.5362069561281764e-06, "epoch": 1.9061166429587484, "percentage": 63.54, "elapsed_time": "4:54:49", "remaining_time": "2:49:11"}
269
+ {"current_steps": 2690, "total_steps": 4218, "loss": 0.0072, "lr": 3.4966902546068016e-06, "epoch": 1.9132290184921765, "percentage": 63.77, "elapsed_time": "4:55:55", "remaining_time": "2:48:05"}
270
+ {"current_steps": 2700, "total_steps": 4218, "loss": 0.0073, "lr": 3.4572765189645516e-06, "epoch": 1.9203413940256047, "percentage": 64.01, "elapsed_time": "4:57:07", "remaining_time": "2:47:03"}
271
+ {"current_steps": 2710, "total_steps": 4218, "loss": 0.0067, "lr": 3.4179684487581555e-06, "epoch": 1.9274537695590328, "percentage": 64.25, "elapsed_time": "4:58:21", "remaining_time": "2:46:01"}
272
+ {"current_steps": 2720, "total_steps": 4218, "loss": 0.0075, "lr": 3.3787687363070256e-06, "epoch": 1.934566145092461, "percentage": 64.49, "elapsed_time": "4:59:38", "remaining_time": "2:45:01"}
273
+ {"current_steps": 2730, "total_steps": 4218, "loss": 0.0069, "lr": 3.3396800665088435e-06, "epoch": 1.941678520625889, "percentage": 64.72, "elapsed_time": "5:00:38", "remaining_time": "2:43:51"}
274
+ {"current_steps": 2740, "total_steps": 4218, "loss": 0.0058, "lr": 3.300705116655672e-06, "epoch": 1.9487908961593172, "percentage": 64.96, "elapsed_time": "5:01:37", "remaining_time": "2:42:41"}
275
+ {"current_steps": 2750, "total_steps": 4218, "loss": 0.0055, "lr": 3.26184655625058e-06, "epoch": 1.9559032716927454, "percentage": 65.2, "elapsed_time": "5:02:38", "remaining_time": "2:41:33"}
276
+ {"current_steps": 2760, "total_steps": 4218, "loss": 0.0062, "lr": 3.2231070468247954e-06, "epoch": 1.9630156472261735, "percentage": 65.43, "elapsed_time": "5:03:29", "remaining_time": "2:40:19"}
277
+ {"current_steps": 2770, "total_steps": 4218, "loss": 0.0063, "lr": 3.1844892417554102e-06, "epoch": 1.9701280227596016, "percentage": 65.67, "elapsed_time": "5:04:39", "remaining_time": "2:39:15"}
278
+ {"current_steps": 2780, "total_steps": 4218, "loss": 0.0065, "lr": 3.1459957860836528e-06, "epoch": 1.9772403982930298, "percentage": 65.91, "elapsed_time": "5:05:44", "remaining_time": "2:38:08"}
279
+ {"current_steps": 2790, "total_steps": 4218, "loss": 0.0068, "lr": 3.1076293163337074e-06, "epoch": 1.984352773826458, "percentage": 66.15, "elapsed_time": "5:06:48", "remaining_time": "2:37:01"}
280
+ {"current_steps": 2800, "total_steps": 4218, "loss": 0.0057, "lr": 3.069392460332141e-06, "epoch": 1.991465149359886, "percentage": 66.38, "elapsed_time": "5:07:49", "remaining_time": "2:35:53"}
281
+ {"current_steps": 2810, "total_steps": 4218, "loss": 0.0066, "lr": 3.031287837027911e-06, "epoch": 1.9985775248933144, "percentage": 66.62, "elapsed_time": "5:08:56", "remaining_time": "2:34:47"}
282
+ {"current_steps": 2820, "total_steps": 4218, "loss": 0.0041, "lr": 2.9933180563129936e-06, "epoch": 2.0056899004267423, "percentage": 66.86, "elapsed_time": "5:10:06", "remaining_time": "2:33:44"}
283
+ {"current_steps": 2830, "total_steps": 4218, "loss": 0.0056, "lr": 2.955485718843616e-06, "epoch": 2.012802275960171, "percentage": 67.09, "elapsed_time": "5:11:23", "remaining_time": "2:32:43"}
284
+ {"current_steps": 2840, "total_steps": 4218, "loss": 0.0048, "lr": 2.917793415862129e-06, "epoch": 2.019914651493599, "percentage": 67.33, "elapsed_time": "5:12:25", "remaining_time": "2:31:35"}
285
+ {"current_steps": 2850, "total_steps": 4218, "loss": 0.0038, "lr": 2.880243729019546e-06, "epoch": 2.027027027027027, "percentage": 67.57, "elapsed_time": "5:13:28", "remaining_time": "2:30:28"}
286
+ {"current_steps": 2860, "total_steps": 4218, "loss": 0.0052, "lr": 2.842839230198685e-06, "epoch": 2.0341394025604553, "percentage": 67.8, "elapsed_time": "5:14:31", "remaining_time": "2:29:20"}
287
+ {"current_steps": 2870, "total_steps": 4218, "loss": 0.0047, "lr": 2.805582481338044e-06, "epoch": 2.0412517780938835, "percentage": 68.04, "elapsed_time": "5:15:26", "remaining_time": "2:28:09"}
288
+ {"current_steps": 2880, "total_steps": 4218, "loss": 0.0047, "lr": 2.7684760342563045e-06, "epoch": 2.0483641536273116, "percentage": 68.28, "elapsed_time": "5:16:29", "remaining_time": "2:27:02"}
289
+ {"current_steps": 2890, "total_steps": 4218, "loss": 0.0056, "lr": 2.731522430477571e-06, "epoch": 2.0554765291607398, "percentage": 68.52, "elapsed_time": "5:17:23", "remaining_time": "2:25:50"}
290
+ {"current_steps": 2900, "total_steps": 4218, "loss": 0.0048, "lr": 2.694724201057273e-06, "epoch": 2.062588904694168, "percentage": 68.75, "elapsed_time": "5:18:22", "remaining_time": "2:24:41"}
291
+ {"current_steps": 2910, "total_steps": 4218, "loss": 0.0042, "lr": 2.6580838664088214e-06, "epoch": 2.069701280227596, "percentage": 68.99, "elapsed_time": "5:19:16", "remaining_time": "2:23:30"}
292
+ {"current_steps": 2920, "total_steps": 4218, "loss": 0.0044, "lr": 2.6216039361309753e-06, "epoch": 2.076813655761024, "percentage": 69.23, "elapsed_time": "5:20:21", "remaining_time": "2:22:24"}
293
+ {"current_steps": 2930, "total_steps": 4218, "loss": 0.0041, "lr": 2.5852869088359495e-06, "epoch": 2.0839260312944523, "percentage": 69.46, "elapsed_time": "5:21:40", "remaining_time": "2:21:24"}
294
+ {"current_steps": 2940, "total_steps": 4218, "loss": 0.0032, "lr": 2.549135271978275e-06, "epoch": 2.0910384068278804, "percentage": 69.7, "elapsed_time": "5:23:03", "remaining_time": "2:20:26"}
295
+ {"current_steps": 2950, "total_steps": 4218, "loss": 0.0046, "lr": 2.5131515016844345e-06, "epoch": 2.0981507823613086, "percentage": 69.94, "elapsed_time": "5:24:12", "remaining_time": "2:19:21"}
296
+ {"current_steps": 2960, "total_steps": 4218, "loss": 0.0047, "lr": 2.4773380625832603e-06, "epoch": 2.1052631578947367, "percentage": 70.18, "elapsed_time": "5:25:20", "remaining_time": "2:18:16"}
297
+ {"current_steps": 2970, "total_steps": 4218, "loss": 0.0039, "lr": 2.4416974076371304e-06, "epoch": 2.112375533428165, "percentage": 70.41, "elapsed_time": "5:26:20", "remaining_time": "2:17:07"}
298
+ {"current_steps": 2980, "total_steps": 4218, "loss": 0.0037, "lr": 2.406231977973942e-06, "epoch": 2.119487908961593, "percentage": 70.65, "elapsed_time": "5:27:19", "remaining_time": "2:15:59"}
299
+ {"current_steps": 2990, "total_steps": 4218, "loss": 0.0049, "lr": 2.3709442027199387e-06, "epoch": 2.126600284495021, "percentage": 70.89, "elapsed_time": "5:28:22", "remaining_time": "2:14:51"}
300
+ {"current_steps": 3000, "total_steps": 4218, "loss": 0.0045, "lr": 2.3358364988333066e-06, "epoch": 2.1337126600284497, "percentage": 71.12, "elapsed_time": "5:29:06", "remaining_time": "2:13:37"}
301
+ {"current_steps": 3010, "total_steps": 4218, "loss": 0.0052, "lr": 2.3009112709386454e-06, "epoch": 2.140825035561878, "percentage": 71.36, "elapsed_time": "5:30:18", "remaining_time": "2:12:33"}
302
+ {"current_steps": 3020, "total_steps": 4218, "loss": 0.0047, "lr": 2.2661709111622666e-06, "epoch": 2.147937411095306, "percentage": 71.6, "elapsed_time": "5:31:31", "remaining_time": "2:11:30"}
303
+ {"current_steps": 3030, "total_steps": 4218, "loss": 0.004, "lr": 2.2316177989683458e-06, "epoch": 2.155049786628734, "percentage": 71.83, "elapsed_time": "5:32:46", "remaining_time": "2:10:28"}
304
+ {"current_steps": 3040, "total_steps": 4218, "loss": 0.0052, "lr": 2.197254300995953e-06, "epoch": 2.1621621621621623, "percentage": 72.07, "elapsed_time": "5:33:47", "remaining_time": "2:09:20"}
305
+ {"current_steps": 3050, "total_steps": 4218, "loss": 0.0043, "lr": 2.163082770896943e-06, "epoch": 2.1692745376955904, "percentage": 72.31, "elapsed_time": "5:35:02", "remaining_time": "2:08:18"}
306
+ {"current_steps": 3060, "total_steps": 4218, "loss": 0.0034, "lr": 2.1291055491747643e-06, "epoch": 2.1763869132290186, "percentage": 72.55, "elapsed_time": "5:36:16", "remaining_time": "2:07:15"}
307
+ {"current_steps": 3070, "total_steps": 4218, "loss": 0.0039, "lr": 2.095324963024137e-06, "epoch": 2.1834992887624467, "percentage": 72.78, "elapsed_time": "5:37:01", "remaining_time": "2:06:01"}
308
+ {"current_steps": 3080, "total_steps": 4218, "loss": 0.0038, "lr": 2.061743326171668e-06, "epoch": 2.190611664295875, "percentage": 73.02, "elapsed_time": "5:38:11", "remaining_time": "2:04:57"}
309
+ {"current_steps": 3090, "total_steps": 4218, "loss": 0.0035, "lr": 2.02836293871736e-06, "epoch": 2.197724039829303, "percentage": 73.26, "elapsed_time": "5:39:11", "remaining_time": "2:03:49"}
310
+ {"current_steps": 3100, "total_steps": 4218, "loss": 0.0038, "lr": 1.9951860869771e-06, "epoch": 2.204836415362731, "percentage": 73.49, "elapsed_time": "5:40:06", "remaining_time": "2:02:39"}
311
+ {"current_steps": 3110, "total_steps": 4218, "loss": 0.004, "lr": 1.962215043326029e-06, "epoch": 2.2119487908961593, "percentage": 73.73, "elapsed_time": "5:41:22", "remaining_time": "2:01:37"}
312
+ {"current_steps": 3120, "total_steps": 4218, "loss": 0.0036, "lr": 1.9294520660429284e-06, "epoch": 2.2190611664295874, "percentage": 73.97, "elapsed_time": "5:42:29", "remaining_time": "2:00:31"}
313
+ {"current_steps": 3130, "total_steps": 4218, "loss": 0.0045, "lr": 1.8968993991555301e-06, "epoch": 2.2261735419630155, "percentage": 74.21, "elapsed_time": "5:43:31", "remaining_time": "1:59:24"}
314
+ {"current_steps": 3140, "total_steps": 4218, "loss": 0.0041, "lr": 1.8645592722868223e-06, "epoch": 2.2332859174964437, "percentage": 74.44, "elapsed_time": "5:44:32", "remaining_time": "1:58:17"}
315
+ {"current_steps": 3150, "total_steps": 4218, "loss": 0.0042, "lr": 1.8324339005023273e-06, "epoch": 2.240398293029872, "percentage": 74.68, "elapsed_time": "5:45:27", "remaining_time": "1:57:07"}
316
+ {"current_steps": 3160, "total_steps": 4218, "loss": 0.0032, "lr": 1.8005254841584035e-06, "epoch": 2.2475106685633, "percentage": 74.92, "elapsed_time": "5:46:31", "remaining_time": "1:56:01"}
317
+ {"current_steps": 3170, "total_steps": 4218, "loss": 0.0039, "lr": 1.768836208751516e-06, "epoch": 2.2546230440967285, "percentage": 75.15, "elapsed_time": "5:47:32", "remaining_time": "1:54:53"}
318
+ {"current_steps": 3180, "total_steps": 4218, "loss": 0.004, "lr": 1.7373682447685624e-06, "epoch": 2.2617354196301562, "percentage": 75.39, "elapsed_time": "5:48:43", "remaining_time": "1:53:49"}
319
+ {"current_steps": 3190, "total_steps": 4218, "loss": 0.0035, "lr": 1.706123747538196e-06, "epoch": 2.268847795163585, "percentage": 75.63, "elapsed_time": "5:49:50", "remaining_time": "1:52:44"}
320
+ {"current_steps": 3200, "total_steps": 4218, "loss": 0.0041, "lr": 1.6751048570832184e-06, "epoch": 2.275960170697013, "percentage": 75.87, "elapsed_time": "5:50:41", "remaining_time": "1:51:33"}
321
+ {"current_steps": 3210, "total_steps": 4218, "loss": 0.003, "lr": 1.6443136979739855e-06, "epoch": 2.283072546230441, "percentage": 76.1, "elapsed_time": "5:51:38", "remaining_time": "1:50:25"}
322
+ {"current_steps": 3220, "total_steps": 4218, "loss": 0.0034, "lr": 1.6137523791829007e-06, "epoch": 2.2901849217638692, "percentage": 76.34, "elapsed_time": "5:52:51", "remaining_time": "1:49:21"}
323
+ {"current_steps": 3230, "total_steps": 4218, "loss": 0.0034, "lr": 1.5834229939399637e-06, "epoch": 2.2972972972972974, "percentage": 76.58, "elapsed_time": "5:53:52", "remaining_time": "1:48:14"}
324
+ {"current_steps": 3240, "total_steps": 4218, "loss": 0.0037, "lr": 1.5533276195893987e-06, "epoch": 2.3044096728307255, "percentage": 76.81, "elapsed_time": "5:55:00", "remaining_time": "1:47:09"}
325
+ {"current_steps": 3250, "total_steps": 4218, "loss": 0.0039, "lr": 1.5234683174473669e-06, "epoch": 2.3115220483641536, "percentage": 77.05, "elapsed_time": "5:56:20", "remaining_time": "1:46:08"}
326
+ {"current_steps": 3260, "total_steps": 4218, "loss": 0.0034, "lr": 1.493847132660789e-06, "epoch": 2.318634423897582, "percentage": 77.29, "elapsed_time": "5:57:30", "remaining_time": "1:45:03"}
327
+ {"current_steps": 3270, "total_steps": 4218, "loss": 0.0044, "lr": 1.4644660940672628e-06, "epoch": 2.32574679943101, "percentage": 77.52, "elapsed_time": "5:58:24", "remaining_time": "1:43:54"}
328
+ {"current_steps": 3280, "total_steps": 4218, "loss": 0.0036, "lr": 1.435327214056103e-06, "epoch": 2.332859174964438, "percentage": 77.76, "elapsed_time": "5:59:46", "remaining_time": "1:42:53"}
329
+ {"current_steps": 3290, "total_steps": 4218, "loss": 0.0041, "lr": 1.406432488430508e-06, "epoch": 2.339971550497866, "percentage": 78.0, "elapsed_time": "6:01:02", "remaining_time": "1:41:50"}
330
+ {"current_steps": 3300, "total_steps": 4218, "loss": 0.0035, "lr": 1.3777838962708602e-06, "epoch": 2.3470839260312943, "percentage": 78.24, "elapsed_time": "6:01:54", "remaining_time": "1:40:40"}
331
+ {"current_steps": 3310, "total_steps": 4218, "loss": 0.0033, "lr": 1.3493833997991745e-06, "epoch": 2.3541963015647225, "percentage": 78.47, "elapsed_time": "6:02:48", "remaining_time": "1:39:31"}
332
+ {"current_steps": 3320, "total_steps": 4218, "loss": 0.0042, "lr": 1.3212329442446985e-06, "epoch": 2.3613086770981506, "percentage": 78.71, "elapsed_time": "6:03:54", "remaining_time": "1:38:25"}
333
+ {"current_steps": 3330, "total_steps": 4218, "loss": 0.0032, "lr": 1.2933344577106822e-06, "epoch": 2.3684210526315788, "percentage": 78.95, "elapsed_time": "6:05:09", "remaining_time": "1:37:22"}
334
+ {"current_steps": 3340, "total_steps": 4218, "loss": 0.0031, "lr": 1.2656898510423122e-06, "epoch": 2.3755334281650073, "percentage": 79.18, "elapsed_time": "6:06:12", "remaining_time": "1:36:16"}
335
+ {"current_steps": 3350, "total_steps": 4218, "loss": 0.0033, "lr": 1.2383010176958372e-06, "epoch": 2.382645803698435, "percentage": 79.42, "elapsed_time": "6:07:26", "remaining_time": "1:35:12"}
336
+ {"current_steps": 3360, "total_steps": 4218, "loss": 0.0039, "lr": 1.2111698336088717e-06, "epoch": 2.3897581792318636, "percentage": 79.66, "elapsed_time": "6:08:25", "remaining_time": "1:34:04"}
337
+ {"current_steps": 3370, "total_steps": 4218, "loss": 0.0034, "lr": 1.1842981570719237e-06, "epoch": 2.3968705547652918, "percentage": 79.9, "elapsed_time": "6:09:39", "remaining_time": "1:33:01"}
338
+ {"current_steps": 3380, "total_steps": 4218, "loss": 0.0039, "lr": 1.157687828601094e-06, "epoch": 2.40398293029872, "percentage": 80.13, "elapsed_time": "6:10:46", "remaining_time": "1:31:55"}
339
+ {"current_steps": 3390, "total_steps": 4218, "loss": 0.0033, "lr": 1.1313406708120327e-06, "epoch": 2.411095305832148, "percentage": 80.37, "elapsed_time": "6:11:55", "remaining_time": "1:30:50"}
340
+ {"current_steps": 3400, "total_steps": 4218, "loss": 0.0032, "lr": 1.1052584882950896e-06, "epoch": 2.418207681365576, "percentage": 80.61, "elapsed_time": "6:13:21", "remaining_time": "1:29:49"}
341
+ {"current_steps": 3410, "total_steps": 4218, "loss": 0.0029, "lr": 1.0794430674917262e-06, "epoch": 2.4253200568990043, "percentage": 80.84, "elapsed_time": "6:14:28", "remaining_time": "1:28:43"}
342
+ {"current_steps": 3420, "total_steps": 4218, "loss": 0.0034, "lr": 1.0538961765721429e-06, "epoch": 2.4324324324324325, "percentage": 81.08, "elapsed_time": "6:15:22", "remaining_time": "1:27:35"}
343
+ {"current_steps": 3430, "total_steps": 4218, "loss": 0.0033, "lr": 1.0286195653141822e-06, "epoch": 2.4395448079658606, "percentage": 81.32, "elapsed_time": "6:16:22", "remaining_time": "1:26:27"}
344
+ {"current_steps": 3440, "total_steps": 4218, "loss": 0.0033, "lr": 1.0036149649834786e-06, "epoch": 2.4466571834992887, "percentage": 81.56, "elapsed_time": "6:17:21", "remaining_time": "1:25:20"}
345
+ {"current_steps": 3450, "total_steps": 4218, "loss": 0.0032, "lr": 9.788840882148803e-07, "epoch": 2.453769559032717, "percentage": 81.79, "elapsed_time": "6:18:35", "remaining_time": "1:24:16"}
346
+ {"current_steps": 3460, "total_steps": 4218, "loss": 0.0028, "lr": 9.544286288951393e-07, "epoch": 2.460881934566145, "percentage": 82.03, "elapsed_time": "6:19:35", "remaining_time": "1:23:09"}
347
+ {"current_steps": 3470, "total_steps": 4218, "loss": 0.003, "lr": 9.302502620469073e-07, "epoch": 2.467994310099573, "percentage": 82.27, "elapsed_time": "6:20:42", "remaining_time": "1:22:03"}
348
+ {"current_steps": 3480, "total_steps": 4218, "loss": 0.0033, "lr": 9.063506437139901e-07, "epoch": 2.4751066856330013, "percentage": 82.5, "elapsed_time": "6:21:49", "remaining_time": "1:20:58"}
349
+ {"current_steps": 3490, "total_steps": 4218, "loss": 0.0035, "lr": 8.827314108479357e-07, "epoch": 2.4822190611664294, "percentage": 82.74, "elapsed_time": "6:23:03", "remaining_time": "1:19:54"}
350
+ {"current_steps": 3500, "total_steps": 4218, "loss": 0.0037, "lr": 8.593941811959078e-07, "epoch": 2.4893314366998576, "percentage": 82.98, "elapsed_time": "6:24:00", "remaining_time": "1:18:46"}
351
+ {"current_steps": 3510, "total_steps": 4218, "loss": 0.0035, "lr": 8.363405531898833e-07, "epoch": 2.496443812233286, "percentage": 83.21, "elapsed_time": "6:24:53", "remaining_time": "1:17:38"}
352
+ {"current_steps": 3520, "total_steps": 4218, "loss": 0.0038, "lr": 8.135721058371681e-07, "epoch": 2.503556187766714, "percentage": 83.45, "elapsed_time": "6:25:57", "remaining_time": "1:16:31"}
353
+ {"current_steps": 3530, "total_steps": 4218, "loss": 0.0023, "lr": 7.910903986122537e-07, "epoch": 2.5106685633001424, "percentage": 83.69, "elapsed_time": "6:26:57", "remaining_time": "1:15:25"}
354
+ {"current_steps": 3540, "total_steps": 4218, "loss": 0.0033, "lr": 7.688969713499983e-07, "epoch": 2.5177809388335706, "percentage": 83.93, "elapsed_time": "6:28:22", "remaining_time": "1:14:22"}
355
+ {"current_steps": 3550, "total_steps": 4218, "loss": 0.0036, "lr": 7.469933441401606e-07, "epoch": 2.5248933143669987, "percentage": 84.16, "elapsed_time": "6:29:32", "remaining_time": "1:13:18"}
356
+ {"current_steps": 3560, "total_steps": 4218, "loss": 0.0029, "lr": 7.253810172232867e-07, "epoch": 2.532005689900427, "percentage": 84.4, "elapsed_time": "6:30:41", "remaining_time": "1:12:12"}
357
+ {"current_steps": 3570, "total_steps": 4218, "loss": 0.0031, "lr": 7.040614708879489e-07, "epoch": 2.539118065433855, "percentage": 84.64, "elapsed_time": "6:31:37", "remaining_time": "1:11:05"}
358
+ {"current_steps": 3580, "total_steps": 4218, "loss": 0.0031, "lr": 6.830361653693673e-07, "epoch": 2.546230440967283, "percentage": 84.87, "elapsed_time": "6:32:53", "remaining_time": "1:10:01"}
359
+ {"current_steps": 3590, "total_steps": 4218, "loss": 0.0031, "lr": 6.623065407493801e-07, "epoch": 2.5533428165007113, "percentage": 85.11, "elapsed_time": "6:34:02", "remaining_time": "1:08:55"}
360
+ {"current_steps": 3600, "total_steps": 4218, "loss": 0.0029, "lr": 6.418740168578208e-07, "epoch": 2.5604551920341394, "percentage": 85.35, "elapsed_time": "6:35:19", "remaining_time": "1:07:51"}
361
+ {"current_steps": 3610, "total_steps": 4218, "loss": 0.0031, "lr": 6.217399931752627e-07, "epoch": 2.5675675675675675, "percentage": 85.59, "elapsed_time": "6:36:16", "remaining_time": "1:06:44"}
362
+ {"current_steps": 3620, "total_steps": 4218, "loss": 0.0028, "lr": 6.019058487371687e-07, "epoch": 2.5746799431009957, "percentage": 85.82, "elapsed_time": "6:37:08", "remaining_time": "1:05:36"}
363
+ {"current_steps": 3630, "total_steps": 4218, "loss": 0.0037, "lr": 5.82372942039432e-07, "epoch": 2.581792318634424, "percentage": 86.06, "elapsed_time": "6:38:04", "remaining_time": "1:04:28"}
364
+ {"current_steps": 3640, "total_steps": 4218, "loss": 0.003, "lr": 5.631426109453364e-07, "epoch": 2.588904694167852, "percentage": 86.3, "elapsed_time": "6:39:11", "remaining_time": "1:03:23"}
365
+ {"current_steps": 3650, "total_steps": 4218, "loss": 0.0032, "lr": 5.44216172593916e-07, "epoch": 2.59601706970128, "percentage": 86.53, "elapsed_time": "6:40:18", "remaining_time": "1:02:17"}
366
+ {"current_steps": 3660, "total_steps": 4218, "loss": 0.0035, "lr": 5.255949233097451e-07, "epoch": 2.6031294452347082, "percentage": 86.77, "elapsed_time": "6:41:31", "remaining_time": "1:01:12"}
367
+ {"current_steps": 3670, "total_steps": 4218, "loss": 0.0032, "lr": 5.072801385141429e-07, "epoch": 2.6102418207681364, "percentage": 87.01, "elapsed_time": "6:42:56", "remaining_time": "1:00:09"}
368
+ {"current_steps": 3680, "total_steps": 4218, "loss": 0.0027, "lr": 4.89273072637827e-07, "epoch": 2.617354196301565, "percentage": 87.25, "elapsed_time": "6:43:51", "remaining_time": "0:59:02"}
369
+ {"current_steps": 3690, "total_steps": 4218, "loss": 0.0029, "lr": 4.7157495903498105e-07, "epoch": 2.6244665718349927, "percentage": 87.48, "elapsed_time": "6:45:04", "remaining_time": "0:57:57"}
370
+ {"current_steps": 3700, "total_steps": 4218, "loss": 0.0035, "lr": 4.541870098987911e-07, "epoch": 2.6315789473684212, "percentage": 87.72, "elapsed_time": "6:46:07", "remaining_time": "0:56:51"}
371
+ {"current_steps": 3710, "total_steps": 4218, "loss": 0.0039, "lr": 4.371104161784073e-07, "epoch": 2.6386913229018494, "percentage": 87.96, "elapsed_time": "6:47:14", "remaining_time": "0:55:45"}
372
+ {"current_steps": 3720, "total_steps": 4218, "loss": 0.003, "lr": 4.2034634749738623e-07, "epoch": 2.6458036984352775, "percentage": 88.19, "elapsed_time": "6:48:19", "remaining_time": "0:54:39"}
373
+ {"current_steps": 3730, "total_steps": 4218, "loss": 0.0032, "lr": 4.038959520735658e-07, "epoch": 2.6529160739687057, "percentage": 88.43, "elapsed_time": "6:49:17", "remaining_time": "0:53:32"}
374
+ {"current_steps": 3740, "total_steps": 4218, "loss": 0.0033, "lr": 3.8776035664043033e-07, "epoch": 2.660028449502134, "percentage": 88.67, "elapsed_time": "6:50:18", "remaining_time": "0:52:26"}
375
+ {"current_steps": 3750, "total_steps": 4218, "loss": 0.0036, "lr": 3.719406663699349e-07, "epoch": 2.667140825035562, "percentage": 88.9, "elapsed_time": "6:51:10", "remaining_time": "0:51:18"}
376
+ {"current_steps": 3760, "total_steps": 4218, "loss": 0.0034, "lr": 3.564379647968064e-07, "epoch": 2.67425320056899, "percentage": 89.14, "elapsed_time": "6:52:30", "remaining_time": "0:50:14"}
377
+ {"current_steps": 3770, "total_steps": 4218, "loss": 0.0029, "lr": 3.4125331374433414e-07, "epoch": 2.681365576102418, "percentage": 89.38, "elapsed_time": "6:53:20", "remaining_time": "0:49:07"}
378
+ {"current_steps": 3780, "total_steps": 4218, "loss": 0.0027, "lr": 3.2638775325163517e-07, "epoch": 2.6884779516358464, "percentage": 89.62, "elapsed_time": "6:54:17", "remaining_time": "0:48:00"}
379
+ {"current_steps": 3790, "total_steps": 4218, "loss": 0.0026, "lr": 3.1184230150243025e-07, "epoch": 2.6955903271692745, "percentage": 89.85, "elapsed_time": "6:55:21", "remaining_time": "0:46:54"}
380
+ {"current_steps": 3800, "total_steps": 4218, "loss": 0.0027, "lr": 2.9761795475529375e-07, "epoch": 2.7027027027027026, "percentage": 90.09, "elapsed_time": "6:56:28", "remaining_time": "0:45:48"}
381
+ {"current_steps": 3810, "total_steps": 4218, "loss": 0.0032, "lr": 2.8371568727542486e-07, "epoch": 2.7098150782361308, "percentage": 90.33, "elapsed_time": "6:57:42", "remaining_time": "0:44:43"}
382
+ {"current_steps": 3820, "total_steps": 4218, "loss": 0.0027, "lr": 2.7013645126791446e-07, "epoch": 2.716927453769559, "percentage": 90.56, "elapsed_time": "6:58:29", "remaining_time": "0:43:36"}
383
+ {"current_steps": 3830, "total_steps": 4218, "loss": 0.0031, "lr": 2.5688117681252677e-07, "epoch": 2.724039829302987, "percentage": 90.8, "elapsed_time": "6:59:31", "remaining_time": "0:42:30"}
384
+ {"current_steps": 3840, "total_steps": 4218, "loss": 0.0027, "lr": 2.439507717999945e-07, "epoch": 2.731152204836415, "percentage": 91.04, "elapsed_time": "7:00:51", "remaining_time": "0:41:25"}
385
+ {"current_steps": 3850, "total_steps": 4218, "loss": 0.0032, "lr": 2.3134612186983817e-07, "epoch": 2.7382645803698438, "percentage": 91.28, "elapsed_time": "7:01:37", "remaining_time": "0:40:18"}
386
+ {"current_steps": 3860, "total_steps": 4218, "loss": 0.0032, "lr": 2.1906809034970057e-07, "epoch": 2.7453769559032715, "percentage": 91.51, "elapsed_time": "7:02:47", "remaining_time": "0:39:12"}
387
+ {"current_steps": 3870, "total_steps": 4218, "loss": 0.0028, "lr": 2.0711751819622038e-07, "epoch": 2.7524893314367, "percentage": 91.75, "elapsed_time": "7:03:36", "remaining_time": "0:38:05"}
388
+ {"current_steps": 3880, "total_steps": 4218, "loss": 0.0033, "lr": 1.954952239374286e-07, "epoch": 2.759601706970128, "percentage": 91.99, "elapsed_time": "7:04:44", "remaining_time": "0:37:00"}
389
+ {"current_steps": 3890, "total_steps": 4218, "loss": 0.0028, "lr": 1.8420200361669137e-07, "epoch": 2.7667140825035563, "percentage": 92.22, "elapsed_time": "7:05:47", "remaining_time": "0:35:54"}
390
+ {"current_steps": 3900, "total_steps": 4218, "loss": 0.0028, "lr": 1.732386307381767e-07, "epoch": 2.7738264580369845, "percentage": 92.46, "elapsed_time": "7:06:56", "remaining_time": "0:34:48"}
391
+ {"current_steps": 3910, "total_steps": 4218, "loss": 0.0032, "lr": 1.6260585621388604e-07, "epoch": 2.7809388335704126, "percentage": 92.7, "elapsed_time": "7:07:56", "remaining_time": "0:33:42"}
392
+ {"current_steps": 3920, "total_steps": 4218, "loss": 0.0033, "lr": 1.523044083122138e-07, "epoch": 2.7880512091038407, "percentage": 92.94, "elapsed_time": "7:09:09", "remaining_time": "0:32:37"}
393
+ {"current_steps": 3930, "total_steps": 4218, "loss": 0.0034, "lr": 1.4233499260807194e-07, "epoch": 2.795163584637269, "percentage": 93.17, "elapsed_time": "7:10:04", "remaining_time": "0:31:31"}
394
+ {"current_steps": 3940, "total_steps": 4218, "loss": 0.003, "lr": 1.326982919345582e-07, "epoch": 2.802275960170697, "percentage": 93.41, "elapsed_time": "7:11:11", "remaining_time": "0:30:25"}
395
+ {"current_steps": 3950, "total_steps": 4218, "loss": 0.0026, "lr": 1.2339496633619218e-07, "epoch": 2.809388335704125, "percentage": 93.65, "elapsed_time": "7:12:14", "remaining_time": "0:29:19"}
396
+ {"current_steps": 3960, "total_steps": 4218, "loss": 0.0026, "lr": 1.1442565302370146e-07, "epoch": 2.8165007112375533, "percentage": 93.88, "elapsed_time": "7:13:30", "remaining_time": "0:28:14"}
397
+ {"current_steps": 3970, "total_steps": 4218, "loss": 0.0033, "lr": 1.0579096633038411e-07, "epoch": 2.8236130867709814, "percentage": 94.12, "elapsed_time": "7:14:32", "remaining_time": "0:27:08"}
398
+ {"current_steps": 3980, "total_steps": 4218, "loss": 0.0029, "lr": 9.749149767002197e-08, "epoch": 2.8307254623044096, "percentage": 94.36, "elapsed_time": "7:15:35", "remaining_time": "0:26:02"}
399
+ {"current_steps": 3990, "total_steps": 4218, "loss": 0.0038, "lr": 8.952781549638412e-08, "epoch": 2.8378378378378377, "percentage": 94.59, "elapsed_time": "7:16:39", "remaining_time": "0:24:57"}
400
+ {"current_steps": 4000, "total_steps": 4218, "loss": 0.0028, "lr": 8.190046526428241e-08, "epoch": 2.844950213371266, "percentage": 94.83, "elapsed_time": "7:17:50", "remaining_time": "0:23:51"}
401
+ {"current_steps": 4010, "total_steps": 4218, "loss": 0.0032, "lr": 7.460996939221643e-08, "epoch": 2.852062588904694, "percentage": 95.07, "elapsed_time": "7:19:05", "remaining_time": "0:22:46"}
402
+ {"current_steps": 4020, "total_steps": 4218, "loss": 0.0034, "lr": 6.765682722659151e-08, "epoch": 2.8591749644381226, "percentage": 95.31, "elapsed_time": "7:20:09", "remaining_time": "0:21:40"}
403
+ {"current_steps": 4030, "total_steps": 4218, "loss": 0.0026, "lr": 6.104151500751609e-08, "epoch": 2.8662873399715503, "percentage": 95.54, "elapsed_time": "7:21:25", "remaining_time": "0:20:35"}
404
+ {"current_steps": 4040, "total_steps": 4218, "loss": 0.0035, "lr": 5.476448583618288e-08, "epoch": 2.873399715504979, "percentage": 95.78, "elapsed_time": "7:22:37", "remaining_time": "0:19:30"}
405
+ {"current_steps": 4050, "total_steps": 4218, "loss": 0.0026, "lr": 4.8826169643832464e-08, "epoch": 2.8805120910384066, "percentage": 96.02, "elapsed_time": "7:23:44", "remaining_time": "0:18:24"}
406
+ {"current_steps": 4060, "total_steps": 4218, "loss": 0.0032, "lr": 4.322697316231361e-08, "epoch": 2.887624466571835, "percentage": 96.25, "elapsed_time": "7:24:46", "remaining_time": "0:17:18"}
407
+ {"current_steps": 4070, "total_steps": 4218, "loss": 0.0024, "lr": 3.796727989621385e-08, "epoch": 2.8947368421052633, "percentage": 96.49, "elapsed_time": "7:25:54", "remaining_time": "0:16:12"}
408
+ {"current_steps": 4080, "total_steps": 4218, "loss": 0.003, "lr": 3.304745009660326e-08, "epoch": 2.9018492176386914, "percentage": 96.73, "elapsed_time": "7:27:13", "remaining_time": "0:15:07"}
409
+ {"current_steps": 4090, "total_steps": 4218, "loss": 0.0028, "lr": 2.8467820736350903e-08, "epoch": 2.9089615931721196, "percentage": 96.97, "elapsed_time": "7:28:35", "remaining_time": "0:14:02"}
410
+ {"current_steps": 4100, "total_steps": 4218, "loss": 0.003, "lr": 2.422870548705103e-08, "epoch": 2.9160739687055477, "percentage": 97.2, "elapsed_time": "7:29:40", "remaining_time": "0:12:56"}
411
+ {"current_steps": 4110, "total_steps": 4218, "loss": 0.0032, "lr": 2.0330394697534726e-08, "epoch": 2.923186344238976, "percentage": 97.44, "elapsed_time": "7:30:40", "remaining_time": "0:11:50"}
412
+ {"current_steps": 4120, "total_steps": 4218, "loss": 0.0033, "lr": 1.677315537398583e-08, "epoch": 2.930298719772404, "percentage": 97.68, "elapsed_time": "7:31:34", "remaining_time": "0:10:44"}
413
+ {"current_steps": 4130, "total_steps": 4218, "loss": 0.003, "lr": 1.355723116165164e-08, "epoch": 2.937411095305832, "percentage": 97.91, "elapsed_time": "7:32:45", "remaining_time": "0:09:38"}
414
+ {"current_steps": 4140, "total_steps": 4218, "loss": 0.003, "lr": 1.0682842328154086e-08, "epoch": 2.9445234708392602, "percentage": 98.15, "elapsed_time": "7:33:55", "remaining_time": "0:08:33"}
415
+ {"current_steps": 4150, "total_steps": 4218, "loss": 0.0034, "lr": 8.150185748405092e-09, "epoch": 2.9516358463726884, "percentage": 98.39, "elapsed_time": "7:34:57", "remaining_time": "0:07:27"}
416
+ {"current_steps": 4160, "total_steps": 4218, "loss": 0.0031, "lr": 5.959434891121274e-09, "epoch": 2.9587482219061165, "percentage": 98.62, "elapsed_time": "7:36:01", "remaining_time": "0:06:21"}
417
+ {"current_steps": 4170, "total_steps": 4218, "loss": 0.0028, "lr": 4.110739806940656e-09, "epoch": 2.9658605974395447, "percentage": 98.86, "elapsed_time": "7:37:04", "remaining_time": "0:05:15"}
418
+ {"current_steps": 4180, "total_steps": 4218, "loss": 0.0025, "lr": 2.604227118148117e-09, "epoch": 2.972972972972973, "percentage": 99.1, "elapsed_time": "7:38:17", "remaining_time": "0:04:09"}
419
+ {"current_steps": 4190, "total_steps": 4218, "loss": 0.0028, "lr": 1.4400000100017741e-09, "epoch": 2.9800853485064014, "percentage": 99.34, "elapsed_time": "7:39:25", "remaining_time": "0:03:04"}
420
+ {"current_steps": 4200, "total_steps": 4218, "loss": 0.0027, "lr": 6.181382236641887e-10, "epoch": 2.987197724039829, "percentage": 99.57, "elapsed_time": "7:40:25", "remaining_time": "0:01:58"}
421
+ {"current_steps": 4210, "total_steps": 4218, "loss": 0.003, "lr": 1.3869805074284704e-10, "epoch": 2.9943100995732577, "percentage": 99.81, "elapsed_time": "7:41:31", "remaining_time": "0:00:52"}
422
+ {"current_steps": 4218, "total_steps": 4218, "epoch": 3.0, "percentage": 100.0, "elapsed_time": "7:42:27", "remaining_time": "0:00:00"}
trainer_state.json ADDED
@@ -0,0 +1,2990 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_global_step": null,
3
+ "best_metric": null,
4
+ "best_model_checkpoint": null,
5
+ "epoch": 3.0,
6
+ "eval_steps": 500,
7
+ "global_step": 4218,
8
+ "is_hyper_param_search": false,
9
+ "is_local_process_zero": true,
10
+ "is_world_process_zero": true,
11
+ "log_history": [
12
+ {
13
+ "epoch": 0.007112375533428165,
14
+ "grad_norm": 45.94488445688017,
15
+ "learning_rate": 2.132701421800948e-07,
16
+ "loss": 4.9235,
17
+ "step": 10
18
+ },
19
+ {
20
+ "epoch": 0.01422475106685633,
21
+ "grad_norm": 56.45310643883483,
22
+ "learning_rate": 4.502369668246446e-07,
23
+ "loss": 4.7616,
24
+ "step": 20
25
+ },
26
+ {
27
+ "epoch": 0.021337126600284494,
28
+ "grad_norm": 47.57072569736425,
29
+ "learning_rate": 6.872037914691944e-07,
30
+ "loss": 4.0518,
31
+ "step": 30
32
+ },
33
+ {
34
+ "epoch": 0.02844950213371266,
35
+ "grad_norm": 14.9615219454182,
36
+ "learning_rate": 9.241706161137441e-07,
37
+ "loss": 3.1168,
38
+ "step": 40
39
+ },
40
+ {
41
+ "epoch": 0.03556187766714083,
42
+ "grad_norm": 9.209691028948875,
43
+ "learning_rate": 1.161137440758294e-06,
44
+ "loss": 2.408,
45
+ "step": 50
46
+ },
47
+ {
48
+ "epoch": 0.04267425320056899,
49
+ "grad_norm": 3.738856271681981,
50
+ "learning_rate": 1.3981042654028437e-06,
51
+ "loss": 2.0996,
52
+ "step": 60
53
+ },
54
+ {
55
+ "epoch": 0.049786628733997154,
56
+ "grad_norm": 4.299210545328982,
57
+ "learning_rate": 1.6350710900473934e-06,
58
+ "loss": 1.961,
59
+ "step": 70
60
+ },
61
+ {
62
+ "epoch": 0.05689900426742532,
63
+ "grad_norm": 4.288339811445908,
64
+ "learning_rate": 1.8720379146919433e-06,
65
+ "loss": 1.8454,
66
+ "step": 80
67
+ },
68
+ {
69
+ "epoch": 0.06401137980085349,
70
+ "grad_norm": 4.487588443815648,
71
+ "learning_rate": 2.109004739336493e-06,
72
+ "loss": 1.8,
73
+ "step": 90
74
+ },
75
+ {
76
+ "epoch": 0.07112375533428165,
77
+ "grad_norm": 5.903658522691362,
78
+ "learning_rate": 2.345971563981043e-06,
79
+ "loss": 1.7189,
80
+ "step": 100
81
+ },
82
+ {
83
+ "epoch": 0.07823613086770982,
84
+ "grad_norm": 6.538803049950138,
85
+ "learning_rate": 2.5829383886255925e-06,
86
+ "loss": 1.6861,
87
+ "step": 110
88
+ },
89
+ {
90
+ "epoch": 0.08534850640113797,
91
+ "grad_norm": 6.420212036240461,
92
+ "learning_rate": 2.8199052132701426e-06,
93
+ "loss": 1.6933,
94
+ "step": 120
95
+ },
96
+ {
97
+ "epoch": 0.09246088193456614,
98
+ "grad_norm": 6.08601994925446,
99
+ "learning_rate": 3.0568720379146923e-06,
100
+ "loss": 1.6477,
101
+ "step": 130
102
+ },
103
+ {
104
+ "epoch": 0.09957325746799431,
105
+ "grad_norm": 6.641158507404538,
106
+ "learning_rate": 3.293838862559242e-06,
107
+ "loss": 1.5837,
108
+ "step": 140
109
+ },
110
+ {
111
+ "epoch": 0.10668563300142248,
112
+ "grad_norm": 5.6671416215198445,
113
+ "learning_rate": 3.5308056872037916e-06,
114
+ "loss": 1.553,
115
+ "step": 150
116
+ },
117
+ {
118
+ "epoch": 0.11379800853485064,
119
+ "grad_norm": 4.895576620125158,
120
+ "learning_rate": 3.7677725118483417e-06,
121
+ "loss": 1.601,
122
+ "step": 160
123
+ },
124
+ {
125
+ "epoch": 0.12091038406827881,
126
+ "grad_norm": 5.00629870941093,
127
+ "learning_rate": 4.004739336492891e-06,
128
+ "loss": 1.4953,
129
+ "step": 170
130
+ },
131
+ {
132
+ "epoch": 0.12802275960170698,
133
+ "grad_norm": 4.098161366916081,
134
+ "learning_rate": 4.2417061611374415e-06,
135
+ "loss": 1.4986,
136
+ "step": 180
137
+ },
138
+ {
139
+ "epoch": 0.13513513513513514,
140
+ "grad_norm": 4.279942094132115,
141
+ "learning_rate": 4.478672985781991e-06,
142
+ "loss": 1.4177,
143
+ "step": 190
144
+ },
145
+ {
146
+ "epoch": 0.1422475106685633,
147
+ "grad_norm": 2.9957264584301506,
148
+ "learning_rate": 4.715639810426541e-06,
149
+ "loss": 1.4234,
150
+ "step": 200
151
+ },
152
+ {
153
+ "epoch": 0.14935988620199148,
154
+ "grad_norm": 2.960846105003115,
155
+ "learning_rate": 4.952606635071091e-06,
156
+ "loss": 1.4034,
157
+ "step": 210
158
+ },
159
+ {
160
+ "epoch": 0.15647226173541964,
161
+ "grad_norm": 2.8142395090714207,
162
+ "learning_rate": 5.18957345971564e-06,
163
+ "loss": 1.4172,
164
+ "step": 220
165
+ },
166
+ {
167
+ "epoch": 0.16358463726884778,
168
+ "grad_norm": 3.1953820486960938,
169
+ "learning_rate": 5.42654028436019e-06,
170
+ "loss": 1.3695,
171
+ "step": 230
172
+ },
173
+ {
174
+ "epoch": 0.17069701280227595,
175
+ "grad_norm": 3.0329786581569813,
176
+ "learning_rate": 5.66350710900474e-06,
177
+ "loss": 1.3815,
178
+ "step": 240
179
+ },
180
+ {
181
+ "epoch": 0.17780938833570412,
182
+ "grad_norm": 3.023917167954777,
183
+ "learning_rate": 5.90047393364929e-06,
184
+ "loss": 1.3494,
185
+ "step": 250
186
+ },
187
+ {
188
+ "epoch": 0.18492176386913228,
189
+ "grad_norm": 3.3061969408501186,
190
+ "learning_rate": 6.137440758293839e-06,
191
+ "loss": 1.351,
192
+ "step": 260
193
+ },
194
+ {
195
+ "epoch": 0.19203413940256045,
196
+ "grad_norm": 3.0703691360984116,
197
+ "learning_rate": 6.374407582938389e-06,
198
+ "loss": 1.3007,
199
+ "step": 270
200
+ },
201
+ {
202
+ "epoch": 0.19914651493598862,
203
+ "grad_norm": 2.6510030082143072,
204
+ "learning_rate": 6.611374407582939e-06,
205
+ "loss": 1.2318,
206
+ "step": 280
207
+ },
208
+ {
209
+ "epoch": 0.20625889046941678,
210
+ "grad_norm": 2.781634197302321,
211
+ "learning_rate": 6.848341232227489e-06,
212
+ "loss": 1.2452,
213
+ "step": 290
214
+ },
215
+ {
216
+ "epoch": 0.21337126600284495,
217
+ "grad_norm": 2.889926592158047,
218
+ "learning_rate": 7.085308056872039e-06,
219
+ "loss": 1.2299,
220
+ "step": 300
221
+ },
222
+ {
223
+ "epoch": 0.22048364153627312,
224
+ "grad_norm": 2.9460513709926546,
225
+ "learning_rate": 7.322274881516588e-06,
226
+ "loss": 1.2481,
227
+ "step": 310
228
+ },
229
+ {
230
+ "epoch": 0.22759601706970128,
231
+ "grad_norm": 3.117250263470296,
232
+ "learning_rate": 7.559241706161138e-06,
233
+ "loss": 1.1874,
234
+ "step": 320
235
+ },
236
+ {
237
+ "epoch": 0.23470839260312945,
238
+ "grad_norm": 3.1068660585891443,
239
+ "learning_rate": 7.796208530805689e-06,
240
+ "loss": 1.242,
241
+ "step": 330
242
+ },
243
+ {
244
+ "epoch": 0.24182076813655762,
245
+ "grad_norm": 3.2303235755610458,
246
+ "learning_rate": 8.033175355450237e-06,
247
+ "loss": 1.1656,
248
+ "step": 340
249
+ },
250
+ {
251
+ "epoch": 0.24893314366998578,
252
+ "grad_norm": 3.380471682074544,
253
+ "learning_rate": 8.270142180094787e-06,
254
+ "loss": 1.1626,
255
+ "step": 350
256
+ },
257
+ {
258
+ "epoch": 0.25604551920341395,
259
+ "grad_norm": 3.0003799025494455,
260
+ "learning_rate": 8.507109004739337e-06,
261
+ "loss": 1.1136,
262
+ "step": 360
263
+ },
264
+ {
265
+ "epoch": 0.2631578947368421,
266
+ "grad_norm": 3.3507131315688037,
267
+ "learning_rate": 8.744075829383887e-06,
268
+ "loss": 1.109,
269
+ "step": 370
270
+ },
271
+ {
272
+ "epoch": 0.2702702702702703,
273
+ "grad_norm": 3.286430938699654,
274
+ "learning_rate": 8.981042654028437e-06,
275
+ "loss": 1.0926,
276
+ "step": 380
277
+ },
278
+ {
279
+ "epoch": 0.2773826458036984,
280
+ "grad_norm": 3.543025306575121,
281
+ "learning_rate": 9.218009478672988e-06,
282
+ "loss": 0.9856,
283
+ "step": 390
284
+ },
285
+ {
286
+ "epoch": 0.2844950213371266,
287
+ "grad_norm": 2.9641151250477,
288
+ "learning_rate": 9.454976303317538e-06,
289
+ "loss": 1.0438,
290
+ "step": 400
291
+ },
292
+ {
293
+ "epoch": 0.29160739687055476,
294
+ "grad_norm": 3.0879210891464175,
295
+ "learning_rate": 9.691943127962086e-06,
296
+ "loss": 0.9834,
297
+ "step": 410
298
+ },
299
+ {
300
+ "epoch": 0.29871977240398295,
301
+ "grad_norm": 3.5828764512704274,
302
+ "learning_rate": 9.928909952606636e-06,
303
+ "loss": 1.0355,
304
+ "step": 420
305
+ },
306
+ {
307
+ "epoch": 0.3058321479374111,
308
+ "grad_norm": 3.0432346994349944,
309
+ "learning_rate": 9.99991609608766e-06,
310
+ "loss": 0.9973,
311
+ "step": 430
312
+ },
313
+ {
314
+ "epoch": 0.3129445234708393,
315
+ "grad_norm": 3.6852442122283384,
316
+ "learning_rate": 9.999505144928566e-06,
317
+ "loss": 1.0118,
318
+ "step": 440
319
+ },
320
+ {
321
+ "epoch": 0.3200568990042674,
322
+ "grad_norm": 3.4571934113589893,
323
+ "learning_rate": 9.998751763712045e-06,
324
+ "loss": 0.915,
325
+ "step": 450
326
+ },
327
+ {
328
+ "epoch": 0.32716927453769556,
329
+ "grad_norm": 3.3733896978659215,
330
+ "learning_rate": 9.997656004039284e-06,
331
+ "loss": 0.8872,
332
+ "step": 460
333
+ },
334
+ {
335
+ "epoch": 0.33428165007112376,
336
+ "grad_norm": 3.1986482463279344,
337
+ "learning_rate": 9.99621794096192e-06,
338
+ "loss": 0.9233,
339
+ "step": 470
340
+ },
341
+ {
342
+ "epoch": 0.3413940256045519,
343
+ "grad_norm": 3.3781480125146217,
344
+ "learning_rate": 9.994437672976904e-06,
345
+ "loss": 0.8156,
346
+ "step": 480
347
+ },
348
+ {
349
+ "epoch": 0.3485064011379801,
350
+ "grad_norm": 3.6561286544224516,
351
+ "learning_rate": 9.99231532201976e-06,
352
+ "loss": 0.8749,
353
+ "step": 490
354
+ },
355
+ {
356
+ "epoch": 0.35561877667140823,
357
+ "grad_norm": 4.142627644307138,
358
+ "learning_rate": 9.989851033456224e-06,
359
+ "loss": 0.8598,
360
+ "step": 500
361
+ },
362
+ {
363
+ "epoch": 0.3627311522048364,
364
+ "grad_norm": 3.7494771233239828,
365
+ "learning_rate": 9.987044976072298e-06,
366
+ "loss": 0.8118,
367
+ "step": 510
368
+ },
369
+ {
370
+ "epoch": 0.36984352773826457,
371
+ "grad_norm": 3.6547956812812123,
372
+ "learning_rate": 9.983897342062681e-06,
373
+ "loss": 0.8227,
374
+ "step": 520
375
+ },
376
+ {
377
+ "epoch": 0.37695590327169276,
378
+ "grad_norm": 3.679890083139226,
379
+ "learning_rate": 9.98040834701761e-06,
380
+ "loss": 0.8132,
381
+ "step": 530
382
+ },
383
+ {
384
+ "epoch": 0.3840682788051209,
385
+ "grad_norm": 3.252191257909053,
386
+ "learning_rate": 9.97657822990809e-06,
387
+ "loss": 0.7806,
388
+ "step": 540
389
+ },
390
+ {
391
+ "epoch": 0.3911806543385491,
392
+ "grad_norm": 3.614922960561001,
393
+ "learning_rate": 9.972407253069527e-06,
394
+ "loss": 0.8095,
395
+ "step": 550
396
+ },
397
+ {
398
+ "epoch": 0.39829302987197723,
399
+ "grad_norm": 3.793537378483368,
400
+ "learning_rate": 9.967895702183767e-06,
401
+ "loss": 0.7911,
402
+ "step": 560
403
+ },
404
+ {
405
+ "epoch": 0.40540540540540543,
406
+ "grad_norm": 3.65980827340659,
407
+ "learning_rate": 9.963043886259518e-06,
408
+ "loss": 0.7712,
409
+ "step": 570
410
+ },
411
+ {
412
+ "epoch": 0.41251778093883357,
413
+ "grad_norm": 3.5164539759645037,
414
+ "learning_rate": 9.957852137611187e-06,
415
+ "loss": 0.7634,
416
+ "step": 580
417
+ },
418
+ {
419
+ "epoch": 0.41963015647226176,
420
+ "grad_norm": 3.3236842648189633,
421
+ "learning_rate": 9.952320811836129e-06,
422
+ "loss": 0.6903,
423
+ "step": 590
424
+ },
425
+ {
426
+ "epoch": 0.4267425320056899,
427
+ "grad_norm": 3.294343434220933,
428
+ "learning_rate": 9.94645028779028e-06,
429
+ "loss": 0.7238,
430
+ "step": 600
431
+ },
432
+ {
433
+ "epoch": 0.43385490753911804,
434
+ "grad_norm": 3.4974393759929208,
435
+ "learning_rate": 9.94024096756221e-06,
436
+ "loss": 0.694,
437
+ "step": 610
438
+ },
439
+ {
440
+ "epoch": 0.44096728307254623,
441
+ "grad_norm": 4.433758888856019,
442
+ "learning_rate": 9.933693276445588e-06,
443
+ "loss": 0.7057,
444
+ "step": 620
445
+ },
446
+ {
447
+ "epoch": 0.4480796586059744,
448
+ "grad_norm": 3.3896425434092503,
449
+ "learning_rate": 9.92680766291005e-06,
450
+ "loss": 0.7001,
451
+ "step": 630
452
+ },
453
+ {
454
+ "epoch": 0.45519203413940257,
455
+ "grad_norm": 3.2995707993625834,
456
+ "learning_rate": 9.91958459857048e-06,
457
+ "loss": 0.6451,
458
+ "step": 640
459
+ },
460
+ {
461
+ "epoch": 0.4623044096728307,
462
+ "grad_norm": 3.5589453987217805,
463
+ "learning_rate": 9.912024578154706e-06,
464
+ "loss": 0.6539,
465
+ "step": 650
466
+ },
467
+ {
468
+ "epoch": 0.4694167852062589,
469
+ "grad_norm": 3.457156793924661,
470
+ "learning_rate": 9.904128119469625e-06,
471
+ "loss": 0.6383,
472
+ "step": 660
473
+ },
474
+ {
475
+ "epoch": 0.47652916073968704,
476
+ "grad_norm": 3.791061357289613,
477
+ "learning_rate": 9.895895763365722e-06,
478
+ "loss": 0.6319,
479
+ "step": 670
480
+ },
481
+ {
482
+ "epoch": 0.48364153627311524,
483
+ "grad_norm": 3.7253719001786307,
484
+ "learning_rate": 9.88732807370004e-06,
485
+ "loss": 0.589,
486
+ "step": 680
487
+ },
488
+ {
489
+ "epoch": 0.4907539118065434,
490
+ "grad_norm": 3.8753257386340167,
491
+ "learning_rate": 9.878425637297549e-06,
492
+ "loss": 0.5236,
493
+ "step": 690
494
+ },
495
+ {
496
+ "epoch": 0.49786628733997157,
497
+ "grad_norm": 3.810036186400155,
498
+ "learning_rate": 9.869189063910959e-06,
499
+ "loss": 0.524,
500
+ "step": 700
501
+ },
502
+ {
503
+ "epoch": 0.5049786628733998,
504
+ "grad_norm": 4.2180281642967365,
505
+ "learning_rate": 9.859618986178953e-06,
506
+ "loss": 0.5336,
507
+ "step": 710
508
+ },
509
+ {
510
+ "epoch": 0.5120910384068279,
511
+ "grad_norm": 3.938273345051735,
512
+ "learning_rate": 9.84971605958286e-06,
513
+ "loss": 0.5202,
514
+ "step": 720
515
+ },
516
+ {
517
+ "epoch": 0.519203413940256,
518
+ "grad_norm": 3.5712127017141397,
519
+ "learning_rate": 9.839480962401753e-06,
520
+ "loss": 0.4938,
521
+ "step": 730
522
+ },
523
+ {
524
+ "epoch": 0.5263157894736842,
525
+ "grad_norm": 3.383580945232286,
526
+ "learning_rate": 9.828914395665996e-06,
527
+ "loss": 0.4503,
528
+ "step": 740
529
+ },
530
+ {
531
+ "epoch": 0.5334281650071123,
532
+ "grad_norm": 3.850151538007975,
533
+ "learning_rate": 9.818017083109233e-06,
534
+ "loss": 0.5067,
535
+ "step": 750
536
+ },
537
+ {
538
+ "epoch": 0.5405405405405406,
539
+ "grad_norm": 3.579242735091459,
540
+ "learning_rate": 9.8067897711188e-06,
541
+ "loss": 0.4296,
542
+ "step": 760
543
+ },
544
+ {
545
+ "epoch": 0.5476529160739687,
546
+ "grad_norm": 3.33637898169204,
547
+ "learning_rate": 9.795233228684631e-06,
548
+ "loss": 0.422,
549
+ "step": 770
550
+ },
551
+ {
552
+ "epoch": 0.5547652916073968,
553
+ "grad_norm": 3.3180173487560998,
554
+ "learning_rate": 9.783348247346558e-06,
555
+ "loss": 0.4352,
556
+ "step": 780
557
+ },
558
+ {
559
+ "epoch": 0.561877667140825,
560
+ "grad_norm": 3.3074859328364172,
561
+ "learning_rate": 9.771135641140117e-06,
562
+ "loss": 0.3788,
563
+ "step": 790
564
+ },
565
+ {
566
+ "epoch": 0.5689900426742532,
567
+ "grad_norm": 3.935128904527344,
568
+ "learning_rate": 9.758596246540782e-06,
569
+ "loss": 0.4512,
570
+ "step": 800
571
+ },
572
+ {
573
+ "epoch": 0.5761024182076814,
574
+ "grad_norm": 3.130800872692149,
575
+ "learning_rate": 9.74573092240668e-06,
576
+ "loss": 0.4286,
577
+ "step": 810
578
+ },
579
+ {
580
+ "epoch": 0.5832147937411095,
581
+ "grad_norm": 3.4818017716980076,
582
+ "learning_rate": 9.732540549919758e-06,
583
+ "loss": 0.3976,
584
+ "step": 820
585
+ },
586
+ {
587
+ "epoch": 0.5903271692745377,
588
+ "grad_norm": 3.7176422056718708,
589
+ "learning_rate": 9.719026032525432e-06,
590
+ "loss": 0.3845,
591
+ "step": 830
592
+ },
593
+ {
594
+ "epoch": 0.5974395448079659,
595
+ "grad_norm": 4.0428367587373115,
596
+ "learning_rate": 9.70518829587071e-06,
597
+ "loss": 0.3761,
598
+ "step": 840
599
+ },
600
+ {
601
+ "epoch": 0.604551920341394,
602
+ "grad_norm": 3.32333703731893,
603
+ "learning_rate": 9.691028287740783e-06,
604
+ "loss": 0.3663,
605
+ "step": 850
606
+ },
607
+ {
608
+ "epoch": 0.6116642958748222,
609
+ "grad_norm": 4.055447477108677,
610
+ "learning_rate": 9.67654697799412e-06,
611
+ "loss": 0.3683,
612
+ "step": 860
613
+ },
614
+ {
615
+ "epoch": 0.6187766714082503,
616
+ "grad_norm": 2.801736293850873,
617
+ "learning_rate": 9.661745358496033e-06,
618
+ "loss": 0.3302,
619
+ "step": 870
620
+ },
621
+ {
622
+ "epoch": 0.6258890469416786,
623
+ "grad_norm": 2.9454979478833576,
624
+ "learning_rate": 9.64662444305074e-06,
625
+ "loss": 0.3714,
626
+ "step": 880
627
+ },
628
+ {
629
+ "epoch": 0.6330014224751067,
630
+ "grad_norm": 3.933969741535959,
631
+ "learning_rate": 9.631185267331937e-06,
632
+ "loss": 0.3214,
633
+ "step": 890
634
+ },
635
+ {
636
+ "epoch": 0.6401137980085349,
637
+ "grad_norm": 3.0707180797561398,
638
+ "learning_rate": 9.615428888811842e-06,
639
+ "loss": 0.3151,
640
+ "step": 900
641
+ },
642
+ {
643
+ "epoch": 0.647226173541963,
644
+ "grad_norm": 3.6006782352295095,
645
+ "learning_rate": 9.59935638668879e-06,
646
+ "loss": 0.3134,
647
+ "step": 910
648
+ },
649
+ {
650
+ "epoch": 0.6543385490753911,
651
+ "grad_norm": 4.528381319074012,
652
+ "learning_rate": 9.582968861813295e-06,
653
+ "loss": 0.2826,
654
+ "step": 920
655
+ },
656
+ {
657
+ "epoch": 0.6614509246088194,
658
+ "grad_norm": 3.084970600037643,
659
+ "learning_rate": 9.566267436612662e-06,
660
+ "loss": 0.3272,
661
+ "step": 930
662
+ },
663
+ {
664
+ "epoch": 0.6685633001422475,
665
+ "grad_norm": 3.1926454881670008,
666
+ "learning_rate": 9.549253255014105e-06,
667
+ "loss": 0.2838,
668
+ "step": 940
669
+ },
670
+ {
671
+ "epoch": 0.6756756756756757,
672
+ "grad_norm": 3.3232334022391083,
673
+ "learning_rate": 9.531927482366398e-06,
674
+ "loss": 0.2676,
675
+ "step": 950
676
+ },
677
+ {
678
+ "epoch": 0.6827880512091038,
679
+ "grad_norm": 3.373450413027547,
680
+ "learning_rate": 9.514291305360053e-06,
681
+ "loss": 0.2615,
682
+ "step": 960
683
+ },
684
+ {
685
+ "epoch": 0.689900426742532,
686
+ "grad_norm": 3.298511219641843,
687
+ "learning_rate": 9.496345931946039e-06,
688
+ "loss": 0.2232,
689
+ "step": 970
690
+ },
691
+ {
692
+ "epoch": 0.6970128022759602,
693
+ "grad_norm": 2.8709213001564726,
694
+ "learning_rate": 9.47809259125306e-06,
695
+ "loss": 0.2628,
696
+ "step": 980
697
+ },
698
+ {
699
+ "epoch": 0.7041251778093883,
700
+ "grad_norm": 3.0027633203506,
701
+ "learning_rate": 9.459532533503347e-06,
702
+ "loss": 0.2404,
703
+ "step": 990
704
+ },
705
+ {
706
+ "epoch": 0.7112375533428165,
707
+ "grad_norm": 3.0886670354052823,
708
+ "learning_rate": 9.440667029927043e-06,
709
+ "loss": 0.2259,
710
+ "step": 1000
711
+ },
712
+ {
713
+ "epoch": 0.7183499288762447,
714
+ "grad_norm": 3.413560155663082,
715
+ "learning_rate": 9.421497372675133e-06,
716
+ "loss": 0.208,
717
+ "step": 1010
718
+ },
719
+ {
720
+ "epoch": 0.7254623044096729,
721
+ "grad_norm": 2.26900305381711,
722
+ "learning_rate": 9.402024874730928e-06,
723
+ "loss": 0.2277,
724
+ "step": 1020
725
+ },
726
+ {
727
+ "epoch": 0.732574679943101,
728
+ "grad_norm": 3.5894430284698315,
729
+ "learning_rate": 9.382250869820146e-06,
730
+ "loss": 0.1926,
731
+ "step": 1030
732
+ },
733
+ {
734
+ "epoch": 0.7396870554765291,
735
+ "grad_norm": 3.267737905170995,
736
+ "learning_rate": 9.36217671231956e-06,
737
+ "loss": 0.2299,
738
+ "step": 1040
739
+ },
740
+ {
741
+ "epoch": 0.7467994310099573,
742
+ "grad_norm": 2.7538943048992737,
743
+ "learning_rate": 9.341803777164228e-06,
744
+ "loss": 0.1708,
745
+ "step": 1050
746
+ },
747
+ {
748
+ "epoch": 0.7539118065433855,
749
+ "grad_norm": 3.867540040555883,
750
+ "learning_rate": 9.321133459753322e-06,
751
+ "loss": 0.2072,
752
+ "step": 1060
753
+ },
754
+ {
755
+ "epoch": 0.7610241820768137,
756
+ "grad_norm": 2.3384449104832226,
757
+ "learning_rate": 9.300167175854564e-06,
758
+ "loss": 0.1875,
759
+ "step": 1070
760
+ },
761
+ {
762
+ "epoch": 0.7681365576102418,
763
+ "grad_norm": 3.6436777076779348,
764
+ "learning_rate": 9.278906361507238e-06,
765
+ "loss": 0.173,
766
+ "step": 1080
767
+ },
768
+ {
769
+ "epoch": 0.7752489331436699,
770
+ "grad_norm": 2.623342004246653,
771
+ "learning_rate": 9.257352472923842e-06,
772
+ "loss": 0.1489,
773
+ "step": 1090
774
+ },
775
+ {
776
+ "epoch": 0.7823613086770982,
777
+ "grad_norm": 2.9293688128652606,
778
+ "learning_rate": 9.235506986390346e-06,
779
+ "loss": 0.1423,
780
+ "step": 1100
781
+ },
782
+ {
783
+ "epoch": 0.7894736842105263,
784
+ "grad_norm": 3.1229986788680653,
785
+ "learning_rate": 9.213371398165077e-06,
786
+ "loss": 0.1564,
787
+ "step": 1110
788
+ },
789
+ {
790
+ "epoch": 0.7965860597439545,
791
+ "grad_norm": 3.5638406658438826,
792
+ "learning_rate": 9.190947224376238e-06,
793
+ "loss": 0.1872,
794
+ "step": 1120
795
+ },
796
+ {
797
+ "epoch": 0.8036984352773826,
798
+ "grad_norm": 3.754826640146973,
799
+ "learning_rate": 9.168236000918063e-06,
800
+ "loss": 0.1483,
801
+ "step": 1130
802
+ },
803
+ {
804
+ "epoch": 0.8108108108108109,
805
+ "grad_norm": 2.494125324383473,
806
+ "learning_rate": 9.145239283345618e-06,
807
+ "loss": 0.1272,
808
+ "step": 1140
809
+ },
810
+ {
811
+ "epoch": 0.817923186344239,
812
+ "grad_norm": 2.1750463421723003,
813
+ "learning_rate": 9.121958646768251e-06,
814
+ "loss": 0.1361,
815
+ "step": 1150
816
+ },
817
+ {
818
+ "epoch": 0.8250355618776671,
819
+ "grad_norm": 2.6835693031385035,
820
+ "learning_rate": 9.09839568574173e-06,
821
+ "loss": 0.1001,
822
+ "step": 1160
823
+ },
824
+ {
825
+ "epoch": 0.8321479374110953,
826
+ "grad_norm": 2.520530993255376,
827
+ "learning_rate": 9.074552014158994e-06,
828
+ "loss": 0.1193,
829
+ "step": 1170
830
+ },
831
+ {
832
+ "epoch": 0.8392603129445235,
833
+ "grad_norm": 2.583475505711053,
834
+ "learning_rate": 9.050429265139647e-06,
835
+ "loss": 0.1122,
836
+ "step": 1180
837
+ },
838
+ {
839
+ "epoch": 0.8463726884779517,
840
+ "grad_norm": 3.0551608668064736,
841
+ "learning_rate": 9.026029090918076e-06,
842
+ "loss": 0.1345,
843
+ "step": 1190
844
+ },
845
+ {
846
+ "epoch": 0.8534850640113798,
847
+ "grad_norm": 2.7079152732306917,
848
+ "learning_rate": 9.001353162730297e-06,
849
+ "loss": 0.1134,
850
+ "step": 1200
851
+ },
852
+ {
853
+ "epoch": 0.8605974395448079,
854
+ "grad_norm": 2.2611924634890075,
855
+ "learning_rate": 8.976403170699486e-06,
856
+ "loss": 0.1026,
857
+ "step": 1210
858
+ },
859
+ {
860
+ "epoch": 0.8677098150782361,
861
+ "grad_norm": 1.987002883566529,
862
+ "learning_rate": 8.951180823720212e-06,
863
+ "loss": 0.0967,
864
+ "step": 1220
865
+ },
866
+ {
867
+ "epoch": 0.8748221906116643,
868
+ "grad_norm": 2.7967317585114615,
869
+ "learning_rate": 8.925687849341398e-06,
870
+ "loss": 0.0819,
871
+ "step": 1230
872
+ },
873
+ {
874
+ "epoch": 0.8819345661450925,
875
+ "grad_norm": 2.65859268119004,
876
+ "learning_rate": 8.899925993647994e-06,
877
+ "loss": 0.0931,
878
+ "step": 1240
879
+ },
880
+ {
881
+ "epoch": 0.8890469416785206,
882
+ "grad_norm": 2.5541801040927226,
883
+ "learning_rate": 8.873897021141378e-06,
884
+ "loss": 0.0888,
885
+ "step": 1250
886
+ },
887
+ {
888
+ "epoch": 0.8961593172119487,
889
+ "grad_norm": 2.6513786896328413,
890
+ "learning_rate": 8.847602714618504e-06,
891
+ "loss": 0.0839,
892
+ "step": 1260
893
+ },
894
+ {
895
+ "epoch": 0.903271692745377,
896
+ "grad_norm": 2.3497391818693587,
897
+ "learning_rate": 8.821044875049796e-06,
898
+ "loss": 0.0878,
899
+ "step": 1270
900
+ },
901
+ {
902
+ "epoch": 0.9103840682788051,
903
+ "grad_norm": 2.067880100094928,
904
+ "learning_rate": 8.794225321455788e-06,
905
+ "loss": 0.0866,
906
+ "step": 1280
907
+ },
908
+ {
909
+ "epoch": 0.9174964438122333,
910
+ "grad_norm": 2.466600341108382,
911
+ "learning_rate": 8.767145890782542e-06,
912
+ "loss": 0.0849,
913
+ "step": 1290
914
+ },
915
+ {
916
+ "epoch": 0.9246088193456614,
917
+ "grad_norm": 2.694537159823399,
918
+ "learning_rate": 8.739808437775825e-06,
919
+ "loss": 0.0773,
920
+ "step": 1300
921
+ },
922
+ {
923
+ "epoch": 0.9317211948790897,
924
+ "grad_norm": 3.051119356918663,
925
+ "learning_rate": 8.71221483485407e-06,
926
+ "loss": 0.0887,
927
+ "step": 1310
928
+ },
929
+ {
930
+ "epoch": 0.9388335704125178,
931
+ "grad_norm": 2.091226963672429,
932
+ "learning_rate": 8.684366971980139e-06,
933
+ "loss": 0.0739,
934
+ "step": 1320
935
+ },
936
+ {
937
+ "epoch": 0.9459459459459459,
938
+ "grad_norm": 2.6573993659558885,
939
+ "learning_rate": 8.656266756531857e-06,
940
+ "loss": 0.0757,
941
+ "step": 1330
942
+ },
943
+ {
944
+ "epoch": 0.9530583214793741,
945
+ "grad_norm": 2.5135440840845593,
946
+ "learning_rate": 8.627916113171396e-06,
947
+ "loss": 0.0695,
948
+ "step": 1340
949
+ },
950
+ {
951
+ "epoch": 0.9601706970128022,
952
+ "grad_norm": 1.8647689285533582,
953
+ "learning_rate": 8.599316983713419e-06,
954
+ "loss": 0.0703,
955
+ "step": 1350
956
+ },
957
+ {
958
+ "epoch": 0.9672830725462305,
959
+ "grad_norm": 2.1656321527764444,
960
+ "learning_rate": 8.570471326992105e-06,
961
+ "loss": 0.062,
962
+ "step": 1360
963
+ },
964
+ {
965
+ "epoch": 0.9743954480796586,
966
+ "grad_norm": 2.705238359384965,
967
+ "learning_rate": 8.54138111872697e-06,
968
+ "loss": 0.0755,
969
+ "step": 1370
970
+ },
971
+ {
972
+ "epoch": 0.9815078236130867,
973
+ "grad_norm": 1.4926114349562027,
974
+ "learning_rate": 8.512048351387551e-06,
975
+ "loss": 0.0656,
976
+ "step": 1380
977
+ },
978
+ {
979
+ "epoch": 0.9886201991465149,
980
+ "grad_norm": 2.193183643997932,
981
+ "learning_rate": 8.482475034056927e-06,
982
+ "loss": 0.0659,
983
+ "step": 1390
984
+ },
985
+ {
986
+ "epoch": 0.9957325746799431,
987
+ "grad_norm": 2.0527279052017264,
988
+ "learning_rate": 8.452663192294121e-06,
989
+ "loss": 0.0576,
990
+ "step": 1400
991
+ },
992
+ {
993
+ "epoch": 1.0028449502133712,
994
+ "grad_norm": 2.043379604895136,
995
+ "learning_rate": 8.42261486799536e-06,
996
+ "loss": 0.0518,
997
+ "step": 1410
998
+ },
999
+ {
1000
+ "epoch": 1.0099573257467995,
1001
+ "grad_norm": 1.7935460456418109,
1002
+ "learning_rate": 8.392332119254214e-06,
1003
+ "loss": 0.0363,
1004
+ "step": 1420
1005
+ },
1006
+ {
1007
+ "epoch": 1.0170697012802277,
1008
+ "grad_norm": 1.9591421706180754,
1009
+ "learning_rate": 8.361817020220647e-06,
1010
+ "loss": 0.0345,
1011
+ "step": 1430
1012
+ },
1013
+ {
1014
+ "epoch": 1.0241820768136558,
1015
+ "grad_norm": 1.904127146547918,
1016
+ "learning_rate": 8.331071660958936e-06,
1017
+ "loss": 0.039,
1018
+ "step": 1440
1019
+ },
1020
+ {
1021
+ "epoch": 1.031294452347084,
1022
+ "grad_norm": 1.8927150070468237,
1023
+ "learning_rate": 8.300098147304523e-06,
1024
+ "loss": 0.0365,
1025
+ "step": 1450
1026
+ },
1027
+ {
1028
+ "epoch": 1.038406827880512,
1029
+ "grad_norm": 1.9578224146696355,
1030
+ "learning_rate": 8.268898600719785e-06,
1031
+ "loss": 0.0431,
1032
+ "step": 1460
1033
+ },
1034
+ {
1035
+ "epoch": 1.0455192034139402,
1036
+ "grad_norm": 2.119890142949488,
1037
+ "learning_rate": 8.237475158148724e-06,
1038
+ "loss": 0.0429,
1039
+ "step": 1470
1040
+ },
1041
+ {
1042
+ "epoch": 1.0526315789473684,
1043
+ "grad_norm": 1.9482483964200852,
1044
+ "learning_rate": 8.205829971870602e-06,
1045
+ "loss": 0.0397,
1046
+ "step": 1480
1047
+ },
1048
+ {
1049
+ "epoch": 1.0597439544807965,
1050
+ "grad_norm": 1.7329874393672655,
1051
+ "learning_rate": 8.173965209352524e-06,
1052
+ "loss": 0.0344,
1053
+ "step": 1490
1054
+ },
1055
+ {
1056
+ "epoch": 1.0668563300142249,
1057
+ "grad_norm": 1.8911139378477928,
1058
+ "learning_rate": 8.14188305310099e-06,
1059
+ "loss": 0.0464,
1060
+ "step": 1500
1061
+ },
1062
+ {
1063
+ "epoch": 1.073968705547653,
1064
+ "grad_norm": 2.450233012383526,
1065
+ "learning_rate": 8.109585700512395e-06,
1066
+ "loss": 0.0375,
1067
+ "step": 1510
1068
+ },
1069
+ {
1070
+ "epoch": 1.0810810810810811,
1071
+ "grad_norm": 2.0138094788301166,
1072
+ "learning_rate": 8.077075363722542e-06,
1073
+ "loss": 0.0389,
1074
+ "step": 1520
1075
+ },
1076
+ {
1077
+ "epoch": 1.0881934566145093,
1078
+ "grad_norm": 2.076572644222088,
1079
+ "learning_rate": 8.044354269455109e-06,
1080
+ "loss": 0.0436,
1081
+ "step": 1530
1082
+ },
1083
+ {
1084
+ "epoch": 1.0953058321479374,
1085
+ "grad_norm": 1.9101229450735917,
1086
+ "learning_rate": 8.011424658869142e-06,
1087
+ "loss": 0.0357,
1088
+ "step": 1540
1089
+ },
1090
+ {
1091
+ "epoch": 1.1024182076813656,
1092
+ "grad_norm": 1.130649417703215,
1093
+ "learning_rate": 7.978288787405556e-06,
1094
+ "loss": 0.0362,
1095
+ "step": 1550
1096
+ },
1097
+ {
1098
+ "epoch": 1.1095305832147937,
1099
+ "grad_norm": 1.1581533245467266,
1100
+ "learning_rate": 7.944948924632643e-06,
1101
+ "loss": 0.0345,
1102
+ "step": 1560
1103
+ },
1104
+ {
1105
+ "epoch": 1.1166429587482218,
1106
+ "grad_norm": 1.6643524677849526,
1107
+ "learning_rate": 7.911407354090634e-06,
1108
+ "loss": 0.0354,
1109
+ "step": 1570
1110
+ },
1111
+ {
1112
+ "epoch": 1.12375533428165,
1113
+ "grad_norm": 1.9726198917599644,
1114
+ "learning_rate": 7.877666373135287e-06,
1115
+ "loss": 0.0346,
1116
+ "step": 1580
1117
+ },
1118
+ {
1119
+ "epoch": 1.1308677098150781,
1120
+ "grad_norm": 1.6692436200631287,
1121
+ "learning_rate": 7.84372829278053e-06,
1122
+ "loss": 0.038,
1123
+ "step": 1590
1124
+ },
1125
+ {
1126
+ "epoch": 1.1379800853485065,
1127
+ "grad_norm": 1.7045565380565189,
1128
+ "learning_rate": 7.809595437540189e-06,
1129
+ "loss": 0.0327,
1130
+ "step": 1600
1131
+ },
1132
+ {
1133
+ "epoch": 1.1450924608819346,
1134
+ "grad_norm": 1.9976160352568044,
1135
+ "learning_rate": 7.775270145268755e-06,
1136
+ "loss": 0.0256,
1137
+ "step": 1610
1138
+ },
1139
+ {
1140
+ "epoch": 1.1522048364153628,
1141
+ "grad_norm": 1.3781171703418404,
1142
+ "learning_rate": 7.740754767001278e-06,
1143
+ "loss": 0.039,
1144
+ "step": 1620
1145
+ },
1146
+ {
1147
+ "epoch": 1.159317211948791,
1148
+ "grad_norm": 1.675366937408603,
1149
+ "learning_rate": 7.706051666792318e-06,
1150
+ "loss": 0.0353,
1151
+ "step": 1630
1152
+ },
1153
+ {
1154
+ "epoch": 1.166429587482219,
1155
+ "grad_norm": 1.5507760610752672,
1156
+ "learning_rate": 7.671163221554043e-06,
1157
+ "loss": 0.0353,
1158
+ "step": 1640
1159
+ },
1160
+ {
1161
+ "epoch": 1.1735419630156472,
1162
+ "grad_norm": 1.5578057994726024,
1163
+ "learning_rate": 7.636091820893417e-06,
1164
+ "loss": 0.0374,
1165
+ "step": 1650
1166
+ },
1167
+ {
1168
+ "epoch": 1.1806543385490753,
1169
+ "grad_norm": 1.9536673456849045,
1170
+ "learning_rate": 7.600839866948528e-06,
1171
+ "loss": 0.0363,
1172
+ "step": 1660
1173
+ },
1174
+ {
1175
+ "epoch": 1.1877667140825037,
1176
+ "grad_norm": 1.4180294508669007,
1177
+ "learning_rate": 7.565409774224066e-06,
1178
+ "loss": 0.0349,
1179
+ "step": 1670
1180
+ },
1181
+ {
1182
+ "epoch": 1.1948790896159318,
1183
+ "grad_norm": 1.6616296432221909,
1184
+ "learning_rate": 7.529803969425941e-06,
1185
+ "loss": 0.0307,
1186
+ "step": 1680
1187
+ },
1188
+ {
1189
+ "epoch": 1.20199146514936,
1190
+ "grad_norm": 1.7138246686303804,
1191
+ "learning_rate": 7.494024891295075e-06,
1192
+ "loss": 0.0322,
1193
+ "step": 1690
1194
+ },
1195
+ {
1196
+ "epoch": 1.209103840682788,
1197
+ "grad_norm": 1.3613855884690513,
1198
+ "learning_rate": 7.458074990440363e-06,
1199
+ "loss": 0.0293,
1200
+ "step": 1700
1201
+ },
1202
+ {
1203
+ "epoch": 1.2162162162162162,
1204
+ "grad_norm": 2.4114521805394205,
1205
+ "learning_rate": 7.421956729170823e-06,
1206
+ "loss": 0.0344,
1207
+ "step": 1710
1208
+ },
1209
+ {
1210
+ "epoch": 1.2233285917496444,
1211
+ "grad_norm": 1.9233612034450194,
1212
+ "learning_rate": 7.385672581326954e-06,
1213
+ "loss": 0.0351,
1214
+ "step": 1720
1215
+ },
1216
+ {
1217
+ "epoch": 1.2304409672830725,
1218
+ "grad_norm": 1.7307194070590812,
1219
+ "learning_rate": 7.34922503211128e-06,
1220
+ "loss": 0.0353,
1221
+ "step": 1730
1222
+ },
1223
+ {
1224
+ "epoch": 1.2375533428165006,
1225
+ "grad_norm": 1.468735660134803,
1226
+ "learning_rate": 7.312616577918149e-06,
1227
+ "loss": 0.03,
1228
+ "step": 1740
1229
+ },
1230
+ {
1231
+ "epoch": 1.2446657183499288,
1232
+ "grad_norm": 0.9815553395553774,
1233
+ "learning_rate": 7.2758497261627345e-06,
1234
+ "loss": 0.0267,
1235
+ "step": 1750
1236
+ },
1237
+ {
1238
+ "epoch": 1.251778093883357,
1239
+ "grad_norm": 1.4851270984075178,
1240
+ "learning_rate": 7.238926995109306e-06,
1241
+ "loss": 0.0288,
1242
+ "step": 1760
1243
+ },
1244
+ {
1245
+ "epoch": 1.2588904694167853,
1246
+ "grad_norm": 2.2537032746619183,
1247
+ "learning_rate": 7.201850913698736e-06,
1248
+ "loss": 0.0364,
1249
+ "step": 1770
1250
+ },
1251
+ {
1252
+ "epoch": 1.2660028449502134,
1253
+ "grad_norm": 1.454211009387941,
1254
+ "learning_rate": 7.164624021375294e-06,
1255
+ "loss": 0.0252,
1256
+ "step": 1780
1257
+ },
1258
+ {
1259
+ "epoch": 1.2731152204836416,
1260
+ "grad_norm": 1.4034123768391151,
1261
+ "learning_rate": 7.12724886791271e-06,
1262
+ "loss": 0.0266,
1263
+ "step": 1790
1264
+ },
1265
+ {
1266
+ "epoch": 1.2802275960170697,
1267
+ "grad_norm": 1.546526107411268,
1268
+ "learning_rate": 7.08972801323953e-06,
1269
+ "loss": 0.03,
1270
+ "step": 1800
1271
+ },
1272
+ {
1273
+ "epoch": 1.2873399715504978,
1274
+ "grad_norm": 1.6929689381873503,
1275
+ "learning_rate": 7.052064027263785e-06,
1276
+ "loss": 0.0235,
1277
+ "step": 1810
1278
+ },
1279
+ {
1280
+ "epoch": 1.294452347083926,
1281
+ "grad_norm": 1.5130921744879449,
1282
+ "learning_rate": 7.014259489696968e-06,
1283
+ "loss": 0.0243,
1284
+ "step": 1820
1285
+ },
1286
+ {
1287
+ "epoch": 1.3015647226173541,
1288
+ "grad_norm": 1.9572718096346318,
1289
+ "learning_rate": 6.976316989877343e-06,
1290
+ "loss": 0.0249,
1291
+ "step": 1830
1292
+ },
1293
+ {
1294
+ "epoch": 1.3086770981507825,
1295
+ "grad_norm": 1.2611303057850376,
1296
+ "learning_rate": 6.938239126592592e-06,
1297
+ "loss": 0.0263,
1298
+ "step": 1840
1299
+ },
1300
+ {
1301
+ "epoch": 1.3157894736842106,
1302
+ "grad_norm": 1.2902816153314383,
1303
+ "learning_rate": 6.90002850790182e-06,
1304
+ "loss": 0.0298,
1305
+ "step": 1850
1306
+ },
1307
+ {
1308
+ "epoch": 1.3229018492176388,
1309
+ "grad_norm": 0.9719782814773048,
1310
+ "learning_rate": 6.861687750956922e-06,
1311
+ "loss": 0.027,
1312
+ "step": 1860
1313
+ },
1314
+ {
1315
+ "epoch": 1.330014224751067,
1316
+ "grad_norm": 1.1718631838309244,
1317
+ "learning_rate": 6.823219481823318e-06,
1318
+ "loss": 0.0245,
1319
+ "step": 1870
1320
+ },
1321
+ {
1322
+ "epoch": 1.337126600284495,
1323
+ "grad_norm": 1.3461970346065844,
1324
+ "learning_rate": 6.784626335300102e-06,
1325
+ "loss": 0.0198,
1326
+ "step": 1880
1327
+ },
1328
+ {
1329
+ "epoch": 1.3442389758179232,
1330
+ "grad_norm": 1.1445639186428003,
1331
+ "learning_rate": 6.745910954739563e-06,
1332
+ "loss": 0.0274,
1333
+ "step": 1890
1334
+ },
1335
+ {
1336
+ "epoch": 1.3513513513513513,
1337
+ "grad_norm": 1.9649035858601103,
1338
+ "learning_rate": 6.707075991866143e-06,
1339
+ "loss": 0.0268,
1340
+ "step": 1900
1341
+ },
1342
+ {
1343
+ "epoch": 1.3584637268847795,
1344
+ "grad_norm": 1.3779682004442027,
1345
+ "learning_rate": 6.668124106594813e-06,
1346
+ "loss": 0.0274,
1347
+ "step": 1910
1348
+ },
1349
+ {
1350
+ "epoch": 1.3655761024182076,
1351
+ "grad_norm": 0.9339287727084011,
1352
+ "learning_rate": 6.629057966848879e-06,
1353
+ "loss": 0.0244,
1354
+ "step": 1920
1355
+ },
1356
+ {
1357
+ "epoch": 1.3726884779516357,
1358
+ "grad_norm": 1.3418194746364869,
1359
+ "learning_rate": 6.589880248377258e-06,
1360
+ "loss": 0.023,
1361
+ "step": 1930
1362
+ },
1363
+ {
1364
+ "epoch": 1.379800853485064,
1365
+ "grad_norm": 1.6101698103903805,
1366
+ "learning_rate": 6.550593634571205e-06,
1367
+ "loss": 0.018,
1368
+ "step": 1940
1369
+ },
1370
+ {
1371
+ "epoch": 1.3869132290184922,
1372
+ "grad_norm": 1.7415141112043047,
1373
+ "learning_rate": 6.511200816280523e-06,
1374
+ "loss": 0.021,
1375
+ "step": 1950
1376
+ },
1377
+ {
1378
+ "epoch": 1.3940256045519204,
1379
+ "grad_norm": 1.2100486434644262,
1380
+ "learning_rate": 6.471704491629251e-06,
1381
+ "loss": 0.0285,
1382
+ "step": 1960
1383
+ },
1384
+ {
1385
+ "epoch": 1.4011379800853485,
1386
+ "grad_norm": 1.301261422264456,
1387
+ "learning_rate": 6.432107365830872e-06,
1388
+ "loss": 0.0198,
1389
+ "step": 1970
1390
+ },
1391
+ {
1392
+ "epoch": 1.4082503556187767,
1393
+ "grad_norm": 1.3543714484816034,
1394
+ "learning_rate": 6.392412151003019e-06,
1395
+ "loss": 0.0244,
1396
+ "step": 1980
1397
+ },
1398
+ {
1399
+ "epoch": 1.4153627311522048,
1400
+ "grad_norm": 1.4893305665999936,
1401
+ "learning_rate": 6.3526215659817156e-06,
1402
+ "loss": 0.0226,
1403
+ "step": 1990
1404
+ },
1405
+ {
1406
+ "epoch": 1.422475106685633,
1407
+ "grad_norm": 1.1217736569772296,
1408
+ "learning_rate": 6.312738336135159e-06,
1409
+ "loss": 0.019,
1410
+ "step": 2000
1411
+ },
1412
+ {
1413
+ "epoch": 1.4295874822190613,
1414
+ "grad_norm": 1.530506526795571,
1415
+ "learning_rate": 6.272765193177044e-06,
1416
+ "loss": 0.0196,
1417
+ "step": 2010
1418
+ },
1419
+ {
1420
+ "epoch": 1.4366998577524894,
1421
+ "grad_norm": 1.1830746085813704,
1422
+ "learning_rate": 6.23270487497947e-06,
1423
+ "loss": 0.0189,
1424
+ "step": 2020
1425
+ },
1426
+ {
1427
+ "epoch": 1.4438122332859176,
1428
+ "grad_norm": 1.3714016439826322,
1429
+ "learning_rate": 6.192560125385412e-06,
1430
+ "loss": 0.025,
1431
+ "step": 2030
1432
+ },
1433
+ {
1434
+ "epoch": 1.4509246088193457,
1435
+ "grad_norm": 1.1129988250796872,
1436
+ "learning_rate": 6.152333694020781e-06,
1437
+ "loss": 0.0184,
1438
+ "step": 2040
1439
+ },
1440
+ {
1441
+ "epoch": 1.4580369843527738,
1442
+ "grad_norm": 2.0430785612059346,
1443
+ "learning_rate": 6.112028336106108e-06,
1444
+ "loss": 0.023,
1445
+ "step": 2050
1446
+ },
1447
+ {
1448
+ "epoch": 1.465149359886202,
1449
+ "grad_norm": 1.4200748013522733,
1450
+ "learning_rate": 6.071646812267817e-06,
1451
+ "loss": 0.0167,
1452
+ "step": 2060
1453
+ },
1454
+ {
1455
+ "epoch": 1.4722617354196301,
1456
+ "grad_norm": 1.8027434372189237,
1457
+ "learning_rate": 6.031191888349155e-06,
1458
+ "loss": 0.0202,
1459
+ "step": 2070
1460
+ },
1461
+ {
1462
+ "epoch": 1.4793741109530583,
1463
+ "grad_norm": 1.1171787456661884,
1464
+ "learning_rate": 5.990666335220738e-06,
1465
+ "loss": 0.0178,
1466
+ "step": 2080
1467
+ },
1468
+ {
1469
+ "epoch": 1.4864864864864864,
1470
+ "grad_norm": 1.6452874612147976,
1471
+ "learning_rate": 5.950072928590781e-06,
1472
+ "loss": 0.018,
1473
+ "step": 2090
1474
+ },
1475
+ {
1476
+ "epoch": 1.4935988620199145,
1477
+ "grad_norm": 0.9884439749765455,
1478
+ "learning_rate": 5.909414448814971e-06,
1479
+ "loss": 0.0209,
1480
+ "step": 2100
1481
+ },
1482
+ {
1483
+ "epoch": 1.5007112375533427,
1484
+ "grad_norm": 1.554996157376441,
1485
+ "learning_rate": 5.8686936807060335e-06,
1486
+ "loss": 0.0192,
1487
+ "step": 2110
1488
+ },
1489
+ {
1490
+ "epoch": 1.5078236130867708,
1491
+ "grad_norm": 1.0929475144672365,
1492
+ "learning_rate": 5.827913413343003e-06,
1493
+ "loss": 0.018,
1494
+ "step": 2120
1495
+ },
1496
+ {
1497
+ "epoch": 1.5149359886201992,
1498
+ "grad_norm": 1.0492081159201816,
1499
+ "learning_rate": 5.787076439880177e-06,
1500
+ "loss": 0.0179,
1501
+ "step": 2130
1502
+ },
1503
+ {
1504
+ "epoch": 1.5220483641536273,
1505
+ "grad_norm": 1.2333928332291602,
1506
+ "learning_rate": 5.746185557355814e-06,
1507
+ "loss": 0.0211,
1508
+ "step": 2140
1509
+ },
1510
+ {
1511
+ "epoch": 1.5291607396870555,
1512
+ "grad_norm": 0.8940904857757537,
1513
+ "learning_rate": 5.70524356650056e-06,
1514
+ "loss": 0.0168,
1515
+ "step": 2150
1516
+ },
1517
+ {
1518
+ "epoch": 1.5362731152204836,
1519
+ "grad_norm": 0.9594678027850269,
1520
+ "learning_rate": 5.664253271545603e-06,
1521
+ "loss": 0.0172,
1522
+ "step": 2160
1523
+ },
1524
+ {
1525
+ "epoch": 1.543385490753912,
1526
+ "grad_norm": 1.133529225026687,
1527
+ "learning_rate": 5.623217480030622e-06,
1528
+ "loss": 0.0178,
1529
+ "step": 2170
1530
+ },
1531
+ {
1532
+ "epoch": 1.55049786628734,
1533
+ "grad_norm": 1.0245366404113008,
1534
+ "learning_rate": 5.58213900261148e-06,
1535
+ "loss": 0.0135,
1536
+ "step": 2180
1537
+ },
1538
+ {
1539
+ "epoch": 1.5576102418207682,
1540
+ "grad_norm": 0.7068889699880522,
1541
+ "learning_rate": 5.541020652867713e-06,
1542
+ "loss": 0.0153,
1543
+ "step": 2190
1544
+ },
1545
+ {
1546
+ "epoch": 1.5647226173541964,
1547
+ "grad_norm": 1.2084727884034199,
1548
+ "learning_rate": 5.49986524710983e-06,
1549
+ "loss": 0.0143,
1550
+ "step": 2200
1551
+ },
1552
+ {
1553
+ "epoch": 1.5718349928876245,
1554
+ "grad_norm": 1.5054621892964164,
1555
+ "learning_rate": 5.4586756041864065e-06,
1556
+ "loss": 0.016,
1557
+ "step": 2210
1558
+ },
1559
+ {
1560
+ "epoch": 1.5789473684210527,
1561
+ "grad_norm": 1.4176580158063212,
1562
+ "learning_rate": 5.417454545291017e-06,
1563
+ "loss": 0.0168,
1564
+ "step": 2220
1565
+ },
1566
+ {
1567
+ "epoch": 1.5860597439544808,
1568
+ "grad_norm": 1.1824924291702557,
1569
+ "learning_rate": 5.376204893769e-06,
1570
+ "loss": 0.0198,
1571
+ "step": 2230
1572
+ },
1573
+ {
1574
+ "epoch": 1.593172119487909,
1575
+ "grad_norm": 1.7631808589665254,
1576
+ "learning_rate": 5.334929474924093e-06,
1577
+ "loss": 0.0155,
1578
+ "step": 2240
1579
+ },
1580
+ {
1581
+ "epoch": 1.600284495021337,
1582
+ "grad_norm": 1.215149372258629,
1583
+ "learning_rate": 5.293631115824897e-06,
1584
+ "loss": 0.0138,
1585
+ "step": 2250
1586
+ },
1587
+ {
1588
+ "epoch": 1.6073968705547652,
1589
+ "grad_norm": 1.718329335563461,
1590
+ "learning_rate": 5.252312645111266e-06,
1591
+ "loss": 0.0173,
1592
+ "step": 2260
1593
+ },
1594
+ {
1595
+ "epoch": 1.6145092460881934,
1596
+ "grad_norm": 1.0751615799620988,
1597
+ "learning_rate": 5.2109768928005454e-06,
1598
+ "loss": 0.0142,
1599
+ "step": 2270
1600
+ },
1601
+ {
1602
+ "epoch": 1.6216216216216215,
1603
+ "grad_norm": 0.8027120709435296,
1604
+ "learning_rate": 5.169626690093751e-06,
1605
+ "loss": 0.014,
1606
+ "step": 2280
1607
+ },
1608
+ {
1609
+ "epoch": 1.6287339971550496,
1610
+ "grad_norm": 1.6699231722730825,
1611
+ "learning_rate": 5.128264869181646e-06,
1612
+ "loss": 0.0127,
1613
+ "step": 2290
1614
+ },
1615
+ {
1616
+ "epoch": 1.635846372688478,
1617
+ "grad_norm": 1.2559995566307685,
1618
+ "learning_rate": 5.086894263050755e-06,
1619
+ "loss": 0.011,
1620
+ "step": 2300
1621
+ },
1622
+ {
1623
+ "epoch": 1.6429587482219061,
1624
+ "grad_norm": 1.349960059022035,
1625
+ "learning_rate": 5.045517705289328e-06,
1626
+ "loss": 0.0111,
1627
+ "step": 2310
1628
+ },
1629
+ {
1630
+ "epoch": 1.6500711237553343,
1631
+ "grad_norm": 0.8142603267011976,
1632
+ "learning_rate": 5.004138029893257e-06,
1633
+ "loss": 0.0138,
1634
+ "step": 2320
1635
+ },
1636
+ {
1637
+ "epoch": 1.6571834992887624,
1638
+ "grad_norm": 1.0621437820203163,
1639
+ "learning_rate": 4.9627580710719734e-06,
1640
+ "loss": 0.0128,
1641
+ "step": 2330
1642
+ },
1643
+ {
1644
+ "epoch": 1.6642958748221908,
1645
+ "grad_norm": 1.7262184368035551,
1646
+ "learning_rate": 4.921380663054318e-06,
1647
+ "loss": 0.0128,
1648
+ "step": 2340
1649
+ },
1650
+ {
1651
+ "epoch": 1.671408250355619,
1652
+ "grad_norm": 1.2695847947859624,
1653
+ "learning_rate": 4.880008639894421e-06,
1654
+ "loss": 0.014,
1655
+ "step": 2350
1656
+ },
1657
+ {
1658
+ "epoch": 1.678520625889047,
1659
+ "grad_norm": 0.9261536386806662,
1660
+ "learning_rate": 4.838644835277585e-06,
1661
+ "loss": 0.0144,
1662
+ "step": 2360
1663
+ },
1664
+ {
1665
+ "epoch": 1.6856330014224752,
1666
+ "grad_norm": 0.6867762051400554,
1667
+ "learning_rate": 4.79729208232621e-06,
1668
+ "loss": 0.0109,
1669
+ "step": 2370
1670
+ },
1671
+ {
1672
+ "epoch": 1.6927453769559033,
1673
+ "grad_norm": 0.6232870542134327,
1674
+ "learning_rate": 4.75595321340573e-06,
1675
+ "loss": 0.0122,
1676
+ "step": 2380
1677
+ },
1678
+ {
1679
+ "epoch": 1.6998577524893315,
1680
+ "grad_norm": 0.970176828182309,
1681
+ "learning_rate": 4.714631059930622e-06,
1682
+ "loss": 0.012,
1683
+ "step": 2390
1684
+ },
1685
+ {
1686
+ "epoch": 1.7069701280227596,
1687
+ "grad_norm": 1.6173382913062293,
1688
+ "learning_rate": 4.6733284521704816e-06,
1689
+ "loss": 0.0124,
1690
+ "step": 2400
1691
+ },
1692
+ {
1693
+ "epoch": 1.7140825035561877,
1694
+ "grad_norm": 0.9844171855603,
1695
+ "learning_rate": 4.632048219056159e-06,
1696
+ "loss": 0.012,
1697
+ "step": 2410
1698
+ },
1699
+ {
1700
+ "epoch": 1.7211948790896159,
1701
+ "grad_norm": 1.3183824382551952,
1702
+ "learning_rate": 4.590793187986003e-06,
1703
+ "loss": 0.0149,
1704
+ "step": 2420
1705
+ },
1706
+ {
1707
+ "epoch": 1.728307254623044,
1708
+ "grad_norm": 0.5730734000902559,
1709
+ "learning_rate": 4.549566184632206e-06,
1710
+ "loss": 0.0117,
1711
+ "step": 2430
1712
+ },
1713
+ {
1714
+ "epoch": 1.7354196301564722,
1715
+ "grad_norm": 0.9239894283732394,
1716
+ "learning_rate": 4.508370032747261e-06,
1717
+ "loss": 0.0092,
1718
+ "step": 2440
1719
+ },
1720
+ {
1721
+ "epoch": 1.7425320056899003,
1722
+ "grad_norm": 0.9732516534559529,
1723
+ "learning_rate": 4.467207553970564e-06,
1724
+ "loss": 0.012,
1725
+ "step": 2450
1726
+ },
1727
+ {
1728
+ "epoch": 1.7496443812233284,
1729
+ "grad_norm": 0.9139268416210883,
1730
+ "learning_rate": 4.426081567635137e-06,
1731
+ "loss": 0.0092,
1732
+ "step": 2460
1733
+ },
1734
+ {
1735
+ "epoch": 1.7567567567567568,
1736
+ "grad_norm": 1.2921223854630304,
1737
+ "learning_rate": 4.3849948905745385e-06,
1738
+ "loss": 0.0137,
1739
+ "step": 2470
1740
+ },
1741
+ {
1742
+ "epoch": 1.763869132290185,
1743
+ "grad_norm": 0.8703692417885042,
1744
+ "learning_rate": 4.343950336929927e-06,
1745
+ "loss": 0.0095,
1746
+ "step": 2480
1747
+ },
1748
+ {
1749
+ "epoch": 1.770981507823613,
1750
+ "grad_norm": 0.9536442700427114,
1751
+ "learning_rate": 4.302950717957304e-06,
1752
+ "loss": 0.0098,
1753
+ "step": 2490
1754
+ },
1755
+ {
1756
+ "epoch": 1.7780938833570412,
1757
+ "grad_norm": 0.852536162993322,
1758
+ "learning_rate": 4.261998841834972e-06,
1759
+ "loss": 0.0101,
1760
+ "step": 2500
1761
+ },
1762
+ {
1763
+ "epoch": 1.7852062588904696,
1764
+ "grad_norm": 1.248725823462744,
1765
+ "learning_rate": 4.221097513471199e-06,
1766
+ "loss": 0.0094,
1767
+ "step": 2510
1768
+ },
1769
+ {
1770
+ "epoch": 1.7923186344238977,
1771
+ "grad_norm": 0.487586863686056,
1772
+ "learning_rate": 4.18024953431209e-06,
1773
+ "loss": 0.009,
1774
+ "step": 2520
1775
+ },
1776
+ {
1777
+ "epoch": 1.7994310099573259,
1778
+ "grad_norm": 0.6857485925261184,
1779
+ "learning_rate": 4.13945770214971e-06,
1780
+ "loss": 0.0098,
1781
+ "step": 2530
1782
+ },
1783
+ {
1784
+ "epoch": 1.806543385490754,
1785
+ "grad_norm": 0.5224101041795471,
1786
+ "learning_rate": 4.098724810930472e-06,
1787
+ "loss": 0.0077,
1788
+ "step": 2540
1789
+ },
1790
+ {
1791
+ "epoch": 1.8136557610241821,
1792
+ "grad_norm": 0.3255236838052598,
1793
+ "learning_rate": 4.058053650563747e-06,
1794
+ "loss": 0.0069,
1795
+ "step": 2550
1796
+ },
1797
+ {
1798
+ "epoch": 1.8207681365576103,
1799
+ "grad_norm": 0.5535169044707119,
1800
+ "learning_rate": 4.017447006730796e-06,
1801
+ "loss": 0.0084,
1802
+ "step": 2560
1803
+ },
1804
+ {
1805
+ "epoch": 1.8278805120910384,
1806
+ "grad_norm": 0.6587680546008802,
1807
+ "learning_rate": 3.976907660693954e-06,
1808
+ "loss": 0.0068,
1809
+ "step": 2570
1810
+ },
1811
+ {
1812
+ "epoch": 1.8349928876244666,
1813
+ "grad_norm": 0.7451030339766666,
1814
+ "learning_rate": 3.936438389106154e-06,
1815
+ "loss": 0.0091,
1816
+ "step": 2580
1817
+ },
1818
+ {
1819
+ "epoch": 1.8421052631578947,
1820
+ "grad_norm": 0.7854707802079127,
1821
+ "learning_rate": 3.896041963820724e-06,
1822
+ "loss": 0.0105,
1823
+ "step": 2590
1824
+ },
1825
+ {
1826
+ "epoch": 1.8492176386913228,
1827
+ "grad_norm": 0.6990927586140553,
1828
+ "learning_rate": 3.855721151701548e-06,
1829
+ "loss": 0.0099,
1830
+ "step": 2600
1831
+ },
1832
+ {
1833
+ "epoch": 1.856330014224751,
1834
+ "grad_norm": 1.318630670215527,
1835
+ "learning_rate": 3.815478714433559e-06,
1836
+ "loss": 0.0095,
1837
+ "step": 2610
1838
+ },
1839
+ {
1840
+ "epoch": 1.863442389758179,
1841
+ "grad_norm": 0.8518153474787149,
1842
+ "learning_rate": 3.775317408333571e-06,
1843
+ "loss": 0.0105,
1844
+ "step": 2620
1845
+ },
1846
+ {
1847
+ "epoch": 1.8705547652916072,
1848
+ "grad_norm": 1.0023735620026466,
1849
+ "learning_rate": 3.7352399841614996e-06,
1850
+ "loss": 0.0082,
1851
+ "step": 2630
1852
+ },
1853
+ {
1854
+ "epoch": 1.8776671408250356,
1855
+ "grad_norm": 0.9809887806472293,
1856
+ "learning_rate": 3.695249186931954e-06,
1857
+ "loss": 0.0087,
1858
+ "step": 2640
1859
+ },
1860
+ {
1861
+ "epoch": 1.8847795163584637,
1862
+ "grad_norm": 0.9540456428445807,
1863
+ "learning_rate": 3.655347755726224e-06,
1864
+ "loss": 0.0076,
1865
+ "step": 2650
1866
+ },
1867
+ {
1868
+ "epoch": 1.8918918918918919,
1869
+ "grad_norm": 0.7066159412282622,
1870
+ "learning_rate": 3.6155384235046674e-06,
1871
+ "loss": 0.0086,
1872
+ "step": 2660
1873
+ },
1874
+ {
1875
+ "epoch": 1.89900426742532,
1876
+ "grad_norm": 0.5137592216850851,
1877
+ "learning_rate": 3.5758239169195276e-06,
1878
+ "loss": 0.005,
1879
+ "step": 2670
1880
+ },
1881
+ {
1882
+ "epoch": 1.9061166429587484,
1883
+ "grad_norm": 0.3439517878091387,
1884
+ "learning_rate": 3.5362069561281764e-06,
1885
+ "loss": 0.0072,
1886
+ "step": 2680
1887
+ },
1888
+ {
1889
+ "epoch": 1.9132290184921765,
1890
+ "grad_norm": 0.3970319267325305,
1891
+ "learning_rate": 3.4966902546068016e-06,
1892
+ "loss": 0.0072,
1893
+ "step": 2690
1894
+ },
1895
+ {
1896
+ "epoch": 1.9203413940256047,
1897
+ "grad_norm": 0.9810798909167313,
1898
+ "learning_rate": 3.4572765189645516e-06,
1899
+ "loss": 0.0073,
1900
+ "step": 2700
1901
+ },
1902
+ {
1903
+ "epoch": 1.9274537695590328,
1904
+ "grad_norm": 1.4872117479815739,
1905
+ "learning_rate": 3.4179684487581555e-06,
1906
+ "loss": 0.0067,
1907
+ "step": 2710
1908
+ },
1909
+ {
1910
+ "epoch": 1.934566145092461,
1911
+ "grad_norm": 0.17941271447530188,
1912
+ "learning_rate": 3.3787687363070256e-06,
1913
+ "loss": 0.0075,
1914
+ "step": 2720
1915
+ },
1916
+ {
1917
+ "epoch": 1.941678520625889,
1918
+ "grad_norm": 0.21377268278340267,
1919
+ "learning_rate": 3.3396800665088435e-06,
1920
+ "loss": 0.0069,
1921
+ "step": 2730
1922
+ },
1923
+ {
1924
+ "epoch": 1.9487908961593172,
1925
+ "grad_norm": 0.8027020001474104,
1926
+ "learning_rate": 3.300705116655672e-06,
1927
+ "loss": 0.0058,
1928
+ "step": 2740
1929
+ },
1930
+ {
1931
+ "epoch": 1.9559032716927454,
1932
+ "grad_norm": 0.607769605088779,
1933
+ "learning_rate": 3.26184655625058e-06,
1934
+ "loss": 0.0055,
1935
+ "step": 2750
1936
+ },
1937
+ {
1938
+ "epoch": 1.9630156472261735,
1939
+ "grad_norm": 0.29396831979764293,
1940
+ "learning_rate": 3.2231070468247954e-06,
1941
+ "loss": 0.0062,
1942
+ "step": 2760
1943
+ },
1944
+ {
1945
+ "epoch": 1.9701280227596016,
1946
+ "grad_norm": 0.49083863249583537,
1947
+ "learning_rate": 3.1844892417554102e-06,
1948
+ "loss": 0.0063,
1949
+ "step": 2770
1950
+ },
1951
+ {
1952
+ "epoch": 1.9772403982930298,
1953
+ "grad_norm": 0.710753958854101,
1954
+ "learning_rate": 3.1459957860836528e-06,
1955
+ "loss": 0.0065,
1956
+ "step": 2780
1957
+ },
1958
+ {
1959
+ "epoch": 1.984352773826458,
1960
+ "grad_norm": 0.27012727932102704,
1961
+ "learning_rate": 3.1076293163337074e-06,
1962
+ "loss": 0.0068,
1963
+ "step": 2790
1964
+ },
1965
+ {
1966
+ "epoch": 1.991465149359886,
1967
+ "grad_norm": 0.34603765606499187,
1968
+ "learning_rate": 3.069392460332141e-06,
1969
+ "loss": 0.0057,
1970
+ "step": 2800
1971
+ },
1972
+ {
1973
+ "epoch": 1.9985775248933144,
1974
+ "grad_norm": 0.3721250969176249,
1975
+ "learning_rate": 3.031287837027911e-06,
1976
+ "loss": 0.0066,
1977
+ "step": 2810
1978
+ },
1979
+ {
1980
+ "epoch": 2.0056899004267423,
1981
+ "grad_norm": 0.781768421432185,
1982
+ "learning_rate": 2.9933180563129936e-06,
1983
+ "loss": 0.0041,
1984
+ "step": 2820
1985
+ },
1986
+ {
1987
+ "epoch": 2.012802275960171,
1988
+ "grad_norm": 0.24350008390092337,
1989
+ "learning_rate": 2.955485718843616e-06,
1990
+ "loss": 0.0056,
1991
+ "step": 2830
1992
+ },
1993
+ {
1994
+ "epoch": 2.019914651493599,
1995
+ "grad_norm": 0.4576741832894929,
1996
+ "learning_rate": 2.917793415862129e-06,
1997
+ "loss": 0.0048,
1998
+ "step": 2840
1999
+ },
2000
+ {
2001
+ "epoch": 2.027027027027027,
2002
+ "grad_norm": 0.9890835980780475,
2003
+ "learning_rate": 2.880243729019546e-06,
2004
+ "loss": 0.0038,
2005
+ "step": 2850
2006
+ },
2007
+ {
2008
+ "epoch": 2.0341394025604553,
2009
+ "grad_norm": 0.3917033136267895,
2010
+ "learning_rate": 2.842839230198685e-06,
2011
+ "loss": 0.0052,
2012
+ "step": 2860
2013
+ },
2014
+ {
2015
+ "epoch": 2.0412517780938835,
2016
+ "grad_norm": 0.12450209954114903,
2017
+ "learning_rate": 2.805582481338044e-06,
2018
+ "loss": 0.0047,
2019
+ "step": 2870
2020
+ },
2021
+ {
2022
+ "epoch": 2.0483641536273116,
2023
+ "grad_norm": 0.5486661654701261,
2024
+ "learning_rate": 2.7684760342563045e-06,
2025
+ "loss": 0.0047,
2026
+ "step": 2880
2027
+ },
2028
+ {
2029
+ "epoch": 2.0554765291607398,
2030
+ "grad_norm": 0.22758726780410876,
2031
+ "learning_rate": 2.731522430477571e-06,
2032
+ "loss": 0.0056,
2033
+ "step": 2890
2034
+ },
2035
+ {
2036
+ "epoch": 2.062588904694168,
2037
+ "grad_norm": 0.2218164583744802,
2038
+ "learning_rate": 2.694724201057273e-06,
2039
+ "loss": 0.0048,
2040
+ "step": 2900
2041
+ },
2042
+ {
2043
+ "epoch": 2.069701280227596,
2044
+ "grad_norm": 0.45353402328041514,
2045
+ "learning_rate": 2.6580838664088214e-06,
2046
+ "loss": 0.0042,
2047
+ "step": 2910
2048
+ },
2049
+ {
2050
+ "epoch": 2.076813655761024,
2051
+ "grad_norm": 0.29165554258590237,
2052
+ "learning_rate": 2.6216039361309753e-06,
2053
+ "loss": 0.0044,
2054
+ "step": 2920
2055
+ },
2056
+ {
2057
+ "epoch": 2.0839260312944523,
2058
+ "grad_norm": 0.42787997336579114,
2059
+ "learning_rate": 2.5852869088359495e-06,
2060
+ "loss": 0.0041,
2061
+ "step": 2930
2062
+ },
2063
+ {
2064
+ "epoch": 2.0910384068278804,
2065
+ "grad_norm": 0.44323215466285076,
2066
+ "learning_rate": 2.549135271978275e-06,
2067
+ "loss": 0.0032,
2068
+ "step": 2940
2069
+ },
2070
+ {
2071
+ "epoch": 2.0981507823613086,
2072
+ "grad_norm": 0.1143123602309504,
2073
+ "learning_rate": 2.5131515016844345e-06,
2074
+ "loss": 0.0046,
2075
+ "step": 2950
2076
+ },
2077
+ {
2078
+ "epoch": 2.1052631578947367,
2079
+ "grad_norm": 0.16583828479799412,
2080
+ "learning_rate": 2.4773380625832603e-06,
2081
+ "loss": 0.0047,
2082
+ "step": 2960
2083
+ },
2084
+ {
2085
+ "epoch": 2.112375533428165,
2086
+ "grad_norm": 0.15755302830922696,
2087
+ "learning_rate": 2.4416974076371304e-06,
2088
+ "loss": 0.0039,
2089
+ "step": 2970
2090
+ },
2091
+ {
2092
+ "epoch": 2.119487908961593,
2093
+ "grad_norm": 0.62834650400931,
2094
+ "learning_rate": 2.406231977973942e-06,
2095
+ "loss": 0.0037,
2096
+ "step": 2980
2097
+ },
2098
+ {
2099
+ "epoch": 2.126600284495021,
2100
+ "grad_norm": 0.3425562134173693,
2101
+ "learning_rate": 2.3709442027199387e-06,
2102
+ "loss": 0.0049,
2103
+ "step": 2990
2104
+ },
2105
+ {
2106
+ "epoch": 2.1337126600284497,
2107
+ "grad_norm": 0.1176241490475843,
2108
+ "learning_rate": 2.3358364988333066e-06,
2109
+ "loss": 0.0045,
2110
+ "step": 3000
2111
+ },
2112
+ {
2113
+ "epoch": 2.140825035561878,
2114
+ "grad_norm": 0.21718467446163836,
2115
+ "learning_rate": 2.3009112709386454e-06,
2116
+ "loss": 0.0052,
2117
+ "step": 3010
2118
+ },
2119
+ {
2120
+ "epoch": 2.147937411095306,
2121
+ "grad_norm": 0.1447042548468856,
2122
+ "learning_rate": 2.2661709111622666e-06,
2123
+ "loss": 0.0047,
2124
+ "step": 3020
2125
+ },
2126
+ {
2127
+ "epoch": 2.155049786628734,
2128
+ "grad_norm": 0.2850367854449551,
2129
+ "learning_rate": 2.2316177989683458e-06,
2130
+ "loss": 0.004,
2131
+ "step": 3030
2132
+ },
2133
+ {
2134
+ "epoch": 2.1621621621621623,
2135
+ "grad_norm": 0.33564220562935804,
2136
+ "learning_rate": 2.197254300995953e-06,
2137
+ "loss": 0.0052,
2138
+ "step": 3040
2139
+ },
2140
+ {
2141
+ "epoch": 2.1692745376955904,
2142
+ "grad_norm": 0.1545067926251289,
2143
+ "learning_rate": 2.163082770896943e-06,
2144
+ "loss": 0.0043,
2145
+ "step": 3050
2146
+ },
2147
+ {
2148
+ "epoch": 2.1763869132290186,
2149
+ "grad_norm": 0.08868335935281069,
2150
+ "learning_rate": 2.1291055491747643e-06,
2151
+ "loss": 0.0034,
2152
+ "step": 3060
2153
+ },
2154
+ {
2155
+ "epoch": 2.1834992887624467,
2156
+ "grad_norm": 0.0678499455537346,
2157
+ "learning_rate": 2.095324963024137e-06,
2158
+ "loss": 0.0039,
2159
+ "step": 3070
2160
+ },
2161
+ {
2162
+ "epoch": 2.190611664295875,
2163
+ "grad_norm": 0.1962461433328382,
2164
+ "learning_rate": 2.061743326171668e-06,
2165
+ "loss": 0.0038,
2166
+ "step": 3080
2167
+ },
2168
+ {
2169
+ "epoch": 2.197724039829303,
2170
+ "grad_norm": 0.07801886707618137,
2171
+ "learning_rate": 2.02836293871736e-06,
2172
+ "loss": 0.0035,
2173
+ "step": 3090
2174
+ },
2175
+ {
2176
+ "epoch": 2.204836415362731,
2177
+ "grad_norm": 0.3629078506453925,
2178
+ "learning_rate": 1.9951860869771e-06,
2179
+ "loss": 0.0038,
2180
+ "step": 3100
2181
+ },
2182
+ {
2183
+ "epoch": 2.2119487908961593,
2184
+ "grad_norm": 0.8806588814039079,
2185
+ "learning_rate": 1.962215043326029e-06,
2186
+ "loss": 0.004,
2187
+ "step": 3110
2188
+ },
2189
+ {
2190
+ "epoch": 2.2190611664295874,
2191
+ "grad_norm": 0.33169199243250613,
2192
+ "learning_rate": 1.9294520660429284e-06,
2193
+ "loss": 0.0036,
2194
+ "step": 3120
2195
+ },
2196
+ {
2197
+ "epoch": 2.2261735419630155,
2198
+ "grad_norm": 0.12310821458251077,
2199
+ "learning_rate": 1.8968993991555301e-06,
2200
+ "loss": 0.0045,
2201
+ "step": 3130
2202
+ },
2203
+ {
2204
+ "epoch": 2.2332859174964437,
2205
+ "grad_norm": 0.1564234234161847,
2206
+ "learning_rate": 1.8645592722868223e-06,
2207
+ "loss": 0.0041,
2208
+ "step": 3140
2209
+ },
2210
+ {
2211
+ "epoch": 2.240398293029872,
2212
+ "grad_norm": 0.1908716606221835,
2213
+ "learning_rate": 1.8324339005023273e-06,
2214
+ "loss": 0.0042,
2215
+ "step": 3150
2216
+ },
2217
+ {
2218
+ "epoch": 2.2475106685633,
2219
+ "grad_norm": 0.17491525199519603,
2220
+ "learning_rate": 1.8005254841584035e-06,
2221
+ "loss": 0.0032,
2222
+ "step": 3160
2223
+ },
2224
+ {
2225
+ "epoch": 2.2546230440967285,
2226
+ "grad_norm": 0.15681019357467124,
2227
+ "learning_rate": 1.768836208751516e-06,
2228
+ "loss": 0.0039,
2229
+ "step": 3170
2230
+ },
2231
+ {
2232
+ "epoch": 2.2617354196301562,
2233
+ "grad_norm": 0.16172138112249296,
2234
+ "learning_rate": 1.7373682447685624e-06,
2235
+ "loss": 0.004,
2236
+ "step": 3180
2237
+ },
2238
+ {
2239
+ "epoch": 2.268847795163585,
2240
+ "grad_norm": 0.10575834882863448,
2241
+ "learning_rate": 1.706123747538196e-06,
2242
+ "loss": 0.0035,
2243
+ "step": 3190
2244
+ },
2245
+ {
2246
+ "epoch": 2.275960170697013,
2247
+ "grad_norm": 0.18222310954574267,
2248
+ "learning_rate": 1.6751048570832184e-06,
2249
+ "loss": 0.0041,
2250
+ "step": 3200
2251
+ },
2252
+ {
2253
+ "epoch": 2.283072546230441,
2254
+ "grad_norm": 0.14875677905536833,
2255
+ "learning_rate": 1.6443136979739855e-06,
2256
+ "loss": 0.003,
2257
+ "step": 3210
2258
+ },
2259
+ {
2260
+ "epoch": 2.2901849217638692,
2261
+ "grad_norm": 0.10898246145730768,
2262
+ "learning_rate": 1.6137523791829007e-06,
2263
+ "loss": 0.0034,
2264
+ "step": 3220
2265
+ },
2266
+ {
2267
+ "epoch": 2.2972972972972974,
2268
+ "grad_norm": 0.1309461753215428,
2269
+ "learning_rate": 1.5834229939399637e-06,
2270
+ "loss": 0.0034,
2271
+ "step": 3230
2272
+ },
2273
+ {
2274
+ "epoch": 2.3044096728307255,
2275
+ "grad_norm": 0.07200423508178247,
2276
+ "learning_rate": 1.5533276195893987e-06,
2277
+ "loss": 0.0037,
2278
+ "step": 3240
2279
+ },
2280
+ {
2281
+ "epoch": 2.3115220483641536,
2282
+ "grad_norm": 0.28943328560772674,
2283
+ "learning_rate": 1.5234683174473669e-06,
2284
+ "loss": 0.0039,
2285
+ "step": 3250
2286
+ },
2287
+ {
2288
+ "epoch": 2.318634423897582,
2289
+ "grad_norm": 0.5192612699526135,
2290
+ "learning_rate": 1.493847132660789e-06,
2291
+ "loss": 0.0034,
2292
+ "step": 3260
2293
+ },
2294
+ {
2295
+ "epoch": 2.32574679943101,
2296
+ "grad_norm": 0.1606295965015448,
2297
+ "learning_rate": 1.4644660940672628e-06,
2298
+ "loss": 0.0044,
2299
+ "step": 3270
2300
+ },
2301
+ {
2302
+ "epoch": 2.332859174964438,
2303
+ "grad_norm": 0.37034704670980706,
2304
+ "learning_rate": 1.435327214056103e-06,
2305
+ "loss": 0.0036,
2306
+ "step": 3280
2307
+ },
2308
+ {
2309
+ "epoch": 2.339971550497866,
2310
+ "grad_norm": 0.1985714241377405,
2311
+ "learning_rate": 1.406432488430508e-06,
2312
+ "loss": 0.0041,
2313
+ "step": 3290
2314
+ },
2315
+ {
2316
+ "epoch": 2.3470839260312943,
2317
+ "grad_norm": 0.13803180507649276,
2318
+ "learning_rate": 1.3777838962708602e-06,
2319
+ "loss": 0.0035,
2320
+ "step": 3300
2321
+ },
2322
+ {
2323
+ "epoch": 2.3541963015647225,
2324
+ "grad_norm": 0.16321860803207505,
2325
+ "learning_rate": 1.3493833997991745e-06,
2326
+ "loss": 0.0033,
2327
+ "step": 3310
2328
+ },
2329
+ {
2330
+ "epoch": 2.3613086770981506,
2331
+ "grad_norm": 0.2001811539323451,
2332
+ "learning_rate": 1.3212329442446985e-06,
2333
+ "loss": 0.0042,
2334
+ "step": 3320
2335
+ },
2336
+ {
2337
+ "epoch": 2.3684210526315788,
2338
+ "grad_norm": 0.1453173744872287,
2339
+ "learning_rate": 1.2933344577106822e-06,
2340
+ "loss": 0.0032,
2341
+ "step": 3330
2342
+ },
2343
+ {
2344
+ "epoch": 2.3755334281650073,
2345
+ "grad_norm": 0.10401910511567347,
2346
+ "learning_rate": 1.2656898510423122e-06,
2347
+ "loss": 0.0031,
2348
+ "step": 3340
2349
+ },
2350
+ {
2351
+ "epoch": 2.382645803698435,
2352
+ "grad_norm": 0.10582948879092595,
2353
+ "learning_rate": 1.2383010176958372e-06,
2354
+ "loss": 0.0033,
2355
+ "step": 3350
2356
+ },
2357
+ {
2358
+ "epoch": 2.3897581792318636,
2359
+ "grad_norm": 0.16511981732406306,
2360
+ "learning_rate": 1.2111698336088717e-06,
2361
+ "loss": 0.0039,
2362
+ "step": 3360
2363
+ },
2364
+ {
2365
+ "epoch": 2.3968705547652918,
2366
+ "grad_norm": 0.14041169290258051,
2367
+ "learning_rate": 1.1842981570719237e-06,
2368
+ "loss": 0.0034,
2369
+ "step": 3370
2370
+ },
2371
+ {
2372
+ "epoch": 2.40398293029872,
2373
+ "grad_norm": 0.216807318559693,
2374
+ "learning_rate": 1.157687828601094e-06,
2375
+ "loss": 0.0039,
2376
+ "step": 3380
2377
+ },
2378
+ {
2379
+ "epoch": 2.411095305832148,
2380
+ "grad_norm": 0.1487410996270859,
2381
+ "learning_rate": 1.1313406708120327e-06,
2382
+ "loss": 0.0033,
2383
+ "step": 3390
2384
+ },
2385
+ {
2386
+ "epoch": 2.418207681365576,
2387
+ "grad_norm": 0.17410715559913836,
2388
+ "learning_rate": 1.1052584882950896e-06,
2389
+ "loss": 0.0032,
2390
+ "step": 3400
2391
+ },
2392
+ {
2393
+ "epoch": 2.4253200568990043,
2394
+ "grad_norm": 0.14679067077660998,
2395
+ "learning_rate": 1.0794430674917262e-06,
2396
+ "loss": 0.0029,
2397
+ "step": 3410
2398
+ },
2399
+ {
2400
+ "epoch": 2.4324324324324325,
2401
+ "grad_norm": 0.11730320262217042,
2402
+ "learning_rate": 1.0538961765721429e-06,
2403
+ "loss": 0.0034,
2404
+ "step": 3420
2405
+ },
2406
+ {
2407
+ "epoch": 2.4395448079658606,
2408
+ "grad_norm": 0.15601345944604691,
2409
+ "learning_rate": 1.0286195653141822e-06,
2410
+ "loss": 0.0033,
2411
+ "step": 3430
2412
+ },
2413
+ {
2414
+ "epoch": 2.4466571834992887,
2415
+ "grad_norm": 0.15596374680032918,
2416
+ "learning_rate": 1.0036149649834786e-06,
2417
+ "loss": 0.0033,
2418
+ "step": 3440
2419
+ },
2420
+ {
2421
+ "epoch": 2.453769559032717,
2422
+ "grad_norm": 0.15341222073346109,
2423
+ "learning_rate": 9.788840882148803e-07,
2424
+ "loss": 0.0032,
2425
+ "step": 3450
2426
+ },
2427
+ {
2428
+ "epoch": 2.460881934566145,
2429
+ "grad_norm": 0.18113221503751906,
2430
+ "learning_rate": 9.544286288951393e-07,
2431
+ "loss": 0.0028,
2432
+ "step": 3460
2433
+ },
2434
+ {
2435
+ "epoch": 2.467994310099573,
2436
+ "grad_norm": 0.23824252331061962,
2437
+ "learning_rate": 9.302502620469073e-07,
2438
+ "loss": 0.003,
2439
+ "step": 3470
2440
+ },
2441
+ {
2442
+ "epoch": 2.4751066856330013,
2443
+ "grad_norm": 0.1804454838531882,
2444
+ "learning_rate": 9.063506437139901e-07,
2445
+ "loss": 0.0033,
2446
+ "step": 3480
2447
+ },
2448
+ {
2449
+ "epoch": 2.4822190611664294,
2450
+ "grad_norm": 0.12129461355182411,
2451
+ "learning_rate": 8.827314108479357e-07,
2452
+ "loss": 0.0035,
2453
+ "step": 3490
2454
+ },
2455
+ {
2456
+ "epoch": 2.4893314366998576,
2457
+ "grad_norm": 0.2496105490338266,
2458
+ "learning_rate": 8.593941811959078e-07,
2459
+ "loss": 0.0037,
2460
+ "step": 3500
2461
+ },
2462
+ {
2463
+ "epoch": 2.496443812233286,
2464
+ "grad_norm": 0.12260976552880777,
2465
+ "learning_rate": 8.363405531898833e-07,
2466
+ "loss": 0.0035,
2467
+ "step": 3510
2468
+ },
2469
+ {
2470
+ "epoch": 2.503556187766714,
2471
+ "grad_norm": 0.17068909040005176,
2472
+ "learning_rate": 8.135721058371681e-07,
2473
+ "loss": 0.0038,
2474
+ "step": 3520
2475
+ },
2476
+ {
2477
+ "epoch": 2.5106685633001424,
2478
+ "grad_norm": 0.14486041747836928,
2479
+ "learning_rate": 7.910903986122537e-07,
2480
+ "loss": 0.0023,
2481
+ "step": 3530
2482
+ },
2483
+ {
2484
+ "epoch": 2.5177809388335706,
2485
+ "grad_norm": 0.16537212820522457,
2486
+ "learning_rate": 7.688969713499983e-07,
2487
+ "loss": 0.0033,
2488
+ "step": 3540
2489
+ },
2490
+ {
2491
+ "epoch": 2.5248933143669987,
2492
+ "grad_norm": 0.06547618532234573,
2493
+ "learning_rate": 7.469933441401606e-07,
2494
+ "loss": 0.0036,
2495
+ "step": 3550
2496
+ },
2497
+ {
2498
+ "epoch": 2.532005689900427,
2499
+ "grad_norm": 0.09486129847604534,
2500
+ "learning_rate": 7.253810172232867e-07,
2501
+ "loss": 0.0029,
2502
+ "step": 3560
2503
+ },
2504
+ {
2505
+ "epoch": 2.539118065433855,
2506
+ "grad_norm": 0.15420596551214288,
2507
+ "learning_rate": 7.040614708879489e-07,
2508
+ "loss": 0.0031,
2509
+ "step": 3570
2510
+ },
2511
+ {
2512
+ "epoch": 2.546230440967283,
2513
+ "grad_norm": 0.18795827544823362,
2514
+ "learning_rate": 6.830361653693673e-07,
2515
+ "loss": 0.0031,
2516
+ "step": 3580
2517
+ },
2518
+ {
2519
+ "epoch": 2.5533428165007113,
2520
+ "grad_norm": 0.20144541991501458,
2521
+ "learning_rate": 6.623065407493801e-07,
2522
+ "loss": 0.0031,
2523
+ "step": 3590
2524
+ },
2525
+ {
2526
+ "epoch": 2.5604551920341394,
2527
+ "grad_norm": 0.11898776472079374,
2528
+ "learning_rate": 6.418740168578208e-07,
2529
+ "loss": 0.0029,
2530
+ "step": 3600
2531
+ },
2532
+ {
2533
+ "epoch": 2.5675675675675675,
2534
+ "grad_norm": 0.11704775629045612,
2535
+ "learning_rate": 6.217399931752627e-07,
2536
+ "loss": 0.0031,
2537
+ "step": 3610
2538
+ },
2539
+ {
2540
+ "epoch": 2.5746799431009957,
2541
+ "grad_norm": 0.13757018665386925,
2542
+ "learning_rate": 6.019058487371687e-07,
2543
+ "loss": 0.0028,
2544
+ "step": 3620
2545
+ },
2546
+ {
2547
+ "epoch": 2.581792318634424,
2548
+ "grad_norm": 0.07705433560973203,
2549
+ "learning_rate": 5.82372942039432e-07,
2550
+ "loss": 0.0037,
2551
+ "step": 3630
2552
+ },
2553
+ {
2554
+ "epoch": 2.588904694167852,
2555
+ "grad_norm": 0.12004181043862794,
2556
+ "learning_rate": 5.631426109453364e-07,
2557
+ "loss": 0.003,
2558
+ "step": 3640
2559
+ },
2560
+ {
2561
+ "epoch": 2.59601706970128,
2562
+ "grad_norm": 0.11547199526456815,
2563
+ "learning_rate": 5.44216172593916e-07,
2564
+ "loss": 0.0032,
2565
+ "step": 3650
2566
+ },
2567
+ {
2568
+ "epoch": 2.6031294452347082,
2569
+ "grad_norm": 0.20275686253937805,
2570
+ "learning_rate": 5.255949233097451e-07,
2571
+ "loss": 0.0035,
2572
+ "step": 3660
2573
+ },
2574
+ {
2575
+ "epoch": 2.6102418207681364,
2576
+ "grad_norm": 0.1327960409529542,
2577
+ "learning_rate": 5.072801385141429e-07,
2578
+ "loss": 0.0032,
2579
+ "step": 3670
2580
+ },
2581
+ {
2582
+ "epoch": 2.617354196301565,
2583
+ "grad_norm": 0.13522734646826431,
2584
+ "learning_rate": 4.89273072637827e-07,
2585
+ "loss": 0.0027,
2586
+ "step": 3680
2587
+ },
2588
+ {
2589
+ "epoch": 2.6244665718349927,
2590
+ "grad_norm": 0.0921535098896707,
2591
+ "learning_rate": 4.7157495903498105e-07,
2592
+ "loss": 0.0029,
2593
+ "step": 3690
2594
+ },
2595
+ {
2596
+ "epoch": 2.6315789473684212,
2597
+ "grad_norm": 0.1305724860300583,
2598
+ "learning_rate": 4.541870098987911e-07,
2599
+ "loss": 0.0035,
2600
+ "step": 3700
2601
+ },
2602
+ {
2603
+ "epoch": 2.6386913229018494,
2604
+ "grad_norm": 0.1366897855739292,
2605
+ "learning_rate": 4.371104161784073e-07,
2606
+ "loss": 0.0039,
2607
+ "step": 3710
2608
+ },
2609
+ {
2610
+ "epoch": 2.6458036984352775,
2611
+ "grad_norm": 0.16675061725996185,
2612
+ "learning_rate": 4.2034634749738623e-07,
2613
+ "loss": 0.003,
2614
+ "step": 3720
2615
+ },
2616
+ {
2617
+ "epoch": 2.6529160739687057,
2618
+ "grad_norm": 0.12062320450080749,
2619
+ "learning_rate": 4.038959520735658e-07,
2620
+ "loss": 0.0032,
2621
+ "step": 3730
2622
+ },
2623
+ {
2624
+ "epoch": 2.660028449502134,
2625
+ "grad_norm": 0.07277873243358957,
2626
+ "learning_rate": 3.8776035664043033e-07,
2627
+ "loss": 0.0033,
2628
+ "step": 3740
2629
+ },
2630
+ {
2631
+ "epoch": 2.667140825035562,
2632
+ "grad_norm": 0.09995970754512991,
2633
+ "learning_rate": 3.719406663699349e-07,
2634
+ "loss": 0.0036,
2635
+ "step": 3750
2636
+ },
2637
+ {
2638
+ "epoch": 2.67425320056899,
2639
+ "grad_norm": 0.14356536332083528,
2640
+ "learning_rate": 3.564379647968064e-07,
2641
+ "loss": 0.0034,
2642
+ "step": 3760
2643
+ },
2644
+ {
2645
+ "epoch": 2.681365576102418,
2646
+ "grad_norm": 0.1289519043233803,
2647
+ "learning_rate": 3.4125331374433414e-07,
2648
+ "loss": 0.0029,
2649
+ "step": 3770
2650
+ },
2651
+ {
2652
+ "epoch": 2.6884779516358464,
2653
+ "grad_norm": 0.10645779562131363,
2654
+ "learning_rate": 3.2638775325163517e-07,
2655
+ "loss": 0.0027,
2656
+ "step": 3780
2657
+ },
2658
+ {
2659
+ "epoch": 2.6955903271692745,
2660
+ "grad_norm": 0.10980156190201901,
2661
+ "learning_rate": 3.1184230150243025e-07,
2662
+ "loss": 0.0026,
2663
+ "step": 3790
2664
+ },
2665
+ {
2666
+ "epoch": 2.7027027027027026,
2667
+ "grad_norm": 0.1212601092847071,
2668
+ "learning_rate": 2.9761795475529375e-07,
2669
+ "loss": 0.0027,
2670
+ "step": 3800
2671
+ },
2672
+ {
2673
+ "epoch": 2.7098150782361308,
2674
+ "grad_norm": 0.10465054324216685,
2675
+ "learning_rate": 2.8371568727542486e-07,
2676
+ "loss": 0.0032,
2677
+ "step": 3810
2678
+ },
2679
+ {
2680
+ "epoch": 2.716927453769559,
2681
+ "grad_norm": 0.14087107927522052,
2682
+ "learning_rate": 2.7013645126791446e-07,
2683
+ "loss": 0.0027,
2684
+ "step": 3820
2685
+ },
2686
+ {
2687
+ "epoch": 2.724039829302987,
2688
+ "grad_norm": 0.11777162015019617,
2689
+ "learning_rate": 2.5688117681252677e-07,
2690
+ "loss": 0.0031,
2691
+ "step": 3830
2692
+ },
2693
+ {
2694
+ "epoch": 2.731152204836415,
2695
+ "grad_norm": 0.12580839073471906,
2696
+ "learning_rate": 2.439507717999945e-07,
2697
+ "loss": 0.0027,
2698
+ "step": 3840
2699
+ },
2700
+ {
2701
+ "epoch": 2.7382645803698438,
2702
+ "grad_norm": 0.11019351778666993,
2703
+ "learning_rate": 2.3134612186983817e-07,
2704
+ "loss": 0.0032,
2705
+ "step": 3850
2706
+ },
2707
+ {
2708
+ "epoch": 2.7453769559032715,
2709
+ "grad_norm": 0.2540811705778796,
2710
+ "learning_rate": 2.1906809034970057e-07,
2711
+ "loss": 0.0032,
2712
+ "step": 3860
2713
+ },
2714
+ {
2715
+ "epoch": 2.7524893314367,
2716
+ "grad_norm": 0.14533749828341638,
2717
+ "learning_rate": 2.0711751819622038e-07,
2718
+ "loss": 0.0028,
2719
+ "step": 3870
2720
+ },
2721
+ {
2722
+ "epoch": 2.759601706970128,
2723
+ "grad_norm": 0.17723003777910762,
2724
+ "learning_rate": 1.954952239374286e-07,
2725
+ "loss": 0.0033,
2726
+ "step": 3880
2727
+ },
2728
+ {
2729
+ "epoch": 2.7667140825035563,
2730
+ "grad_norm": 0.1714781247080342,
2731
+ "learning_rate": 1.8420200361669137e-07,
2732
+ "loss": 0.0028,
2733
+ "step": 3890
2734
+ },
2735
+ {
2736
+ "epoch": 2.7738264580369845,
2737
+ "grad_norm": 0.1442879683659834,
2738
+ "learning_rate": 1.732386307381767e-07,
2739
+ "loss": 0.0028,
2740
+ "step": 3900
2741
+ },
2742
+ {
2743
+ "epoch": 2.7809388335704126,
2744
+ "grad_norm": 0.11658671113478708,
2745
+ "learning_rate": 1.6260585621388604e-07,
2746
+ "loss": 0.0032,
2747
+ "step": 3910
2748
+ },
2749
+ {
2750
+ "epoch": 2.7880512091038407,
2751
+ "grad_norm": 0.13555304661960596,
2752
+ "learning_rate": 1.523044083122138e-07,
2753
+ "loss": 0.0033,
2754
+ "step": 3920
2755
+ },
2756
+ {
2757
+ "epoch": 2.795163584637269,
2758
+ "grad_norm": 0.16068613052421124,
2759
+ "learning_rate": 1.4233499260807194e-07,
2760
+ "loss": 0.0034,
2761
+ "step": 3930
2762
+ },
2763
+ {
2764
+ "epoch": 2.802275960170697,
2765
+ "grad_norm": 0.1397672323891182,
2766
+ "learning_rate": 1.326982919345582e-07,
2767
+ "loss": 0.003,
2768
+ "step": 3940
2769
+ },
2770
+ {
2771
+ "epoch": 2.809388335704125,
2772
+ "grad_norm": 0.1228326098193467,
2773
+ "learning_rate": 1.2339496633619218e-07,
2774
+ "loss": 0.0026,
2775
+ "step": 3950
2776
+ },
2777
+ {
2778
+ "epoch": 2.8165007112375533,
2779
+ "grad_norm": 0.09294084238773208,
2780
+ "learning_rate": 1.1442565302370146e-07,
2781
+ "loss": 0.0026,
2782
+ "step": 3960
2783
+ },
2784
+ {
2785
+ "epoch": 2.8236130867709814,
2786
+ "grad_norm": 0.10538827214385106,
2787
+ "learning_rate": 1.0579096633038411e-07,
2788
+ "loss": 0.0033,
2789
+ "step": 3970
2790
+ },
2791
+ {
2792
+ "epoch": 2.8307254623044096,
2793
+ "grad_norm": 0.09895208971100541,
2794
+ "learning_rate": 9.749149767002197e-08,
2795
+ "loss": 0.0029,
2796
+ "step": 3980
2797
+ },
2798
+ {
2799
+ "epoch": 2.8378378378378377,
2800
+ "grad_norm": 0.17612347880517987,
2801
+ "learning_rate": 8.952781549638412e-08,
2802
+ "loss": 0.0038,
2803
+ "step": 3990
2804
+ },
2805
+ {
2806
+ "epoch": 2.844950213371266,
2807
+ "grad_norm": 0.13285843764249902,
2808
+ "learning_rate": 8.190046526428241e-08,
2809
+ "loss": 0.0028,
2810
+ "step": 4000
2811
+ },
2812
+ {
2813
+ "epoch": 2.852062588904694,
2814
+ "grad_norm": 0.15853886614347157,
2815
+ "learning_rate": 7.460996939221643e-08,
2816
+ "loss": 0.0032,
2817
+ "step": 4010
2818
+ },
2819
+ {
2820
+ "epoch": 2.8591749644381226,
2821
+ "grad_norm": 0.10115826454451997,
2822
+ "learning_rate": 6.765682722659151e-08,
2823
+ "loss": 0.0034,
2824
+ "step": 4020
2825
+ },
2826
+ {
2827
+ "epoch": 2.8662873399715503,
2828
+ "grad_norm": 0.16050424912282388,
2829
+ "learning_rate": 6.104151500751609e-08,
2830
+ "loss": 0.0026,
2831
+ "step": 4030
2832
+ },
2833
+ {
2834
+ "epoch": 2.873399715504979,
2835
+ "grad_norm": 0.10822054946183253,
2836
+ "learning_rate": 5.476448583618288e-08,
2837
+ "loss": 0.0035,
2838
+ "step": 4040
2839
+ },
2840
+ {
2841
+ "epoch": 2.8805120910384066,
2842
+ "grad_norm": 0.1113521110254991,
2843
+ "learning_rate": 4.8826169643832464e-08,
2844
+ "loss": 0.0026,
2845
+ "step": 4050
2846
+ },
2847
+ {
2848
+ "epoch": 2.887624466571835,
2849
+ "grad_norm": 0.14081228392187445,
2850
+ "learning_rate": 4.322697316231361e-08,
2851
+ "loss": 0.0032,
2852
+ "step": 4060
2853
+ },
2854
+ {
2855
+ "epoch": 2.8947368421052633,
2856
+ "grad_norm": 0.11756191197474342,
2857
+ "learning_rate": 3.796727989621385e-08,
2858
+ "loss": 0.0024,
2859
+ "step": 4070
2860
+ },
2861
+ {
2862
+ "epoch": 2.9018492176386914,
2863
+ "grad_norm": 0.14346626654053973,
2864
+ "learning_rate": 3.304745009660326e-08,
2865
+ "loss": 0.003,
2866
+ "step": 4080
2867
+ },
2868
+ {
2869
+ "epoch": 2.9089615931721196,
2870
+ "grad_norm": 0.13833583160259022,
2871
+ "learning_rate": 2.8467820736350903e-08,
2872
+ "loss": 0.0028,
2873
+ "step": 4090
2874
+ },
2875
+ {
2876
+ "epoch": 2.9160739687055477,
2877
+ "grad_norm": 0.08441703695039304,
2878
+ "learning_rate": 2.422870548705103e-08,
2879
+ "loss": 0.003,
2880
+ "step": 4100
2881
+ },
2882
+ {
2883
+ "epoch": 2.923186344238976,
2884
+ "grad_norm": 0.15199272572784162,
2885
+ "learning_rate": 2.0330394697534726e-08,
2886
+ "loss": 0.0032,
2887
+ "step": 4110
2888
+ },
2889
+ {
2890
+ "epoch": 2.930298719772404,
2891
+ "grad_norm": 0.09905970954206261,
2892
+ "learning_rate": 1.677315537398583e-08,
2893
+ "loss": 0.0033,
2894
+ "step": 4120
2895
+ },
2896
+ {
2897
+ "epoch": 2.937411095305832,
2898
+ "grad_norm": 0.12746964816800027,
2899
+ "learning_rate": 1.355723116165164e-08,
2900
+ "loss": 0.003,
2901
+ "step": 4130
2902
+ },
2903
+ {
2904
+ "epoch": 2.9445234708392602,
2905
+ "grad_norm": 0.1730883953102828,
2906
+ "learning_rate": 1.0682842328154086e-08,
2907
+ "loss": 0.003,
2908
+ "step": 4140
2909
+ },
2910
+ {
2911
+ "epoch": 2.9516358463726884,
2912
+ "grad_norm": 0.14592570068315344,
2913
+ "learning_rate": 8.150185748405092e-09,
2914
+ "loss": 0.0034,
2915
+ "step": 4150
2916
+ },
2917
+ {
2918
+ "epoch": 2.9587482219061165,
2919
+ "grad_norm": 0.16218729377273186,
2920
+ "learning_rate": 5.959434891121274e-09,
2921
+ "loss": 0.0031,
2922
+ "step": 4160
2923
+ },
2924
+ {
2925
+ "epoch": 2.9658605974395447,
2926
+ "grad_norm": 0.1534720207270455,
2927
+ "learning_rate": 4.110739806940656e-09,
2928
+ "loss": 0.0028,
2929
+ "step": 4170
2930
+ },
2931
+ {
2932
+ "epoch": 2.972972972972973,
2933
+ "grad_norm": 0.1535652411238345,
2934
+ "learning_rate": 2.604227118148117e-09,
2935
+ "loss": 0.0025,
2936
+ "step": 4180
2937
+ },
2938
+ {
2939
+ "epoch": 2.9800853485064014,
2940
+ "grad_norm": 0.21854345544372025,
2941
+ "learning_rate": 1.4400000100017741e-09,
2942
+ "loss": 0.0028,
2943
+ "step": 4190
2944
+ },
2945
+ {
2946
+ "epoch": 2.987197724039829,
2947
+ "grad_norm": 0.11360018294244285,
2948
+ "learning_rate": 6.181382236641887e-10,
2949
+ "loss": 0.0027,
2950
+ "step": 4200
2951
+ },
2952
+ {
2953
+ "epoch": 2.9943100995732577,
2954
+ "grad_norm": 0.13109703302719727,
2955
+ "learning_rate": 1.3869805074284704e-10,
2956
+ "loss": 0.003,
2957
+ "step": 4210
2958
+ },
2959
+ {
2960
+ "epoch": 3.0,
2961
+ "step": 4218,
2962
+ "total_flos": 247294279680000.0,
2963
+ "train_loss": 0.26427117863254007,
2964
+ "train_runtime": 27747.0354,
2965
+ "train_samples_per_second": 9.727,
2966
+ "train_steps_per_second": 0.152
2967
+ }
2968
+ ],
2969
+ "logging_steps": 10,
2970
+ "max_steps": 4218,
2971
+ "num_input_tokens_seen": 0,
2972
+ "num_train_epochs": 3,
2973
+ "save_steps": 500,
2974
+ "stateful_callbacks": {
2975
+ "TrainerControl": {
2976
+ "args": {
2977
+ "should_epoch_stop": false,
2978
+ "should_evaluate": false,
2979
+ "should_log": false,
2980
+ "should_save": false,
2981
+ "should_training_stop": false
2982
+ },
2983
+ "attributes": {}
2984
+ }
2985
+ },
2986
+ "total_flos": 247294279680000.0,
2987
+ "train_batch_size": 8,
2988
+ "trial_name": null,
2989
+ "trial_params": null
2990
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:563bd929e6fd35927a880d21f2f35e526927af1ddb0723f0bc25cafcbba4fa73
3
+ size 7889
training_loss.png ADDED
vocab.json ADDED
The diff for this file is too large to render. See raw diff