double7 commited on
Commit
96a926c
·
verified ·
1 Parent(s): 75e184e

Upload model

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
README.md ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ library_name: transformers
3
+ license: other
4
+ base_model: Qwen2.5-7B
5
+ tags:
6
+ - llama-factory
7
+ - full
8
+ - generated_from_trainer
9
+ model-index:
10
+ - name: v2
11
+ results: []
12
+ ---
13
+
14
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
15
+ should probably proofread and complete it, then remove this comment. -->
16
+
17
+ # v2
18
+
19
+ This model is a fine-tuned version of [Qwen2.5-7B](https://huggingface.co/Qwen2.5-7B) on the tower_zhen_cot_mt_gemini_distill.sft dataset.
20
+
21
+ ## Model description
22
+
23
+ More information needed
24
+
25
+ ## Intended uses & limitations
26
+
27
+ More information needed
28
+
29
+ ## Training and evaluation data
30
+
31
+ More information needed
32
+
33
+ ## Training procedure
34
+
35
+ ### Training hyperparameters
36
+
37
+ The following hyperparameters were used during training:
38
+ - learning_rate: 1e-05
39
+ - train_batch_size: 4
40
+ - eval_batch_size: 8
41
+ - seed: 42
42
+ - distributed_type: multi-GPU
43
+ - num_devices: 8
44
+ - gradient_accumulation_steps: 2
45
+ - total_train_batch_size: 64
46
+ - total_eval_batch_size: 64
47
+ - optimizer: Use OptimizerNames.ADAMW_TORCH_FUSED with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments
48
+ - lr_scheduler_type: cosine
49
+ - lr_scheduler_warmup_ratio: 0.1
50
+ - num_epochs: 1.0
51
+
52
+ ### Training results
53
+
54
+
55
+
56
+ ### Framework versions
57
+
58
+ - Transformers 4.56.2
59
+ - Pytorch 2.8.0+cu128
60
+ - Datasets 4.0.0
61
+ - Tokenizers 0.22.1
added_tokens.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "</tool_call>": 151658,
3
+ "<tool_call>": 151657,
4
+ "<|box_end|>": 151649,
5
+ "<|box_start|>": 151648,
6
+ "<|endoftext|>": 151643,
7
+ "<|file_sep|>": 151664,
8
+ "<|fim_middle|>": 151660,
9
+ "<|fim_pad|>": 151662,
10
+ "<|fim_prefix|>": 151659,
11
+ "<|fim_suffix|>": 151661,
12
+ "<|im_end|>": 151645,
13
+ "<|im_start|>": 151644,
14
+ "<|image_pad|>": 151655,
15
+ "<|object_ref_end|>": 151647,
16
+ "<|object_ref_start|>": 151646,
17
+ "<|quad_end|>": 151651,
18
+ "<|quad_start|>": 151650,
19
+ "<|repo_name|>": 151663,
20
+ "<|video_pad|>": 151656,
21
+ "<|vision_end|>": 151653,
22
+ "<|vision_pad|>": 151654,
23
+ "<|vision_start|>": 151652
24
+ }
all_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 1.0,
3
+ "total_flos": 44242544295936.0,
4
+ "train_loss": 0.5097209928399425,
5
+ "train_runtime": 1256.4644,
6
+ "train_samples_per_second": 14.995,
7
+ "train_steps_per_second": 0.235
8
+ }
chat_template.jinja ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {% if messages[0]['role'] == 'system' %}{% set loop_messages = messages[1:] %}{% set system_message = messages[0]['content'] %}{% else %}{% set loop_messages = messages %}{% endif %}{% if system_message is defined %}{{ '<|im_start|>system
2
+ ' + system_message + '<|im_end|>
3
+ ' }}{% endif %}{% for message in loop_messages %}{% set content = message['content'] %}{% if message['role'] == 'user' %}{{ '<|im_start|>user
4
+ ' + content + '<|im_end|>
5
+ <|im_start|>assistant
6
+ ' }}{% elif message['role'] == 'assistant' %}{{ content + '<|im_end|>
7
+ ' }}{% endif %}{% endfor %}
config.json ADDED
@@ -0,0 +1,59 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "Qwen2ForCausalLM"
4
+ ],
5
+ "attention_dropout": 0.0,
6
+ "dtype": "bfloat16",
7
+ "eos_token_id": 151645,
8
+ "hidden_act": "silu",
9
+ "hidden_size": 3584,
10
+ "initializer_range": 0.02,
11
+ "intermediate_size": 18944,
12
+ "layer_types": [
13
+ "full_attention",
14
+ "full_attention",
15
+ "full_attention",
16
+ "full_attention",
17
+ "full_attention",
18
+ "full_attention",
19
+ "full_attention",
20
+ "full_attention",
21
+ "full_attention",
22
+ "full_attention",
23
+ "full_attention",
24
+ "full_attention",
25
+ "full_attention",
26
+ "full_attention",
27
+ "full_attention",
28
+ "full_attention",
29
+ "full_attention",
30
+ "full_attention",
31
+ "full_attention",
32
+ "full_attention",
33
+ "full_attention",
34
+ "full_attention",
35
+ "full_attention",
36
+ "full_attention",
37
+ "full_attention",
38
+ "full_attention",
39
+ "full_attention",
40
+ "full_attention"
41
+ ],
42
+ "max_position_embeddings": 131072,
43
+ "max_window_layers": 28,
44
+ "model_type": "qwen2",
45
+ "num_attention_heads": 28,
46
+ "num_hidden_layers": 28,
47
+ "num_key_value_heads": 4,
48
+ "pad_token_id": 151643,
49
+ "rms_norm_eps": 1e-06,
50
+ "rope_scaling": null,
51
+ "rope_theta": 1000000.0,
52
+ "sliding_window": null,
53
+ "tie_word_embeddings": false,
54
+ "transformers_version": "4.56.2",
55
+ "use_cache": false,
56
+ "use_mrope": false,
57
+ "use_sliding_window": false,
58
+ "vocab_size": 152064
59
+ }
generation_config.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eos_token_id": [
3
+ 151645,
4
+ 151643
5
+ ],
6
+ "max_new_tokens": 2048,
7
+ "pad_token_id": 151643,
8
+ "transformers_version": "4.56.2"
9
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
model-00001-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2ee012c61e6d4e6bd757b58bfeaa2da4a468236105083542c920661622de2401
3
+ size 4877660776
model-00002-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f548702e8598bb65364aed734f56aeef5302db942d053b3057a288df374dc8b7
3
+ size 4932751008
model-00003-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:63b9faf921c6022534c4a8e1849f5d27098d5375c85f3cb851191489e3b8dde7
3
+ size 4330865200
model-00004-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1e6b3f5712bf9e6156ebf6a5018e2a7e62f36c78ac8c9587f9d20482c0272c6d
3
+ size 1089994880
model.safetensors.index.json ADDED
@@ -0,0 +1,347 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_parameters": 333312,
4
+ "total_size": 15231233024
5
+ },
6
+ "weight_map": {
7
+ "lm_head.weight": "model-00004-of-00004.safetensors",
8
+ "model.embed_tokens.weight": "model-00001-of-00004.safetensors",
9
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00004.safetensors",
10
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
11
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
12
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
13
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
14
+ "model.layers.0.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
15
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
16
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
17
+ "model.layers.0.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
18
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
19
+ "model.layers.0.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
20
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
21
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00004.safetensors",
22
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
23
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
24
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
25
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
26
+ "model.layers.1.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
27
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
28
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
29
+ "model.layers.1.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
30
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
31
+ "model.layers.1.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
32
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
33
+ "model.layers.10.input_layernorm.weight": "model-00002-of-00004.safetensors",
34
+ "model.layers.10.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
35
+ "model.layers.10.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
36
+ "model.layers.10.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
37
+ "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
38
+ "model.layers.10.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
39
+ "model.layers.10.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
40
+ "model.layers.10.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
41
+ "model.layers.10.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
42
+ "model.layers.10.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
43
+ "model.layers.10.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
44
+ "model.layers.10.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
45
+ "model.layers.11.input_layernorm.weight": "model-00002-of-00004.safetensors",
46
+ "model.layers.11.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
47
+ "model.layers.11.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
48
+ "model.layers.11.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
49
+ "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
50
+ "model.layers.11.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
51
+ "model.layers.11.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
52
+ "model.layers.11.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
53
+ "model.layers.11.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
54
+ "model.layers.11.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
55
+ "model.layers.11.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
56
+ "model.layers.11.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
57
+ "model.layers.12.input_layernorm.weight": "model-00002-of-00004.safetensors",
58
+ "model.layers.12.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
59
+ "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
60
+ "model.layers.12.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
61
+ "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
62
+ "model.layers.12.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
63
+ "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
64
+ "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
65
+ "model.layers.12.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
66
+ "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
67
+ "model.layers.12.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
68
+ "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
69
+ "model.layers.13.input_layernorm.weight": "model-00002-of-00004.safetensors",
70
+ "model.layers.13.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
71
+ "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
72
+ "model.layers.13.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
73
+ "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
74
+ "model.layers.13.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
75
+ "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
76
+ "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
77
+ "model.layers.13.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
78
+ "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
79
+ "model.layers.13.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
80
+ "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
81
+ "model.layers.14.input_layernorm.weight": "model-00002-of-00004.safetensors",
82
+ "model.layers.14.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
83
+ "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
84
+ "model.layers.14.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
85
+ "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
86
+ "model.layers.14.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
87
+ "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
88
+ "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
89
+ "model.layers.14.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
90
+ "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
91
+ "model.layers.14.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
92
+ "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
93
+ "model.layers.15.input_layernorm.weight": "model-00002-of-00004.safetensors",
94
+ "model.layers.15.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
95
+ "model.layers.15.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
96
+ "model.layers.15.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
97
+ "model.layers.15.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
98
+ "model.layers.15.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
99
+ "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
100
+ "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
101
+ "model.layers.15.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
102
+ "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
103
+ "model.layers.15.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
104
+ "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
105
+ "model.layers.16.input_layernorm.weight": "model-00002-of-00004.safetensors",
106
+ "model.layers.16.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
107
+ "model.layers.16.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
108
+ "model.layers.16.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
109
+ "model.layers.16.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
110
+ "model.layers.16.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
111
+ "model.layers.16.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
112
+ "model.layers.16.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
113
+ "model.layers.16.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
114
+ "model.layers.16.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
115
+ "model.layers.16.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
116
+ "model.layers.16.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
117
+ "model.layers.17.input_layernorm.weight": "model-00002-of-00004.safetensors",
118
+ "model.layers.17.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
119
+ "model.layers.17.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
120
+ "model.layers.17.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
121
+ "model.layers.17.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
122
+ "model.layers.17.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
123
+ "model.layers.17.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
124
+ "model.layers.17.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
125
+ "model.layers.17.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
126
+ "model.layers.17.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
127
+ "model.layers.17.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
128
+ "model.layers.17.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
129
+ "model.layers.18.input_layernorm.weight": "model-00003-of-00004.safetensors",
130
+ "model.layers.18.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
131
+ "model.layers.18.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
132
+ "model.layers.18.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
133
+ "model.layers.18.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
134
+ "model.layers.18.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
135
+ "model.layers.18.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
136
+ "model.layers.18.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
137
+ "model.layers.18.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
138
+ "model.layers.18.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
139
+ "model.layers.18.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
140
+ "model.layers.18.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
141
+ "model.layers.19.input_layernorm.weight": "model-00003-of-00004.safetensors",
142
+ "model.layers.19.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
143
+ "model.layers.19.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
144
+ "model.layers.19.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
145
+ "model.layers.19.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
146
+ "model.layers.19.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
147
+ "model.layers.19.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
148
+ "model.layers.19.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
149
+ "model.layers.19.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
150
+ "model.layers.19.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
151
+ "model.layers.19.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
152
+ "model.layers.19.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
153
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00004.safetensors",
154
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
155
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
156
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
157
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
158
+ "model.layers.2.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
159
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
160
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
161
+ "model.layers.2.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
162
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
163
+ "model.layers.2.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
164
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
165
+ "model.layers.20.input_layernorm.weight": "model-00003-of-00004.safetensors",
166
+ "model.layers.20.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
167
+ "model.layers.20.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
168
+ "model.layers.20.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
169
+ "model.layers.20.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
170
+ "model.layers.20.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
171
+ "model.layers.20.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
172
+ "model.layers.20.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
173
+ "model.layers.20.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
174
+ "model.layers.20.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
175
+ "model.layers.20.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
176
+ "model.layers.20.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
177
+ "model.layers.21.input_layernorm.weight": "model-00003-of-00004.safetensors",
178
+ "model.layers.21.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
179
+ "model.layers.21.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
180
+ "model.layers.21.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
181
+ "model.layers.21.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
182
+ "model.layers.21.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
183
+ "model.layers.21.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
184
+ "model.layers.21.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
185
+ "model.layers.21.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
186
+ "model.layers.21.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
187
+ "model.layers.21.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
188
+ "model.layers.21.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
189
+ "model.layers.22.input_layernorm.weight": "model-00003-of-00004.safetensors",
190
+ "model.layers.22.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
191
+ "model.layers.22.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
192
+ "model.layers.22.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
193
+ "model.layers.22.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
194
+ "model.layers.22.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
195
+ "model.layers.22.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
196
+ "model.layers.22.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
197
+ "model.layers.22.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
198
+ "model.layers.22.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
199
+ "model.layers.22.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
200
+ "model.layers.22.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
201
+ "model.layers.23.input_layernorm.weight": "model-00003-of-00004.safetensors",
202
+ "model.layers.23.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
203
+ "model.layers.23.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
204
+ "model.layers.23.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
205
+ "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
206
+ "model.layers.23.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
207
+ "model.layers.23.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
208
+ "model.layers.23.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
209
+ "model.layers.23.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
210
+ "model.layers.23.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
211
+ "model.layers.23.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
212
+ "model.layers.23.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
213
+ "model.layers.24.input_layernorm.weight": "model-00003-of-00004.safetensors",
214
+ "model.layers.24.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
215
+ "model.layers.24.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
216
+ "model.layers.24.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
217
+ "model.layers.24.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
218
+ "model.layers.24.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
219
+ "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
220
+ "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
221
+ "model.layers.24.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
222
+ "model.layers.24.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
223
+ "model.layers.24.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
224
+ "model.layers.24.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
225
+ "model.layers.25.input_layernorm.weight": "model-00003-of-00004.safetensors",
226
+ "model.layers.25.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
227
+ "model.layers.25.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
228
+ "model.layers.25.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
229
+ "model.layers.25.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
230
+ "model.layers.25.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
231
+ "model.layers.25.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
232
+ "model.layers.25.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
233
+ "model.layers.25.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
234
+ "model.layers.25.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
235
+ "model.layers.25.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
236
+ "model.layers.25.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
237
+ "model.layers.26.input_layernorm.weight": "model-00003-of-00004.safetensors",
238
+ "model.layers.26.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
239
+ "model.layers.26.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
240
+ "model.layers.26.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
241
+ "model.layers.26.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
242
+ "model.layers.26.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
243
+ "model.layers.26.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
244
+ "model.layers.26.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
245
+ "model.layers.26.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
246
+ "model.layers.26.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
247
+ "model.layers.26.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
248
+ "model.layers.26.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
249
+ "model.layers.27.input_layernorm.weight": "model-00003-of-00004.safetensors",
250
+ "model.layers.27.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
251
+ "model.layers.27.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
252
+ "model.layers.27.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
253
+ "model.layers.27.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
254
+ "model.layers.27.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
255
+ "model.layers.27.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
256
+ "model.layers.27.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
257
+ "model.layers.27.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
258
+ "model.layers.27.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
259
+ "model.layers.27.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
260
+ "model.layers.27.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
261
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00004.safetensors",
262
+ "model.layers.3.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
263
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
264
+ "model.layers.3.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
265
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
266
+ "model.layers.3.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
267
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
268
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
269
+ "model.layers.3.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
270
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
271
+ "model.layers.3.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
272
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
273
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00004.safetensors",
274
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
275
+ "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
276
+ "model.layers.4.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
277
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
278
+ "model.layers.4.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
279
+ "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
280
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
281
+ "model.layers.4.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
282
+ "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
283
+ "model.layers.4.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
284
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
285
+ "model.layers.5.input_layernorm.weight": "model-00001-of-00004.safetensors",
286
+ "model.layers.5.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
287
+ "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
288
+ "model.layers.5.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
289
+ "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
290
+ "model.layers.5.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
291
+ "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
292
+ "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
293
+ "model.layers.5.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
294
+ "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
295
+ "model.layers.5.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
296
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
297
+ "model.layers.6.input_layernorm.weight": "model-00001-of-00004.safetensors",
298
+ "model.layers.6.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
299
+ "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
300
+ "model.layers.6.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
301
+ "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
302
+ "model.layers.6.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
303
+ "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
304
+ "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
305
+ "model.layers.6.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
306
+ "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
307
+ "model.layers.6.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
308
+ "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
309
+ "model.layers.7.input_layernorm.weight": "model-00001-of-00004.safetensors",
310
+ "model.layers.7.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
311
+ "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
312
+ "model.layers.7.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
313
+ "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
314
+ "model.layers.7.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
315
+ "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
316
+ "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
317
+ "model.layers.7.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
318
+ "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
319
+ "model.layers.7.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
320
+ "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
321
+ "model.layers.8.input_layernorm.weight": "model-00002-of-00004.safetensors",
322
+ "model.layers.8.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
323
+ "model.layers.8.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
324
+ "model.layers.8.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
325
+ "model.layers.8.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
326
+ "model.layers.8.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
327
+ "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
328
+ "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
329
+ "model.layers.8.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
330
+ "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
331
+ "model.layers.8.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
332
+ "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
333
+ "model.layers.9.input_layernorm.weight": "model-00002-of-00004.safetensors",
334
+ "model.layers.9.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
335
+ "model.layers.9.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
336
+ "model.layers.9.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
337
+ "model.layers.9.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
338
+ "model.layers.9.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
339
+ "model.layers.9.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
340
+ "model.layers.9.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
341
+ "model.layers.9.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
342
+ "model.layers.9.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
343
+ "model.layers.9.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
344
+ "model.layers.9.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
345
+ "model.norm.weight": "model-00003-of-00004.safetensors"
346
+ }
347
+ }
qwen_full_sft.yaml ADDED
@@ -0,0 +1,50 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ### model
2
+ model_name_or_path: Qwen2.5-7B
3
+ flash_attn: fa2
4
+
5
+
6
+ ### method
7
+ stage: sft
8
+ do_train: true
9
+ finetuning_type: full
10
+ deepspeed: examples/deepspeed/ds_z3_config.json
11
+
12
+ ### dataset
13
+ dataset: tower_zhen_cot_mt_gemini_distill.sft
14
+ template: chatml
15
+ cutoff_len: 12000
16
+ # cutoff_prompt_head: true
17
+ # max_samples: 16000
18
+ overwrite_cache: true
19
+ preprocessing_num_workers: 16
20
+ dataloader_num_workers: 4
21
+ # max_steps: 12540
22
+
23
+ # default_system: "You are Qwen, a virtual human developed by the Qwen Team, Alibaba Group, capable of perceiving auditory and visual inputs, as well as generating text and speech."
24
+
25
+ ### output
26
+ output_dir: saves/qwen-7b/full/sft/v2
27
+ logging_steps: 1
28
+ save_steps: 2500
29
+ plot_loss: true
30
+ overwrite_output_dir: true
31
+ report_to: wandb # choices: [none, wandb, tensorboard, swanlab, mlflow]
32
+
33
+ ### train
34
+ per_device_train_batch_size: 4
35
+ gradient_accumulation_steps: 2
36
+ learning_rate: 1.0e-5
37
+ num_train_epochs: 1.0
38
+ lr_scheduler_type: cosine
39
+ warmup_ratio: 0.1
40
+ # warmup_steps: 300
41
+ bf16: true
42
+ ddp_timeout: 180000000
43
+ resume_from_checkpoint: null
44
+ # model_accepts_loss_kwargs: true
45
+
46
+ ## eval
47
+ # eval_dataset: ted_yt_training_transcription.cot5.test.audio
48
+ # per_device_eval_batch_size: 1
49
+ # eval_strategy: steps
50
+ # eval_steps: 400
special_tokens_map.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|im_start|>",
4
+ "<|im_end|>",
5
+ "<|object_ref_start|>",
6
+ "<|object_ref_end|>",
7
+ "<|box_start|>",
8
+ "<|box_end|>",
9
+ "<|quad_start|>",
10
+ "<|quad_end|>",
11
+ "<|vision_start|>",
12
+ "<|vision_end|>",
13
+ "<|vision_pad|>",
14
+ "<|image_pad|>",
15
+ "<|video_pad|>"
16
+ ],
17
+ "eos_token": {
18
+ "content": "<|im_end|>",
19
+ "lstrip": false,
20
+ "normalized": false,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ },
24
+ "pad_token": {
25
+ "content": "<|endoftext|>",
26
+ "lstrip": false,
27
+ "normalized": false,
28
+ "rstrip": false,
29
+ "single_word": false
30
+ }
31
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9c5ae00e602b8860cbd784ba82a8aa14e8feecec692e7076590d014d7b7fdafa
3
+ size 11421896
tokenizer_config.json ADDED
@@ -0,0 +1,208 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": false,
3
+ "add_prefix_space": false,
4
+ "added_tokens_decoder": {
5
+ "151643": {
6
+ "content": "<|endoftext|>",
7
+ "lstrip": false,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ },
13
+ "151644": {
14
+ "content": "<|im_start|>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false,
19
+ "special": true
20
+ },
21
+ "151645": {
22
+ "content": "<|im_end|>",
23
+ "lstrip": false,
24
+ "normalized": false,
25
+ "rstrip": false,
26
+ "single_word": false,
27
+ "special": true
28
+ },
29
+ "151646": {
30
+ "content": "<|object_ref_start|>",
31
+ "lstrip": false,
32
+ "normalized": false,
33
+ "rstrip": false,
34
+ "single_word": false,
35
+ "special": true
36
+ },
37
+ "151647": {
38
+ "content": "<|object_ref_end|>",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": false,
42
+ "single_word": false,
43
+ "special": true
44
+ },
45
+ "151648": {
46
+ "content": "<|box_start|>",
47
+ "lstrip": false,
48
+ "normalized": false,
49
+ "rstrip": false,
50
+ "single_word": false,
51
+ "special": true
52
+ },
53
+ "151649": {
54
+ "content": "<|box_end|>",
55
+ "lstrip": false,
56
+ "normalized": false,
57
+ "rstrip": false,
58
+ "single_word": false,
59
+ "special": true
60
+ },
61
+ "151650": {
62
+ "content": "<|quad_start|>",
63
+ "lstrip": false,
64
+ "normalized": false,
65
+ "rstrip": false,
66
+ "single_word": false,
67
+ "special": true
68
+ },
69
+ "151651": {
70
+ "content": "<|quad_end|>",
71
+ "lstrip": false,
72
+ "normalized": false,
73
+ "rstrip": false,
74
+ "single_word": false,
75
+ "special": true
76
+ },
77
+ "151652": {
78
+ "content": "<|vision_start|>",
79
+ "lstrip": false,
80
+ "normalized": false,
81
+ "rstrip": false,
82
+ "single_word": false,
83
+ "special": true
84
+ },
85
+ "151653": {
86
+ "content": "<|vision_end|>",
87
+ "lstrip": false,
88
+ "normalized": false,
89
+ "rstrip": false,
90
+ "single_word": false,
91
+ "special": true
92
+ },
93
+ "151654": {
94
+ "content": "<|vision_pad|>",
95
+ "lstrip": false,
96
+ "normalized": false,
97
+ "rstrip": false,
98
+ "single_word": false,
99
+ "special": true
100
+ },
101
+ "151655": {
102
+ "content": "<|image_pad|>",
103
+ "lstrip": false,
104
+ "normalized": false,
105
+ "rstrip": false,
106
+ "single_word": false,
107
+ "special": true
108
+ },
109
+ "151656": {
110
+ "content": "<|video_pad|>",
111
+ "lstrip": false,
112
+ "normalized": false,
113
+ "rstrip": false,
114
+ "single_word": false,
115
+ "special": true
116
+ },
117
+ "151657": {
118
+ "content": "<tool_call>",
119
+ "lstrip": false,
120
+ "normalized": false,
121
+ "rstrip": false,
122
+ "single_word": false,
123
+ "special": false
124
+ },
125
+ "151658": {
126
+ "content": "</tool_call>",
127
+ "lstrip": false,
128
+ "normalized": false,
129
+ "rstrip": false,
130
+ "single_word": false,
131
+ "special": false
132
+ },
133
+ "151659": {
134
+ "content": "<|fim_prefix|>",
135
+ "lstrip": false,
136
+ "normalized": false,
137
+ "rstrip": false,
138
+ "single_word": false,
139
+ "special": false
140
+ },
141
+ "151660": {
142
+ "content": "<|fim_middle|>",
143
+ "lstrip": false,
144
+ "normalized": false,
145
+ "rstrip": false,
146
+ "single_word": false,
147
+ "special": false
148
+ },
149
+ "151661": {
150
+ "content": "<|fim_suffix|>",
151
+ "lstrip": false,
152
+ "normalized": false,
153
+ "rstrip": false,
154
+ "single_word": false,
155
+ "special": false
156
+ },
157
+ "151662": {
158
+ "content": "<|fim_pad|>",
159
+ "lstrip": false,
160
+ "normalized": false,
161
+ "rstrip": false,
162
+ "single_word": false,
163
+ "special": false
164
+ },
165
+ "151663": {
166
+ "content": "<|repo_name|>",
167
+ "lstrip": false,
168
+ "normalized": false,
169
+ "rstrip": false,
170
+ "single_word": false,
171
+ "special": false
172
+ },
173
+ "151664": {
174
+ "content": "<|file_sep|>",
175
+ "lstrip": false,
176
+ "normalized": false,
177
+ "rstrip": false,
178
+ "single_word": false,
179
+ "special": false
180
+ }
181
+ },
182
+ "additional_special_tokens": [
183
+ "<|im_start|>",
184
+ "<|im_end|>",
185
+ "<|object_ref_start|>",
186
+ "<|object_ref_end|>",
187
+ "<|box_start|>",
188
+ "<|box_end|>",
189
+ "<|quad_start|>",
190
+ "<|quad_end|>",
191
+ "<|vision_start|>",
192
+ "<|vision_end|>",
193
+ "<|vision_pad|>",
194
+ "<|image_pad|>",
195
+ "<|video_pad|>"
196
+ ],
197
+ "bos_token": null,
198
+ "clean_up_tokenization_spaces": false,
199
+ "eos_token": "<|im_end|>",
200
+ "errors": "replace",
201
+ "extra_special_tokens": {},
202
+ "model_max_length": 131072,
203
+ "pad_token": "<|endoftext|>",
204
+ "padding_side": "right",
205
+ "split_special_tokens": false,
206
+ "tokenizer_class": "Qwen2Tokenizer",
207
+ "unk_token": null
208
+ }
train_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 1.0,
3
+ "total_flos": 44242544295936.0,
4
+ "train_loss": 0.5097209928399425,
5
+ "train_runtime": 1256.4644,
6
+ "train_samples_per_second": 14.995,
7
+ "train_steps_per_second": 0.235
8
+ }
trainer_log.jsonl ADDED
@@ -0,0 +1,296 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {"current_steps": 1, "total_steps": 295, "loss": 0.9899, "lr": 0.0, "epoch": 0.003395585738539898, "percentage": 0.34, "elapsed_time": "0:00:06", "remaining_time": "0:30:39"}
2
+ {"current_steps": 2, "total_steps": 295, "loss": 0.9683, "lr": 3.3333333333333335e-07, "epoch": 0.006791171477079796, "percentage": 0.68, "elapsed_time": "0:00:10", "remaining_time": "0:25:09"}
3
+ {"current_steps": 3, "total_steps": 295, "loss": 0.9722, "lr": 6.666666666666667e-07, "epoch": 0.010186757215619695, "percentage": 1.02, "elapsed_time": "0:00:13", "remaining_time": "0:22:04"}
4
+ {"current_steps": 4, "total_steps": 295, "loss": 0.9667, "lr": 1.0000000000000002e-06, "epoch": 0.013582342954159592, "percentage": 1.36, "elapsed_time": "0:00:17", "remaining_time": "0:21:00"}
5
+ {"current_steps": 5, "total_steps": 295, "loss": 0.9814, "lr": 1.3333333333333334e-06, "epoch": 0.01697792869269949, "percentage": 1.69, "elapsed_time": "0:00:21", "remaining_time": "0:20:23"}
6
+ {"current_steps": 6, "total_steps": 295, "loss": 0.9502, "lr": 1.6666666666666667e-06, "epoch": 0.02037351443123939, "percentage": 2.03, "elapsed_time": "0:00:25", "remaining_time": "0:20:09"}
7
+ {"current_steps": 7, "total_steps": 295, "loss": 0.9411, "lr": 2.0000000000000003e-06, "epoch": 0.023769100169779286, "percentage": 2.37, "elapsed_time": "0:00:30", "remaining_time": "0:20:41"}
8
+ {"current_steps": 8, "total_steps": 295, "loss": 0.9075, "lr": 2.3333333333333336e-06, "epoch": 0.027164685908319185, "percentage": 2.71, "elapsed_time": "0:00:34", "remaining_time": "0:20:51"}
9
+ {"current_steps": 9, "total_steps": 295, "loss": 0.8979, "lr": 2.666666666666667e-06, "epoch": 0.030560271646859084, "percentage": 3.05, "elapsed_time": "0:00:38", "remaining_time": "0:20:31"}
10
+ {"current_steps": 10, "total_steps": 295, "loss": 0.8589, "lr": 3e-06, "epoch": 0.03395585738539898, "percentage": 3.39, "elapsed_time": "0:00:42", "remaining_time": "0:20:23"}
11
+ {"current_steps": 11, "total_steps": 295, "loss": 0.8797, "lr": 3.3333333333333333e-06, "epoch": 0.03735144312393888, "percentage": 3.73, "elapsed_time": "0:00:46", "remaining_time": "0:20:06"}
12
+ {"current_steps": 12, "total_steps": 295, "loss": 0.785, "lr": 3.6666666666666666e-06, "epoch": 0.04074702886247878, "percentage": 4.07, "elapsed_time": "0:00:50", "remaining_time": "0:19:53"}
13
+ {"current_steps": 13, "total_steps": 295, "loss": 0.7932, "lr": 4.000000000000001e-06, "epoch": 0.044142614601018676, "percentage": 4.41, "elapsed_time": "0:00:54", "remaining_time": "0:19:36"}
14
+ {"current_steps": 14, "total_steps": 295, "loss": 0.7878, "lr": 4.333333333333334e-06, "epoch": 0.04753820033955857, "percentage": 4.75, "elapsed_time": "0:00:58", "remaining_time": "0:19:27"}
15
+ {"current_steps": 15, "total_steps": 295, "loss": 0.7854, "lr": 4.666666666666667e-06, "epoch": 0.050933786078098474, "percentage": 5.08, "elapsed_time": "0:01:01", "remaining_time": "0:19:11"}
16
+ {"current_steps": 16, "total_steps": 295, "loss": 0.7194, "lr": 5e-06, "epoch": 0.05432937181663837, "percentage": 5.42, "elapsed_time": "0:01:07", "remaining_time": "0:19:34"}
17
+ {"current_steps": 17, "total_steps": 295, "loss": 0.7059, "lr": 5.333333333333334e-06, "epoch": 0.057724957555178265, "percentage": 5.76, "elapsed_time": "0:01:10", "remaining_time": "0:19:19"}
18
+ {"current_steps": 18, "total_steps": 295, "loss": 0.7236, "lr": 5.666666666666667e-06, "epoch": 0.06112054329371817, "percentage": 6.1, "elapsed_time": "0:01:14", "remaining_time": "0:19:07"}
19
+ {"current_steps": 19, "total_steps": 295, "loss": 0.7133, "lr": 6e-06, "epoch": 0.06451612903225806, "percentage": 6.44, "elapsed_time": "0:01:20", "remaining_time": "0:19:31"}
20
+ {"current_steps": 20, "total_steps": 295, "loss": 0.6807, "lr": 6.333333333333333e-06, "epoch": 0.06791171477079797, "percentage": 6.78, "elapsed_time": "0:01:23", "remaining_time": "0:19:14"}
21
+ {"current_steps": 21, "total_steps": 295, "loss": 0.6717, "lr": 6.666666666666667e-06, "epoch": 0.07130730050933787, "percentage": 7.12, "elapsed_time": "0:01:28", "remaining_time": "0:19:08"}
22
+ {"current_steps": 22, "total_steps": 295, "loss": 0.6836, "lr": 7e-06, "epoch": 0.07470288624787776, "percentage": 7.46, "elapsed_time": "0:01:31", "remaining_time": "0:18:58"}
23
+ {"current_steps": 23, "total_steps": 295, "loss": 0.656, "lr": 7.333333333333333e-06, "epoch": 0.07809847198641766, "percentage": 7.8, "elapsed_time": "0:01:35", "remaining_time": "0:18:47"}
24
+ {"current_steps": 24, "total_steps": 295, "loss": 0.652, "lr": 7.666666666666667e-06, "epoch": 0.08149405772495756, "percentage": 8.14, "elapsed_time": "0:01:39", "remaining_time": "0:18:39"}
25
+ {"current_steps": 25, "total_steps": 295, "loss": 0.6237, "lr": 8.000000000000001e-06, "epoch": 0.08488964346349745, "percentage": 8.47, "elapsed_time": "0:01:42", "remaining_time": "0:18:32"}
26
+ {"current_steps": 26, "total_steps": 295, "loss": 0.5999, "lr": 8.333333333333334e-06, "epoch": 0.08828522920203735, "percentage": 8.81, "elapsed_time": "0:01:48", "remaining_time": "0:18:47"}
27
+ {"current_steps": 27, "total_steps": 295, "loss": 0.6229, "lr": 8.666666666666668e-06, "epoch": 0.09168081494057725, "percentage": 9.15, "elapsed_time": "0:01:52", "remaining_time": "0:18:37"}
28
+ {"current_steps": 28, "total_steps": 295, "loss": 0.5909, "lr": 9e-06, "epoch": 0.09507640067911714, "percentage": 9.49, "elapsed_time": "0:01:56", "remaining_time": "0:18:26"}
29
+ {"current_steps": 29, "total_steps": 295, "loss": 0.6236, "lr": 9.333333333333334e-06, "epoch": 0.09847198641765705, "percentage": 9.83, "elapsed_time": "0:01:59", "remaining_time": "0:18:17"}
30
+ {"current_steps": 30, "total_steps": 295, "loss": 0.5978, "lr": 9.666666666666667e-06, "epoch": 0.10186757215619695, "percentage": 10.17, "elapsed_time": "0:02:03", "remaining_time": "0:18:10"}
31
+ {"current_steps": 31, "total_steps": 295, "loss": 0.6206, "lr": 1e-05, "epoch": 0.10526315789473684, "percentage": 10.51, "elapsed_time": "0:02:06", "remaining_time": "0:18:00"}
32
+ {"current_steps": 32, "total_steps": 295, "loss": 0.6103, "lr": 9.999648647603774e-06, "epoch": 0.10865874363327674, "percentage": 10.85, "elapsed_time": "0:02:10", "remaining_time": "0:17:51"}
33
+ {"current_steps": 33, "total_steps": 295, "loss": 0.5823, "lr": 9.998594639794502e-06, "epoch": 0.11205432937181664, "percentage": 11.19, "elapsed_time": "0:02:14", "remaining_time": "0:17:45"}
34
+ {"current_steps": 34, "total_steps": 295, "loss": 0.5724, "lr": 9.996838124703448e-06, "epoch": 0.11544991511035653, "percentage": 11.53, "elapsed_time": "0:02:19", "remaining_time": "0:17:47"}
35
+ {"current_steps": 35, "total_steps": 295, "loss": 0.6006, "lr": 9.994379349192927e-06, "epoch": 0.11884550084889643, "percentage": 11.86, "elapsed_time": "0:02:22", "remaining_time": "0:17:39"}
36
+ {"current_steps": 36, "total_steps": 295, "loss": 0.5834, "lr": 9.991218658821609e-06, "epoch": 0.12224108658743633, "percentage": 12.2, "elapsed_time": "0:02:26", "remaining_time": "0:17:31"}
37
+ {"current_steps": 37, "total_steps": 295, "loss": 0.5781, "lr": 9.987356497795944e-06, "epoch": 0.12563667232597622, "percentage": 12.54, "elapsed_time": "0:02:31", "remaining_time": "0:17:36"}
38
+ {"current_steps": 38, "total_steps": 295, "loss": 0.5613, "lr": 9.982793408907747e-06, "epoch": 0.12903225806451613, "percentage": 12.88, "elapsed_time": "0:02:35", "remaining_time": "0:17:29"}
39
+ {"current_steps": 39, "total_steps": 295, "loss": 0.5518, "lr": 9.977530033457906e-06, "epoch": 0.13242784380305603, "percentage": 13.22, "elapsed_time": "0:02:39", "remaining_time": "0:17:24"}
40
+ {"current_steps": 40, "total_steps": 295, "loss": 0.5875, "lr": 9.971567111166246e-06, "epoch": 0.13582342954159593, "percentage": 13.56, "elapsed_time": "0:02:42", "remaining_time": "0:17:16"}
41
+ {"current_steps": 41, "total_steps": 295, "loss": 0.5666, "lr": 9.964905480067585e-06, "epoch": 0.13921901528013583, "percentage": 13.9, "elapsed_time": "0:02:46", "remaining_time": "0:17:11"}
42
+ {"current_steps": 42, "total_steps": 295, "loss": 0.5829, "lr": 9.957546076393944e-06, "epoch": 0.14261460101867574, "percentage": 14.24, "elapsed_time": "0:02:51", "remaining_time": "0:17:15"}
43
+ {"current_steps": 43, "total_steps": 295, "loss": 0.592, "lr": 9.949489934442966e-06, "epoch": 0.1460101867572156, "percentage": 14.58, "elapsed_time": "0:02:55", "remaining_time": "0:17:08"}
44
+ {"current_steps": 44, "total_steps": 295, "loss": 0.554, "lr": 9.940738186432565e-06, "epoch": 0.1494057724957555, "percentage": 14.92, "elapsed_time": "0:02:58", "remaining_time": "0:17:00"}
45
+ {"current_steps": 45, "total_steps": 295, "loss": 0.5563, "lr": 9.931292062341793e-06, "epoch": 0.15280135823429541, "percentage": 15.25, "elapsed_time": "0:03:02", "remaining_time": "0:16:54"}
46
+ {"current_steps": 46, "total_steps": 295, "loss": 0.5349, "lr": 9.921152889737985e-06, "epoch": 0.15619694397283532, "percentage": 15.59, "elapsed_time": "0:03:07", "remaining_time": "0:16:52"}
47
+ {"current_steps": 47, "total_steps": 295, "loss": 0.5885, "lr": 9.910322093590177e-06, "epoch": 0.15959252971137522, "percentage": 15.93, "elapsed_time": "0:03:12", "remaining_time": "0:16:54"}
48
+ {"current_steps": 48, "total_steps": 295, "loss": 0.555, "lr": 9.898801196068839e-06, "epoch": 0.16298811544991512, "percentage": 16.27, "elapsed_time": "0:03:19", "remaining_time": "0:17:04"}
49
+ {"current_steps": 49, "total_steps": 295, "loss": 0.5412, "lr": 9.886591816331953e-06, "epoch": 0.166383701188455, "percentage": 16.61, "elapsed_time": "0:03:23", "remaining_time": "0:17:01"}
50
+ {"current_steps": 50, "total_steps": 295, "loss": 0.5266, "lr": 9.87369567029745e-06, "epoch": 0.1697792869269949, "percentage": 16.95, "elapsed_time": "0:03:28", "remaining_time": "0:17:00"}
51
+ {"current_steps": 51, "total_steps": 295, "loss": 0.5364, "lr": 9.860114570402055e-06, "epoch": 0.1731748726655348, "percentage": 17.29, "elapsed_time": "0:03:31", "remaining_time": "0:16:53"}
52
+ {"current_steps": 52, "total_steps": 295, "loss": 0.544, "lr": 9.845850425346563e-06, "epoch": 0.1765704584040747, "percentage": 17.63, "elapsed_time": "0:03:37", "remaining_time": "0:16:54"}
53
+ {"current_steps": 53, "total_steps": 295, "loss": 0.526, "lr": 9.830905239827592e-06, "epoch": 0.1799660441426146, "percentage": 17.97, "elapsed_time": "0:03:42", "remaining_time": "0:16:54"}
54
+ {"current_steps": 54, "total_steps": 295, "loss": 0.5384, "lr": 9.815281114255841e-06, "epoch": 0.1833616298811545, "percentage": 18.31, "elapsed_time": "0:03:48", "remaining_time": "0:17:00"}
55
+ {"current_steps": 55, "total_steps": 295, "loss": 0.5216, "lr": 9.798980244460892e-06, "epoch": 0.1867572156196944, "percentage": 18.64, "elapsed_time": "0:03:52", "remaining_time": "0:16:53"}
56
+ {"current_steps": 56, "total_steps": 295, "loss": 0.5602, "lr": 9.782004921382612e-06, "epoch": 0.19015280135823429, "percentage": 18.98, "elapsed_time": "0:03:56", "remaining_time": "0:16:48"}
57
+ {"current_steps": 57, "total_steps": 295, "loss": 0.5211, "lr": 9.764357530749178e-06, "epoch": 0.1935483870967742, "percentage": 19.32, "elapsed_time": "0:03:59", "remaining_time": "0:16:41"}
58
+ {"current_steps": 58, "total_steps": 295, "loss": 0.5408, "lr": 9.74604055274178e-06, "epoch": 0.1969439728353141, "percentage": 19.66, "elapsed_time": "0:04:03", "remaining_time": "0:16:35"}
59
+ {"current_steps": 59, "total_steps": 295, "loss": 0.5297, "lr": 9.727056561646067e-06, "epoch": 0.200339558573854, "percentage": 20.0, "elapsed_time": "0:04:06", "remaining_time": "0:16:27"}
60
+ {"current_steps": 60, "total_steps": 295, "loss": 0.5241, "lr": 9.707408225490343e-06, "epoch": 0.2037351443123939, "percentage": 20.34, "elapsed_time": "0:04:10", "remaining_time": "0:16:22"}
61
+ {"current_steps": 61, "total_steps": 295, "loss": 0.5373, "lr": 9.687098305670606e-06, "epoch": 0.2071307300509338, "percentage": 20.68, "elapsed_time": "0:04:15", "remaining_time": "0:16:18"}
62
+ {"current_steps": 62, "total_steps": 295, "loss": 0.5397, "lr": 9.66612965656245e-06, "epoch": 0.21052631578947367, "percentage": 21.02, "elapsed_time": "0:04:18", "remaining_time": "0:16:12"}
63
+ {"current_steps": 63, "total_steps": 295, "loss": 0.5444, "lr": 9.644505225119922e-06, "epoch": 0.21392190152801357, "percentage": 21.36, "elapsed_time": "0:04:22", "remaining_time": "0:16:05"}
64
+ {"current_steps": 64, "total_steps": 295, "loss": 0.4984, "lr": 9.622228050461345e-06, "epoch": 0.21731748726655348, "percentage": 21.69, "elapsed_time": "0:04:27", "remaining_time": "0:16:03"}
65
+ {"current_steps": 65, "total_steps": 295, "loss": 0.5214, "lr": 9.599301263442194e-06, "epoch": 0.22071307300509338, "percentage": 22.03, "elapsed_time": "0:04:30", "remaining_time": "0:15:58"}
66
+ {"current_steps": 66, "total_steps": 295, "loss": 0.5265, "lr": 9.575728086215093e-06, "epoch": 0.22410865874363328, "percentage": 22.37, "elapsed_time": "0:04:34", "remaining_time": "0:15:52"}
67
+ {"current_steps": 67, "total_steps": 295, "loss": 0.5155, "lr": 9.551511831776966e-06, "epoch": 0.22750424448217318, "percentage": 22.71, "elapsed_time": "0:04:38", "remaining_time": "0:15:46"}
68
+ {"current_steps": 68, "total_steps": 295, "loss": 0.5277, "lr": 9.526655903503423e-06, "epoch": 0.23089983022071306, "percentage": 23.05, "elapsed_time": "0:04:42", "remaining_time": "0:15:41"}
69
+ {"current_steps": 69, "total_steps": 295, "loss": 0.5307, "lr": 9.501163794670445e-06, "epoch": 0.23429541595925296, "percentage": 23.39, "elapsed_time": "0:04:47", "remaining_time": "0:15:40"}
70
+ {"current_steps": 70, "total_steps": 295, "loss": 0.51, "lr": 9.475039087963443e-06, "epoch": 0.23769100169779286, "percentage": 23.73, "elapsed_time": "0:04:50", "remaining_time": "0:15:34"}
71
+ {"current_steps": 71, "total_steps": 295, "loss": 0.517, "lr": 9.448285454973739e-06, "epoch": 0.24108658743633277, "percentage": 24.07, "elapsed_time": "0:04:54", "remaining_time": "0:15:28"}
72
+ {"current_steps": 72, "total_steps": 295, "loss": 0.5223, "lr": 9.420906655682553e-06, "epoch": 0.24448217317487267, "percentage": 24.41, "elapsed_time": "0:04:57", "remaining_time": "0:15:22"}
73
+ {"current_steps": 73, "total_steps": 295, "loss": 0.5257, "lr": 9.392906537932582e-06, "epoch": 0.24787775891341257, "percentage": 24.75, "elapsed_time": "0:05:01", "remaining_time": "0:15:16"}
74
+ {"current_steps": 74, "total_steps": 295, "loss": 0.5187, "lr": 9.364289036887214e-06, "epoch": 0.25127334465195245, "percentage": 25.08, "elapsed_time": "0:05:05", "remaining_time": "0:15:11"}
75
+ {"current_steps": 75, "total_steps": 295, "loss": 0.5127, "lr": 9.335058174477472e-06, "epoch": 0.2546689303904924, "percentage": 25.42, "elapsed_time": "0:05:08", "remaining_time": "0:15:05"}
76
+ {"current_steps": 76, "total_steps": 295, "loss": 0.4966, "lr": 9.305218058836778e-06, "epoch": 0.25806451612903225, "percentage": 25.76, "elapsed_time": "0:05:14", "remaining_time": "0:15:05"}
77
+ {"current_steps": 77, "total_steps": 295, "loss": 0.5265, "lr": 9.274772883723587e-06, "epoch": 0.2614601018675722, "percentage": 26.1, "elapsed_time": "0:05:17", "remaining_time": "0:14:59"}
78
+ {"current_steps": 78, "total_steps": 295, "loss": 0.5003, "lr": 9.24372692793199e-06, "epoch": 0.26485568760611206, "percentage": 26.44, "elapsed_time": "0:05:22", "remaining_time": "0:14:56"}
79
+ {"current_steps": 79, "total_steps": 295, "loss": 0.4971, "lr": 9.21208455469037e-06, "epoch": 0.26825127334465193, "percentage": 26.78, "elapsed_time": "0:05:25", "remaining_time": "0:14:50"}
80
+ {"current_steps": 80, "total_steps": 295, "loss": 0.491, "lr": 9.179850211048193e-06, "epoch": 0.27164685908319186, "percentage": 27.12, "elapsed_time": "0:05:29", "remaining_time": "0:14:46"}
81
+ {"current_steps": 81, "total_steps": 295, "loss": 0.5033, "lr": 9.14702842725101e-06, "epoch": 0.27504244482173174, "percentage": 27.46, "elapsed_time": "0:05:33", "remaining_time": "0:14:41"}
82
+ {"current_steps": 82, "total_steps": 295, "loss": 0.5157, "lr": 9.113623816103775e-06, "epoch": 0.27843803056027167, "percentage": 27.8, "elapsed_time": "0:05:37", "remaining_time": "0:14:36"}
83
+ {"current_steps": 83, "total_steps": 295, "loss": 0.5103, "lr": 9.079641072322555e-06, "epoch": 0.28183361629881154, "percentage": 28.14, "elapsed_time": "0:05:41", "remaining_time": "0:14:31"}
84
+ {"current_steps": 84, "total_steps": 295, "loss": 0.5118, "lr": 9.045084971874738e-06, "epoch": 0.28522920203735147, "percentage": 28.47, "elapsed_time": "0:05:44", "remaining_time": "0:14:26"}
85
+ {"current_steps": 85, "total_steps": 295, "loss": 0.4753, "lr": 9.009960371307798e-06, "epoch": 0.28862478777589134, "percentage": 28.81, "elapsed_time": "0:05:49", "remaining_time": "0:14:22"}
86
+ {"current_steps": 86, "total_steps": 295, "loss": 0.5065, "lr": 8.974272207066767e-06, "epoch": 0.2920203735144312, "percentage": 29.15, "elapsed_time": "0:05:52", "remaining_time": "0:14:17"}
87
+ {"current_steps": 87, "total_steps": 295, "loss": 0.496, "lr": 8.938025494800454e-06, "epoch": 0.29541595925297115, "percentage": 29.49, "elapsed_time": "0:05:56", "remaining_time": "0:14:12"}
88
+ {"current_steps": 88, "total_steps": 295, "loss": 0.4768, "lr": 8.901225328656543e-06, "epoch": 0.298811544991511, "percentage": 29.83, "elapsed_time": "0:06:02", "remaining_time": "0:14:12"}
89
+ {"current_steps": 89, "total_steps": 295, "loss": 0.4982, "lr": 8.863876880565656e-06, "epoch": 0.30220713073005095, "percentage": 30.17, "elapsed_time": "0:06:06", "remaining_time": "0:14:07"}
90
+ {"current_steps": 90, "total_steps": 295, "loss": 0.49, "lr": 8.825985399514488e-06, "epoch": 0.30560271646859083, "percentage": 30.51, "elapsed_time": "0:06:10", "remaining_time": "0:14:04"}
91
+ {"current_steps": 91, "total_steps": 295, "loss": 0.508, "lr": 8.787556210808101e-06, "epoch": 0.3089983022071307, "percentage": 30.85, "elapsed_time": "0:06:14", "remaining_time": "0:13:58"}
92
+ {"current_steps": 92, "total_steps": 295, "loss": 0.493, "lr": 8.748594715321512e-06, "epoch": 0.31239388794567063, "percentage": 31.19, "elapsed_time": "0:06:20", "remaining_time": "0:13:58"}
93
+ {"current_steps": 93, "total_steps": 295, "loss": 0.5063, "lr": 8.70910638874064e-06, "epoch": 0.3157894736842105, "percentage": 31.53, "elapsed_time": "0:06:23", "remaining_time": "0:13:52"}
94
+ {"current_steps": 94, "total_steps": 295, "loss": 0.4953, "lr": 8.669096780792754e-06, "epoch": 0.31918505942275044, "percentage": 31.86, "elapsed_time": "0:06:26", "remaining_time": "0:13:47"}
95
+ {"current_steps": 95, "total_steps": 295, "loss": 0.5062, "lr": 8.628571514466502e-06, "epoch": 0.3225806451612903, "percentage": 32.2, "elapsed_time": "0:06:30", "remaining_time": "0:13:41"}
96
+ {"current_steps": 96, "total_steps": 295, "loss": 0.5254, "lr": 8.587536285221656e-06, "epoch": 0.32597623089983024, "percentage": 32.54, "elapsed_time": "0:06:34", "remaining_time": "0:13:38"}
97
+ {"current_steps": 97, "total_steps": 295, "loss": 0.5107, "lr": 8.545996860188668e-06, "epoch": 0.3293718166383701, "percentage": 32.88, "elapsed_time": "0:06:38", "remaining_time": "0:13:33"}
98
+ {"current_steps": 98, "total_steps": 295, "loss": 0.4767, "lr": 8.503959077358143e-06, "epoch": 0.33276740237691, "percentage": 33.22, "elapsed_time": "0:06:43", "remaining_time": "0:13:30"}
99
+ {"current_steps": 99, "total_steps": 295, "loss": 0.4991, "lr": 8.46142884476038e-06, "epoch": 0.3361629881154499, "percentage": 33.56, "elapsed_time": "0:06:46", "remaining_time": "0:13:25"}
100
+ {"current_steps": 100, "total_steps": 295, "loss": 0.5202, "lr": 8.418412139635026e-06, "epoch": 0.3395585738539898, "percentage": 33.9, "elapsed_time": "0:06:50", "remaining_time": "0:13:20"}
101
+ {"current_steps": 101, "total_steps": 295, "loss": 0.5019, "lr": 8.374915007591053e-06, "epoch": 0.34295415959252973, "percentage": 34.24, "elapsed_time": "0:06:53", "remaining_time": "0:13:14"}
102
+ {"current_steps": 102, "total_steps": 295, "loss": 0.4981, "lr": 8.330943561757092e-06, "epoch": 0.3463497453310696, "percentage": 34.58, "elapsed_time": "0:06:57", "remaining_time": "0:13:09"}
103
+ {"current_steps": 103, "total_steps": 295, "loss": 0.4828, "lr": 8.286503981922284e-06, "epoch": 0.34974533106960953, "percentage": 34.92, "elapsed_time": "0:07:02", "remaining_time": "0:13:08"}
104
+ {"current_steps": 104, "total_steps": 295, "loss": 0.4677, "lr": 8.241602513667775e-06, "epoch": 0.3531409168081494, "percentage": 35.25, "elapsed_time": "0:07:06", "remaining_time": "0:13:04"}
105
+ {"current_steps": 105, "total_steps": 295, "loss": 0.4991, "lr": 8.19624546748895e-06, "epoch": 0.3565365025466893, "percentage": 35.59, "elapsed_time": "0:07:10", "remaining_time": "0:12:59"}
106
+ {"current_steps": 106, "total_steps": 295, "loss": 0.4874, "lr": 8.150439217908557e-06, "epoch": 0.3599320882852292, "percentage": 35.93, "elapsed_time": "0:07:14", "remaining_time": "0:12:54"}
107
+ {"current_steps": 107, "total_steps": 295, "loss": 0.4613, "lr": 8.104190202580811e-06, "epoch": 0.3633276740237691, "percentage": 36.27, "elapsed_time": "0:07:21", "remaining_time": "0:12:54"}
108
+ {"current_steps": 108, "total_steps": 295, "loss": 0.4981, "lr": 8.057504921386661e-06, "epoch": 0.366723259762309, "percentage": 36.61, "elapsed_time": "0:07:27", "remaining_time": "0:12:54"}
109
+ {"current_steps": 109, "total_steps": 295, "loss": 0.4812, "lr": 8.010389935520269e-06, "epoch": 0.3701188455008489, "percentage": 36.95, "elapsed_time": "0:07:31", "remaining_time": "0:12:49"}
110
+ {"current_steps": 110, "total_steps": 295, "loss": 0.4901, "lr": 7.962851866566912e-06, "epoch": 0.3735144312393888, "percentage": 37.29, "elapsed_time": "0:07:34", "remaining_time": "0:12:44"}
111
+ {"current_steps": 111, "total_steps": 295, "loss": 0.4841, "lr": 7.914897395572362e-06, "epoch": 0.3769100169779287, "percentage": 37.63, "elapsed_time": "0:07:38", "remaining_time": "0:12:39"}
112
+ {"current_steps": 112, "total_steps": 295, "loss": 0.4813, "lr": 7.866533262103937e-06, "epoch": 0.38030560271646857, "percentage": 37.97, "elapsed_time": "0:07:42", "remaining_time": "0:12:35"}
113
+ {"current_steps": 113, "total_steps": 295, "loss": 0.4731, "lr": 7.817766263303312e-06, "epoch": 0.3837011884550085, "percentage": 38.31, "elapsed_time": "0:07:45", "remaining_time": "0:12:29"}
114
+ {"current_steps": 114, "total_steps": 295, "loss": 0.4988, "lr": 7.768603252931243e-06, "epoch": 0.3870967741935484, "percentage": 38.64, "elapsed_time": "0:07:49", "remaining_time": "0:12:25"}
115
+ {"current_steps": 115, "total_steps": 295, "loss": 0.4825, "lr": 7.719051140404327e-06, "epoch": 0.3904923599320883, "percentage": 38.98, "elapsed_time": "0:07:53", "remaining_time": "0:12:20"}
116
+ {"current_steps": 116, "total_steps": 295, "loss": 0.4742, "lr": 7.669116889823955e-06, "epoch": 0.3938879456706282, "percentage": 39.32, "elapsed_time": "0:07:57", "remaining_time": "0:12:16"}
117
+ {"current_steps": 117, "total_steps": 295, "loss": 0.4743, "lr": 7.6188075189975644e-06, "epoch": 0.39728353140916806, "percentage": 39.66, "elapsed_time": "0:08:00", "remaining_time": "0:12:11"}
118
+ {"current_steps": 118, "total_steps": 295, "loss": 0.4739, "lr": 7.568130098452352e-06, "epoch": 0.400679117147708, "percentage": 40.0, "elapsed_time": "0:08:04", "remaining_time": "0:12:07"}
119
+ {"current_steps": 119, "total_steps": 295, "loss": 0.4814, "lr": 7.517091750441576e-06, "epoch": 0.40407470288624786, "percentage": 40.34, "elapsed_time": "0:08:08", "remaining_time": "0:12:02"}
120
+ {"current_steps": 120, "total_steps": 295, "loss": 0.4805, "lr": 7.465699647943586e-06, "epoch": 0.4074702886247878, "percentage": 40.68, "elapsed_time": "0:08:11", "remaining_time": "0:11:57"}
121
+ {"current_steps": 121, "total_steps": 295, "loss": 0.4953, "lr": 7.413961013653725e-06, "epoch": 0.41086587436332767, "percentage": 41.02, "elapsed_time": "0:08:15", "remaining_time": "0:11:52"}
122
+ {"current_steps": 122, "total_steps": 295, "loss": 0.4588, "lr": 7.361883118969248e-06, "epoch": 0.4142614601018676, "percentage": 41.36, "elapsed_time": "0:08:19", "remaining_time": "0:11:47"}
123
+ {"current_steps": 123, "total_steps": 295, "loss": 0.469, "lr": 7.309473282967387e-06, "epoch": 0.41765704584040747, "percentage": 41.69, "elapsed_time": "0:08:24", "remaining_time": "0:11:45"}
124
+ {"current_steps": 124, "total_steps": 295, "loss": 0.4812, "lr": 7.256738871376733e-06, "epoch": 0.42105263157894735, "percentage": 42.03, "elapsed_time": "0:08:27", "remaining_time": "0:11:40"}
125
+ {"current_steps": 125, "total_steps": 295, "loss": 0.456, "lr": 7.203687295542032e-06, "epoch": 0.4244482173174873, "percentage": 42.37, "elapsed_time": "0:08:31", "remaining_time": "0:11:35"}
126
+ {"current_steps": 126, "total_steps": 295, "loss": 0.4597, "lr": 7.1503260113826035e-06, "epoch": 0.42784380305602715, "percentage": 42.71, "elapsed_time": "0:08:34", "remaining_time": "0:11:30"}
127
+ {"current_steps": 127, "total_steps": 295, "loss": 0.4884, "lr": 7.09666251834447e-06, "epoch": 0.4312393887945671, "percentage": 43.05, "elapsed_time": "0:08:38", "remaining_time": "0:11:25"}
128
+ {"current_steps": 128, "total_steps": 295, "loss": 0.4581, "lr": 7.042704358346375e-06, "epoch": 0.43463497453310695, "percentage": 43.39, "elapsed_time": "0:08:42", "remaining_time": "0:11:21"}
129
+ {"current_steps": 129, "total_steps": 295, "loss": 0.4672, "lr": 6.988459114719849e-06, "epoch": 0.4380305602716469, "percentage": 43.73, "elapsed_time": "0:08:46", "remaining_time": "0:11:17"}
130
+ {"current_steps": 130, "total_steps": 295, "loss": 0.4524, "lr": 6.933934411143419e-06, "epoch": 0.44142614601018676, "percentage": 44.07, "elapsed_time": "0:08:50", "remaining_time": "0:11:13"}
131
+ {"current_steps": 131, "total_steps": 295, "loss": 0.4738, "lr": 6.879137910571191e-06, "epoch": 0.44482173174872663, "percentage": 44.41, "elapsed_time": "0:08:54", "remaining_time": "0:11:08"}
132
+ {"current_steps": 132, "total_steps": 295, "loss": 0.4679, "lr": 6.824077314155877e-06, "epoch": 0.44821731748726656, "percentage": 44.75, "elapsed_time": "0:08:59", "remaining_time": "0:11:05"}
133
+ {"current_steps": 133, "total_steps": 295, "loss": 0.4909, "lr": 6.768760360166471e-06, "epoch": 0.45161290322580644, "percentage": 45.08, "elapsed_time": "0:09:02", "remaining_time": "0:11:00"}
134
+ {"current_steps": 134, "total_steps": 295, "loss": 0.4734, "lr": 6.713194822900707e-06, "epoch": 0.45500848896434637, "percentage": 45.42, "elapsed_time": "0:09:06", "remaining_time": "0:10:56"}
135
+ {"current_steps": 135, "total_steps": 295, "loss": 0.4979, "lr": 6.657388511592453e-06, "epoch": 0.45840407470288624, "percentage": 45.76, "elapsed_time": "0:09:11", "remaining_time": "0:10:53"}
136
+ {"current_steps": 136, "total_steps": 295, "loss": 0.4826, "lr": 6.601349269314188e-06, "epoch": 0.4617996604414261, "percentage": 46.1, "elapsed_time": "0:09:14", "remaining_time": "0:10:48"}
137
+ {"current_steps": 137, "total_steps": 295, "loss": 0.4685, "lr": 6.545084971874738e-06, "epoch": 0.46519524617996605, "percentage": 46.44, "elapsed_time": "0:09:18", "remaining_time": "0:10:44"}
138
+ {"current_steps": 138, "total_steps": 295, "loss": 0.483, "lr": 6.488603526712391e-06, "epoch": 0.4685908319185059, "percentage": 46.78, "elapsed_time": "0:09:22", "remaining_time": "0:10:39"}
139
+ {"current_steps": 139, "total_steps": 295, "loss": 0.5028, "lr": 6.431912871783587e-06, "epoch": 0.47198641765704585, "percentage": 47.12, "elapsed_time": "0:09:25", "remaining_time": "0:10:34"}
140
+ {"current_steps": 140, "total_steps": 295, "loss": 0.4669, "lr": 6.3750209744473105e-06, "epoch": 0.47538200339558573, "percentage": 47.46, "elapsed_time": "0:09:29", "remaining_time": "0:10:30"}
141
+ {"current_steps": 141, "total_steps": 295, "loss": 0.4735, "lr": 6.3179358303453386e-06, "epoch": 0.47877758913412566, "percentage": 47.8, "elapsed_time": "0:09:34", "remaining_time": "0:10:27"}
142
+ {"current_steps": 142, "total_steps": 295, "loss": 0.5051, "lr": 6.260665462278544e-06, "epoch": 0.48217317487266553, "percentage": 48.14, "elapsed_time": "0:09:37", "remaining_time": "0:10:22"}
143
+ {"current_steps": 143, "total_steps": 295, "loss": 0.4762, "lr": 6.203217919079343e-06, "epoch": 0.4855687606112054, "percentage": 48.47, "elapsed_time": "0:09:41", "remaining_time": "0:10:18"}
144
+ {"current_steps": 144, "total_steps": 295, "loss": 0.4869, "lr": 6.145601274480521e-06, "epoch": 0.48896434634974534, "percentage": 48.81, "elapsed_time": "0:09:44", "remaining_time": "0:10:13"}
145
+ {"current_steps": 145, "total_steps": 295, "loss": 0.4611, "lr": 6.08782362598054e-06, "epoch": 0.4923599320882852, "percentage": 49.15, "elapsed_time": "0:09:49", "remaining_time": "0:10:09"}
146
+ {"current_steps": 146, "total_steps": 295, "loss": 0.4763, "lr": 6.029893093705492e-06, "epoch": 0.49575551782682514, "percentage": 49.49, "elapsed_time": "0:09:52", "remaining_time": "0:10:05"}
147
+ {"current_steps": 147, "total_steps": 295, "loss": 0.4654, "lr": 5.971817819267914e-06, "epoch": 0.499151103565365, "percentage": 49.83, "elapsed_time": "0:09:56", "remaining_time": "0:10:00"}
148
+ {"current_steps": 148, "total_steps": 295, "loss": 0.5044, "lr": 5.9136059646225375e-06, "epoch": 0.5025466893039049, "percentage": 50.17, "elapsed_time": "0:10:00", "remaining_time": "0:09:56"}
149
+ {"current_steps": 149, "total_steps": 295, "loss": 0.4435, "lr": 5.855265710919211e-06, "epoch": 0.5059422750424448, "percentage": 50.51, "elapsed_time": "0:10:05", "remaining_time": "0:09:53"}
150
+ {"current_steps": 150, "total_steps": 295, "loss": 0.4834, "lr": 5.796805257353109e-06, "epoch": 0.5093378607809848, "percentage": 50.85, "elapsed_time": "0:10:13", "remaining_time": "0:09:52"}
151
+ {"current_steps": 151, "total_steps": 295, "loss": 0.461, "lr": 5.738232820012407e-06, "epoch": 0.5127334465195246, "percentage": 51.19, "elapsed_time": "0:10:16", "remaining_time": "0:09:48"}
152
+ {"current_steps": 152, "total_steps": 295, "loss": 0.4611, "lr": 5.679556630723592e-06, "epoch": 0.5161290322580645, "percentage": 51.53, "elapsed_time": "0:10:20", "remaining_time": "0:09:43"}
153
+ {"current_steps": 153, "total_steps": 295, "loss": 0.4712, "lr": 5.620784935894548e-06, "epoch": 0.5195246179966044, "percentage": 51.86, "elapsed_time": "0:10:23", "remaining_time": "0:09:38"}
154
+ {"current_steps": 154, "total_steps": 295, "loss": 0.4906, "lr": 5.561925995355595e-06, "epoch": 0.5229202037351444, "percentage": 52.2, "elapsed_time": "0:10:29", "remaining_time": "0:09:36"}
155
+ {"current_steps": 155, "total_steps": 295, "loss": 0.4836, "lr": 5.5029880811986546e-06, "epoch": 0.5263157894736842, "percentage": 52.54, "elapsed_time": "0:10:33", "remaining_time": "0:09:32"}
156
+ {"current_steps": 156, "total_steps": 295, "loss": 0.4568, "lr": 5.443979476614674e-06, "epoch": 0.5297113752122241, "percentage": 52.88, "elapsed_time": "0:10:37", "remaining_time": "0:09:27"}
157
+ {"current_steps": 157, "total_steps": 295, "loss": 0.4669, "lr": 5.384908474729501e-06, "epoch": 0.533106960950764, "percentage": 53.22, "elapsed_time": "0:10:40", "remaining_time": "0:09:23"}
158
+ {"current_steps": 158, "total_steps": 295, "loss": 0.4631, "lr": 5.325783377438357e-06, "epoch": 0.5365025466893039, "percentage": 53.56, "elapsed_time": "0:10:45", "remaining_time": "0:09:19"}
159
+ {"current_steps": 159, "total_steps": 295, "loss": 0.4824, "lr": 5.266612494239088e-06, "epoch": 0.5398981324278438, "percentage": 53.9, "elapsed_time": "0:10:50", "remaining_time": "0:09:16"}
160
+ {"current_steps": 160, "total_steps": 295, "loss": 0.4619, "lr": 5.207404141064334e-06, "epoch": 0.5432937181663837, "percentage": 54.24, "elapsed_time": "0:10:56", "remaining_time": "0:09:13"}
161
+ {"current_steps": 161, "total_steps": 295, "loss": 0.518, "lr": 5.148166639112799e-06, "epoch": 0.5466893039049237, "percentage": 54.58, "elapsed_time": "0:11:00", "remaining_time": "0:09:10"}
162
+ {"current_steps": 162, "total_steps": 295, "loss": 0.4567, "lr": 5.088908313679788e-06, "epoch": 0.5500848896434635, "percentage": 54.92, "elapsed_time": "0:11:04", "remaining_time": "0:09:05"}
163
+ {"current_steps": 163, "total_steps": 295, "loss": 0.4649, "lr": 5.029637492987153e-06, "epoch": 0.5534804753820034, "percentage": 55.25, "elapsed_time": "0:11:08", "remaining_time": "0:09:01"}
164
+ {"current_steps": 164, "total_steps": 295, "loss": 0.4682, "lr": 4.970362507012848e-06, "epoch": 0.5568760611205433, "percentage": 55.59, "elapsed_time": "0:11:12", "remaining_time": "0:08:57"}
165
+ {"current_steps": 165, "total_steps": 295, "loss": 0.4594, "lr": 4.911091686320213e-06, "epoch": 0.5602716468590832, "percentage": 55.93, "elapsed_time": "0:11:16", "remaining_time": "0:08:52"}
166
+ {"current_steps": 166, "total_steps": 295, "loss": 0.4266, "lr": 4.8518333608872015e-06, "epoch": 0.5636672325976231, "percentage": 56.27, "elapsed_time": "0:11:19", "remaining_time": "0:08:48"}
167
+ {"current_steps": 167, "total_steps": 295, "loss": 0.4745, "lr": 4.792595858935668e-06, "epoch": 0.567062818336163, "percentage": 56.61, "elapsed_time": "0:11:23", "remaining_time": "0:08:43"}
168
+ {"current_steps": 168, "total_steps": 295, "loss": 0.4863, "lr": 4.733387505760913e-06, "epoch": 0.5704584040747029, "percentage": 56.95, "elapsed_time": "0:11:27", "remaining_time": "0:08:39"}
169
+ {"current_steps": 169, "total_steps": 295, "loss": 0.4459, "lr": 4.674216622561645e-06, "epoch": 0.5738539898132428, "percentage": 57.29, "elapsed_time": "0:11:31", "remaining_time": "0:08:35"}
170
+ {"current_steps": 170, "total_steps": 295, "loss": 0.4571, "lr": 4.6150915252705005e-06, "epoch": 0.5772495755517827, "percentage": 57.63, "elapsed_time": "0:11:35", "remaining_time": "0:08:31"}
171
+ {"current_steps": 171, "total_steps": 295, "loss": 0.4701, "lr": 4.556020523385326e-06, "epoch": 0.5806451612903226, "percentage": 57.97, "elapsed_time": "0:11:38", "remaining_time": "0:08:26"}
172
+ {"current_steps": 172, "total_steps": 295, "loss": 0.477, "lr": 4.497011918801347e-06, "epoch": 0.5840407470288624, "percentage": 58.31, "elapsed_time": "0:11:43", "remaining_time": "0:08:23"}
173
+ {"current_steps": 173, "total_steps": 295, "loss": 0.4963, "lr": 4.438074004644407e-06, "epoch": 0.5874363327674024, "percentage": 58.64, "elapsed_time": "0:11:47", "remaining_time": "0:08:18"}
174
+ {"current_steps": 174, "total_steps": 295, "loss": 0.4502, "lr": 4.379215064105454e-06, "epoch": 0.5908319185059423, "percentage": 58.98, "elapsed_time": "0:11:50", "remaining_time": "0:08:14"}
175
+ {"current_steps": 175, "total_steps": 295, "loss": 0.4667, "lr": 4.32044336927641e-06, "epoch": 0.5942275042444821, "percentage": 59.32, "elapsed_time": "0:11:55", "remaining_time": "0:08:10"}
176
+ {"current_steps": 176, "total_steps": 295, "loss": 0.469, "lr": 4.261767179987595e-06, "epoch": 0.597623089983022, "percentage": 59.66, "elapsed_time": "0:11:59", "remaining_time": "0:08:06"}
177
+ {"current_steps": 177, "total_steps": 295, "loss": 0.4549, "lr": 4.203194742646893e-06, "epoch": 0.601018675721562, "percentage": 60.0, "elapsed_time": "0:12:03", "remaining_time": "0:08:02"}
178
+ {"current_steps": 178, "total_steps": 295, "loss": 0.4589, "lr": 4.1447342890807905e-06, "epoch": 0.6044142614601019, "percentage": 60.34, "elapsed_time": "0:12:07", "remaining_time": "0:07:57"}
179
+ {"current_steps": 179, "total_steps": 295, "loss": 0.4525, "lr": 4.086394035377463e-06, "epoch": 0.6078098471986417, "percentage": 60.68, "elapsed_time": "0:12:10", "remaining_time": "0:07:53"}
180
+ {"current_steps": 180, "total_steps": 295, "loss": 0.4652, "lr": 4.028182180732088e-06, "epoch": 0.6112054329371817, "percentage": 61.02, "elapsed_time": "0:12:17", "remaining_time": "0:07:51"}
181
+ {"current_steps": 181, "total_steps": 295, "loss": 0.4887, "lr": 3.970106906294509e-06, "epoch": 0.6146010186757216, "percentage": 61.36, "elapsed_time": "0:12:22", "remaining_time": "0:07:47"}
182
+ {"current_steps": 182, "total_steps": 295, "loss": 0.4928, "lr": 3.912176374019462e-06, "epoch": 0.6179966044142614, "percentage": 61.69, "elapsed_time": "0:12:26", "remaining_time": "0:07:43"}
183
+ {"current_steps": 183, "total_steps": 295, "loss": 0.4613, "lr": 3.85439872551948e-06, "epoch": 0.6213921901528013, "percentage": 62.03, "elapsed_time": "0:12:29", "remaining_time": "0:07:38"}
184
+ {"current_steps": 184, "total_steps": 295, "loss": 0.4489, "lr": 3.796782080920659e-06, "epoch": 0.6247877758913413, "percentage": 62.37, "elapsed_time": "0:12:33", "remaining_time": "0:07:34"}
185
+ {"current_steps": 185, "total_steps": 295, "loss": 0.4433, "lr": 3.7393345377214584e-06, "epoch": 0.6281833616298812, "percentage": 62.71, "elapsed_time": "0:12:36", "remaining_time": "0:07:29"}
186
+ {"current_steps": 186, "total_steps": 295, "loss": 0.4665, "lr": 3.682064169654663e-06, "epoch": 0.631578947368421, "percentage": 63.05, "elapsed_time": "0:12:40", "remaining_time": "0:07:25"}
187
+ {"current_steps": 187, "total_steps": 295, "loss": 0.4363, "lr": 3.6249790255526916e-06, "epoch": 0.634974533106961, "percentage": 63.39, "elapsed_time": "0:12:44", "remaining_time": "0:07:21"}
188
+ {"current_steps": 188, "total_steps": 295, "loss": 0.4646, "lr": 3.568087128216414e-06, "epoch": 0.6383701188455009, "percentage": 63.73, "elapsed_time": "0:12:47", "remaining_time": "0:07:16"}
189
+ {"current_steps": 189, "total_steps": 295, "loss": 0.4571, "lr": 3.511396473287611e-06, "epoch": 0.6417657045840407, "percentage": 64.07, "elapsed_time": "0:12:51", "remaining_time": "0:07:12"}
190
+ {"current_steps": 190, "total_steps": 295, "loss": 0.4484, "lr": 3.4549150281252635e-06, "epoch": 0.6451612903225806, "percentage": 64.41, "elapsed_time": "0:12:54", "remaining_time": "0:07:08"}
191
+ {"current_steps": 191, "total_steps": 295, "loss": 0.457, "lr": 3.398650730685813e-06, "epoch": 0.6485568760611206, "percentage": 64.75, "elapsed_time": "0:12:58", "remaining_time": "0:07:03"}
192
+ {"current_steps": 192, "total_steps": 295, "loss": 0.4467, "lr": 3.3426114884075488e-06, "epoch": 0.6519524617996605, "percentage": 65.08, "elapsed_time": "0:13:03", "remaining_time": "0:07:00"}
193
+ {"current_steps": 193, "total_steps": 295, "loss": 0.4572, "lr": 3.2868051770992935e-06, "epoch": 0.6553480475382003, "percentage": 65.42, "elapsed_time": "0:13:08", "remaining_time": "0:06:56"}
194
+ {"current_steps": 194, "total_steps": 295, "loss": 0.4687, "lr": 3.2312396398335312e-06, "epoch": 0.6587436332767402, "percentage": 65.76, "elapsed_time": "0:13:14", "remaining_time": "0:06:53"}
195
+ {"current_steps": 195, "total_steps": 295, "loss": 0.4336, "lr": 3.175922685844125e-06, "epoch": 0.6621392190152802, "percentage": 66.1, "elapsed_time": "0:13:20", "remaining_time": "0:06:50"}
196
+ {"current_steps": 196, "total_steps": 295, "loss": 0.4486, "lr": 3.1208620894288105e-06, "epoch": 0.66553480475382, "percentage": 66.44, "elapsed_time": "0:13:23", "remaining_time": "0:06:45"}
197
+ {"current_steps": 197, "total_steps": 295, "loss": 0.4588, "lr": 3.0660655888565827e-06, "epoch": 0.6689303904923599, "percentage": 66.78, "elapsed_time": "0:13:28", "remaining_time": "0:06:42"}
198
+ {"current_steps": 198, "total_steps": 295, "loss": 0.4423, "lr": 3.0115408852801535e-06, "epoch": 0.6723259762308998, "percentage": 67.12, "elapsed_time": "0:13:31", "remaining_time": "0:06:37"}
199
+ {"current_steps": 199, "total_steps": 295, "loss": 0.444, "lr": 2.9572956416536267e-06, "epoch": 0.6757215619694398, "percentage": 67.46, "elapsed_time": "0:13:35", "remaining_time": "0:06:33"}
200
+ {"current_steps": 200, "total_steps": 295, "loss": 0.4556, "lr": 2.9033374816555338e-06, "epoch": 0.6791171477079796, "percentage": 67.8, "elapsed_time": "0:13:40", "remaining_time": "0:06:29"}
201
+ {"current_steps": 201, "total_steps": 295, "loss": 0.4727, "lr": 2.8496739886173994e-06, "epoch": 0.6825127334465195, "percentage": 68.14, "elapsed_time": "0:13:44", "remaining_time": "0:06:25"}
202
+ {"current_steps": 202, "total_steps": 295, "loss": 0.432, "lr": 2.7963127044579697e-06, "epoch": 0.6859083191850595, "percentage": 68.47, "elapsed_time": "0:13:47", "remaining_time": "0:06:20"}
203
+ {"current_steps": 203, "total_steps": 295, "loss": 0.4596, "lr": 2.743261128623269e-06, "epoch": 0.6893039049235993, "percentage": 68.81, "elapsed_time": "0:13:51", "remaining_time": "0:06:16"}
204
+ {"current_steps": 204, "total_steps": 295, "loss": 0.4679, "lr": 2.6905267170326143e-06, "epoch": 0.6926994906621392, "percentage": 69.15, "elapsed_time": "0:13:55", "remaining_time": "0:06:12"}
205
+ {"current_steps": 205, "total_steps": 295, "loss": 0.4631, "lr": 2.6381168810307536e-06, "epoch": 0.6960950764006791, "percentage": 69.49, "elapsed_time": "0:14:00", "remaining_time": "0:06:09"}
206
+ {"current_steps": 206, "total_steps": 295, "loss": 0.4475, "lr": 2.5860389863462765e-06, "epoch": 0.6994906621392191, "percentage": 69.83, "elapsed_time": "0:14:04", "remaining_time": "0:06:04"}
207
+ {"current_steps": 207, "total_steps": 295, "loss": 0.4529, "lr": 2.534300352056416e-06, "epoch": 0.7028862478777589, "percentage": 70.17, "elapsed_time": "0:14:10", "remaining_time": "0:06:01"}
208
+ {"current_steps": 208, "total_steps": 295, "loss": 0.4431, "lr": 2.4829082495584244e-06, "epoch": 0.7062818336162988, "percentage": 70.51, "elapsed_time": "0:14:13", "remaining_time": "0:05:57"}
209
+ {"current_steps": 209, "total_steps": 295, "loss": 0.4295, "lr": 2.4318699015476495e-06, "epoch": 0.7096774193548387, "percentage": 70.85, "elapsed_time": "0:14:19", "remaining_time": "0:05:53"}
210
+ {"current_steps": 210, "total_steps": 295, "loss": 0.4462, "lr": 2.3811924810024385e-06, "epoch": 0.7130730050933786, "percentage": 71.19, "elapsed_time": "0:14:23", "remaining_time": "0:05:49"}
211
+ {"current_steps": 211, "total_steps": 295, "loss": 0.4764, "lr": 2.330883110176049e-06, "epoch": 0.7164685908319185, "percentage": 71.53, "elapsed_time": "0:14:26", "remaining_time": "0:05:45"}
212
+ {"current_steps": 212, "total_steps": 295, "loss": 0.4245, "lr": 2.2809488595956746e-06, "epoch": 0.7198641765704584, "percentage": 71.86, "elapsed_time": "0:14:30", "remaining_time": "0:05:40"}
213
+ {"current_steps": 213, "total_steps": 295, "loss": 0.4618, "lr": 2.2313967470687593e-06, "epoch": 0.7232597623089984, "percentage": 72.2, "elapsed_time": "0:14:35", "remaining_time": "0:05:37"}
214
+ {"current_steps": 214, "total_steps": 295, "loss": 0.4614, "lr": 2.18223373669669e-06, "epoch": 0.7266553480475382, "percentage": 72.54, "elapsed_time": "0:14:39", "remaining_time": "0:05:32"}
215
+ {"current_steps": 215, "total_steps": 295, "loss": 0.462, "lr": 2.1334667378960642e-06, "epoch": 0.7300509337860781, "percentage": 72.88, "elapsed_time": "0:14:43", "remaining_time": "0:05:28"}
216
+ {"current_steps": 216, "total_steps": 295, "loss": 0.4634, "lr": 2.0851026044276405e-06, "epoch": 0.733446519524618, "percentage": 73.22, "elapsed_time": "0:14:49", "remaining_time": "0:05:25"}
217
+ {"current_steps": 217, "total_steps": 295, "loss": 0.463, "lr": 2.0371481334330913e-06, "epoch": 0.7368421052631579, "percentage": 73.56, "elapsed_time": "0:14:52", "remaining_time": "0:05:20"}
218
+ {"current_steps": 218, "total_steps": 295, "loss": 0.4788, "lr": 1.9896100644797316e-06, "epoch": 0.7402376910016978, "percentage": 73.9, "elapsed_time": "0:14:56", "remaining_time": "0:05:16"}
219
+ {"current_steps": 219, "total_steps": 295, "loss": 0.4634, "lr": 1.9424950786133414e-06, "epoch": 0.7436332767402377, "percentage": 74.24, "elapsed_time": "0:14:59", "remaining_time": "0:05:12"}
220
+ {"current_steps": 220, "total_steps": 295, "loss": 0.484, "lr": 1.8958097974191909e-06, "epoch": 0.7470288624787776, "percentage": 74.58, "elapsed_time": "0:15:03", "remaining_time": "0:05:08"}
221
+ {"current_steps": 221, "total_steps": 295, "loss": 0.4442, "lr": 1.8495607820914451e-06, "epoch": 0.7504244482173175, "percentage": 74.92, "elapsed_time": "0:15:07", "remaining_time": "0:05:03"}
222
+ {"current_steps": 222, "total_steps": 295, "loss": 0.4241, "lr": 1.8037545325110506e-06, "epoch": 0.7538200339558574, "percentage": 75.25, "elapsed_time": "0:15:12", "remaining_time": "0:05:00"}
223
+ {"current_steps": 223, "total_steps": 295, "loss": 0.4837, "lr": 1.7583974863322272e-06, "epoch": 0.7572156196943973, "percentage": 75.59, "elapsed_time": "0:15:16", "remaining_time": "0:04:55"}
224
+ {"current_steps": 224, "total_steps": 295, "loss": 0.4728, "lr": 1.7134960180777171e-06, "epoch": 0.7606112054329371, "percentage": 75.93, "elapsed_time": "0:15:19", "remaining_time": "0:04:51"}
225
+ {"current_steps": 225, "total_steps": 295, "loss": 0.4301, "lr": 1.6690564382429104e-06, "epoch": 0.7640067911714771, "percentage": 76.27, "elapsed_time": "0:15:25", "remaining_time": "0:04:47"}
226
+ {"current_steps": 226, "total_steps": 295, "loss": 0.4527, "lr": 1.6250849924089485e-06, "epoch": 0.767402376910017, "percentage": 76.61, "elapsed_time": "0:15:29", "remaining_time": "0:04:43"}
227
+ {"current_steps": 227, "total_steps": 295, "loss": 0.4543, "lr": 1.581587860364977e-06, "epoch": 0.7707979626485568, "percentage": 76.95, "elapsed_time": "0:15:32", "remaining_time": "0:04:39"}
228
+ {"current_steps": 228, "total_steps": 295, "loss": 0.4466, "lr": 1.5385711552396227e-06, "epoch": 0.7741935483870968, "percentage": 77.29, "elapsed_time": "0:15:36", "remaining_time": "0:04:35"}
229
+ {"current_steps": 229, "total_steps": 295, "loss": 0.4386, "lr": 1.4960409226418576e-06, "epoch": 0.7775891341256367, "percentage": 77.63, "elapsed_time": "0:15:40", "remaining_time": "0:04:31"}
230
+ {"current_steps": 230, "total_steps": 295, "loss": 0.4533, "lr": 1.4540031398113335e-06, "epoch": 0.7809847198641766, "percentage": 77.97, "elapsed_time": "0:15:44", "remaining_time": "0:04:26"}
231
+ {"current_steps": 231, "total_steps": 295, "loss": 0.4447, "lr": 1.4124637147783431e-06, "epoch": 0.7843803056027164, "percentage": 78.31, "elapsed_time": "0:15:49", "remaining_time": "0:04:22"}
232
+ {"current_steps": 232, "total_steps": 295, "loss": 0.4826, "lr": 1.371428485533498e-06, "epoch": 0.7877758913412564, "percentage": 78.64, "elapsed_time": "0:15:52", "remaining_time": "0:04:18"}
233
+ {"current_steps": 233, "total_steps": 295, "loss": 0.4486, "lr": 1.3309032192072463e-06, "epoch": 0.7911714770797963, "percentage": 78.98, "elapsed_time": "0:15:56", "remaining_time": "0:04:14"}
234
+ {"current_steps": 234, "total_steps": 295, "loss": 0.4396, "lr": 1.29089361125936e-06, "epoch": 0.7945670628183361, "percentage": 79.32, "elapsed_time": "0:16:01", "remaining_time": "0:04:10"}
235
+ {"current_steps": 235, "total_steps": 295, "loss": 0.4666, "lr": 1.251405284678488e-06, "epoch": 0.797962648556876, "percentage": 79.66, "elapsed_time": "0:16:05", "remaining_time": "0:04:06"}
236
+ {"current_steps": 236, "total_steps": 295, "loss": 0.4338, "lr": 1.2124437891918995e-06, "epoch": 0.801358234295416, "percentage": 80.0, "elapsed_time": "0:16:09", "remaining_time": "0:04:02"}
237
+ {"current_steps": 237, "total_steps": 295, "loss": 0.4609, "lr": 1.1740146004855141e-06, "epoch": 0.8047538200339559, "percentage": 80.34, "elapsed_time": "0:16:12", "remaining_time": "0:03:58"}
238
+ {"current_steps": 238, "total_steps": 295, "loss": 0.4505, "lr": 1.1361231194343436e-06, "epoch": 0.8081494057724957, "percentage": 80.68, "elapsed_time": "0:16:16", "remaining_time": "0:03:53"}
239
+ {"current_steps": 239, "total_steps": 295, "loss": 0.4626, "lr": 1.0987746713434578e-06, "epoch": 0.8115449915110357, "percentage": 81.02, "elapsed_time": "0:16:21", "remaining_time": "0:03:49"}
240
+ {"current_steps": 240, "total_steps": 295, "loss": 0.465, "lr": 1.0619745051995473e-06, "epoch": 0.8149405772495756, "percentage": 81.36, "elapsed_time": "0:16:25", "remaining_time": "0:03:45"}
241
+ {"current_steps": 241, "total_steps": 295, "loss": 0.4543, "lr": 1.0257277929332332e-06, "epoch": 0.8183361629881154, "percentage": 81.69, "elapsed_time": "0:16:29", "remaining_time": "0:03:41"}
242
+ {"current_steps": 242, "total_steps": 295, "loss": 0.4676, "lr": 9.900396286922025e-07, "epoch": 0.8217317487266553, "percentage": 82.03, "elapsed_time": "0:16:33", "remaining_time": "0:03:37"}
243
+ {"current_steps": 243, "total_steps": 295, "loss": 0.4366, "lr": 9.549150281252633e-07, "epoch": 0.8251273344651953, "percentage": 82.37, "elapsed_time": "0:16:37", "remaining_time": "0:03:33"}
244
+ {"current_steps": 244, "total_steps": 295, "loss": 0.4404, "lr": 9.203589276774438e-07, "epoch": 0.8285229202037352, "percentage": 82.71, "elapsed_time": "0:16:41", "remaining_time": "0:03:29"}
245
+ {"current_steps": 245, "total_steps": 295, "loss": 0.4447, "lr": 8.86376183896226e-07, "epoch": 0.831918505942275, "percentage": 83.05, "elapsed_time": "0:16:44", "remaining_time": "0:03:25"}
246
+ {"current_steps": 246, "total_steps": 295, "loss": 0.4488, "lr": 8.529715727489912e-07, "epoch": 0.8353140916808149, "percentage": 83.39, "elapsed_time": "0:16:48", "remaining_time": "0:03:20"}
247
+ {"current_steps": 247, "total_steps": 295, "loss": 0.4561, "lr": 8.201497889518073e-07, "epoch": 0.8387096774193549, "percentage": 83.73, "elapsed_time": "0:16:51", "remaining_time": "0:03:16"}
248
+ {"current_steps": 248, "total_steps": 295, "loss": 0.4556, "lr": 7.879154453096305e-07, "epoch": 0.8421052631578947, "percentage": 84.07, "elapsed_time": "0:16:55", "remaining_time": "0:03:12"}
249
+ {"current_steps": 249, "total_steps": 295, "loss": 0.4333, "lr": 7.562730720680111e-07, "epoch": 0.8455008488964346, "percentage": 84.41, "elapsed_time": "0:16:59", "remaining_time": "0:03:08"}
250
+ {"current_steps": 250, "total_steps": 295, "loss": 0.4512, "lr": 7.25227116276413e-07, "epoch": 0.8488964346349746, "percentage": 84.75, "elapsed_time": "0:17:02", "remaining_time": "0:03:04"}
251
+ {"current_steps": 251, "total_steps": 295, "loss": 0.4492, "lr": 6.947819411632223e-07, "epoch": 0.8522920203735145, "percentage": 85.08, "elapsed_time": "0:17:06", "remaining_time": "0:02:59"}
252
+ {"current_steps": 252, "total_steps": 295, "loss": 0.4633, "lr": 6.649418255225298e-07, "epoch": 0.8556876061120543, "percentage": 85.42, "elapsed_time": "0:17:10", "remaining_time": "0:02:55"}
253
+ {"current_steps": 253, "total_steps": 295, "loss": 0.4418, "lr": 6.357109631127889e-07, "epoch": 0.8590831918505942, "percentage": 85.76, "elapsed_time": "0:17:13", "remaining_time": "0:02:51"}
254
+ {"current_steps": 254, "total_steps": 295, "loss": 0.4572, "lr": 6.07093462067419e-07, "epoch": 0.8624787775891342, "percentage": 86.1, "elapsed_time": "0:17:17", "remaining_time": "0:02:47"}
255
+ {"current_steps": 255, "total_steps": 295, "loss": 0.4775, "lr": 5.79093344317449e-07, "epoch": 0.865874363327674, "percentage": 86.44, "elapsed_time": "0:17:20", "remaining_time": "0:02:43"}
256
+ {"current_steps": 256, "total_steps": 295, "loss": 0.4313, "lr": 5.517145450262639e-07, "epoch": 0.8692699490662139, "percentage": 86.78, "elapsed_time": "0:17:27", "remaining_time": "0:02:39"}
257
+ {"current_steps": 257, "total_steps": 295, "loss": 0.4397, "lr": 5.249609120365579e-07, "epoch": 0.8726655348047538, "percentage": 87.12, "elapsed_time": "0:17:31", "remaining_time": "0:02:35"}
258
+ {"current_steps": 258, "total_steps": 295, "loss": 0.4496, "lr": 4.988362053295564e-07, "epoch": 0.8760611205432938, "percentage": 87.46, "elapsed_time": "0:17:35", "remaining_time": "0:02:31"}
259
+ {"current_steps": 259, "total_steps": 295, "loss": 0.4303, "lr": 4.733440964965791e-07, "epoch": 0.8794567062818336, "percentage": 87.8, "elapsed_time": "0:17:40", "remaining_time": "0:02:27"}
260
+ {"current_steps": 260, "total_steps": 295, "loss": 0.4268, "lr": 4.484881682230341e-07, "epoch": 0.8828522920203735, "percentage": 88.14, "elapsed_time": "0:17:43", "remaining_time": "0:02:23"}
261
+ {"current_steps": 261, "total_steps": 295, "loss": 0.4514, "lr": 4.242719137849077e-07, "epoch": 0.8862478777589134, "percentage": 88.47, "elapsed_time": "0:17:47", "remaining_time": "0:02:19"}
262
+ {"current_steps": 262, "total_steps": 295, "loss": 0.4475, "lr": 4.00698736557808e-07, "epoch": 0.8896434634974533, "percentage": 88.81, "elapsed_time": "0:17:52", "remaining_time": "0:02:15"}
263
+ {"current_steps": 263, "total_steps": 295, "loss": 0.4479, "lr": 3.777719495386567e-07, "epoch": 0.8930390492359932, "percentage": 89.15, "elapsed_time": "0:17:56", "remaining_time": "0:02:10"}
264
+ {"current_steps": 264, "total_steps": 295, "loss": 0.4563, "lr": 3.5549477488007853e-07, "epoch": 0.8964346349745331, "percentage": 89.49, "elapsed_time": "0:18:00", "remaining_time": "0:02:06"}
265
+ {"current_steps": 265, "total_steps": 295, "loss": 0.4447, "lr": 3.3387034343755063e-07, "epoch": 0.8998302207130731, "percentage": 89.83, "elapsed_time": "0:18:06", "remaining_time": "0:02:03"}
266
+ {"current_steps": 266, "total_steps": 295, "loss": 0.4416, "lr": 3.1290169432939556e-07, "epoch": 0.9032258064516129, "percentage": 90.17, "elapsed_time": "0:18:10", "remaining_time": "0:01:58"}
267
+ {"current_steps": 267, "total_steps": 295, "loss": 0.4263, "lr": 2.925917745096568e-07, "epoch": 0.9066213921901528, "percentage": 90.51, "elapsed_time": "0:18:14", "remaining_time": "0:01:54"}
268
+ {"current_steps": 268, "total_steps": 295, "loss": 0.4635, "lr": 2.7294343835393366e-07, "epoch": 0.9100169779286927, "percentage": 90.85, "elapsed_time": "0:18:18", "remaining_time": "0:01:50"}
269
+ {"current_steps": 269, "total_steps": 295, "loss": 0.4387, "lr": 2.539594472582213e-07, "epoch": 0.9134125636672326, "percentage": 91.19, "elapsed_time": "0:18:21", "remaining_time": "0:01:46"}
270
+ {"current_steps": 270, "total_steps": 295, "loss": 0.4409, "lr": 2.3564246925082358e-07, "epoch": 0.9168081494057725, "percentage": 91.53, "elapsed_time": "0:18:25", "remaining_time": "0:01:42"}
271
+ {"current_steps": 271, "total_steps": 295, "loss": 0.422, "lr": 2.179950786173879e-07, "epoch": 0.9202037351443124, "percentage": 91.86, "elapsed_time": "0:18:29", "remaining_time": "0:01:38"}
272
+ {"current_steps": 272, "total_steps": 295, "loss": 0.4533, "lr": 2.01019755539108e-07, "epoch": 0.9235993208828522, "percentage": 92.2, "elapsed_time": "0:18:33", "remaining_time": "0:01:34"}
273
+ {"current_steps": 273, "total_steps": 295, "loss": 0.4504, "lr": 1.8471888574415953e-07, "epoch": 0.9269949066213922, "percentage": 92.54, "elapsed_time": "0:18:37", "remaining_time": "0:01:30"}
274
+ {"current_steps": 274, "total_steps": 295, "loss": 0.4616, "lr": 1.690947601724091e-07, "epoch": 0.9303904923599321, "percentage": 92.88, "elapsed_time": "0:18:41", "remaining_time": "0:01:25"}
275
+ {"current_steps": 275, "total_steps": 295, "loss": 0.4362, "lr": 1.5414957465343883e-07, "epoch": 0.933786078098472, "percentage": 93.22, "elapsed_time": "0:18:44", "remaining_time": "0:01:21"}
276
+ {"current_steps": 276, "total_steps": 295, "loss": 0.4476, "lr": 1.3988542959794627e-07, "epoch": 0.9371816638370118, "percentage": 93.56, "elapsed_time": "0:18:50", "remaining_time": "0:01:17"}
277
+ {"current_steps": 277, "total_steps": 295, "loss": 0.4317, "lr": 1.2630432970255014e-07, "epoch": 0.9405772495755518, "percentage": 93.9, "elapsed_time": "0:18:54", "remaining_time": "0:01:13"}
278
+ {"current_steps": 278, "total_steps": 295, "loss": 0.4374, "lr": 1.1340818366804728e-07, "epoch": 0.9439728353140917, "percentage": 94.24, "elapsed_time": "0:18:58", "remaining_time": "0:01:09"}
279
+ {"current_steps": 279, "total_steps": 295, "loss": 0.4463, "lr": 1.0119880393116177e-07, "epoch": 0.9473684210526315, "percentage": 94.58, "elapsed_time": "0:19:01", "remaining_time": "0:01:05"}
280
+ {"current_steps": 280, "total_steps": 295, "loss": 0.4534, "lr": 8.967790640982466e-08, "epoch": 0.9507640067911715, "percentage": 94.92, "elapsed_time": "0:19:05", "remaining_time": "0:01:01"}
281
+ {"current_steps": 281, "total_steps": 295, "loss": 0.4474, "lr": 7.884711026201586e-08, "epoch": 0.9541595925297114, "percentage": 95.25, "elapsed_time": "0:19:09", "remaining_time": "0:00:57"}
282
+ {"current_steps": 282, "total_steps": 295, "loss": 0.4735, "lr": 6.870793765820783e-08, "epoch": 0.9575551782682513, "percentage": 95.59, "elapsed_time": "0:19:13", "remaining_time": "0:00:53"}
283
+ {"current_steps": 283, "total_steps": 295, "loss": 0.4506, "lr": 5.92618135674361e-08, "epoch": 0.9609507640067911, "percentage": 95.93, "elapsed_time": "0:19:17", "remaining_time": "0:00:49"}
284
+ {"current_steps": 284, "total_steps": 295, "loss": 0.4677, "lr": 5.0510065557034526e-08, "epoch": 0.9643463497453311, "percentage": 96.27, "elapsed_time": "0:19:21", "remaining_time": "0:00:45"}
285
+ {"current_steps": 285, "total_steps": 295, "loss": 0.4582, "lr": 4.245392360605727e-08, "epoch": 0.967741935483871, "percentage": 96.61, "elapsed_time": "0:19:27", "remaining_time": "0:00:40"}
286
+ {"current_steps": 286, "total_steps": 295, "loss": 0.4505, "lr": 3.5094519932415417e-08, "epoch": 0.9711375212224108, "percentage": 96.95, "elapsed_time": "0:19:31", "remaining_time": "0:00:36"}
287
+ {"current_steps": 287, "total_steps": 295, "loss": 0.4538, "lr": 2.843288883375539e-08, "epoch": 0.9745331069609507, "percentage": 97.29, "elapsed_time": "0:19:35", "remaining_time": "0:00:32"}
288
+ {"current_steps": 288, "total_steps": 295, "loss": 0.4432, "lr": 2.2469966542096323e-08, "epoch": 0.9779286926994907, "percentage": 97.63, "elapsed_time": "0:19:40", "remaining_time": "0:00:28"}
289
+ {"current_steps": 289, "total_steps": 295, "loss": 0.4567, "lr": 1.7206591092253642e-08, "epoch": 0.9813242784380306, "percentage": 97.97, "elapsed_time": "0:19:43", "remaining_time": "0:00:24"}
290
+ {"current_steps": 290, "total_steps": 295, "loss": 0.4317, "lr": 1.264350220405719e-08, "epoch": 0.9847198641765704, "percentage": 98.31, "elapsed_time": "0:19:48", "remaining_time": "0:00:20"}
291
+ {"current_steps": 291, "total_steps": 295, "loss": 0.4555, "lr": 8.781341178393244e-09, "epoch": 0.9881154499151104, "percentage": 98.64, "elapsed_time": "0:19:52", "remaining_time": "0:00:16"}
292
+ {"current_steps": 292, "total_steps": 295, "loss": 0.4606, "lr": 5.620650807073857e-09, "epoch": 0.9915110356536503, "percentage": 98.98, "elapsed_time": "0:19:55", "remaining_time": "0:00:12"}
293
+ {"current_steps": 293, "total_steps": 295, "loss": 0.4408, "lr": 3.1618752965534295e-09, "epoch": 0.9949066213921901, "percentage": 99.32, "elapsed_time": "0:20:00", "remaining_time": "0:00:08"}
294
+ {"current_steps": 294, "total_steps": 295, "loss": 0.4157, "lr": 1.4053602054991954e-09, "epoch": 0.99830220713073, "percentage": 99.66, "elapsed_time": "0:20:04", "remaining_time": "0:00:04"}
295
+ {"current_steps": 295, "total_steps": 295, "loss": 0.4174, "lr": 3.513523962256349e-10, "epoch": 1.0, "percentage": 100.0, "elapsed_time": "0:20:07", "remaining_time": "0:00:00"}
296
+ {"current_steps": 295, "total_steps": 295, "epoch": 1.0, "percentage": 100.0, "elapsed_time": "0:20:49", "remaining_time": "0:00:00"}
trainer_state.json ADDED
@@ -0,0 +1,2108 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_global_step": null,
3
+ "best_metric": null,
4
+ "best_model_checkpoint": null,
5
+ "epoch": 1.0,
6
+ "eval_steps": 500,
7
+ "global_step": 295,
8
+ "is_hyper_param_search": false,
9
+ "is_local_process_zero": true,
10
+ "is_world_process_zero": true,
11
+ "log_history": [
12
+ {
13
+ "epoch": 0.003395585738539898,
14
+ "grad_norm": 6.320343624704885,
15
+ "learning_rate": 0.0,
16
+ "loss": 0.9899,
17
+ "step": 1
18
+ },
19
+ {
20
+ "epoch": 0.006791171477079796,
21
+ "grad_norm": 16.545756923443665,
22
+ "learning_rate": 3.3333333333333335e-07,
23
+ "loss": 0.9683,
24
+ "step": 2
25
+ },
26
+ {
27
+ "epoch": 0.010186757215619695,
28
+ "grad_norm": 6.124626290117518,
29
+ "learning_rate": 6.666666666666667e-07,
30
+ "loss": 0.9722,
31
+ "step": 3
32
+ },
33
+ {
34
+ "epoch": 0.013582342954159592,
35
+ "grad_norm": 6.907471369837716,
36
+ "learning_rate": 1.0000000000000002e-06,
37
+ "loss": 0.9667,
38
+ "step": 4
39
+ },
40
+ {
41
+ "epoch": 0.01697792869269949,
42
+ "grad_norm": 6.275431523928528,
43
+ "learning_rate": 1.3333333333333334e-06,
44
+ "loss": 0.9814,
45
+ "step": 5
46
+ },
47
+ {
48
+ "epoch": 0.02037351443123939,
49
+ "grad_norm": 6.074623009583982,
50
+ "learning_rate": 1.6666666666666667e-06,
51
+ "loss": 0.9502,
52
+ "step": 6
53
+ },
54
+ {
55
+ "epoch": 0.023769100169779286,
56
+ "grad_norm": 89.99670872123082,
57
+ "learning_rate": 2.0000000000000003e-06,
58
+ "loss": 0.9411,
59
+ "step": 7
60
+ },
61
+ {
62
+ "epoch": 0.027164685908319185,
63
+ "grad_norm": 6.224698090768632,
64
+ "learning_rate": 2.3333333333333336e-06,
65
+ "loss": 0.9075,
66
+ "step": 8
67
+ },
68
+ {
69
+ "epoch": 0.030560271646859084,
70
+ "grad_norm": 5.913600661145363,
71
+ "learning_rate": 2.666666666666667e-06,
72
+ "loss": 0.8979,
73
+ "step": 9
74
+ },
75
+ {
76
+ "epoch": 0.03395585738539898,
77
+ "grad_norm": 7.267004601688997,
78
+ "learning_rate": 3e-06,
79
+ "loss": 0.8589,
80
+ "step": 10
81
+ },
82
+ {
83
+ "epoch": 0.03735144312393888,
84
+ "grad_norm": 4.920293108168373,
85
+ "learning_rate": 3.3333333333333333e-06,
86
+ "loss": 0.8797,
87
+ "step": 11
88
+ },
89
+ {
90
+ "epoch": 0.04074702886247878,
91
+ "grad_norm": 7.0251120254603325,
92
+ "learning_rate": 3.6666666666666666e-06,
93
+ "loss": 0.785,
94
+ "step": 12
95
+ },
96
+ {
97
+ "epoch": 0.044142614601018676,
98
+ "grad_norm": 4.620531235688851,
99
+ "learning_rate": 4.000000000000001e-06,
100
+ "loss": 0.7932,
101
+ "step": 13
102
+ },
103
+ {
104
+ "epoch": 0.04753820033955857,
105
+ "grad_norm": 8.017042756220283,
106
+ "learning_rate": 4.333333333333334e-06,
107
+ "loss": 0.7878,
108
+ "step": 14
109
+ },
110
+ {
111
+ "epoch": 0.050933786078098474,
112
+ "grad_norm": 7.168335735527789,
113
+ "learning_rate": 4.666666666666667e-06,
114
+ "loss": 0.7854,
115
+ "step": 15
116
+ },
117
+ {
118
+ "epoch": 0.05432937181663837,
119
+ "grad_norm": 2.6102783506270137,
120
+ "learning_rate": 5e-06,
121
+ "loss": 0.7194,
122
+ "step": 16
123
+ },
124
+ {
125
+ "epoch": 0.057724957555178265,
126
+ "grad_norm": 2.8281275726026074,
127
+ "learning_rate": 5.333333333333334e-06,
128
+ "loss": 0.7059,
129
+ "step": 17
130
+ },
131
+ {
132
+ "epoch": 0.06112054329371817,
133
+ "grad_norm": 2.1330350735122505,
134
+ "learning_rate": 5.666666666666667e-06,
135
+ "loss": 0.7236,
136
+ "step": 18
137
+ },
138
+ {
139
+ "epoch": 0.06451612903225806,
140
+ "grad_norm": 3.3828007198200694,
141
+ "learning_rate": 6e-06,
142
+ "loss": 0.7133,
143
+ "step": 19
144
+ },
145
+ {
146
+ "epoch": 0.06791171477079797,
147
+ "grad_norm": 1.733407506222324,
148
+ "learning_rate": 6.333333333333333e-06,
149
+ "loss": 0.6807,
150
+ "step": 20
151
+ },
152
+ {
153
+ "epoch": 0.07130730050933787,
154
+ "grad_norm": 1.5732888077219966,
155
+ "learning_rate": 6.666666666666667e-06,
156
+ "loss": 0.6717,
157
+ "step": 21
158
+ },
159
+ {
160
+ "epoch": 0.07470288624787776,
161
+ "grad_norm": 2.869294784618442,
162
+ "learning_rate": 7e-06,
163
+ "loss": 0.6836,
164
+ "step": 22
165
+ },
166
+ {
167
+ "epoch": 0.07809847198641766,
168
+ "grad_norm": 1.539403446338188,
169
+ "learning_rate": 7.333333333333333e-06,
170
+ "loss": 0.656,
171
+ "step": 23
172
+ },
173
+ {
174
+ "epoch": 0.08149405772495756,
175
+ "grad_norm": 1.390711875015812,
176
+ "learning_rate": 7.666666666666667e-06,
177
+ "loss": 0.652,
178
+ "step": 24
179
+ },
180
+ {
181
+ "epoch": 0.08488964346349745,
182
+ "grad_norm": 1.4133171062619616,
183
+ "learning_rate": 8.000000000000001e-06,
184
+ "loss": 0.6237,
185
+ "step": 25
186
+ },
187
+ {
188
+ "epoch": 0.08828522920203735,
189
+ "grad_norm": 1.2675100082185478,
190
+ "learning_rate": 8.333333333333334e-06,
191
+ "loss": 0.5999,
192
+ "step": 26
193
+ },
194
+ {
195
+ "epoch": 0.09168081494057725,
196
+ "grad_norm": 1.4010718655700412,
197
+ "learning_rate": 8.666666666666668e-06,
198
+ "loss": 0.6229,
199
+ "step": 27
200
+ },
201
+ {
202
+ "epoch": 0.09507640067911714,
203
+ "grad_norm": 1.4302934299507057,
204
+ "learning_rate": 9e-06,
205
+ "loss": 0.5909,
206
+ "step": 28
207
+ },
208
+ {
209
+ "epoch": 0.09847198641765705,
210
+ "grad_norm": 1.3417528783040296,
211
+ "learning_rate": 9.333333333333334e-06,
212
+ "loss": 0.6236,
213
+ "step": 29
214
+ },
215
+ {
216
+ "epoch": 0.10186757215619695,
217
+ "grad_norm": 1.2550326498175997,
218
+ "learning_rate": 9.666666666666667e-06,
219
+ "loss": 0.5978,
220
+ "step": 30
221
+ },
222
+ {
223
+ "epoch": 0.10526315789473684,
224
+ "grad_norm": 1.2949203936642646,
225
+ "learning_rate": 1e-05,
226
+ "loss": 0.6206,
227
+ "step": 31
228
+ },
229
+ {
230
+ "epoch": 0.10865874363327674,
231
+ "grad_norm": 1.3404316606873072,
232
+ "learning_rate": 9.999648647603774e-06,
233
+ "loss": 0.6103,
234
+ "step": 32
235
+ },
236
+ {
237
+ "epoch": 0.11205432937181664,
238
+ "grad_norm": 1.2464543142979883,
239
+ "learning_rate": 9.998594639794502e-06,
240
+ "loss": 0.5823,
241
+ "step": 33
242
+ },
243
+ {
244
+ "epoch": 0.11544991511035653,
245
+ "grad_norm": 1.28881560865773,
246
+ "learning_rate": 9.996838124703448e-06,
247
+ "loss": 0.5724,
248
+ "step": 34
249
+ },
250
+ {
251
+ "epoch": 0.11884550084889643,
252
+ "grad_norm": 1.236715524722473,
253
+ "learning_rate": 9.994379349192927e-06,
254
+ "loss": 0.6006,
255
+ "step": 35
256
+ },
257
+ {
258
+ "epoch": 0.12224108658743633,
259
+ "grad_norm": 1.1748122882399419,
260
+ "learning_rate": 9.991218658821609e-06,
261
+ "loss": 0.5834,
262
+ "step": 36
263
+ },
264
+ {
265
+ "epoch": 0.12563667232597622,
266
+ "grad_norm": 1.3335055567165142,
267
+ "learning_rate": 9.987356497795944e-06,
268
+ "loss": 0.5781,
269
+ "step": 37
270
+ },
271
+ {
272
+ "epoch": 0.12903225806451613,
273
+ "grad_norm": 1.2573177388462913,
274
+ "learning_rate": 9.982793408907747e-06,
275
+ "loss": 0.5613,
276
+ "step": 38
277
+ },
278
+ {
279
+ "epoch": 0.13242784380305603,
280
+ "grad_norm": 614.0974853168025,
281
+ "learning_rate": 9.977530033457906e-06,
282
+ "loss": 0.5518,
283
+ "step": 39
284
+ },
285
+ {
286
+ "epoch": 0.13582342954159593,
287
+ "grad_norm": 1.8130801394796245,
288
+ "learning_rate": 9.971567111166246e-06,
289
+ "loss": 0.5875,
290
+ "step": 40
291
+ },
292
+ {
293
+ "epoch": 0.13921901528013583,
294
+ "grad_norm": 3.021540974319598,
295
+ "learning_rate": 9.964905480067585e-06,
296
+ "loss": 0.5666,
297
+ "step": 41
298
+ },
299
+ {
300
+ "epoch": 0.14261460101867574,
301
+ "grad_norm": 1.4307732415515269,
302
+ "learning_rate": 9.957546076393944e-06,
303
+ "loss": 0.5829,
304
+ "step": 42
305
+ },
306
+ {
307
+ "epoch": 0.1460101867572156,
308
+ "grad_norm": 1.2948953962351142,
309
+ "learning_rate": 9.949489934442966e-06,
310
+ "loss": 0.592,
311
+ "step": 43
312
+ },
313
+ {
314
+ "epoch": 0.1494057724957555,
315
+ "grad_norm": 1.28013431192408,
316
+ "learning_rate": 9.940738186432565e-06,
317
+ "loss": 0.554,
318
+ "step": 44
319
+ },
320
+ {
321
+ "epoch": 0.15280135823429541,
322
+ "grad_norm": 1.364283730780204,
323
+ "learning_rate": 9.931292062341793e-06,
324
+ "loss": 0.5563,
325
+ "step": 45
326
+ },
327
+ {
328
+ "epoch": 0.15619694397283532,
329
+ "grad_norm": 1.1789812327801241,
330
+ "learning_rate": 9.921152889737985e-06,
331
+ "loss": 0.5349,
332
+ "step": 46
333
+ },
334
+ {
335
+ "epoch": 0.15959252971137522,
336
+ "grad_norm": 1.272995264216437,
337
+ "learning_rate": 9.910322093590177e-06,
338
+ "loss": 0.5885,
339
+ "step": 47
340
+ },
341
+ {
342
+ "epoch": 0.16298811544991512,
343
+ "grad_norm": 1.2071797315958241,
344
+ "learning_rate": 9.898801196068839e-06,
345
+ "loss": 0.555,
346
+ "step": 48
347
+ },
348
+ {
349
+ "epoch": 0.166383701188455,
350
+ "grad_norm": 1.2466300266575705,
351
+ "learning_rate": 9.886591816331953e-06,
352
+ "loss": 0.5412,
353
+ "step": 49
354
+ },
355
+ {
356
+ "epoch": 0.1697792869269949,
357
+ "grad_norm": 1.1951025261533827,
358
+ "learning_rate": 9.87369567029745e-06,
359
+ "loss": 0.5266,
360
+ "step": 50
361
+ },
362
+ {
363
+ "epoch": 0.1731748726655348,
364
+ "grad_norm": 1.1409578758956818,
365
+ "learning_rate": 9.860114570402055e-06,
366
+ "loss": 0.5364,
367
+ "step": 51
368
+ },
369
+ {
370
+ "epoch": 0.1765704584040747,
371
+ "grad_norm": 1.194968225105999,
372
+ "learning_rate": 9.845850425346563e-06,
373
+ "loss": 0.544,
374
+ "step": 52
375
+ },
376
+ {
377
+ "epoch": 0.1799660441426146,
378
+ "grad_norm": 1.105032206844106,
379
+ "learning_rate": 9.830905239827592e-06,
380
+ "loss": 0.526,
381
+ "step": 53
382
+ },
383
+ {
384
+ "epoch": 0.1833616298811545,
385
+ "grad_norm": 1.0951143452700236,
386
+ "learning_rate": 9.815281114255841e-06,
387
+ "loss": 0.5384,
388
+ "step": 54
389
+ },
390
+ {
391
+ "epoch": 0.1867572156196944,
392
+ "grad_norm": 1.237628011550819,
393
+ "learning_rate": 9.798980244460892e-06,
394
+ "loss": 0.5216,
395
+ "step": 55
396
+ },
397
+ {
398
+ "epoch": 0.19015280135823429,
399
+ "grad_norm": 1.2049941993061364,
400
+ "learning_rate": 9.782004921382612e-06,
401
+ "loss": 0.5602,
402
+ "step": 56
403
+ },
404
+ {
405
+ "epoch": 0.1935483870967742,
406
+ "grad_norm": 1.0963401467603866,
407
+ "learning_rate": 9.764357530749178e-06,
408
+ "loss": 0.5211,
409
+ "step": 57
410
+ },
411
+ {
412
+ "epoch": 0.1969439728353141,
413
+ "grad_norm": 1.1153693044651702,
414
+ "learning_rate": 9.74604055274178e-06,
415
+ "loss": 0.5408,
416
+ "step": 58
417
+ },
418
+ {
419
+ "epoch": 0.200339558573854,
420
+ "grad_norm": 1.1446372149192978,
421
+ "learning_rate": 9.727056561646067e-06,
422
+ "loss": 0.5297,
423
+ "step": 59
424
+ },
425
+ {
426
+ "epoch": 0.2037351443123939,
427
+ "grad_norm": 1.1379654178567067,
428
+ "learning_rate": 9.707408225490343e-06,
429
+ "loss": 0.5241,
430
+ "step": 60
431
+ },
432
+ {
433
+ "epoch": 0.2071307300509338,
434
+ "grad_norm": 1.1133344090884694,
435
+ "learning_rate": 9.687098305670606e-06,
436
+ "loss": 0.5373,
437
+ "step": 61
438
+ },
439
+ {
440
+ "epoch": 0.21052631578947367,
441
+ "grad_norm": 1.14642700050245,
442
+ "learning_rate": 9.66612965656245e-06,
443
+ "loss": 0.5397,
444
+ "step": 62
445
+ },
446
+ {
447
+ "epoch": 0.21392190152801357,
448
+ "grad_norm": 1.0251599768158084,
449
+ "learning_rate": 9.644505225119922e-06,
450
+ "loss": 0.5444,
451
+ "step": 63
452
+ },
453
+ {
454
+ "epoch": 0.21731748726655348,
455
+ "grad_norm": 0.9890854380699226,
456
+ "learning_rate": 9.622228050461345e-06,
457
+ "loss": 0.4984,
458
+ "step": 64
459
+ },
460
+ {
461
+ "epoch": 0.22071307300509338,
462
+ "grad_norm": 1.1117068387014335,
463
+ "learning_rate": 9.599301263442194e-06,
464
+ "loss": 0.5214,
465
+ "step": 65
466
+ },
467
+ {
468
+ "epoch": 0.22410865874363328,
469
+ "grad_norm": 1.1785090789686485,
470
+ "learning_rate": 9.575728086215093e-06,
471
+ "loss": 0.5265,
472
+ "step": 66
473
+ },
474
+ {
475
+ "epoch": 0.22750424448217318,
476
+ "grad_norm": 1.0565597242070186,
477
+ "learning_rate": 9.551511831776966e-06,
478
+ "loss": 0.5155,
479
+ "step": 67
480
+ },
481
+ {
482
+ "epoch": 0.23089983022071306,
483
+ "grad_norm": 1.2278748412937952,
484
+ "learning_rate": 9.526655903503423e-06,
485
+ "loss": 0.5277,
486
+ "step": 68
487
+ },
488
+ {
489
+ "epoch": 0.23429541595925296,
490
+ "grad_norm": 1.1213279864335683,
491
+ "learning_rate": 9.501163794670445e-06,
492
+ "loss": 0.5307,
493
+ "step": 69
494
+ },
495
+ {
496
+ "epoch": 0.23769100169779286,
497
+ "grad_norm": 1.1237007845490015,
498
+ "learning_rate": 9.475039087963443e-06,
499
+ "loss": 0.51,
500
+ "step": 70
501
+ },
502
+ {
503
+ "epoch": 0.24108658743633277,
504
+ "grad_norm": 1.0615774210564253,
505
+ "learning_rate": 9.448285454973739e-06,
506
+ "loss": 0.517,
507
+ "step": 71
508
+ },
509
+ {
510
+ "epoch": 0.24448217317487267,
511
+ "grad_norm": 1.2104405507373044,
512
+ "learning_rate": 9.420906655682553e-06,
513
+ "loss": 0.5223,
514
+ "step": 72
515
+ },
516
+ {
517
+ "epoch": 0.24787775891341257,
518
+ "grad_norm": 1.0985265075768098,
519
+ "learning_rate": 9.392906537932582e-06,
520
+ "loss": 0.5257,
521
+ "step": 73
522
+ },
523
+ {
524
+ "epoch": 0.25127334465195245,
525
+ "grad_norm": 1.0626836434651132,
526
+ "learning_rate": 9.364289036887214e-06,
527
+ "loss": 0.5187,
528
+ "step": 74
529
+ },
530
+ {
531
+ "epoch": 0.2546689303904924,
532
+ "grad_norm": 1.0234412989238388,
533
+ "learning_rate": 9.335058174477472e-06,
534
+ "loss": 0.5127,
535
+ "step": 75
536
+ },
537
+ {
538
+ "epoch": 0.25806451612903225,
539
+ "grad_norm": 1.0702433549745685,
540
+ "learning_rate": 9.305218058836778e-06,
541
+ "loss": 0.4966,
542
+ "step": 76
543
+ },
544
+ {
545
+ "epoch": 0.2614601018675722,
546
+ "grad_norm": 1.1228773632449267,
547
+ "learning_rate": 9.274772883723587e-06,
548
+ "loss": 0.5265,
549
+ "step": 77
550
+ },
551
+ {
552
+ "epoch": 0.26485568760611206,
553
+ "grad_norm": 1.0076068781933833,
554
+ "learning_rate": 9.24372692793199e-06,
555
+ "loss": 0.5003,
556
+ "step": 78
557
+ },
558
+ {
559
+ "epoch": 0.26825127334465193,
560
+ "grad_norm": 1.0987680518642855,
561
+ "learning_rate": 9.21208455469037e-06,
562
+ "loss": 0.4971,
563
+ "step": 79
564
+ },
565
+ {
566
+ "epoch": 0.27164685908319186,
567
+ "grad_norm": 1.1051172285171345,
568
+ "learning_rate": 9.179850211048193e-06,
569
+ "loss": 0.491,
570
+ "step": 80
571
+ },
572
+ {
573
+ "epoch": 0.27504244482173174,
574
+ "grad_norm": 1.0886761114445709,
575
+ "learning_rate": 9.14702842725101e-06,
576
+ "loss": 0.5033,
577
+ "step": 81
578
+ },
579
+ {
580
+ "epoch": 0.27843803056027167,
581
+ "grad_norm": 1.101989710769037,
582
+ "learning_rate": 9.113623816103775e-06,
583
+ "loss": 0.5157,
584
+ "step": 82
585
+ },
586
+ {
587
+ "epoch": 0.28183361629881154,
588
+ "grad_norm": 1.0846044794179348,
589
+ "learning_rate": 9.079641072322555e-06,
590
+ "loss": 0.5103,
591
+ "step": 83
592
+ },
593
+ {
594
+ "epoch": 0.28522920203735147,
595
+ "grad_norm": 1.0994243701550208,
596
+ "learning_rate": 9.045084971874738e-06,
597
+ "loss": 0.5118,
598
+ "step": 84
599
+ },
600
+ {
601
+ "epoch": 0.28862478777589134,
602
+ "grad_norm": 0.982679989364588,
603
+ "learning_rate": 9.009960371307798e-06,
604
+ "loss": 0.4753,
605
+ "step": 85
606
+ },
607
+ {
608
+ "epoch": 0.2920203735144312,
609
+ "grad_norm": 1.0974557383394132,
610
+ "learning_rate": 8.974272207066767e-06,
611
+ "loss": 0.5065,
612
+ "step": 86
613
+ },
614
+ {
615
+ "epoch": 0.29541595925297115,
616
+ "grad_norm": 1.191767026119928,
617
+ "learning_rate": 8.938025494800454e-06,
618
+ "loss": 0.496,
619
+ "step": 87
620
+ },
621
+ {
622
+ "epoch": 0.298811544991511,
623
+ "grad_norm": 1.0863558052080193,
624
+ "learning_rate": 8.901225328656543e-06,
625
+ "loss": 0.4768,
626
+ "step": 88
627
+ },
628
+ {
629
+ "epoch": 0.30220713073005095,
630
+ "grad_norm": 1.0292339490206497,
631
+ "learning_rate": 8.863876880565656e-06,
632
+ "loss": 0.4982,
633
+ "step": 89
634
+ },
635
+ {
636
+ "epoch": 0.30560271646859083,
637
+ "grad_norm": 1.0263488228624327,
638
+ "learning_rate": 8.825985399514488e-06,
639
+ "loss": 0.49,
640
+ "step": 90
641
+ },
642
+ {
643
+ "epoch": 0.3089983022071307,
644
+ "grad_norm": 1.1083934275101859,
645
+ "learning_rate": 8.787556210808101e-06,
646
+ "loss": 0.508,
647
+ "step": 91
648
+ },
649
+ {
650
+ "epoch": 0.31239388794567063,
651
+ "grad_norm": 1.0759718773481644,
652
+ "learning_rate": 8.748594715321512e-06,
653
+ "loss": 0.493,
654
+ "step": 92
655
+ },
656
+ {
657
+ "epoch": 0.3157894736842105,
658
+ "grad_norm": 1.1275176872041444,
659
+ "learning_rate": 8.70910638874064e-06,
660
+ "loss": 0.5063,
661
+ "step": 93
662
+ },
663
+ {
664
+ "epoch": 0.31918505942275044,
665
+ "grad_norm": 1.0268180450930244,
666
+ "learning_rate": 8.669096780792754e-06,
667
+ "loss": 0.4953,
668
+ "step": 94
669
+ },
670
+ {
671
+ "epoch": 0.3225806451612903,
672
+ "grad_norm": 1.0957503547012442,
673
+ "learning_rate": 8.628571514466502e-06,
674
+ "loss": 0.5062,
675
+ "step": 95
676
+ },
677
+ {
678
+ "epoch": 0.32597623089983024,
679
+ "grad_norm": 1.0476160480506713,
680
+ "learning_rate": 8.587536285221656e-06,
681
+ "loss": 0.5254,
682
+ "step": 96
683
+ },
684
+ {
685
+ "epoch": 0.3293718166383701,
686
+ "grad_norm": 0.973467408933069,
687
+ "learning_rate": 8.545996860188668e-06,
688
+ "loss": 0.5107,
689
+ "step": 97
690
+ },
691
+ {
692
+ "epoch": 0.33276740237691,
693
+ "grad_norm": 0.9690640110532758,
694
+ "learning_rate": 8.503959077358143e-06,
695
+ "loss": 0.4767,
696
+ "step": 98
697
+ },
698
+ {
699
+ "epoch": 0.3361629881154499,
700
+ "grad_norm": 1.1272513638936046,
701
+ "learning_rate": 8.46142884476038e-06,
702
+ "loss": 0.4991,
703
+ "step": 99
704
+ },
705
+ {
706
+ "epoch": 0.3395585738539898,
707
+ "grad_norm": 1.089949180068211,
708
+ "learning_rate": 8.418412139635026e-06,
709
+ "loss": 0.5202,
710
+ "step": 100
711
+ },
712
+ {
713
+ "epoch": 0.34295415959252973,
714
+ "grad_norm": 1.0904976650398113,
715
+ "learning_rate": 8.374915007591053e-06,
716
+ "loss": 0.5019,
717
+ "step": 101
718
+ },
719
+ {
720
+ "epoch": 0.3463497453310696,
721
+ "grad_norm": 1.0132973869482662,
722
+ "learning_rate": 8.330943561757092e-06,
723
+ "loss": 0.4981,
724
+ "step": 102
725
+ },
726
+ {
727
+ "epoch": 0.34974533106960953,
728
+ "grad_norm": 1.0144345934272716,
729
+ "learning_rate": 8.286503981922284e-06,
730
+ "loss": 0.4828,
731
+ "step": 103
732
+ },
733
+ {
734
+ "epoch": 0.3531409168081494,
735
+ "grad_norm": 1.0708364693456955,
736
+ "learning_rate": 8.241602513667775e-06,
737
+ "loss": 0.4677,
738
+ "step": 104
739
+ },
740
+ {
741
+ "epoch": 0.3565365025466893,
742
+ "grad_norm": 1.0734641865863697,
743
+ "learning_rate": 8.19624546748895e-06,
744
+ "loss": 0.4991,
745
+ "step": 105
746
+ },
747
+ {
748
+ "epoch": 0.3599320882852292,
749
+ "grad_norm": 1.206730960165882,
750
+ "learning_rate": 8.150439217908557e-06,
751
+ "loss": 0.4874,
752
+ "step": 106
753
+ },
754
+ {
755
+ "epoch": 0.3633276740237691,
756
+ "grad_norm": 0.990112721589732,
757
+ "learning_rate": 8.104190202580811e-06,
758
+ "loss": 0.4613,
759
+ "step": 107
760
+ },
761
+ {
762
+ "epoch": 0.366723259762309,
763
+ "grad_norm": 1.135017537331381,
764
+ "learning_rate": 8.057504921386661e-06,
765
+ "loss": 0.4981,
766
+ "step": 108
767
+ },
768
+ {
769
+ "epoch": 0.3701188455008489,
770
+ "grad_norm": 1.1168906124405507,
771
+ "learning_rate": 8.010389935520269e-06,
772
+ "loss": 0.4812,
773
+ "step": 109
774
+ },
775
+ {
776
+ "epoch": 0.3735144312393888,
777
+ "grad_norm": 1.109365518761395,
778
+ "learning_rate": 7.962851866566912e-06,
779
+ "loss": 0.4901,
780
+ "step": 110
781
+ },
782
+ {
783
+ "epoch": 0.3769100169779287,
784
+ "grad_norm": 1.1121750297406146,
785
+ "learning_rate": 7.914897395572362e-06,
786
+ "loss": 0.4841,
787
+ "step": 111
788
+ },
789
+ {
790
+ "epoch": 0.38030560271646857,
791
+ "grad_norm": 1.10407244769018,
792
+ "learning_rate": 7.866533262103937e-06,
793
+ "loss": 0.4813,
794
+ "step": 112
795
+ },
796
+ {
797
+ "epoch": 0.3837011884550085,
798
+ "grad_norm": 1.109502100982312,
799
+ "learning_rate": 7.817766263303312e-06,
800
+ "loss": 0.4731,
801
+ "step": 113
802
+ },
803
+ {
804
+ "epoch": 0.3870967741935484,
805
+ "grad_norm": 1.0709239172717544,
806
+ "learning_rate": 7.768603252931243e-06,
807
+ "loss": 0.4988,
808
+ "step": 114
809
+ },
810
+ {
811
+ "epoch": 0.3904923599320883,
812
+ "grad_norm": 1.0915174076138725,
813
+ "learning_rate": 7.719051140404327e-06,
814
+ "loss": 0.4825,
815
+ "step": 115
816
+ },
817
+ {
818
+ "epoch": 0.3938879456706282,
819
+ "grad_norm": 1.0965896575304275,
820
+ "learning_rate": 7.669116889823955e-06,
821
+ "loss": 0.4742,
822
+ "step": 116
823
+ },
824
+ {
825
+ "epoch": 0.39728353140916806,
826
+ "grad_norm": 1.0241307032424805,
827
+ "learning_rate": 7.6188075189975644e-06,
828
+ "loss": 0.4743,
829
+ "step": 117
830
+ },
831
+ {
832
+ "epoch": 0.400679117147708,
833
+ "grad_norm": 0.9435780818732129,
834
+ "learning_rate": 7.568130098452352e-06,
835
+ "loss": 0.4739,
836
+ "step": 118
837
+ },
838
+ {
839
+ "epoch": 0.40407470288624786,
840
+ "grad_norm": 1.0248298007190784,
841
+ "learning_rate": 7.517091750441576e-06,
842
+ "loss": 0.4814,
843
+ "step": 119
844
+ },
845
+ {
846
+ "epoch": 0.4074702886247878,
847
+ "grad_norm": 1.0315243401737155,
848
+ "learning_rate": 7.465699647943586e-06,
849
+ "loss": 0.4805,
850
+ "step": 120
851
+ },
852
+ {
853
+ "epoch": 0.41086587436332767,
854
+ "grad_norm": 1.091443614674752,
855
+ "learning_rate": 7.413961013653725e-06,
856
+ "loss": 0.4953,
857
+ "step": 121
858
+ },
859
+ {
860
+ "epoch": 0.4142614601018676,
861
+ "grad_norm": 1.0680561484041602,
862
+ "learning_rate": 7.361883118969248e-06,
863
+ "loss": 0.4588,
864
+ "step": 122
865
+ },
866
+ {
867
+ "epoch": 0.41765704584040747,
868
+ "grad_norm": 0.9761759048656751,
869
+ "learning_rate": 7.309473282967387e-06,
870
+ "loss": 0.469,
871
+ "step": 123
872
+ },
873
+ {
874
+ "epoch": 0.42105263157894735,
875
+ "grad_norm": 1.0119186737160737,
876
+ "learning_rate": 7.256738871376733e-06,
877
+ "loss": 0.4812,
878
+ "step": 124
879
+ },
880
+ {
881
+ "epoch": 0.4244482173174873,
882
+ "grad_norm": 0.9670302415957511,
883
+ "learning_rate": 7.203687295542032e-06,
884
+ "loss": 0.456,
885
+ "step": 125
886
+ },
887
+ {
888
+ "epoch": 0.42784380305602715,
889
+ "grad_norm": 1.001107021910696,
890
+ "learning_rate": 7.1503260113826035e-06,
891
+ "loss": 0.4597,
892
+ "step": 126
893
+ },
894
+ {
895
+ "epoch": 0.4312393887945671,
896
+ "grad_norm": 1.024857257795839,
897
+ "learning_rate": 7.09666251834447e-06,
898
+ "loss": 0.4884,
899
+ "step": 127
900
+ },
901
+ {
902
+ "epoch": 0.43463497453310695,
903
+ "grad_norm": 1.0331435407774852,
904
+ "learning_rate": 7.042704358346375e-06,
905
+ "loss": 0.4581,
906
+ "step": 128
907
+ },
908
+ {
909
+ "epoch": 0.4380305602716469,
910
+ "grad_norm": 1.00793619623988,
911
+ "learning_rate": 6.988459114719849e-06,
912
+ "loss": 0.4672,
913
+ "step": 129
914
+ },
915
+ {
916
+ "epoch": 0.44142614601018676,
917
+ "grad_norm": 0.9145035341259203,
918
+ "learning_rate": 6.933934411143419e-06,
919
+ "loss": 0.4524,
920
+ "step": 130
921
+ },
922
+ {
923
+ "epoch": 0.44482173174872663,
924
+ "grad_norm": 1.1118101624400085,
925
+ "learning_rate": 6.879137910571191e-06,
926
+ "loss": 0.4738,
927
+ "step": 131
928
+ },
929
+ {
930
+ "epoch": 0.44821731748726656,
931
+ "grad_norm": 1.0044078413498474,
932
+ "learning_rate": 6.824077314155877e-06,
933
+ "loss": 0.4679,
934
+ "step": 132
935
+ },
936
+ {
937
+ "epoch": 0.45161290322580644,
938
+ "grad_norm": 1.0568591065810937,
939
+ "learning_rate": 6.768760360166471e-06,
940
+ "loss": 0.4909,
941
+ "step": 133
942
+ },
943
+ {
944
+ "epoch": 0.45500848896434637,
945
+ "grad_norm": 0.9937372079053678,
946
+ "learning_rate": 6.713194822900707e-06,
947
+ "loss": 0.4734,
948
+ "step": 134
949
+ },
950
+ {
951
+ "epoch": 0.45840407470288624,
952
+ "grad_norm": 1.0380172124065203,
953
+ "learning_rate": 6.657388511592453e-06,
954
+ "loss": 0.4979,
955
+ "step": 135
956
+ },
957
+ {
958
+ "epoch": 0.4617996604414261,
959
+ "grad_norm": 0.9710945454163669,
960
+ "learning_rate": 6.601349269314188e-06,
961
+ "loss": 0.4826,
962
+ "step": 136
963
+ },
964
+ {
965
+ "epoch": 0.46519524617996605,
966
+ "grad_norm": 1.1195229655523649,
967
+ "learning_rate": 6.545084971874738e-06,
968
+ "loss": 0.4685,
969
+ "step": 137
970
+ },
971
+ {
972
+ "epoch": 0.4685908319185059,
973
+ "grad_norm": 1.079871197310449,
974
+ "learning_rate": 6.488603526712391e-06,
975
+ "loss": 0.483,
976
+ "step": 138
977
+ },
978
+ {
979
+ "epoch": 0.47198641765704585,
980
+ "grad_norm": 1.0089317127122743,
981
+ "learning_rate": 6.431912871783587e-06,
982
+ "loss": 0.5028,
983
+ "step": 139
984
+ },
985
+ {
986
+ "epoch": 0.47538200339558573,
987
+ "grad_norm": 0.9671431137430636,
988
+ "learning_rate": 6.3750209744473105e-06,
989
+ "loss": 0.4669,
990
+ "step": 140
991
+ },
992
+ {
993
+ "epoch": 0.47877758913412566,
994
+ "grad_norm": 1.0520557175009426,
995
+ "learning_rate": 6.3179358303453386e-06,
996
+ "loss": 0.4735,
997
+ "step": 141
998
+ },
999
+ {
1000
+ "epoch": 0.48217317487266553,
1001
+ "grad_norm": 1.0538462837406155,
1002
+ "learning_rate": 6.260665462278544e-06,
1003
+ "loss": 0.5051,
1004
+ "step": 142
1005
+ },
1006
+ {
1007
+ "epoch": 0.4855687606112054,
1008
+ "grad_norm": 1.037965055623141,
1009
+ "learning_rate": 6.203217919079343e-06,
1010
+ "loss": 0.4762,
1011
+ "step": 143
1012
+ },
1013
+ {
1014
+ "epoch": 0.48896434634974534,
1015
+ "grad_norm": 1.0176275927771237,
1016
+ "learning_rate": 6.145601274480521e-06,
1017
+ "loss": 0.4869,
1018
+ "step": 144
1019
+ },
1020
+ {
1021
+ "epoch": 0.4923599320882852,
1022
+ "grad_norm": 1.0431340329887582,
1023
+ "learning_rate": 6.08782362598054e-06,
1024
+ "loss": 0.4611,
1025
+ "step": 145
1026
+ },
1027
+ {
1028
+ "epoch": 0.49575551782682514,
1029
+ "grad_norm": 1.0308571103490511,
1030
+ "learning_rate": 6.029893093705492e-06,
1031
+ "loss": 0.4763,
1032
+ "step": 146
1033
+ },
1034
+ {
1035
+ "epoch": 0.499151103565365,
1036
+ "grad_norm": 1.0355702334516321,
1037
+ "learning_rate": 5.971817819267914e-06,
1038
+ "loss": 0.4654,
1039
+ "step": 147
1040
+ },
1041
+ {
1042
+ "epoch": 0.5025466893039049,
1043
+ "grad_norm": 0.9893529623332218,
1044
+ "learning_rate": 5.9136059646225375e-06,
1045
+ "loss": 0.5044,
1046
+ "step": 148
1047
+ },
1048
+ {
1049
+ "epoch": 0.5059422750424448,
1050
+ "grad_norm": 0.960195438000855,
1051
+ "learning_rate": 5.855265710919211e-06,
1052
+ "loss": 0.4435,
1053
+ "step": 149
1054
+ },
1055
+ {
1056
+ "epoch": 0.5093378607809848,
1057
+ "grad_norm": 0.9646163806867624,
1058
+ "learning_rate": 5.796805257353109e-06,
1059
+ "loss": 0.4834,
1060
+ "step": 150
1061
+ },
1062
+ {
1063
+ "epoch": 0.5127334465195246,
1064
+ "grad_norm": 0.9931874900764169,
1065
+ "learning_rate": 5.738232820012407e-06,
1066
+ "loss": 0.461,
1067
+ "step": 151
1068
+ },
1069
+ {
1070
+ "epoch": 0.5161290322580645,
1071
+ "grad_norm": 1.0149629817842267,
1072
+ "learning_rate": 5.679556630723592e-06,
1073
+ "loss": 0.4611,
1074
+ "step": 152
1075
+ },
1076
+ {
1077
+ "epoch": 0.5195246179966044,
1078
+ "grad_norm": 1.0022805808479243,
1079
+ "learning_rate": 5.620784935894548e-06,
1080
+ "loss": 0.4712,
1081
+ "step": 153
1082
+ },
1083
+ {
1084
+ "epoch": 0.5229202037351444,
1085
+ "grad_norm": 0.9091219788824919,
1086
+ "learning_rate": 5.561925995355595e-06,
1087
+ "loss": 0.4906,
1088
+ "step": 154
1089
+ },
1090
+ {
1091
+ "epoch": 0.5263157894736842,
1092
+ "grad_norm": 0.9508691466403413,
1093
+ "learning_rate": 5.5029880811986546e-06,
1094
+ "loss": 0.4836,
1095
+ "step": 155
1096
+ },
1097
+ {
1098
+ "epoch": 0.5297113752122241,
1099
+ "grad_norm": 0.9816091460679524,
1100
+ "learning_rate": 5.443979476614674e-06,
1101
+ "loss": 0.4568,
1102
+ "step": 156
1103
+ },
1104
+ {
1105
+ "epoch": 0.533106960950764,
1106
+ "grad_norm": 0.9890714751673821,
1107
+ "learning_rate": 5.384908474729501e-06,
1108
+ "loss": 0.4669,
1109
+ "step": 157
1110
+ },
1111
+ {
1112
+ "epoch": 0.5365025466893039,
1113
+ "grad_norm": 0.9852345937629808,
1114
+ "learning_rate": 5.325783377438357e-06,
1115
+ "loss": 0.4631,
1116
+ "step": 158
1117
+ },
1118
+ {
1119
+ "epoch": 0.5398981324278438,
1120
+ "grad_norm": 0.9992258942689997,
1121
+ "learning_rate": 5.266612494239088e-06,
1122
+ "loss": 0.4824,
1123
+ "step": 159
1124
+ },
1125
+ {
1126
+ "epoch": 0.5432937181663837,
1127
+ "grad_norm": 1.0010167382867965,
1128
+ "learning_rate": 5.207404141064334e-06,
1129
+ "loss": 0.4619,
1130
+ "step": 160
1131
+ },
1132
+ {
1133
+ "epoch": 0.5466893039049237,
1134
+ "grad_norm": 1.117194827109753,
1135
+ "learning_rate": 5.148166639112799e-06,
1136
+ "loss": 0.518,
1137
+ "step": 161
1138
+ },
1139
+ {
1140
+ "epoch": 0.5500848896434635,
1141
+ "grad_norm": 0.9423156223035444,
1142
+ "learning_rate": 5.088908313679788e-06,
1143
+ "loss": 0.4567,
1144
+ "step": 162
1145
+ },
1146
+ {
1147
+ "epoch": 0.5534804753820034,
1148
+ "grad_norm": 0.919109798014551,
1149
+ "learning_rate": 5.029637492987153e-06,
1150
+ "loss": 0.4649,
1151
+ "step": 163
1152
+ },
1153
+ {
1154
+ "epoch": 0.5568760611205433,
1155
+ "grad_norm": 1.1232411363329386,
1156
+ "learning_rate": 4.970362507012848e-06,
1157
+ "loss": 0.4682,
1158
+ "step": 164
1159
+ },
1160
+ {
1161
+ "epoch": 0.5602716468590832,
1162
+ "grad_norm": 0.9560703825281516,
1163
+ "learning_rate": 4.911091686320213e-06,
1164
+ "loss": 0.4594,
1165
+ "step": 165
1166
+ },
1167
+ {
1168
+ "epoch": 0.5636672325976231,
1169
+ "grad_norm": 0.9927617945956294,
1170
+ "learning_rate": 4.8518333608872015e-06,
1171
+ "loss": 0.4266,
1172
+ "step": 166
1173
+ },
1174
+ {
1175
+ "epoch": 0.567062818336163,
1176
+ "grad_norm": 1.0053279255521879,
1177
+ "learning_rate": 4.792595858935668e-06,
1178
+ "loss": 0.4745,
1179
+ "step": 167
1180
+ },
1181
+ {
1182
+ "epoch": 0.5704584040747029,
1183
+ "grad_norm": 1.1247627665383018,
1184
+ "learning_rate": 4.733387505760913e-06,
1185
+ "loss": 0.4863,
1186
+ "step": 168
1187
+ },
1188
+ {
1189
+ "epoch": 0.5738539898132428,
1190
+ "grad_norm": 0.9506838521386066,
1191
+ "learning_rate": 4.674216622561645e-06,
1192
+ "loss": 0.4459,
1193
+ "step": 169
1194
+ },
1195
+ {
1196
+ "epoch": 0.5772495755517827,
1197
+ "grad_norm": 0.8918545360346245,
1198
+ "learning_rate": 4.6150915252705005e-06,
1199
+ "loss": 0.4571,
1200
+ "step": 170
1201
+ },
1202
+ {
1203
+ "epoch": 0.5806451612903226,
1204
+ "grad_norm": 0.9704497436094051,
1205
+ "learning_rate": 4.556020523385326e-06,
1206
+ "loss": 0.4701,
1207
+ "step": 171
1208
+ },
1209
+ {
1210
+ "epoch": 0.5840407470288624,
1211
+ "grad_norm": 0.9830084007827714,
1212
+ "learning_rate": 4.497011918801347e-06,
1213
+ "loss": 0.477,
1214
+ "step": 172
1215
+ },
1216
+ {
1217
+ "epoch": 0.5874363327674024,
1218
+ "grad_norm": 1.0446285595393363,
1219
+ "learning_rate": 4.438074004644407e-06,
1220
+ "loss": 0.4963,
1221
+ "step": 173
1222
+ },
1223
+ {
1224
+ "epoch": 0.5908319185059423,
1225
+ "grad_norm": 0.9430261659822053,
1226
+ "learning_rate": 4.379215064105454e-06,
1227
+ "loss": 0.4502,
1228
+ "step": 174
1229
+ },
1230
+ {
1231
+ "epoch": 0.5942275042444821,
1232
+ "grad_norm": 0.9528761706191039,
1233
+ "learning_rate": 4.32044336927641e-06,
1234
+ "loss": 0.4667,
1235
+ "step": 175
1236
+ },
1237
+ {
1238
+ "epoch": 0.597623089983022,
1239
+ "grad_norm": 0.9712945630496029,
1240
+ "learning_rate": 4.261767179987595e-06,
1241
+ "loss": 0.469,
1242
+ "step": 176
1243
+ },
1244
+ {
1245
+ "epoch": 0.601018675721562,
1246
+ "grad_norm": 0.9203712027671123,
1247
+ "learning_rate": 4.203194742646893e-06,
1248
+ "loss": 0.4549,
1249
+ "step": 177
1250
+ },
1251
+ {
1252
+ "epoch": 0.6044142614601019,
1253
+ "grad_norm": 0.9335750991253596,
1254
+ "learning_rate": 4.1447342890807905e-06,
1255
+ "loss": 0.4589,
1256
+ "step": 178
1257
+ },
1258
+ {
1259
+ "epoch": 0.6078098471986417,
1260
+ "grad_norm": 0.9302840956981335,
1261
+ "learning_rate": 4.086394035377463e-06,
1262
+ "loss": 0.4525,
1263
+ "step": 179
1264
+ },
1265
+ {
1266
+ "epoch": 0.6112054329371817,
1267
+ "grad_norm": 0.9205763667360544,
1268
+ "learning_rate": 4.028182180732088e-06,
1269
+ "loss": 0.4652,
1270
+ "step": 180
1271
+ },
1272
+ {
1273
+ "epoch": 0.6146010186757216,
1274
+ "grad_norm": 0.9053159533350853,
1275
+ "learning_rate": 3.970106906294509e-06,
1276
+ "loss": 0.4887,
1277
+ "step": 181
1278
+ },
1279
+ {
1280
+ "epoch": 0.6179966044142614,
1281
+ "grad_norm": 0.9639072424421392,
1282
+ "learning_rate": 3.912176374019462e-06,
1283
+ "loss": 0.4928,
1284
+ "step": 182
1285
+ },
1286
+ {
1287
+ "epoch": 0.6213921901528013,
1288
+ "grad_norm": 0.8934882136410526,
1289
+ "learning_rate": 3.85439872551948e-06,
1290
+ "loss": 0.4613,
1291
+ "step": 183
1292
+ },
1293
+ {
1294
+ "epoch": 0.6247877758913413,
1295
+ "grad_norm": 0.9142153731395757,
1296
+ "learning_rate": 3.796782080920659e-06,
1297
+ "loss": 0.4489,
1298
+ "step": 184
1299
+ },
1300
+ {
1301
+ "epoch": 0.6281833616298812,
1302
+ "grad_norm": 0.9197655133200423,
1303
+ "learning_rate": 3.7393345377214584e-06,
1304
+ "loss": 0.4433,
1305
+ "step": 185
1306
+ },
1307
+ {
1308
+ "epoch": 0.631578947368421,
1309
+ "grad_norm": 0.9569197839619695,
1310
+ "learning_rate": 3.682064169654663e-06,
1311
+ "loss": 0.4665,
1312
+ "step": 186
1313
+ },
1314
+ {
1315
+ "epoch": 0.634974533106961,
1316
+ "grad_norm": 0.9362494462879061,
1317
+ "learning_rate": 3.6249790255526916e-06,
1318
+ "loss": 0.4363,
1319
+ "step": 187
1320
+ },
1321
+ {
1322
+ "epoch": 0.6383701188455009,
1323
+ "grad_norm": 0.8671267136185908,
1324
+ "learning_rate": 3.568087128216414e-06,
1325
+ "loss": 0.4646,
1326
+ "step": 188
1327
+ },
1328
+ {
1329
+ "epoch": 0.6417657045840407,
1330
+ "grad_norm": 0.9203019838984255,
1331
+ "learning_rate": 3.511396473287611e-06,
1332
+ "loss": 0.4571,
1333
+ "step": 189
1334
+ },
1335
+ {
1336
+ "epoch": 0.6451612903225806,
1337
+ "grad_norm": 0.9768723974984408,
1338
+ "learning_rate": 3.4549150281252635e-06,
1339
+ "loss": 0.4484,
1340
+ "step": 190
1341
+ },
1342
+ {
1343
+ "epoch": 0.6485568760611206,
1344
+ "grad_norm": 0.9396504959111149,
1345
+ "learning_rate": 3.398650730685813e-06,
1346
+ "loss": 0.457,
1347
+ "step": 191
1348
+ },
1349
+ {
1350
+ "epoch": 0.6519524617996605,
1351
+ "grad_norm": 0.9336335579059931,
1352
+ "learning_rate": 3.3426114884075488e-06,
1353
+ "loss": 0.4467,
1354
+ "step": 192
1355
+ },
1356
+ {
1357
+ "epoch": 0.6553480475382003,
1358
+ "grad_norm": 0.8976643217426382,
1359
+ "learning_rate": 3.2868051770992935e-06,
1360
+ "loss": 0.4572,
1361
+ "step": 193
1362
+ },
1363
+ {
1364
+ "epoch": 0.6587436332767402,
1365
+ "grad_norm": 0.9373033603378723,
1366
+ "learning_rate": 3.2312396398335312e-06,
1367
+ "loss": 0.4687,
1368
+ "step": 194
1369
+ },
1370
+ {
1371
+ "epoch": 0.6621392190152802,
1372
+ "grad_norm": 0.9707294952319975,
1373
+ "learning_rate": 3.175922685844125e-06,
1374
+ "loss": 0.4336,
1375
+ "step": 195
1376
+ },
1377
+ {
1378
+ "epoch": 0.66553480475382,
1379
+ "grad_norm": 0.9583493306758707,
1380
+ "learning_rate": 3.1208620894288105e-06,
1381
+ "loss": 0.4486,
1382
+ "step": 196
1383
+ },
1384
+ {
1385
+ "epoch": 0.6689303904923599,
1386
+ "grad_norm": 0.9820021819293268,
1387
+ "learning_rate": 3.0660655888565827e-06,
1388
+ "loss": 0.4588,
1389
+ "step": 197
1390
+ },
1391
+ {
1392
+ "epoch": 0.6723259762308998,
1393
+ "grad_norm": 0.9400174461623624,
1394
+ "learning_rate": 3.0115408852801535e-06,
1395
+ "loss": 0.4423,
1396
+ "step": 198
1397
+ },
1398
+ {
1399
+ "epoch": 0.6757215619694398,
1400
+ "grad_norm": 0.9603934801371697,
1401
+ "learning_rate": 2.9572956416536267e-06,
1402
+ "loss": 0.444,
1403
+ "step": 199
1404
+ },
1405
+ {
1406
+ "epoch": 0.6791171477079796,
1407
+ "grad_norm": 0.9537870436181752,
1408
+ "learning_rate": 2.9033374816555338e-06,
1409
+ "loss": 0.4556,
1410
+ "step": 200
1411
+ },
1412
+ {
1413
+ "epoch": 0.6825127334465195,
1414
+ "grad_norm": 0.9149135094841083,
1415
+ "learning_rate": 2.8496739886173994e-06,
1416
+ "loss": 0.4727,
1417
+ "step": 201
1418
+ },
1419
+ {
1420
+ "epoch": 0.6859083191850595,
1421
+ "grad_norm": 0.9486316397589806,
1422
+ "learning_rate": 2.7963127044579697e-06,
1423
+ "loss": 0.432,
1424
+ "step": 202
1425
+ },
1426
+ {
1427
+ "epoch": 0.6893039049235993,
1428
+ "grad_norm": 0.9240748880967119,
1429
+ "learning_rate": 2.743261128623269e-06,
1430
+ "loss": 0.4596,
1431
+ "step": 203
1432
+ },
1433
+ {
1434
+ "epoch": 0.6926994906621392,
1435
+ "grad_norm": 0.9573847569611018,
1436
+ "learning_rate": 2.6905267170326143e-06,
1437
+ "loss": 0.4679,
1438
+ "step": 204
1439
+ },
1440
+ {
1441
+ "epoch": 0.6960950764006791,
1442
+ "grad_norm": 0.8845992056166951,
1443
+ "learning_rate": 2.6381168810307536e-06,
1444
+ "loss": 0.4631,
1445
+ "step": 205
1446
+ },
1447
+ {
1448
+ "epoch": 0.6994906621392191,
1449
+ "grad_norm": 1.004797015683845,
1450
+ "learning_rate": 2.5860389863462765e-06,
1451
+ "loss": 0.4475,
1452
+ "step": 206
1453
+ },
1454
+ {
1455
+ "epoch": 0.7028862478777589,
1456
+ "grad_norm": 0.9408081397458115,
1457
+ "learning_rate": 2.534300352056416e-06,
1458
+ "loss": 0.4529,
1459
+ "step": 207
1460
+ },
1461
+ {
1462
+ "epoch": 0.7062818336162988,
1463
+ "grad_norm": 0.8830371296945094,
1464
+ "learning_rate": 2.4829082495584244e-06,
1465
+ "loss": 0.4431,
1466
+ "step": 208
1467
+ },
1468
+ {
1469
+ "epoch": 0.7096774193548387,
1470
+ "grad_norm": 0.918591969840702,
1471
+ "learning_rate": 2.4318699015476495e-06,
1472
+ "loss": 0.4295,
1473
+ "step": 209
1474
+ },
1475
+ {
1476
+ "epoch": 0.7130730050933786,
1477
+ "grad_norm": 0.9126233293580099,
1478
+ "learning_rate": 2.3811924810024385e-06,
1479
+ "loss": 0.4462,
1480
+ "step": 210
1481
+ },
1482
+ {
1483
+ "epoch": 0.7164685908319185,
1484
+ "grad_norm": 0.944049132172904,
1485
+ "learning_rate": 2.330883110176049e-06,
1486
+ "loss": 0.4764,
1487
+ "step": 211
1488
+ },
1489
+ {
1490
+ "epoch": 0.7198641765704584,
1491
+ "grad_norm": 0.8594916774059891,
1492
+ "learning_rate": 2.2809488595956746e-06,
1493
+ "loss": 0.4245,
1494
+ "step": 212
1495
+ },
1496
+ {
1497
+ "epoch": 0.7232597623089984,
1498
+ "grad_norm": 0.9004237430035651,
1499
+ "learning_rate": 2.2313967470687593e-06,
1500
+ "loss": 0.4618,
1501
+ "step": 213
1502
+ },
1503
+ {
1504
+ "epoch": 0.7266553480475382,
1505
+ "grad_norm": 0.9087973453481795,
1506
+ "learning_rate": 2.18223373669669e-06,
1507
+ "loss": 0.4614,
1508
+ "step": 214
1509
+ },
1510
+ {
1511
+ "epoch": 0.7300509337860781,
1512
+ "grad_norm": 0.8919978483526617,
1513
+ "learning_rate": 2.1334667378960642e-06,
1514
+ "loss": 0.462,
1515
+ "step": 215
1516
+ },
1517
+ {
1518
+ "epoch": 0.733446519524618,
1519
+ "grad_norm": 0.9099211098465206,
1520
+ "learning_rate": 2.0851026044276405e-06,
1521
+ "loss": 0.4634,
1522
+ "step": 216
1523
+ },
1524
+ {
1525
+ "epoch": 0.7368421052631579,
1526
+ "grad_norm": 0.8896204484265512,
1527
+ "learning_rate": 2.0371481334330913e-06,
1528
+ "loss": 0.463,
1529
+ "step": 217
1530
+ },
1531
+ {
1532
+ "epoch": 0.7402376910016978,
1533
+ "grad_norm": 0.9519257493559129,
1534
+ "learning_rate": 1.9896100644797316e-06,
1535
+ "loss": 0.4788,
1536
+ "step": 218
1537
+ },
1538
+ {
1539
+ "epoch": 0.7436332767402377,
1540
+ "grad_norm": 0.9116718849117885,
1541
+ "learning_rate": 1.9424950786133414e-06,
1542
+ "loss": 0.4634,
1543
+ "step": 219
1544
+ },
1545
+ {
1546
+ "epoch": 0.7470288624787776,
1547
+ "grad_norm": 0.9263410885293661,
1548
+ "learning_rate": 1.8958097974191909e-06,
1549
+ "loss": 0.484,
1550
+ "step": 220
1551
+ },
1552
+ {
1553
+ "epoch": 0.7504244482173175,
1554
+ "grad_norm": 0.9023645875611066,
1555
+ "learning_rate": 1.8495607820914451e-06,
1556
+ "loss": 0.4442,
1557
+ "step": 221
1558
+ },
1559
+ {
1560
+ "epoch": 0.7538200339558574,
1561
+ "grad_norm": 0.8875288134755263,
1562
+ "learning_rate": 1.8037545325110506e-06,
1563
+ "loss": 0.4241,
1564
+ "step": 222
1565
+ },
1566
+ {
1567
+ "epoch": 0.7572156196943973,
1568
+ "grad_norm": 0.9432820849930534,
1569
+ "learning_rate": 1.7583974863322272e-06,
1570
+ "loss": 0.4837,
1571
+ "step": 223
1572
+ },
1573
+ {
1574
+ "epoch": 0.7606112054329371,
1575
+ "grad_norm": 0.9658199086993665,
1576
+ "learning_rate": 1.7134960180777171e-06,
1577
+ "loss": 0.4728,
1578
+ "step": 224
1579
+ },
1580
+ {
1581
+ "epoch": 0.7640067911714771,
1582
+ "grad_norm": 0.8457866400541412,
1583
+ "learning_rate": 1.6690564382429104e-06,
1584
+ "loss": 0.4301,
1585
+ "step": 225
1586
+ },
1587
+ {
1588
+ "epoch": 0.767402376910017,
1589
+ "grad_norm": 0.8989697214841547,
1590
+ "learning_rate": 1.6250849924089485e-06,
1591
+ "loss": 0.4527,
1592
+ "step": 226
1593
+ },
1594
+ {
1595
+ "epoch": 0.7707979626485568,
1596
+ "grad_norm": 0.9022700419018949,
1597
+ "learning_rate": 1.581587860364977e-06,
1598
+ "loss": 0.4543,
1599
+ "step": 227
1600
+ },
1601
+ {
1602
+ "epoch": 0.7741935483870968,
1603
+ "grad_norm": 0.9173241233268217,
1604
+ "learning_rate": 1.5385711552396227e-06,
1605
+ "loss": 0.4466,
1606
+ "step": 228
1607
+ },
1608
+ {
1609
+ "epoch": 0.7775891341256367,
1610
+ "grad_norm": 0.8617337826425147,
1611
+ "learning_rate": 1.4960409226418576e-06,
1612
+ "loss": 0.4386,
1613
+ "step": 229
1614
+ },
1615
+ {
1616
+ "epoch": 0.7809847198641766,
1617
+ "grad_norm": 0.8625616538339923,
1618
+ "learning_rate": 1.4540031398113335e-06,
1619
+ "loss": 0.4533,
1620
+ "step": 230
1621
+ },
1622
+ {
1623
+ "epoch": 0.7843803056027164,
1624
+ "grad_norm": 0.894048701617508,
1625
+ "learning_rate": 1.4124637147783431e-06,
1626
+ "loss": 0.4447,
1627
+ "step": 231
1628
+ },
1629
+ {
1630
+ "epoch": 0.7877758913412564,
1631
+ "grad_norm": 0.9208844935282854,
1632
+ "learning_rate": 1.371428485533498e-06,
1633
+ "loss": 0.4826,
1634
+ "step": 232
1635
+ },
1636
+ {
1637
+ "epoch": 0.7911714770797963,
1638
+ "grad_norm": 0.8883039726822113,
1639
+ "learning_rate": 1.3309032192072463e-06,
1640
+ "loss": 0.4486,
1641
+ "step": 233
1642
+ },
1643
+ {
1644
+ "epoch": 0.7945670628183361,
1645
+ "grad_norm": 0.899703690304073,
1646
+ "learning_rate": 1.29089361125936e-06,
1647
+ "loss": 0.4396,
1648
+ "step": 234
1649
+ },
1650
+ {
1651
+ "epoch": 0.797962648556876,
1652
+ "grad_norm": 0.942126628969688,
1653
+ "learning_rate": 1.251405284678488e-06,
1654
+ "loss": 0.4666,
1655
+ "step": 235
1656
+ },
1657
+ {
1658
+ "epoch": 0.801358234295416,
1659
+ "grad_norm": 0.876574882692073,
1660
+ "learning_rate": 1.2124437891918995e-06,
1661
+ "loss": 0.4338,
1662
+ "step": 236
1663
+ },
1664
+ {
1665
+ "epoch": 0.8047538200339559,
1666
+ "grad_norm": 0.8738541998828565,
1667
+ "learning_rate": 1.1740146004855141e-06,
1668
+ "loss": 0.4609,
1669
+ "step": 237
1670
+ },
1671
+ {
1672
+ "epoch": 0.8081494057724957,
1673
+ "grad_norm": 0.8732256896169818,
1674
+ "learning_rate": 1.1361231194343436e-06,
1675
+ "loss": 0.4505,
1676
+ "step": 238
1677
+ },
1678
+ {
1679
+ "epoch": 0.8115449915110357,
1680
+ "grad_norm": 0.8634374293174691,
1681
+ "learning_rate": 1.0987746713434578e-06,
1682
+ "loss": 0.4626,
1683
+ "step": 239
1684
+ },
1685
+ {
1686
+ "epoch": 0.8149405772495756,
1687
+ "grad_norm": 0.8709360064531477,
1688
+ "learning_rate": 1.0619745051995473e-06,
1689
+ "loss": 0.465,
1690
+ "step": 240
1691
+ },
1692
+ {
1693
+ "epoch": 0.8183361629881154,
1694
+ "grad_norm": 0.8612129831209381,
1695
+ "learning_rate": 1.0257277929332332e-06,
1696
+ "loss": 0.4543,
1697
+ "step": 241
1698
+ },
1699
+ {
1700
+ "epoch": 0.8217317487266553,
1701
+ "grad_norm": 0.9089176413771565,
1702
+ "learning_rate": 9.900396286922025e-07,
1703
+ "loss": 0.4676,
1704
+ "step": 242
1705
+ },
1706
+ {
1707
+ "epoch": 0.8251273344651953,
1708
+ "grad_norm": 0.8856710278778406,
1709
+ "learning_rate": 9.549150281252633e-07,
1710
+ "loss": 0.4366,
1711
+ "step": 243
1712
+ },
1713
+ {
1714
+ "epoch": 0.8285229202037352,
1715
+ "grad_norm": 0.8188623584178454,
1716
+ "learning_rate": 9.203589276774438e-07,
1717
+ "loss": 0.4404,
1718
+ "step": 244
1719
+ },
1720
+ {
1721
+ "epoch": 0.831918505942275,
1722
+ "grad_norm": 0.9252917982729971,
1723
+ "learning_rate": 8.86376183896226e-07,
1724
+ "loss": 0.4447,
1725
+ "step": 245
1726
+ },
1727
+ {
1728
+ "epoch": 0.8353140916808149,
1729
+ "grad_norm": 0.8784423429363009,
1730
+ "learning_rate": 8.529715727489912e-07,
1731
+ "loss": 0.4488,
1732
+ "step": 246
1733
+ },
1734
+ {
1735
+ "epoch": 0.8387096774193549,
1736
+ "grad_norm": 0.9207047553546756,
1737
+ "learning_rate": 8.201497889518073e-07,
1738
+ "loss": 0.4561,
1739
+ "step": 247
1740
+ },
1741
+ {
1742
+ "epoch": 0.8421052631578947,
1743
+ "grad_norm": 0.886908726068121,
1744
+ "learning_rate": 7.879154453096305e-07,
1745
+ "loss": 0.4556,
1746
+ "step": 248
1747
+ },
1748
+ {
1749
+ "epoch": 0.8455008488964346,
1750
+ "grad_norm": 0.8585705366949494,
1751
+ "learning_rate": 7.562730720680111e-07,
1752
+ "loss": 0.4333,
1753
+ "step": 249
1754
+ },
1755
+ {
1756
+ "epoch": 0.8488964346349746,
1757
+ "grad_norm": 0.8504815527006262,
1758
+ "learning_rate": 7.25227116276413e-07,
1759
+ "loss": 0.4512,
1760
+ "step": 250
1761
+ },
1762
+ {
1763
+ "epoch": 0.8522920203735145,
1764
+ "grad_norm": 0.8201572218259242,
1765
+ "learning_rate": 6.947819411632223e-07,
1766
+ "loss": 0.4492,
1767
+ "step": 251
1768
+ },
1769
+ {
1770
+ "epoch": 0.8556876061120543,
1771
+ "grad_norm": 0.8579475897548395,
1772
+ "learning_rate": 6.649418255225298e-07,
1773
+ "loss": 0.4633,
1774
+ "step": 252
1775
+ },
1776
+ {
1777
+ "epoch": 0.8590831918505942,
1778
+ "grad_norm": 0.8140416489339838,
1779
+ "learning_rate": 6.357109631127889e-07,
1780
+ "loss": 0.4418,
1781
+ "step": 253
1782
+ },
1783
+ {
1784
+ "epoch": 0.8624787775891342,
1785
+ "grad_norm": 0.8552881051968678,
1786
+ "learning_rate": 6.07093462067419e-07,
1787
+ "loss": 0.4572,
1788
+ "step": 254
1789
+ },
1790
+ {
1791
+ "epoch": 0.865874363327674,
1792
+ "grad_norm": 0.8971540169404577,
1793
+ "learning_rate": 5.79093344317449e-07,
1794
+ "loss": 0.4775,
1795
+ "step": 255
1796
+ },
1797
+ {
1798
+ "epoch": 0.8692699490662139,
1799
+ "grad_norm": 0.8058254068183305,
1800
+ "learning_rate": 5.517145450262639e-07,
1801
+ "loss": 0.4313,
1802
+ "step": 256
1803
+ },
1804
+ {
1805
+ "epoch": 0.8726655348047538,
1806
+ "grad_norm": 0.8530487589073922,
1807
+ "learning_rate": 5.249609120365579e-07,
1808
+ "loss": 0.4397,
1809
+ "step": 257
1810
+ },
1811
+ {
1812
+ "epoch": 0.8760611205432938,
1813
+ "grad_norm": 0.9429999718413201,
1814
+ "learning_rate": 4.988362053295564e-07,
1815
+ "loss": 0.4496,
1816
+ "step": 258
1817
+ },
1818
+ {
1819
+ "epoch": 0.8794567062818336,
1820
+ "grad_norm": 0.8375356078246441,
1821
+ "learning_rate": 4.733440964965791e-07,
1822
+ "loss": 0.4303,
1823
+ "step": 259
1824
+ },
1825
+ {
1826
+ "epoch": 0.8828522920203735,
1827
+ "grad_norm": 0.8256703834683652,
1828
+ "learning_rate": 4.484881682230341e-07,
1829
+ "loss": 0.4268,
1830
+ "step": 260
1831
+ },
1832
+ {
1833
+ "epoch": 0.8862478777589134,
1834
+ "grad_norm": 0.8543078561007811,
1835
+ "learning_rate": 4.242719137849077e-07,
1836
+ "loss": 0.4514,
1837
+ "step": 261
1838
+ },
1839
+ {
1840
+ "epoch": 0.8896434634974533,
1841
+ "grad_norm": 0.905414358204414,
1842
+ "learning_rate": 4.00698736557808e-07,
1843
+ "loss": 0.4475,
1844
+ "step": 262
1845
+ },
1846
+ {
1847
+ "epoch": 0.8930390492359932,
1848
+ "grad_norm": 0.8479836834072867,
1849
+ "learning_rate": 3.777719495386567e-07,
1850
+ "loss": 0.4479,
1851
+ "step": 263
1852
+ },
1853
+ {
1854
+ "epoch": 0.8964346349745331,
1855
+ "grad_norm": 0.8646327130622585,
1856
+ "learning_rate": 3.5549477488007853e-07,
1857
+ "loss": 0.4563,
1858
+ "step": 264
1859
+ },
1860
+ {
1861
+ "epoch": 0.8998302207130731,
1862
+ "grad_norm": 0.8363483102479007,
1863
+ "learning_rate": 3.3387034343755063e-07,
1864
+ "loss": 0.4447,
1865
+ "step": 265
1866
+ },
1867
+ {
1868
+ "epoch": 0.9032258064516129,
1869
+ "grad_norm": 0.8772923486186023,
1870
+ "learning_rate": 3.1290169432939556e-07,
1871
+ "loss": 0.4416,
1872
+ "step": 266
1873
+ },
1874
+ {
1875
+ "epoch": 0.9066213921901528,
1876
+ "grad_norm": 0.8320996733730202,
1877
+ "learning_rate": 2.925917745096568e-07,
1878
+ "loss": 0.4263,
1879
+ "step": 267
1880
+ },
1881
+ {
1882
+ "epoch": 0.9100169779286927,
1883
+ "grad_norm": 0.8735391402072182,
1884
+ "learning_rate": 2.7294343835393366e-07,
1885
+ "loss": 0.4635,
1886
+ "step": 268
1887
+ },
1888
+ {
1889
+ "epoch": 0.9134125636672326,
1890
+ "grad_norm": 0.8476560390812548,
1891
+ "learning_rate": 2.539594472582213e-07,
1892
+ "loss": 0.4387,
1893
+ "step": 269
1894
+ },
1895
+ {
1896
+ "epoch": 0.9168081494057725,
1897
+ "grad_norm": 0.8265472830061668,
1898
+ "learning_rate": 2.3564246925082358e-07,
1899
+ "loss": 0.4409,
1900
+ "step": 270
1901
+ },
1902
+ {
1903
+ "epoch": 0.9202037351443124,
1904
+ "grad_norm": 0.8275085794146296,
1905
+ "learning_rate": 2.179950786173879e-07,
1906
+ "loss": 0.422,
1907
+ "step": 271
1908
+ },
1909
+ {
1910
+ "epoch": 0.9235993208828522,
1911
+ "grad_norm": 0.8378368477541204,
1912
+ "learning_rate": 2.01019755539108e-07,
1913
+ "loss": 0.4533,
1914
+ "step": 272
1915
+ },
1916
+ {
1917
+ "epoch": 0.9269949066213922,
1918
+ "grad_norm": 0.8225280576542648,
1919
+ "learning_rate": 1.8471888574415953e-07,
1920
+ "loss": 0.4504,
1921
+ "step": 273
1922
+ },
1923
+ {
1924
+ "epoch": 0.9303904923599321,
1925
+ "grad_norm": 0.8689138287376853,
1926
+ "learning_rate": 1.690947601724091e-07,
1927
+ "loss": 0.4616,
1928
+ "step": 274
1929
+ },
1930
+ {
1931
+ "epoch": 0.933786078098472,
1932
+ "grad_norm": 0.8153963652578823,
1933
+ "learning_rate": 1.5414957465343883e-07,
1934
+ "loss": 0.4362,
1935
+ "step": 275
1936
+ },
1937
+ {
1938
+ "epoch": 0.9371816638370118,
1939
+ "grad_norm": 0.7970051219455544,
1940
+ "learning_rate": 1.3988542959794627e-07,
1941
+ "loss": 0.4476,
1942
+ "step": 276
1943
+ },
1944
+ {
1945
+ "epoch": 0.9405772495755518,
1946
+ "grad_norm": 0.8286263852975556,
1947
+ "learning_rate": 1.2630432970255014e-07,
1948
+ "loss": 0.4317,
1949
+ "step": 277
1950
+ },
1951
+ {
1952
+ "epoch": 0.9439728353140917,
1953
+ "grad_norm": 0.819703548732724,
1954
+ "learning_rate": 1.1340818366804728e-07,
1955
+ "loss": 0.4374,
1956
+ "step": 278
1957
+ },
1958
+ {
1959
+ "epoch": 0.9473684210526315,
1960
+ "grad_norm": 0.8630335285550543,
1961
+ "learning_rate": 1.0119880393116177e-07,
1962
+ "loss": 0.4463,
1963
+ "step": 279
1964
+ },
1965
+ {
1966
+ "epoch": 0.9507640067911715,
1967
+ "grad_norm": 0.8905416016998572,
1968
+ "learning_rate": 8.967790640982466e-08,
1969
+ "loss": 0.4534,
1970
+ "step": 280
1971
+ },
1972
+ {
1973
+ "epoch": 0.9541595925297114,
1974
+ "grad_norm": 0.869553726844365,
1975
+ "learning_rate": 7.884711026201586e-08,
1976
+ "loss": 0.4474,
1977
+ "step": 281
1978
+ },
1979
+ {
1980
+ "epoch": 0.9575551782682513,
1981
+ "grad_norm": 0.8516101851144577,
1982
+ "learning_rate": 6.870793765820783e-08,
1983
+ "loss": 0.4735,
1984
+ "step": 282
1985
+ },
1986
+ {
1987
+ "epoch": 0.9609507640067911,
1988
+ "grad_norm": 0.834166916764426,
1989
+ "learning_rate": 5.92618135674361e-08,
1990
+ "loss": 0.4506,
1991
+ "step": 283
1992
+ },
1993
+ {
1994
+ "epoch": 0.9643463497453311,
1995
+ "grad_norm": 0.8500281237762543,
1996
+ "learning_rate": 5.0510065557034526e-08,
1997
+ "loss": 0.4677,
1998
+ "step": 284
1999
+ },
2000
+ {
2001
+ "epoch": 0.967741935483871,
2002
+ "grad_norm": 0.833367943844974,
2003
+ "learning_rate": 4.245392360605727e-08,
2004
+ "loss": 0.4582,
2005
+ "step": 285
2006
+ },
2007
+ {
2008
+ "epoch": 0.9711375212224108,
2009
+ "grad_norm": 0.8458107328647104,
2010
+ "learning_rate": 3.5094519932415417e-08,
2011
+ "loss": 0.4505,
2012
+ "step": 286
2013
+ },
2014
+ {
2015
+ "epoch": 0.9745331069609507,
2016
+ "grad_norm": 0.8443895579435003,
2017
+ "learning_rate": 2.843288883375539e-08,
2018
+ "loss": 0.4538,
2019
+ "step": 287
2020
+ },
2021
+ {
2022
+ "epoch": 0.9779286926994907,
2023
+ "grad_norm": 0.8631958241337926,
2024
+ "learning_rate": 2.2469966542096323e-08,
2025
+ "loss": 0.4432,
2026
+ "step": 288
2027
+ },
2028
+ {
2029
+ "epoch": 0.9813242784380306,
2030
+ "grad_norm": 0.886260366562854,
2031
+ "learning_rate": 1.7206591092253642e-08,
2032
+ "loss": 0.4567,
2033
+ "step": 289
2034
+ },
2035
+ {
2036
+ "epoch": 0.9847198641765704,
2037
+ "grad_norm": 0.7994144470255627,
2038
+ "learning_rate": 1.264350220405719e-08,
2039
+ "loss": 0.4317,
2040
+ "step": 290
2041
+ },
2042
+ {
2043
+ "epoch": 0.9881154499151104,
2044
+ "grad_norm": 0.8700681537853684,
2045
+ "learning_rate": 8.781341178393244e-09,
2046
+ "loss": 0.4555,
2047
+ "step": 291
2048
+ },
2049
+ {
2050
+ "epoch": 0.9915110356536503,
2051
+ "grad_norm": 0.8575898812488901,
2052
+ "learning_rate": 5.620650807073857e-09,
2053
+ "loss": 0.4606,
2054
+ "step": 292
2055
+ },
2056
+ {
2057
+ "epoch": 0.9949066213921901,
2058
+ "grad_norm": 0.8616135380601608,
2059
+ "learning_rate": 3.1618752965534295e-09,
2060
+ "loss": 0.4408,
2061
+ "step": 293
2062
+ },
2063
+ {
2064
+ "epoch": 0.99830220713073,
2065
+ "grad_norm": 0.7784658740487929,
2066
+ "learning_rate": 1.4053602054991954e-09,
2067
+ "loss": 0.4157,
2068
+ "step": 294
2069
+ },
2070
+ {
2071
+ "epoch": 1.0,
2072
+ "grad_norm": 1.2063313976605035,
2073
+ "learning_rate": 3.513523962256349e-10,
2074
+ "loss": 0.4174,
2075
+ "step": 295
2076
+ },
2077
+ {
2078
+ "epoch": 1.0,
2079
+ "step": 295,
2080
+ "total_flos": 44242544295936.0,
2081
+ "train_loss": 0.5097209928399425,
2082
+ "train_runtime": 1256.4644,
2083
+ "train_samples_per_second": 14.995,
2084
+ "train_steps_per_second": 0.235
2085
+ }
2086
+ ],
2087
+ "logging_steps": 1,
2088
+ "max_steps": 295,
2089
+ "num_input_tokens_seen": 0,
2090
+ "num_train_epochs": 1,
2091
+ "save_steps": 2500,
2092
+ "stateful_callbacks": {
2093
+ "TrainerControl": {
2094
+ "args": {
2095
+ "should_epoch_stop": false,
2096
+ "should_evaluate": false,
2097
+ "should_log": false,
2098
+ "should_save": true,
2099
+ "should_training_stop": true
2100
+ },
2101
+ "attributes": {}
2102
+ }
2103
+ },
2104
+ "total_flos": 44242544295936.0,
2105
+ "train_batch_size": 4,
2106
+ "trial_name": null,
2107
+ "trial_params": null
2108
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:73aa068e9bce41cafed881e595c3959cffc118bd893a4b1b75a37dcc632c76fe
3
+ size 8081
training_loss.png ADDED
vocab.json ADDED
The diff for this file is too large to render. See raw diff