ryanmarten commited on
Commit
437ad67
·
verified ·
1 Parent(s): 469cfd4

Upload model

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
README.md ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ library_name: transformers
3
+ license: other
4
+ base_model: Qwen/Qwen2.5-7B-Instruct
5
+ tags:
6
+ - llama-factory
7
+ - full
8
+ - generated_from_trainer
9
+ model-index:
10
+ - name: openthoughts_math
11
+ results: []
12
+ ---
13
+
14
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
15
+ should probably proofread and complete it, then remove this comment. -->
16
+
17
+ # openthoughts_math
18
+
19
+ This model is a fine-tuned version of [Qwen/Qwen2.5-7B-Instruct](https://huggingface.co/Qwen/Qwen2.5-7B-Instruct) on the mlfoundations-dev/openthoughts_math dataset.
20
+
21
+ ## Model description
22
+
23
+ More information needed
24
+
25
+ ## Intended uses & limitations
26
+
27
+ More information needed
28
+
29
+ ## Training and evaluation data
30
+
31
+ More information needed
32
+
33
+ ## Training procedure
34
+
35
+ ### Training hyperparameters
36
+
37
+ The following hyperparameters were used during training:
38
+ - learning_rate: 8e-05
39
+ - train_batch_size: 1
40
+ - eval_batch_size: 8
41
+ - seed: 42
42
+ - distributed_type: multi-GPU
43
+ - num_devices: 16
44
+ - gradient_accumulation_steps: 32
45
+ - total_train_batch_size: 512
46
+ - total_eval_batch_size: 128
47
+ - optimizer: Use adamw_torch with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments
48
+ - lr_scheduler_type: cosine
49
+ - lr_scheduler_warmup_ratio: 0.1
50
+ - num_epochs: 5.0
51
+
52
+ ### Training results
53
+
54
+
55
+
56
+ ### Framework versions
57
+
58
+ - Transformers 4.46.1
59
+ - Pytorch 2.5.0a0+b465a5843b.nv24.09
60
+ - Datasets 3.5.0
61
+ - Tokenizers 0.20.3
added_tokens.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "</tool_call>": 151658,
3
+ "<tool_call>": 151657,
4
+ "<|box_end|>": 151649,
5
+ "<|box_start|>": 151648,
6
+ "<|endoftext|>": 151643,
7
+ "<|file_sep|>": 151664,
8
+ "<|fim_middle|>": 151660,
9
+ "<|fim_pad|>": 151662,
10
+ "<|fim_prefix|>": 151659,
11
+ "<|fim_suffix|>": 151661,
12
+ "<|im_end|>": 151645,
13
+ "<|im_start|>": 151644,
14
+ "<|image_pad|>": 151655,
15
+ "<|object_ref_end|>": 151647,
16
+ "<|object_ref_start|>": 151646,
17
+ "<|quad_end|>": 151651,
18
+ "<|quad_start|>": 151650,
19
+ "<|repo_name|>": 151663,
20
+ "<|video_pad|>": 151656,
21
+ "<|vision_end|>": 151653,
22
+ "<|vision_pad|>": 151654,
23
+ "<|vision_start|>": 151652
24
+ }
all_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 4.942630185348632,
3
+ "total_flos": 9.306564393200255e+18,
4
+ "train_loss": 0.0,
5
+ "train_runtime": 1.9909,
6
+ "train_samples_per_second": 91040.986,
7
+ "train_steps_per_second": 175.798
8
+ }
config.json ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/p/data1/mmlaion/dcft/hub/models--Qwen--Qwen2.5-7B-Instruct/snapshots/a09a35458c702b33eeacc393d103063234e8bc28",
3
+ "architectures": [
4
+ "Qwen2ForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 151643,
8
+ "eos_token_id": 151645,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 3584,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 18944,
13
+ "max_position_embeddings": 32768,
14
+ "max_window_layers": 28,
15
+ "model_type": "qwen2",
16
+ "num_attention_heads": 28,
17
+ "num_hidden_layers": 28,
18
+ "num_key_value_heads": 4,
19
+ "rms_norm_eps": 1e-06,
20
+ "rope_scaling": null,
21
+ "rope_theta": 1000000.0,
22
+ "sliding_window": null,
23
+ "tie_word_embeddings": false,
24
+ "torch_dtype": "bfloat16",
25
+ "transformers_version": "4.46.1",
26
+ "use_cache": false,
27
+ "use_sliding_window": false,
28
+ "vocab_size": 152064
29
+ }
generation_config.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 151643,
3
+ "do_sample": true,
4
+ "eos_token_id": [
5
+ 151645,
6
+ 151643
7
+ ],
8
+ "pad_token_id": 151643,
9
+ "repetition_penalty": 1.05,
10
+ "temperature": 0.7,
11
+ "top_k": 20,
12
+ "top_p": 0.8,
13
+ "transformers_version": "4.46.1"
14
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
model-00001-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2a64ae99d81dc816f97114b5466c31f7262c421ffdfb2e63859eaea39d0196c0
3
+ size 4877660776
model-00002-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:872caa13155c49c822cecac2c685977433a9c7e983710810a8a37db9c5528308
3
+ size 4932751008
model-00003-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:71276194da339b6a5e9387f4c2a1f906511e1fae820e27c5034015e6cb650c3d
3
+ size 4330865200
model-00004-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:41ce992e3e4e1e106bbcd8ce60916a0fa3dcb47637f15d3019900e65453a0984
3
+ size 1089994880
model.safetensors.index.json ADDED
@@ -0,0 +1,346 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 15231233024
4
+ },
5
+ "weight_map": {
6
+ "lm_head.weight": "model-00004-of-00004.safetensors",
7
+ "model.embed_tokens.weight": "model-00001-of-00004.safetensors",
8
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00004.safetensors",
9
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
10
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
11
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
12
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
13
+ "model.layers.0.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
14
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
15
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
16
+ "model.layers.0.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
17
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
18
+ "model.layers.0.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
19
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
20
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00004.safetensors",
21
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
22
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
23
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
24
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
25
+ "model.layers.1.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
26
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
27
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
28
+ "model.layers.1.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
29
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
30
+ "model.layers.1.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
31
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
32
+ "model.layers.10.input_layernorm.weight": "model-00002-of-00004.safetensors",
33
+ "model.layers.10.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
34
+ "model.layers.10.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
35
+ "model.layers.10.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
36
+ "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
37
+ "model.layers.10.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
38
+ "model.layers.10.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
39
+ "model.layers.10.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
40
+ "model.layers.10.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
41
+ "model.layers.10.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
42
+ "model.layers.10.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
43
+ "model.layers.10.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
44
+ "model.layers.11.input_layernorm.weight": "model-00002-of-00004.safetensors",
45
+ "model.layers.11.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
46
+ "model.layers.11.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
47
+ "model.layers.11.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
48
+ "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
49
+ "model.layers.11.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
50
+ "model.layers.11.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
51
+ "model.layers.11.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
52
+ "model.layers.11.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
53
+ "model.layers.11.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
54
+ "model.layers.11.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
55
+ "model.layers.11.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
56
+ "model.layers.12.input_layernorm.weight": "model-00002-of-00004.safetensors",
57
+ "model.layers.12.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
58
+ "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
59
+ "model.layers.12.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
60
+ "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
61
+ "model.layers.12.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
62
+ "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
63
+ "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
64
+ "model.layers.12.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
65
+ "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
66
+ "model.layers.12.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
67
+ "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
68
+ "model.layers.13.input_layernorm.weight": "model-00002-of-00004.safetensors",
69
+ "model.layers.13.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
70
+ "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
71
+ "model.layers.13.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
72
+ "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
73
+ "model.layers.13.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
74
+ "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
75
+ "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
76
+ "model.layers.13.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
77
+ "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
78
+ "model.layers.13.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
79
+ "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
80
+ "model.layers.14.input_layernorm.weight": "model-00002-of-00004.safetensors",
81
+ "model.layers.14.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
82
+ "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
83
+ "model.layers.14.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
84
+ "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
85
+ "model.layers.14.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
86
+ "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
87
+ "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
88
+ "model.layers.14.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
89
+ "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
90
+ "model.layers.14.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
91
+ "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
92
+ "model.layers.15.input_layernorm.weight": "model-00002-of-00004.safetensors",
93
+ "model.layers.15.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
94
+ "model.layers.15.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
95
+ "model.layers.15.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
96
+ "model.layers.15.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
97
+ "model.layers.15.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
98
+ "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
99
+ "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
100
+ "model.layers.15.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
101
+ "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
102
+ "model.layers.15.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
103
+ "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
104
+ "model.layers.16.input_layernorm.weight": "model-00002-of-00004.safetensors",
105
+ "model.layers.16.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
106
+ "model.layers.16.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
107
+ "model.layers.16.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
108
+ "model.layers.16.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
109
+ "model.layers.16.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
110
+ "model.layers.16.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
111
+ "model.layers.16.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
112
+ "model.layers.16.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
113
+ "model.layers.16.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
114
+ "model.layers.16.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
115
+ "model.layers.16.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
116
+ "model.layers.17.input_layernorm.weight": "model-00002-of-00004.safetensors",
117
+ "model.layers.17.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
118
+ "model.layers.17.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
119
+ "model.layers.17.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
120
+ "model.layers.17.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
121
+ "model.layers.17.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
122
+ "model.layers.17.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
123
+ "model.layers.17.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
124
+ "model.layers.17.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
125
+ "model.layers.17.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
126
+ "model.layers.17.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
127
+ "model.layers.17.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
128
+ "model.layers.18.input_layernorm.weight": "model-00003-of-00004.safetensors",
129
+ "model.layers.18.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
130
+ "model.layers.18.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
131
+ "model.layers.18.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
132
+ "model.layers.18.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
133
+ "model.layers.18.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
134
+ "model.layers.18.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
135
+ "model.layers.18.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
136
+ "model.layers.18.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
137
+ "model.layers.18.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
138
+ "model.layers.18.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
139
+ "model.layers.18.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
140
+ "model.layers.19.input_layernorm.weight": "model-00003-of-00004.safetensors",
141
+ "model.layers.19.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
142
+ "model.layers.19.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
143
+ "model.layers.19.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
144
+ "model.layers.19.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
145
+ "model.layers.19.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
146
+ "model.layers.19.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
147
+ "model.layers.19.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
148
+ "model.layers.19.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
149
+ "model.layers.19.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
150
+ "model.layers.19.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
151
+ "model.layers.19.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
152
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00004.safetensors",
153
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
154
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
155
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
156
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
157
+ "model.layers.2.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
158
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
159
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
160
+ "model.layers.2.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
161
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
162
+ "model.layers.2.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
163
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
164
+ "model.layers.20.input_layernorm.weight": "model-00003-of-00004.safetensors",
165
+ "model.layers.20.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
166
+ "model.layers.20.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
167
+ "model.layers.20.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
168
+ "model.layers.20.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
169
+ "model.layers.20.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
170
+ "model.layers.20.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
171
+ "model.layers.20.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
172
+ "model.layers.20.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
173
+ "model.layers.20.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
174
+ "model.layers.20.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
175
+ "model.layers.20.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
176
+ "model.layers.21.input_layernorm.weight": "model-00003-of-00004.safetensors",
177
+ "model.layers.21.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
178
+ "model.layers.21.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
179
+ "model.layers.21.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
180
+ "model.layers.21.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
181
+ "model.layers.21.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
182
+ "model.layers.21.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
183
+ "model.layers.21.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
184
+ "model.layers.21.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
185
+ "model.layers.21.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
186
+ "model.layers.21.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
187
+ "model.layers.21.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
188
+ "model.layers.22.input_layernorm.weight": "model-00003-of-00004.safetensors",
189
+ "model.layers.22.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
190
+ "model.layers.22.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
191
+ "model.layers.22.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
192
+ "model.layers.22.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
193
+ "model.layers.22.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
194
+ "model.layers.22.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
195
+ "model.layers.22.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
196
+ "model.layers.22.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
197
+ "model.layers.22.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
198
+ "model.layers.22.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
199
+ "model.layers.22.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
200
+ "model.layers.23.input_layernorm.weight": "model-00003-of-00004.safetensors",
201
+ "model.layers.23.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
202
+ "model.layers.23.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
203
+ "model.layers.23.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
204
+ "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
205
+ "model.layers.23.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
206
+ "model.layers.23.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
207
+ "model.layers.23.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
208
+ "model.layers.23.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
209
+ "model.layers.23.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
210
+ "model.layers.23.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
211
+ "model.layers.23.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
212
+ "model.layers.24.input_layernorm.weight": "model-00003-of-00004.safetensors",
213
+ "model.layers.24.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
214
+ "model.layers.24.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
215
+ "model.layers.24.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
216
+ "model.layers.24.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
217
+ "model.layers.24.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
218
+ "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
219
+ "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
220
+ "model.layers.24.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
221
+ "model.layers.24.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
222
+ "model.layers.24.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
223
+ "model.layers.24.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
224
+ "model.layers.25.input_layernorm.weight": "model-00003-of-00004.safetensors",
225
+ "model.layers.25.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
226
+ "model.layers.25.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
227
+ "model.layers.25.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
228
+ "model.layers.25.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
229
+ "model.layers.25.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
230
+ "model.layers.25.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
231
+ "model.layers.25.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
232
+ "model.layers.25.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
233
+ "model.layers.25.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
234
+ "model.layers.25.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
235
+ "model.layers.25.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
236
+ "model.layers.26.input_layernorm.weight": "model-00003-of-00004.safetensors",
237
+ "model.layers.26.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
238
+ "model.layers.26.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
239
+ "model.layers.26.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
240
+ "model.layers.26.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
241
+ "model.layers.26.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
242
+ "model.layers.26.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
243
+ "model.layers.26.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
244
+ "model.layers.26.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
245
+ "model.layers.26.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
246
+ "model.layers.26.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
247
+ "model.layers.26.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
248
+ "model.layers.27.input_layernorm.weight": "model-00003-of-00004.safetensors",
249
+ "model.layers.27.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
250
+ "model.layers.27.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
251
+ "model.layers.27.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
252
+ "model.layers.27.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
253
+ "model.layers.27.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
254
+ "model.layers.27.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
255
+ "model.layers.27.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
256
+ "model.layers.27.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
257
+ "model.layers.27.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
258
+ "model.layers.27.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
259
+ "model.layers.27.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
260
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00004.safetensors",
261
+ "model.layers.3.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
262
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
263
+ "model.layers.3.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
264
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
265
+ "model.layers.3.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
266
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
267
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
268
+ "model.layers.3.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
269
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
270
+ "model.layers.3.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
271
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
272
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00004.safetensors",
273
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
274
+ "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
275
+ "model.layers.4.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
276
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
277
+ "model.layers.4.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
278
+ "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
279
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
280
+ "model.layers.4.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
281
+ "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
282
+ "model.layers.4.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
283
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
284
+ "model.layers.5.input_layernorm.weight": "model-00001-of-00004.safetensors",
285
+ "model.layers.5.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
286
+ "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
287
+ "model.layers.5.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
288
+ "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
289
+ "model.layers.5.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
290
+ "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
291
+ "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
292
+ "model.layers.5.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
293
+ "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
294
+ "model.layers.5.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
295
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
296
+ "model.layers.6.input_layernorm.weight": "model-00001-of-00004.safetensors",
297
+ "model.layers.6.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
298
+ "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
299
+ "model.layers.6.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
300
+ "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
301
+ "model.layers.6.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
302
+ "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
303
+ "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
304
+ "model.layers.6.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
305
+ "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
306
+ "model.layers.6.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
307
+ "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
308
+ "model.layers.7.input_layernorm.weight": "model-00001-of-00004.safetensors",
309
+ "model.layers.7.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
310
+ "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
311
+ "model.layers.7.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
312
+ "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
313
+ "model.layers.7.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
314
+ "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
315
+ "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
316
+ "model.layers.7.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
317
+ "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
318
+ "model.layers.7.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
319
+ "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
320
+ "model.layers.8.input_layernorm.weight": "model-00002-of-00004.safetensors",
321
+ "model.layers.8.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
322
+ "model.layers.8.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
323
+ "model.layers.8.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
324
+ "model.layers.8.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
325
+ "model.layers.8.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
326
+ "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
327
+ "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
328
+ "model.layers.8.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
329
+ "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
330
+ "model.layers.8.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
331
+ "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
332
+ "model.layers.9.input_layernorm.weight": "model-00002-of-00004.safetensors",
333
+ "model.layers.9.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
334
+ "model.layers.9.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
335
+ "model.layers.9.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
336
+ "model.layers.9.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
337
+ "model.layers.9.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
338
+ "model.layers.9.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
339
+ "model.layers.9.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
340
+ "model.layers.9.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
341
+ "model.layers.9.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
342
+ "model.layers.9.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
343
+ "model.layers.9.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
344
+ "model.norm.weight": "model-00003-of-00004.safetensors"
345
+ }
346
+ }
runs/May10_23-17-04_jrc0927/events.out.tfevents.1746911952.jrc0927.818033.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:42a4d5b7a032cd6b2940383f33c15152a89742786d8144b8ae5c07fcbfa4fb90
3
+ size 5497
runs/May12_18-08-34_jrc0911/events.out.tfevents.1747066243.jrc0911.2931372.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:89256193da2b0eb77eac73c453bfc0475925d066bd805b41a858007819e5a984
3
+ size 79193
runs/May13_16-52-23_jrc0911/events.out.tfevents.1747148074.jrc0911.3140381.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e8e58f5e0856f2847a9af4dda92006508b4ee1a8507e24c3b80e2b88ce801ccf
3
+ size 5915
special_tokens_map.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|im_start|>",
4
+ "<|im_end|>",
5
+ "<|object_ref_start|>",
6
+ "<|object_ref_end|>",
7
+ "<|box_start|>",
8
+ "<|box_end|>",
9
+ "<|quad_start|>",
10
+ "<|quad_end|>",
11
+ "<|vision_start|>",
12
+ "<|vision_end|>",
13
+ "<|vision_pad|>",
14
+ "<|image_pad|>",
15
+ "<|video_pad|>"
16
+ ],
17
+ "eos_token": {
18
+ "content": "<|endoftext|>",
19
+ "lstrip": false,
20
+ "normalized": false,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ },
24
+ "pad_token": {
25
+ "content": "<|endoftext|>",
26
+ "lstrip": false,
27
+ "normalized": false,
28
+ "rstrip": false,
29
+ "single_word": false
30
+ }
31
+ }
start_end.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"start_time": "2025-05-13 16:52:23", "end_time": "2025-05-13 16:54:48"}
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9c5ae00e602b8860cbd784ba82a8aa14e8feecec692e7076590d014d7b7fdafa
3
+ size 11421896
tokenizer_config.json ADDED
@@ -0,0 +1,208 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": false,
3
+ "add_prefix_space": false,
4
+ "added_tokens_decoder": {
5
+ "151643": {
6
+ "content": "<|endoftext|>",
7
+ "lstrip": false,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ },
13
+ "151644": {
14
+ "content": "<|im_start|>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false,
19
+ "special": true
20
+ },
21
+ "151645": {
22
+ "content": "<|im_end|>",
23
+ "lstrip": false,
24
+ "normalized": false,
25
+ "rstrip": false,
26
+ "single_word": false,
27
+ "special": true
28
+ },
29
+ "151646": {
30
+ "content": "<|object_ref_start|>",
31
+ "lstrip": false,
32
+ "normalized": false,
33
+ "rstrip": false,
34
+ "single_word": false,
35
+ "special": true
36
+ },
37
+ "151647": {
38
+ "content": "<|object_ref_end|>",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": false,
42
+ "single_word": false,
43
+ "special": true
44
+ },
45
+ "151648": {
46
+ "content": "<|box_start|>",
47
+ "lstrip": false,
48
+ "normalized": false,
49
+ "rstrip": false,
50
+ "single_word": false,
51
+ "special": true
52
+ },
53
+ "151649": {
54
+ "content": "<|box_end|>",
55
+ "lstrip": false,
56
+ "normalized": false,
57
+ "rstrip": false,
58
+ "single_word": false,
59
+ "special": true
60
+ },
61
+ "151650": {
62
+ "content": "<|quad_start|>",
63
+ "lstrip": false,
64
+ "normalized": false,
65
+ "rstrip": false,
66
+ "single_word": false,
67
+ "special": true
68
+ },
69
+ "151651": {
70
+ "content": "<|quad_end|>",
71
+ "lstrip": false,
72
+ "normalized": false,
73
+ "rstrip": false,
74
+ "single_word": false,
75
+ "special": true
76
+ },
77
+ "151652": {
78
+ "content": "<|vision_start|>",
79
+ "lstrip": false,
80
+ "normalized": false,
81
+ "rstrip": false,
82
+ "single_word": false,
83
+ "special": true
84
+ },
85
+ "151653": {
86
+ "content": "<|vision_end|>",
87
+ "lstrip": false,
88
+ "normalized": false,
89
+ "rstrip": false,
90
+ "single_word": false,
91
+ "special": true
92
+ },
93
+ "151654": {
94
+ "content": "<|vision_pad|>",
95
+ "lstrip": false,
96
+ "normalized": false,
97
+ "rstrip": false,
98
+ "single_word": false,
99
+ "special": true
100
+ },
101
+ "151655": {
102
+ "content": "<|image_pad|>",
103
+ "lstrip": false,
104
+ "normalized": false,
105
+ "rstrip": false,
106
+ "single_word": false,
107
+ "special": true
108
+ },
109
+ "151656": {
110
+ "content": "<|video_pad|>",
111
+ "lstrip": false,
112
+ "normalized": false,
113
+ "rstrip": false,
114
+ "single_word": false,
115
+ "special": true
116
+ },
117
+ "151657": {
118
+ "content": "<tool_call>",
119
+ "lstrip": false,
120
+ "normalized": false,
121
+ "rstrip": false,
122
+ "single_word": false,
123
+ "special": false
124
+ },
125
+ "151658": {
126
+ "content": "</tool_call>",
127
+ "lstrip": false,
128
+ "normalized": false,
129
+ "rstrip": false,
130
+ "single_word": false,
131
+ "special": false
132
+ },
133
+ "151659": {
134
+ "content": "<|fim_prefix|>",
135
+ "lstrip": false,
136
+ "normalized": false,
137
+ "rstrip": false,
138
+ "single_word": false,
139
+ "special": false
140
+ },
141
+ "151660": {
142
+ "content": "<|fim_middle|>",
143
+ "lstrip": false,
144
+ "normalized": false,
145
+ "rstrip": false,
146
+ "single_word": false,
147
+ "special": false
148
+ },
149
+ "151661": {
150
+ "content": "<|fim_suffix|>",
151
+ "lstrip": false,
152
+ "normalized": false,
153
+ "rstrip": false,
154
+ "single_word": false,
155
+ "special": false
156
+ },
157
+ "151662": {
158
+ "content": "<|fim_pad|>",
159
+ "lstrip": false,
160
+ "normalized": false,
161
+ "rstrip": false,
162
+ "single_word": false,
163
+ "special": false
164
+ },
165
+ "151663": {
166
+ "content": "<|repo_name|>",
167
+ "lstrip": false,
168
+ "normalized": false,
169
+ "rstrip": false,
170
+ "single_word": false,
171
+ "special": false
172
+ },
173
+ "151664": {
174
+ "content": "<|file_sep|>",
175
+ "lstrip": false,
176
+ "normalized": false,
177
+ "rstrip": false,
178
+ "single_word": false,
179
+ "special": false
180
+ }
181
+ },
182
+ "additional_special_tokens": [
183
+ "<|im_start|>",
184
+ "<|im_end|>",
185
+ "<|object_ref_start|>",
186
+ "<|object_ref_end|>",
187
+ "<|box_start|>",
188
+ "<|box_end|>",
189
+ "<|quad_start|>",
190
+ "<|quad_end|>",
191
+ "<|vision_start|>",
192
+ "<|vision_end|>",
193
+ "<|vision_pad|>",
194
+ "<|image_pad|>",
195
+ "<|video_pad|>"
196
+ ],
197
+ "bos_token": null,
198
+ "chat_template": "{%- if tools %}\n {{- '<|im_start|>system\\n' }}\n {%- if messages[0]['role'] == 'system' %}\n {{- messages[0]['content'] }}\n {%- else %}\n {{- 'You are Qwen, created by Alibaba Cloud. You are a helpful assistant.' }}\n {%- endif %}\n {{- \"\\n\\n# Tools\\n\\nYou may call one or more functions to assist with the user query.\\n\\nYou are provided with function signatures within <tools></tools> XML tags:\\n<tools>\" }}\n {%- for tool in tools %}\n {{- \"\\n\" }}\n {{- tool | tojson }}\n {%- endfor %}\n {{- \"\\n</tools>\\n\\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\\n<tool_call>\\n{\\\"name\\\": <function-name>, \\\"arguments\\\": <args-json-object>}\\n</tool_call><|im_end|>\\n\" }}\n{%- else %}\n {%- if messages[0]['role'] == 'system' %}\n {{- '<|im_start|>system\\n' + messages[0]['content'] + '<|im_end|>\\n' }}\n {%- else %}\n {{- '<|im_start|>system\\nYou are Qwen, created by Alibaba Cloud. You are a helpful assistant.<|im_end|>\\n' }}\n {%- endif %}\n{%- endif %}\n{%- for message in messages %}\n {%- if (message.role == \"user\") or (message.role == \"system\" and not loop.first) or (message.role == \"assistant\" and not message.tool_calls) %}\n {{- '<|im_start|>' + message.role + '\\n' + message.content + '<|im_end|>' + '\\n' }}\n {%- elif message.role == \"assistant\" %}\n {{- '<|im_start|>' + message.role }}\n {%- if message.content %}\n {{- '\\n' + message.content }}\n {%- endif %}\n {%- for tool_call in message.tool_calls %}\n {%- if tool_call.function is defined %}\n {%- set tool_call = tool_call.function %}\n {%- endif %}\n {{- '\\n<tool_call>\\n{\"name\": \"' }}\n {{- tool_call.name }}\n {{- '\", \"arguments\": ' }}\n {{- tool_call.arguments | tojson }}\n {{- '}\\n</tool_call>' }}\n {%- endfor %}\n {{- '<|im_end|>\\n' }}\n {%- elif message.role == \"tool\" %}\n {%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != \"tool\") %}\n {{- '<|im_start|>user' }}\n {%- endif %}\n {{- '\\n<tool_response>\\n' }}\n {{- message.content }}\n {{- '\\n</tool_response>' }}\n {%- if loop.last or (messages[loop.index0 + 1].role != \"tool\") %}\n {{- '<|im_end|>\\n' }}\n {%- endif %}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|im_start|>assistant\\n' }}\n{%- endif %}\n",
199
+ "clean_up_tokenization_spaces": false,
200
+ "eos_token": "<|endoftext|>",
201
+ "errors": "replace",
202
+ "model_max_length": 131072,
203
+ "pad_token": "<|endoftext|>",
204
+ "padding_side": "right",
205
+ "split_special_tokens": false,
206
+ "tokenizer_class": "Qwen2Tokenizer",
207
+ "unk_token": null
208
+ }
train_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 4.942630185348632,
3
+ "total_flos": 9.306564393200255e+18,
4
+ "train_loss": 0.0,
5
+ "train_runtime": 1.9909,
6
+ "train_samples_per_second": 91040.986,
7
+ "train_steps_per_second": 175.798
8
+ }
trainer_log.jsonl ADDED
@@ -0,0 +1,352 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {"current_steps": 1, "total_steps": 350, "loss": 0.8154, "lr": 2.285714285714286e-06, "epoch": 0.01412180052956752, "percentage": 0.29, "elapsed_time": "0:04:10", "remaining_time": "1 day, 0:16:47"}
2
+ {"current_steps": 2, "total_steps": 350, "loss": 0.8243, "lr": 4.571428571428572e-06, "epoch": 0.02824360105913504, "percentage": 0.57, "elapsed_time": "0:08:04", "remaining_time": "23:24:02"}
3
+ {"current_steps": 3, "total_steps": 350, "loss": 0.8057, "lr": 6.857142857142858e-06, "epoch": 0.04236540158870256, "percentage": 0.86, "elapsed_time": "0:11:58", "remaining_time": "23:04:36"}
4
+ {"current_steps": 4, "total_steps": 350, "loss": 0.7602, "lr": 9.142857142857144e-06, "epoch": 0.05648720211827008, "percentage": 1.14, "elapsed_time": "0:15:50", "remaining_time": "22:50:13"}
5
+ {"current_steps": 5, "total_steps": 350, "loss": 0.7197, "lr": 1.1428571428571429e-05, "epoch": 0.0706090026478376, "percentage": 1.43, "elapsed_time": "0:19:42", "remaining_time": "22:40:19"}
6
+ {"current_steps": 6, "total_steps": 350, "loss": 0.7374, "lr": 1.3714285714285716e-05, "epoch": 0.08473080317740513, "percentage": 1.71, "elapsed_time": "0:23:36", "remaining_time": "22:33:10"}
7
+ {"current_steps": 7, "total_steps": 350, "loss": 0.7325, "lr": 1.6000000000000003e-05, "epoch": 0.09885260370697264, "percentage": 2.0, "elapsed_time": "0:27:28", "remaining_time": "22:26:24"}
8
+ {"current_steps": 8, "total_steps": 350, "loss": 0.7502, "lr": 1.8285714285714288e-05, "epoch": 0.11297440423654016, "percentage": 2.29, "elapsed_time": "0:31:22", "remaining_time": "22:21:09"}
9
+ {"current_steps": 9, "total_steps": 350, "loss": 0.7051, "lr": 2.057142857142857e-05, "epoch": 0.12709620476610767, "percentage": 2.57, "elapsed_time": "0:35:15", "remaining_time": "22:15:48"}
10
+ {"current_steps": 10, "total_steps": 350, "loss": 0.6593, "lr": 2.2857142857142858e-05, "epoch": 0.1412180052956752, "percentage": 2.86, "elapsed_time": "0:39:07", "remaining_time": "22:10:29"}
11
+ {"current_steps": 11, "total_steps": 350, "loss": 0.6276, "lr": 2.5142857142857143e-05, "epoch": 0.1553398058252427, "percentage": 3.14, "elapsed_time": "0:43:01", "remaining_time": "22:05:50"}
12
+ {"current_steps": 12, "total_steps": 350, "loss": 0.6013, "lr": 2.742857142857143e-05, "epoch": 0.16946160635481025, "percentage": 3.43, "elapsed_time": "0:46:54", "remaining_time": "22:01:02"}
13
+ {"current_steps": 13, "total_steps": 350, "loss": 0.5859, "lr": 2.9714285714285717e-05, "epoch": 0.18358340688437777, "percentage": 3.71, "elapsed_time": "0:50:47", "remaining_time": "21:56:29"}
14
+ {"current_steps": 14, "total_steps": 350, "loss": 0.5824, "lr": 3.2000000000000005e-05, "epoch": 0.1977052074139453, "percentage": 4.0, "elapsed_time": "0:54:40", "remaining_time": "21:52:05"}
15
+ {"current_steps": 15, "total_steps": 350, "loss": 0.5708, "lr": 3.4285714285714284e-05, "epoch": 0.2118270079435128, "percentage": 4.29, "elapsed_time": "0:58:32", "remaining_time": "21:47:34"}
16
+ {"current_steps": 16, "total_steps": 350, "loss": 0.5563, "lr": 3.6571428571428576e-05, "epoch": 0.22594880847308033, "percentage": 4.57, "elapsed_time": "1:02:26", "remaining_time": "21:43:27"}
17
+ {"current_steps": 17, "total_steps": 350, "loss": 0.5566, "lr": 3.885714285714286e-05, "epoch": 0.24007060900264784, "percentage": 4.86, "elapsed_time": "1:06:18", "remaining_time": "21:38:56"}
18
+ {"current_steps": 18, "total_steps": 350, "loss": 0.5507, "lr": 4.114285714285714e-05, "epoch": 0.25419240953221534, "percentage": 5.14, "elapsed_time": "1:10:11", "remaining_time": "21:34:43"}
19
+ {"current_steps": 19, "total_steps": 350, "loss": 0.5418, "lr": 4.342857142857143e-05, "epoch": 0.26831421006178285, "percentage": 5.43, "elapsed_time": "1:14:04", "remaining_time": "21:30:28"}
20
+ {"current_steps": 20, "total_steps": 350, "loss": 0.5333, "lr": 4.5714285714285716e-05, "epoch": 0.2824360105913504, "percentage": 5.71, "elapsed_time": "1:17:58", "remaining_time": "21:26:30"}
21
+ {"current_steps": 21, "total_steps": 350, "loss": 0.5305, "lr": 4.8e-05, "epoch": 0.2965578111209179, "percentage": 6.0, "elapsed_time": "1:21:50", "remaining_time": "21:22:12"}
22
+ {"current_steps": 22, "total_steps": 350, "loss": 0.5189, "lr": 5.0285714285714286e-05, "epoch": 0.3106796116504854, "percentage": 6.29, "elapsed_time": "1:25:43", "remaining_time": "21:18:09"}
23
+ {"current_steps": 23, "total_steps": 350, "loss": 0.5198, "lr": 5.257142857142858e-05, "epoch": 0.324801412180053, "percentage": 6.57, "elapsed_time": "1:29:36", "remaining_time": "21:14:03"}
24
+ {"current_steps": 24, "total_steps": 350, "loss": 0.5102, "lr": 5.485714285714286e-05, "epoch": 0.3389232127096205, "percentage": 6.86, "elapsed_time": "1:33:29", "remaining_time": "21:10:01"}
25
+ {"current_steps": 25, "total_steps": 350, "loss": 0.5098, "lr": 5.714285714285715e-05, "epoch": 0.353045013239188, "percentage": 7.14, "elapsed_time": "1:37:22", "remaining_time": "21:05:55"}
26
+ {"current_steps": 26, "total_steps": 350, "loss": 0.5015, "lr": 5.9428571428571434e-05, "epoch": 0.36716681376875554, "percentage": 7.43, "elapsed_time": "1:41:15", "remaining_time": "21:01:53"}
27
+ {"current_steps": 27, "total_steps": 350, "loss": 0.507, "lr": 6.171428571428573e-05, "epoch": 0.38128861429832306, "percentage": 7.71, "elapsed_time": "1:45:09", "remaining_time": "20:57:56"}
28
+ {"current_steps": 28, "total_steps": 350, "loss": 0.5034, "lr": 6.400000000000001e-05, "epoch": 0.3954104148278906, "percentage": 8.0, "elapsed_time": "1:49:01", "remaining_time": "20:53:51"}
29
+ {"current_steps": 29, "total_steps": 350, "loss": 0.4983, "lr": 6.62857142857143e-05, "epoch": 0.4095322153574581, "percentage": 8.29, "elapsed_time": "1:52:54", "remaining_time": "20:49:47"}
30
+ {"current_steps": 30, "total_steps": 350, "loss": 0.508, "lr": 6.857142857142857e-05, "epoch": 0.4236540158870256, "percentage": 8.57, "elapsed_time": "1:56:47", "remaining_time": "20:45:50"}
31
+ {"current_steps": 31, "total_steps": 350, "loss": 0.5016, "lr": 7.085714285714287e-05, "epoch": 0.43777581641659313, "percentage": 8.86, "elapsed_time": "2:00:40", "remaining_time": "20:41:46"}
32
+ {"current_steps": 32, "total_steps": 350, "loss": 0.4939, "lr": 7.314285714285715e-05, "epoch": 0.45189761694616065, "percentage": 9.14, "elapsed_time": "2:04:32", "remaining_time": "20:37:39"}
33
+ {"current_steps": 33, "total_steps": 350, "loss": 0.4924, "lr": 7.542857142857144e-05, "epoch": 0.46601941747572817, "percentage": 9.43, "elapsed_time": "2:08:26", "remaining_time": "20:33:46"}
34
+ {"current_steps": 34, "total_steps": 350, "loss": 0.4902, "lr": 7.771428571428572e-05, "epoch": 0.4801412180052957, "percentage": 9.71, "elapsed_time": "2:12:19", "remaining_time": "20:29:49"}
35
+ {"current_steps": 35, "total_steps": 350, "loss": 0.4852, "lr": 8e-05, "epoch": 0.4942630185348632, "percentage": 10.0, "elapsed_time": "2:16:12", "remaining_time": "20:25:49"}
36
+ {"current_steps": 36, "total_steps": 350, "loss": 0.4853, "lr": 7.999801067823773e-05, "epoch": 0.5083848190644307, "percentage": 10.29, "elapsed_time": "2:20:04", "remaining_time": "20:21:44"}
37
+ {"current_steps": 37, "total_steps": 350, "loss": 0.4764, "lr": 7.999204291082095e-05, "epoch": 0.5225066195939982, "percentage": 10.57, "elapsed_time": "2:23:57", "remaining_time": "20:17:44"}
38
+ {"current_steps": 38, "total_steps": 350, "loss": 0.4809, "lr": 7.998209729134014e-05, "epoch": 0.5366284201235657, "percentage": 10.86, "elapsed_time": "2:27:50", "remaining_time": "20:13:52"}
39
+ {"current_steps": 39, "total_steps": 350, "loss": 0.4719, "lr": 7.996817480904718e-05, "epoch": 0.5507502206531333, "percentage": 11.14, "elapsed_time": "2:31:44", "remaining_time": "20:09:59"}
40
+ {"current_steps": 40, "total_steps": 350, "loss": 0.477, "lr": 7.99502768487569e-05, "epoch": 0.5648720211827007, "percentage": 11.43, "elapsed_time": "2:35:36", "remaining_time": "20:06:00"}
41
+ {"current_steps": 41, "total_steps": 350, "loss": 0.4724, "lr": 7.99284051907094e-05, "epoch": 0.5789938217122683, "percentage": 11.71, "elapsed_time": "2:39:29", "remaining_time": "20:02:04"}
42
+ {"current_steps": 42, "total_steps": 350, "loss": 0.4662, "lr": 7.990256201039297e-05, "epoch": 0.5931156222418358, "percentage": 12.0, "elapsed_time": "2:43:22", "remaining_time": "19:58:04"}
43
+ {"current_steps": 43, "total_steps": 350, "loss": 0.4621, "lr": 7.987274987832764e-05, "epoch": 0.6072374227714034, "percentage": 12.29, "elapsed_time": "2:47:14", "remaining_time": "19:54:01"}
44
+ {"current_steps": 44, "total_steps": 350, "loss": 0.4665, "lr": 7.983897175980957e-05, "epoch": 0.6213592233009708, "percentage": 12.57, "elapsed_time": "2:51:07", "remaining_time": "19:50:03"}
45
+ {"current_steps": 45, "total_steps": 350, "loss": 0.4761, "lr": 7.980123101461606e-05, "epoch": 0.6354810238305384, "percentage": 12.86, "elapsed_time": "2:55:00", "remaining_time": "19:46:08"}
46
+ {"current_steps": 46, "total_steps": 350, "loss": 0.4652, "lr": 7.975953139667141e-05, "epoch": 0.649602824360106, "percentage": 13.14, "elapsed_time": "2:58:52", "remaining_time": "19:42:10"}
47
+ {"current_steps": 47, "total_steps": 350, "loss": 0.4619, "lr": 7.97138770536735e-05, "epoch": 0.6637246248896734, "percentage": 13.43, "elapsed_time": "3:02:46", "remaining_time": "19:38:18"}
48
+ {"current_steps": 48, "total_steps": 350, "loss": 0.4638, "lr": 7.966427252668121e-05, "epoch": 0.677846425419241, "percentage": 13.71, "elapsed_time": "3:06:38", "remaining_time": "19:34:19"}
49
+ {"current_steps": 49, "total_steps": 350, "loss": 0.4527, "lr": 7.961072274966282e-05, "epoch": 0.6919682259488085, "percentage": 14.0, "elapsed_time": "3:10:31", "remaining_time": "19:30:23"}
50
+ {"current_steps": 50, "total_steps": 350, "loss": 0.4571, "lr": 7.955323304900514e-05, "epoch": 0.706090026478376, "percentage": 14.29, "elapsed_time": "3:14:24", "remaining_time": "19:26:28"}
51
+ {"current_steps": 51, "total_steps": 350, "loss": 0.4496, "lr": 7.949180914298383e-05, "epoch": 0.7202118270079435, "percentage": 14.57, "elapsed_time": "3:18:18", "remaining_time": "19:22:36"}
52
+ {"current_steps": 52, "total_steps": 350, "loss": 0.4593, "lr": 7.942645714119452e-05, "epoch": 0.7343336275375111, "percentage": 14.86, "elapsed_time": "3:22:10", "remaining_time": "19:18:37"}
53
+ {"current_steps": 53, "total_steps": 350, "loss": 0.4502, "lr": 7.93571835439452e-05, "epoch": 0.7484554280670785, "percentage": 15.14, "elapsed_time": "3:26:03", "remaining_time": "19:14:44"}
54
+ {"current_steps": 54, "total_steps": 350, "loss": 0.447, "lr": 7.928399524160956e-05, "epoch": 0.7625772285966461, "percentage": 15.43, "elapsed_time": "3:29:57", "remaining_time": "19:10:50"}
55
+ {"current_steps": 55, "total_steps": 350, "loss": 0.4461, "lr": 7.920689951394175e-05, "epoch": 0.7766990291262136, "percentage": 15.71, "elapsed_time": "3:33:49", "remaining_time": "19:06:53"}
56
+ {"current_steps": 56, "total_steps": 350, "loss": 0.4473, "lr": 7.912590402935223e-05, "epoch": 0.7908208296557812, "percentage": 16.0, "elapsed_time": "3:37:42", "remaining_time": "19:02:56"}
57
+ {"current_steps": 57, "total_steps": 350, "loss": 0.4472, "lr": 7.904101684414498e-05, "epoch": 0.8049426301853486, "percentage": 16.29, "elapsed_time": "3:41:35", "remaining_time": "18:59:05"}
58
+ {"current_steps": 58, "total_steps": 350, "loss": 0.4442, "lr": 7.895224640171625e-05, "epoch": 0.8190644307149162, "percentage": 16.57, "elapsed_time": "3:45:28", "remaining_time": "18:55:06"}
59
+ {"current_steps": 59, "total_steps": 350, "loss": 0.4449, "lr": 7.88596015317147e-05, "epoch": 0.8331862312444837, "percentage": 16.86, "elapsed_time": "3:49:20", "remaining_time": "18:51:09"}
60
+ {"current_steps": 60, "total_steps": 350, "loss": 0.4433, "lr": 7.876309144916312e-05, "epoch": 0.8473080317740512, "percentage": 17.14, "elapsed_time": "3:53:14", "remaining_time": "18:47:21"}
61
+ {"current_steps": 61, "total_steps": 350, "loss": 0.4401, "lr": 7.86627257535419e-05, "epoch": 0.8614298323036187, "percentage": 17.43, "elapsed_time": "3:57:08", "remaining_time": "18:43:28"}
62
+ {"current_steps": 62, "total_steps": 350, "loss": 0.4374, "lr": 7.855851442783414e-05, "epoch": 0.8755516328331863, "percentage": 17.71, "elapsed_time": "4:01:00", "remaining_time": "18:39:31"}
63
+ {"current_steps": 63, "total_steps": 350, "loss": 0.4409, "lr": 7.845046783753276e-05, "epoch": 0.8896734333627537, "percentage": 18.0, "elapsed_time": "4:04:53", "remaining_time": "18:35:37"}
64
+ {"current_steps": 64, "total_steps": 350, "loss": 0.4407, "lr": 7.833859672960943e-05, "epoch": 0.9037952338923213, "percentage": 18.29, "elapsed_time": "4:08:46", "remaining_time": "18:31:43"}
65
+ {"current_steps": 65, "total_steps": 350, "loss": 0.4602, "lr": 7.822291223144564e-05, "epoch": 0.9179170344218888, "percentage": 18.57, "elapsed_time": "4:12:40", "remaining_time": "18:27:51"}
66
+ {"current_steps": 66, "total_steps": 350, "loss": 0.4369, "lr": 7.810342584972585e-05, "epoch": 0.9320388349514563, "percentage": 18.86, "elapsed_time": "4:16:34", "remaining_time": "18:24:03"}
67
+ {"current_steps": 67, "total_steps": 350, "loss": 0.4356, "lr": 7.798014946929306e-05, "epoch": 0.9461606354810238, "percentage": 19.14, "elapsed_time": "4:20:28", "remaining_time": "18:20:11"}
68
+ {"current_steps": 68, "total_steps": 350, "loss": 0.4504, "lr": 7.785309535196657e-05, "epoch": 0.9602824360105914, "percentage": 19.43, "elapsed_time": "4:24:21", "remaining_time": "18:16:16"}
69
+ {"current_steps": 69, "total_steps": 350, "loss": 0.4392, "lr": 7.772227613532242e-05, "epoch": 0.9744042365401588, "percentage": 19.71, "elapsed_time": "4:28:13", "remaining_time": "18:12:22"}
70
+ {"current_steps": 70, "total_steps": 350, "loss": 0.4474, "lr": 7.758770483143634e-05, "epoch": 0.9885260370697264, "percentage": 20.0, "elapsed_time": "4:32:06", "remaining_time": "18:08:26"}
71
+ {"current_steps": 71, "total_steps": 350, "loss": 0.5108, "lr": 7.74493948255895e-05, "epoch": 1.002647837599294, "percentage": 20.29, "elapsed_time": "4:36:47", "remaining_time": "18:07:41"}
72
+ {"current_steps": 72, "total_steps": 350, "loss": 0.4227, "lr": 7.730735987493711e-05, "epoch": 1.0167696381288613, "percentage": 20.57, "elapsed_time": "4:40:40", "remaining_time": "18:03:41"}
73
+ {"current_steps": 73, "total_steps": 350, "loss": 0.419, "lr": 7.71616141071401e-05, "epoch": 1.030891438658429, "percentage": 20.86, "elapsed_time": "4:44:33", "remaining_time": "17:59:46"}
74
+ {"current_steps": 74, "total_steps": 350, "loss": 0.4182, "lr": 7.701217201895987e-05, "epoch": 1.0450132391879965, "percentage": 21.14, "elapsed_time": "4:48:26", "remaining_time": "17:55:47"}
75
+ {"current_steps": 75, "total_steps": 350, "loss": 0.4147, "lr": 7.685904847481631e-05, "epoch": 1.059135039717564, "percentage": 21.43, "elapsed_time": "4:52:20", "remaining_time": "17:51:53"}
76
+ {"current_steps": 76, "total_steps": 350, "loss": 0.4192, "lr": 7.670225870530936e-05, "epoch": 1.0732568402471314, "percentage": 21.71, "elapsed_time": "4:56:13", "remaining_time": "17:47:59"}
77
+ {"current_steps": 77, "total_steps": 350, "loss": 0.4193, "lr": 7.654181830570404e-05, "epoch": 1.087378640776699, "percentage": 22.0, "elapsed_time": "5:00:06", "remaining_time": "17:43:59"}
78
+ {"current_steps": 78, "total_steps": 350, "loss": 0.4126, "lr": 7.637774323437929e-05, "epoch": 1.1015004413062666, "percentage": 22.29, "elapsed_time": "5:03:59", "remaining_time": "17:40:03"}
79
+ {"current_steps": 79, "total_steps": 350, "loss": 0.4193, "lr": 7.62100498112406e-05, "epoch": 1.1156222418358341, "percentage": 22.57, "elapsed_time": "5:07:51", "remaining_time": "17:36:05"}
80
+ {"current_steps": 80, "total_steps": 350, "loss": 0.4069, "lr": 7.603875471609677e-05, "epoch": 1.1297440423654015, "percentage": 22.86, "elapsed_time": "5:11:44", "remaining_time": "17:32:07"}
81
+ {"current_steps": 81, "total_steps": 350, "loss": 0.4187, "lr": 7.586387498700084e-05, "epoch": 1.143865842894969, "percentage": 23.14, "elapsed_time": "5:15:37", "remaining_time": "17:28:11"}
82
+ {"current_steps": 82, "total_steps": 350, "loss": 0.4101, "lr": 7.568542801855535e-05, "epoch": 1.1579876434245366, "percentage": 23.43, "elapsed_time": "5:19:29", "remaining_time": "17:24:12"}
83
+ {"current_steps": 83, "total_steps": 350, "loss": 0.4074, "lr": 7.550343156018217e-05, "epoch": 1.1721094439541042, "percentage": 23.71, "elapsed_time": "5:23:21", "remaining_time": "17:20:13"}
84
+ {"current_steps": 84, "total_steps": 350, "loss": 0.4105, "lr": 7.531790371435709e-05, "epoch": 1.1862312444836718, "percentage": 24.0, "elapsed_time": "5:27:13", "remaining_time": "17:16:12"}
85
+ {"current_steps": 85, "total_steps": 350, "loss": 0.4131, "lr": 7.512886293480914e-05, "epoch": 1.2003530450132391, "percentage": 24.29, "elapsed_time": "5:31:06", "remaining_time": "17:12:15"}
86
+ {"current_steps": 86, "total_steps": 350, "loss": 0.4123, "lr": 7.49363280246852e-05, "epoch": 1.2144748455428067, "percentage": 24.57, "elapsed_time": "5:34:58", "remaining_time": "17:08:17"}
87
+ {"current_steps": 87, "total_steps": 350, "loss": 0.4199, "lr": 7.474031813467956e-05, "epoch": 1.2285966460723743, "percentage": 24.86, "elapsed_time": "5:38:51", "remaining_time": "17:04:22"}
88
+ {"current_steps": 88, "total_steps": 350, "loss": 0.4152, "lr": 7.454085276112925e-05, "epoch": 1.2427184466019416, "percentage": 25.14, "elapsed_time": "5:42:43", "remaining_time": "17:00:23"}
89
+ {"current_steps": 89, "total_steps": 350, "loss": 0.4064, "lr": 7.433795174407465e-05, "epoch": 1.2568402471315092, "percentage": 25.43, "elapsed_time": "5:46:35", "remaining_time": "16:56:25"}
90
+ {"current_steps": 90, "total_steps": 350, "loss": 0.409, "lr": 7.413163526528623e-05, "epoch": 1.2709620476610768, "percentage": 25.71, "elapsed_time": "5:50:27", "remaining_time": "16:52:26"}
91
+ {"current_steps": 91, "total_steps": 350, "loss": 0.4054, "lr": 7.392192384625704e-05, "epoch": 1.2850838481906444, "percentage": 26.0, "elapsed_time": "5:54:19", "remaining_time": "16:48:28"}
92
+ {"current_steps": 92, "total_steps": 350, "loss": 0.4099, "lr": 7.370883834616157e-05, "epoch": 1.299205648720212, "percentage": 26.29, "elapsed_time": "5:58:11", "remaining_time": "16:44:30"}
93
+ {"current_steps": 93, "total_steps": 350, "loss": 0.4084, "lr": 7.349239995978095e-05, "epoch": 1.3133274492497793, "percentage": 26.57, "elapsed_time": "6:02:03", "remaining_time": "16:40:32"}
94
+ {"current_steps": 94, "total_steps": 350, "loss": 0.4048, "lr": 7.327263021539478e-05, "epoch": 1.3274492497793469, "percentage": 26.86, "elapsed_time": "6:05:55", "remaining_time": "16:36:34"}
95
+ {"current_steps": 95, "total_steps": 350, "loss": 0.4068, "lr": 7.30495509726398e-05, "epoch": 1.3415710503089144, "percentage": 27.14, "elapsed_time": "6:09:47", "remaining_time": "16:32:35"}
96
+ {"current_steps": 96, "total_steps": 350, "loss": 0.4034, "lr": 7.282318442033567e-05, "epoch": 1.3556928508384818, "percentage": 27.43, "elapsed_time": "6:13:40", "remaining_time": "16:28:40"}
97
+ {"current_steps": 97, "total_steps": 350, "loss": 0.4078, "lr": 7.259355307427781e-05, "epoch": 1.3698146513680494, "percentage": 27.71, "elapsed_time": "6:17:33", "remaining_time": "16:24:46"}
98
+ {"current_steps": 98, "total_steps": 350, "loss": 0.4084, "lr": 7.236067977499791e-05, "epoch": 1.383936451897617, "percentage": 28.0, "elapsed_time": "6:21:25", "remaining_time": "16:20:49"}
99
+ {"current_steps": 99, "total_steps": 350, "loss": 0.4069, "lr": 7.212458768549208e-05, "epoch": 1.3980582524271845, "percentage": 28.29, "elapsed_time": "6:25:18", "remaining_time": "16:16:52"}
100
+ {"current_steps": 100, "total_steps": 350, "loss": 0.4047, "lr": 7.188530028891691e-05, "epoch": 1.412180052956752, "percentage": 28.57, "elapsed_time": "6:29:11", "remaining_time": "16:12:58"}
101
+ {"current_steps": 101, "total_steps": 350, "loss": 0.4075, "lr": 7.164284138625367e-05, "epoch": 1.4263018534863194, "percentage": 28.86, "elapsed_time": "6:33:03", "remaining_time": "16:09:01"}
102
+ {"current_steps": 102, "total_steps": 350, "loss": 0.4036, "lr": 7.13972350939409e-05, "epoch": 1.440423654015887, "percentage": 29.14, "elapsed_time": "6:36:55", "remaining_time": "16:05:05"}
103
+ {"current_steps": 103, "total_steps": 350, "loss": 0.4068, "lr": 7.114850584147577e-05, "epoch": 1.4545454545454546, "percentage": 29.43, "elapsed_time": "6:40:47", "remaining_time": "16:01:08"}
104
+ {"current_steps": 104, "total_steps": 350, "loss": 0.4053, "lr": 7.089667836898399e-05, "epoch": 1.468667255075022, "percentage": 29.71, "elapsed_time": "6:44:40", "remaining_time": "15:57:11"}
105
+ {"current_steps": 105, "total_steps": 350, "loss": 0.4002, "lr": 7.064177772475912e-05, "epoch": 1.4827890556045895, "percentage": 30.0, "elapsed_time": "6:48:33", "remaining_time": "15:53:17"}
106
+ {"current_steps": 106, "total_steps": 350, "loss": 0.4013, "lr": 7.038382926277113e-05, "epoch": 1.496910856134157, "percentage": 30.29, "elapsed_time": "6:52:26", "remaining_time": "15:49:23"}
107
+ {"current_steps": 107, "total_steps": 350, "loss": 0.404, "lr": 7.012285864014445e-05, "epoch": 1.5110326566637247, "percentage": 30.57, "elapsed_time": "6:56:19", "remaining_time": "15:45:29"}
108
+ {"current_steps": 108, "total_steps": 350, "loss": 0.3992, "lr": 6.985889181460602e-05, "epoch": 1.5251544571932922, "percentage": 30.86, "elapsed_time": "7:00:12", "remaining_time": "15:41:33"}
109
+ {"current_steps": 109, "total_steps": 350, "loss": 0.4022, "lr": 6.959195504190337e-05, "epoch": 1.5392762577228596, "percentage": 31.14, "elapsed_time": "7:04:05", "remaining_time": "15:37:40"}
110
+ {"current_steps": 110, "total_steps": 350, "loss": 0.3933, "lr": 6.932207487319305e-05, "epoch": 1.5533980582524272, "percentage": 31.43, "elapsed_time": "7:07:58", "remaining_time": "15:33:45"}
111
+ {"current_steps": 111, "total_steps": 350, "loss": 0.4014, "lr": 6.904927815239972e-05, "epoch": 1.5675198587819947, "percentage": 31.71, "elapsed_time": "7:11:51", "remaining_time": "15:29:51"}
112
+ {"current_steps": 112, "total_steps": 350, "loss": 0.4014, "lr": 6.877359201354606e-05, "epoch": 1.581641659311562, "percentage": 32.0, "elapsed_time": "7:15:43", "remaining_time": "15:25:55"}
113
+ {"current_steps": 113, "total_steps": 350, "loss": 0.4021, "lr": 6.84950438780538e-05, "epoch": 1.5957634598411299, "percentage": 32.29, "elapsed_time": "7:19:36", "remaining_time": "15:22:00"}
114
+ {"current_steps": 114, "total_steps": 350, "loss": 0.4003, "lr": 6.821366145201636e-05, "epoch": 1.6098852603706972, "percentage": 32.57, "elapsed_time": "7:23:29", "remaining_time": "15:18:05"}
115
+ {"current_steps": 115, "total_steps": 350, "loss": 0.3992, "lr": 6.792947272344292e-05, "epoch": 1.6240070609002648, "percentage": 32.86, "elapsed_time": "7:27:22", "remaining_time": "15:14:11"}
116
+ {"current_steps": 116, "total_steps": 350, "loss": 0.3982, "lr": 6.76425059594746e-05, "epoch": 1.6381288614298324, "percentage": 33.14, "elapsed_time": "7:31:15", "remaining_time": "15:10:17"}
117
+ {"current_steps": 117, "total_steps": 350, "loss": 0.4053, "lr": 6.73527897035728e-05, "epoch": 1.6522506619593997, "percentage": 33.43, "elapsed_time": "7:35:08", "remaining_time": "15:06:23"}
118
+ {"current_steps": 118, "total_steps": 350, "loss": 0.4024, "lr": 6.706035277268022e-05, "epoch": 1.6663724624889673, "percentage": 33.71, "elapsed_time": "7:39:01", "remaining_time": "15:02:28"}
119
+ {"current_steps": 119, "total_steps": 350, "loss": 0.3945, "lr": 6.676522425435433e-05, "epoch": 1.6804942630185349, "percentage": 34.0, "elapsed_time": "7:42:54", "remaining_time": "14:58:34"}
120
+ {"current_steps": 120, "total_steps": 350, "loss": 0.3984, "lr": 6.646743350387438e-05, "epoch": 1.6946160635481022, "percentage": 34.29, "elapsed_time": "7:46:47", "remaining_time": "14:54:40"}
121
+ {"current_steps": 121, "total_steps": 350, "loss": 0.4021, "lr": 6.616701014132138e-05, "epoch": 1.70873786407767, "percentage": 34.57, "elapsed_time": "7:50:39", "remaining_time": "14:50:45"}
122
+ {"current_steps": 122, "total_steps": 350, "loss": 0.4026, "lr": 6.586398404863198e-05, "epoch": 1.7228596646072374, "percentage": 34.86, "elapsed_time": "7:54:32", "remaining_time": "14:46:51"}
123
+ {"current_steps": 123, "total_steps": 350, "loss": 0.3925, "lr": 6.555838536662624e-05, "epoch": 1.736981465136805, "percentage": 35.14, "elapsed_time": "7:58:25", "remaining_time": "14:42:57"}
124
+ {"current_steps": 124, "total_steps": 350, "loss": 0.3976, "lr": 6.525024449200956e-05, "epoch": 1.7511032656663725, "percentage": 35.43, "elapsed_time": "8:02:18", "remaining_time": "14:39:02"}
125
+ {"current_steps": 125, "total_steps": 350, "loss": 0.3982, "lr": 6.493959207434934e-05, "epoch": 1.7652250661959399, "percentage": 35.71, "elapsed_time": "8:06:11", "remaining_time": "14:35:08"}
126
+ {"current_steps": 126, "total_steps": 350, "loss": 0.3947, "lr": 6.462645901302633e-05, "epoch": 1.7793468667255075, "percentage": 36.0, "elapsed_time": "8:10:04", "remaining_time": "14:31:14"}
127
+ {"current_steps": 127, "total_steps": 350, "loss": 0.4015, "lr": 6.431087645416121e-05, "epoch": 1.793468667255075, "percentage": 36.29, "elapsed_time": "8:13:58", "remaining_time": "14:27:21"}
128
+ {"current_steps": 128, "total_steps": 350, "loss": 0.3959, "lr": 6.399287578751656e-05, "epoch": 1.8075904677846424, "percentage": 36.57, "elapsed_time": "8:17:51", "remaining_time": "14:23:28"}
129
+ {"current_steps": 129, "total_steps": 350, "loss": 0.3975, "lr": 6.367248864337471e-05, "epoch": 1.8217122683142102, "percentage": 36.86, "elapsed_time": "8:21:44", "remaining_time": "14:19:34"}
130
+ {"current_steps": 130, "total_steps": 350, "loss": 0.3961, "lr": 6.334974688939161e-05, "epoch": 1.8358340688437775, "percentage": 37.14, "elapsed_time": "8:25:37", "remaining_time": "14:15:40"}
131
+ {"current_steps": 131, "total_steps": 350, "loss": 0.3923, "lr": 6.302468262742695e-05, "epoch": 1.849955869373345, "percentage": 37.43, "elapsed_time": "8:29:30", "remaining_time": "14:11:47"}
132
+ {"current_steps": 132, "total_steps": 350, "loss": 0.3895, "lr": 6.269732819035128e-05, "epoch": 1.8640776699029127, "percentage": 37.71, "elapsed_time": "8:33:23", "remaining_time": "14:07:53"}
133
+ {"current_steps": 133, "total_steps": 350, "loss": 0.3933, "lr": 6.236771613882987e-05, "epoch": 1.87819947043248, "percentage": 38.0, "elapsed_time": "8:37:17", "remaining_time": "14:04:00"}
134
+ {"current_steps": 134, "total_steps": 350, "loss": 0.3865, "lr": 6.20358792580841e-05, "epoch": 1.8923212709620476, "percentage": 38.29, "elapsed_time": "8:41:10", "remaining_time": "14:00:05"}
135
+ {"current_steps": 135, "total_steps": 350, "loss": 0.3985, "lr": 6.170185055463039e-05, "epoch": 1.9064430714916152, "percentage": 38.57, "elapsed_time": "8:45:03", "remaining_time": "13:56:11"}
136
+ {"current_steps": 136, "total_steps": 350, "loss": 0.3972, "lr": 6.136566325299715e-05, "epoch": 1.9205648720211828, "percentage": 38.86, "elapsed_time": "8:48:55", "remaining_time": "13:52:16"}
137
+ {"current_steps": 137, "total_steps": 350, "loss": 0.398, "lr": 6.102735079242019e-05, "epoch": 1.9346866725507503, "percentage": 39.14, "elapsed_time": "8:52:48", "remaining_time": "13:48:22"}
138
+ {"current_steps": 138, "total_steps": 350, "loss": 0.3957, "lr": 6.068694682351651e-05, "epoch": 1.9488084730803177, "percentage": 39.43, "elapsed_time": "8:56:41", "remaining_time": "13:44:29"}
139
+ {"current_steps": 139, "total_steps": 350, "loss": 0.3953, "lr": 6.0344485204937274e-05, "epoch": 1.9629302736098853, "percentage": 39.71, "elapsed_time": "9:00:34", "remaining_time": "13:40:34"}
140
+ {"current_steps": 140, "total_steps": 350, "loss": 0.3955, "lr": 6.000000000000001e-05, "epoch": 1.9770520741394528, "percentage": 40.0, "elapsed_time": "9:04:26", "remaining_time": "13:36:39"}
141
+ {"current_steps": 141, "total_steps": 350, "loss": 0.4096, "lr": 5.965352547330046e-05, "epoch": 1.9911738746690202, "percentage": 40.29, "elapsed_time": "9:08:14", "remaining_time": "13:32:38"}
142
+ {"current_steps": 142, "total_steps": 350, "loss": 0.4441, "lr": 5.930509608730444e-05, "epoch": 2.005295675198588, "percentage": 40.57, "elapsed_time": "9:12:34", "remaining_time": "13:29:23"}
143
+ {"current_steps": 143, "total_steps": 350, "loss": 0.3728, "lr": 5.895474649891995e-05, "epoch": 2.0194174757281553, "percentage": 40.86, "elapsed_time": "9:16:26", "remaining_time": "13:25:28"}
144
+ {"current_steps": 144, "total_steps": 350, "loss": 0.3745, "lr": 5.860251155605003e-05, "epoch": 2.0335392762577227, "percentage": 41.14, "elapsed_time": "9:20:19", "remaining_time": "13:21:34"}
145
+ {"current_steps": 145, "total_steps": 350, "loss": 0.3832, "lr": 5.824842629412653e-05, "epoch": 2.0476610767872905, "percentage": 41.43, "elapsed_time": "9:24:12", "remaining_time": "13:17:39"}
146
+ {"current_steps": 146, "total_steps": 350, "loss": 0.3779, "lr": 5.7892525932625305e-05, "epoch": 2.061782877316858, "percentage": 41.71, "elapsed_time": "9:28:04", "remaining_time": "13:13:44"}
147
+ {"current_steps": 147, "total_steps": 350, "loss": 0.3718, "lr": 5.75348458715631e-05, "epoch": 2.0759046778464256, "percentage": 42.0, "elapsed_time": "9:31:56", "remaining_time": "13:09:49"}
148
+ {"current_steps": 148, "total_steps": 350, "loss": 0.3699, "lr": 5.7175421687976374e-05, "epoch": 2.090026478375993, "percentage": 42.29, "elapsed_time": "9:35:49", "remaining_time": "13:05:55"}
149
+ {"current_steps": 149, "total_steps": 350, "loss": 0.367, "lr": 5.681428913238263e-05, "epoch": 2.1041482789055603, "percentage": 42.57, "elapsed_time": "9:39:42", "remaining_time": "13:02:00"}
150
+ {"current_steps": 150, "total_steps": 350, "loss": 0.3752, "lr": 5.645148412522447e-05, "epoch": 2.118270079435128, "percentage": 42.86, "elapsed_time": "9:43:34", "remaining_time": "12:58:05"}
151
+ {"current_steps": 151, "total_steps": 350, "loss": 0.3657, "lr": 5.60870427532967e-05, "epoch": 2.1323918799646955, "percentage": 43.14, "elapsed_time": "9:47:27", "remaining_time": "12:54:11"}
152
+ {"current_steps": 152, "total_steps": 350, "loss": 0.3701, "lr": 5.572100126615695e-05, "epoch": 2.146513680494263, "percentage": 43.43, "elapsed_time": "9:51:20", "remaining_time": "12:50:17"}
153
+ {"current_steps": 153, "total_steps": 350, "loss": 0.364, "lr": 5.535339607252003e-05, "epoch": 2.1606354810238306, "percentage": 43.71, "elapsed_time": "9:55:13", "remaining_time": "12:46:23"}
154
+ {"current_steps": 154, "total_steps": 350, "loss": 0.3641, "lr": 5.4984263736636494e-05, "epoch": 2.174757281553398, "percentage": 44.0, "elapsed_time": "9:59:06", "remaining_time": "12:42:29"}
155
+ {"current_steps": 155, "total_steps": 350, "loss": 0.3634, "lr": 5.461364097465581e-05, "epoch": 2.1888790820829658, "percentage": 44.29, "elapsed_time": "10:02:59", "remaining_time": "12:38:36"}
156
+ {"current_steps": 156, "total_steps": 350, "loss": 0.3676, "lr": 5.424156465097428e-05, "epoch": 2.203000882612533, "percentage": 44.57, "elapsed_time": "10:06:52", "remaining_time": "12:34:42"}
157
+ {"current_steps": 157, "total_steps": 350, "loss": 0.3649, "lr": 5.38680717745683e-05, "epoch": 2.2171226831421005, "percentage": 44.86, "elapsed_time": "10:10:45", "remaining_time": "12:30:47"}
158
+ {"current_steps": 158, "total_steps": 350, "loss": 0.3646, "lr": 5.349319949531321e-05, "epoch": 2.2312444836716683, "percentage": 45.14, "elapsed_time": "10:14:38", "remaining_time": "12:26:54"}
159
+ {"current_steps": 159, "total_steps": 350, "loss": 0.3682, "lr": 5.3116985100288185e-05, "epoch": 2.2453662842012356, "percentage": 45.43, "elapsed_time": "10:18:31", "remaining_time": "12:23:00"}
160
+ {"current_steps": 160, "total_steps": 350, "loss": 0.3606, "lr": 5.2739466010067385e-05, "epoch": 2.259488084730803, "percentage": 45.71, "elapsed_time": "10:22:24", "remaining_time": "12:19:06"}
161
+ {"current_steps": 161, "total_steps": 350, "loss": 0.3638, "lr": 5.23606797749979e-05, "epoch": 2.2736098852603708, "percentage": 46.0, "elapsed_time": "10:26:17", "remaining_time": "12:15:12"}
162
+ {"current_steps": 162, "total_steps": 350, "loss": 0.3667, "lr": 5.1980664071464776e-05, "epoch": 2.287731685789938, "percentage": 46.29, "elapsed_time": "10:30:09", "remaining_time": "12:11:17"}
163
+ {"current_steps": 163, "total_steps": 350, "loss": 0.3696, "lr": 5.159945669814345e-05, "epoch": 2.301853486319506, "percentage": 46.57, "elapsed_time": "10:34:01", "remaining_time": "12:07:23"}
164
+ {"current_steps": 164, "total_steps": 350, "loss": 0.3606, "lr": 5.121709557224011e-05, "epoch": 2.3159752868490733, "percentage": 46.86, "elapsed_time": "10:37:54", "remaining_time": "12:03:28"}
165
+ {"current_steps": 165, "total_steps": 350, "loss": 0.365, "lr": 5.0833618725720214e-05, "epoch": 2.3300970873786406, "percentage": 47.14, "elapsed_time": "10:41:47", "remaining_time": "11:59:35"}
166
+ {"current_steps": 166, "total_steps": 350, "loss": 0.3667, "lr": 5.044906430152554e-05, "epoch": 2.3442188879082084, "percentage": 47.43, "elapsed_time": "10:45:41", "remaining_time": "11:55:42"}
167
+ {"current_steps": 167, "total_steps": 350, "loss": 0.3699, "lr": 5.006347054978035e-05, "epoch": 2.358340688437776, "percentage": 47.71, "elapsed_time": "10:49:33", "remaining_time": "11:51:47"}
168
+ {"current_steps": 168, "total_steps": 350, "loss": 0.3587, "lr": 4.967687582398671e-05, "epoch": 2.3724624889673436, "percentage": 48.0, "elapsed_time": "10:53:26", "remaining_time": "11:47:53"}
169
+ {"current_steps": 169, "total_steps": 350, "loss": 0.3636, "lr": 4.9289318577209706e-05, "epoch": 2.386584289496911, "percentage": 48.29, "elapsed_time": "10:57:17", "remaining_time": "11:43:58"}
170
+ {"current_steps": 170, "total_steps": 350, "loss": 0.3605, "lr": 4.890083735825258e-05, "epoch": 2.4007060900264783, "percentage": 48.57, "elapsed_time": "11:01:11", "remaining_time": "11:40:05"}
171
+ {"current_steps": 171, "total_steps": 350, "loss": 0.3648, "lr": 4.851147080782249e-05, "epoch": 2.414827890556046, "percentage": 48.86, "elapsed_time": "11:05:04", "remaining_time": "11:36:11"}
172
+ {"current_steps": 172, "total_steps": 350, "loss": 0.3606, "lr": 4.812125765468705e-05, "epoch": 2.4289496910856134, "percentage": 49.14, "elapsed_time": "11:08:57", "remaining_time": "11:32:17"}
173
+ {"current_steps": 173, "total_steps": 350, "loss": 0.3637, "lr": 4.773023671182213e-05, "epoch": 2.443071491615181, "percentage": 49.43, "elapsed_time": "11:12:49", "remaining_time": "11:28:23"}
174
+ {"current_steps": 174, "total_steps": 350, "loss": 0.3636, "lr": 4.73384468725513e-05, "epoch": 2.4571932921447486, "percentage": 49.71, "elapsed_time": "11:16:42", "remaining_time": "11:24:29"}
175
+ {"current_steps": 175, "total_steps": 350, "loss": 0.3645, "lr": 4.694592710667723e-05, "epoch": 2.471315092674316, "percentage": 50.0, "elapsed_time": "11:20:34", "remaining_time": "11:20:34"}
176
+ {"current_steps": 176, "total_steps": 350, "loss": 0.3605, "lr": 4.6552716456605514e-05, "epoch": 2.4854368932038833, "percentage": 50.29, "elapsed_time": "11:24:27", "remaining_time": "11:16:40"}
177
+ {"current_steps": 177, "total_steps": 350, "loss": 0.3562, "lr": 4.615885403346134e-05, "epoch": 2.499558693733451, "percentage": 50.57, "elapsed_time": "11:28:20", "remaining_time": "11:12:46"}
178
+ {"current_steps": 178, "total_steps": 350, "loss": 0.3636, "lr": 4.576437901319921e-05, "epoch": 2.5136804942630184, "percentage": 50.86, "elapsed_time": "11:32:12", "remaining_time": "11:08:52"}
179
+ {"current_steps": 179, "total_steps": 350, "loss": 0.3648, "lr": 4.5369330632706223e-05, "epoch": 2.5278022947925862, "percentage": 51.14, "elapsed_time": "11:36:05", "remaining_time": "11:04:59"}
180
+ {"current_steps": 180, "total_steps": 350, "loss": 0.3612, "lr": 4.4973748185899416e-05, "epoch": 2.5419240953221536, "percentage": 51.43, "elapsed_time": "11:39:58", "remaining_time": "11:01:05"}
181
+ {"current_steps": 181, "total_steps": 350, "loss": 0.3677, "lr": 4.457767101981728e-05, "epoch": 2.556045895851721, "percentage": 51.71, "elapsed_time": "11:43:50", "remaining_time": "10:57:10"}
182
+ {"current_steps": 182, "total_steps": 350, "loss": 0.3626, "lr": 4.418113853070614e-05, "epoch": 2.5701676963812887, "percentage": 52.0, "elapsed_time": "11:47:42", "remaining_time": "10:53:16"}
183
+ {"current_steps": 183, "total_steps": 350, "loss": 0.364, "lr": 4.378419016010149e-05, "epoch": 2.584289496910856, "percentage": 52.29, "elapsed_time": "11:51:35", "remaining_time": "10:49:22"}
184
+ {"current_steps": 184, "total_steps": 350, "loss": 0.3615, "lr": 4.338686539090493e-05, "epoch": 2.598411297440424, "percentage": 52.57, "elapsed_time": "11:55:28", "remaining_time": "10:45:29"}
185
+ {"current_steps": 185, "total_steps": 350, "loss": 0.3596, "lr": 4.298920374345698e-05, "epoch": 2.6125330979699912, "percentage": 52.86, "elapsed_time": "11:59:21", "remaining_time": "10:41:35"}
186
+ {"current_steps": 186, "total_steps": 350, "loss": 0.3625, "lr": 4.259124477160607e-05, "epoch": 2.6266548984995586, "percentage": 53.14, "elapsed_time": "12:03:13", "remaining_time": "10:37:40"}
187
+ {"current_steps": 187, "total_steps": 350, "loss": 0.3617, "lr": 4.219302805877441e-05, "epoch": 2.6407766990291264, "percentage": 53.43, "elapsed_time": "12:07:05", "remaining_time": "10:33:46"}
188
+ {"current_steps": 188, "total_steps": 350, "loss": 0.3666, "lr": 4.17945932140206e-05, "epoch": 2.6548984995586937, "percentage": 53.71, "elapsed_time": "12:10:58", "remaining_time": "10:29:52"}
189
+ {"current_steps": 189, "total_steps": 350, "loss": 0.3629, "lr": 4.139597986810005e-05, "epoch": 2.6690203000882615, "percentage": 54.0, "elapsed_time": "12:14:50", "remaining_time": "10:25:58"}
190
+ {"current_steps": 190, "total_steps": 350, "loss": 0.3628, "lr": 4.0997227669522924e-05, "epoch": 2.683142100617829, "percentage": 54.29, "elapsed_time": "12:18:43", "remaining_time": "10:22:05"}
191
+ {"current_steps": 191, "total_steps": 350, "loss": 0.3638, "lr": 4.059837628061055e-05, "epoch": 2.6972639011473962, "percentage": 54.57, "elapsed_time": "12:22:36", "remaining_time": "10:18:11"}
192
+ {"current_steps": 192, "total_steps": 350, "loss": 0.3614, "lr": 4.019946537355033e-05, "epoch": 2.7113857016769636, "percentage": 54.86, "elapsed_time": "12:26:29", "remaining_time": "10:14:17"}
193
+ {"current_steps": 193, "total_steps": 350, "loss": 0.3634, "lr": 3.9800534626449683e-05, "epoch": 2.7255075022065314, "percentage": 55.14, "elapsed_time": "12:30:21", "remaining_time": "10:10:24"}
194
+ {"current_steps": 194, "total_steps": 350, "loss": 0.3587, "lr": 3.940162371938947e-05, "epoch": 2.7396293027360987, "percentage": 55.43, "elapsed_time": "12:34:14", "remaining_time": "10:06:30"}
195
+ {"current_steps": 195, "total_steps": 350, "loss": 0.3599, "lr": 3.9002772330477096e-05, "epoch": 2.7537511032656665, "percentage": 55.71, "elapsed_time": "12:38:07", "remaining_time": "10:02:36"}
196
+ {"current_steps": 196, "total_steps": 350, "loss": 0.3575, "lr": 3.860402013189998e-05, "epoch": 2.767872903795234, "percentage": 56.0, "elapsed_time": "12:42:00", "remaining_time": "9:58:43"}
197
+ {"current_steps": 197, "total_steps": 350, "loss": 0.3648, "lr": 3.820540678597942e-05, "epoch": 2.7819947043248012, "percentage": 56.29, "elapsed_time": "12:45:53", "remaining_time": "9:54:49"}
198
+ {"current_steps": 198, "total_steps": 350, "loss": 0.3609, "lr": 3.78069719412256e-05, "epoch": 2.796116504854369, "percentage": 56.57, "elapsed_time": "12:49:46", "remaining_time": "9:50:56"}
199
+ {"current_steps": 199, "total_steps": 350, "loss": 0.3608, "lr": 3.740875522839393e-05, "epoch": 2.8102383053839364, "percentage": 56.86, "elapsed_time": "12:53:38", "remaining_time": "9:47:02"}
200
+ {"current_steps": 200, "total_steps": 350, "loss": 0.3549, "lr": 3.7010796256543034e-05, "epoch": 2.824360105913504, "percentage": 57.14, "elapsed_time": "12:57:31", "remaining_time": "9:43:08"}
201
+ {"current_steps": 201, "total_steps": 350, "loss": 0.3593, "lr": 3.661313460909507e-05, "epoch": 2.8384819064430715, "percentage": 57.43, "elapsed_time": "13:01:24", "remaining_time": "9:39:15"}
202
+ {"current_steps": 202, "total_steps": 350, "loss": 0.3608, "lr": 3.621580983989852e-05, "epoch": 2.852603706972639, "percentage": 57.71, "elapsed_time": "13:05:17", "remaining_time": "9:35:21"}
203
+ {"current_steps": 203, "total_steps": 350, "loss": 0.3605, "lr": 3.581886146929387e-05, "epoch": 2.8667255075022067, "percentage": 58.0, "elapsed_time": "13:09:09", "remaining_time": "9:31:27"}
204
+ {"current_steps": 204, "total_steps": 350, "loss": 0.3582, "lr": 3.542232898018273e-05, "epoch": 2.880847308031774, "percentage": 58.29, "elapsed_time": "13:13:01", "remaining_time": "9:27:33"}
205
+ {"current_steps": 205, "total_steps": 350, "loss": 0.359, "lr": 3.5026251814100604e-05, "epoch": 2.894969108561342, "percentage": 58.57, "elapsed_time": "13:16:53", "remaining_time": "9:23:39"}
206
+ {"current_steps": 206, "total_steps": 350, "loss": 0.3562, "lr": 3.4630669367293797e-05, "epoch": 2.909090909090909, "percentage": 58.86, "elapsed_time": "13:20:46", "remaining_time": "9:19:45"}
207
+ {"current_steps": 207, "total_steps": 350, "loss": 0.3641, "lr": 3.4235620986800806e-05, "epoch": 2.9232127096204765, "percentage": 59.14, "elapsed_time": "13:24:39", "remaining_time": "9:15:52"}
208
+ {"current_steps": 208, "total_steps": 350, "loss": 0.361, "lr": 3.384114596653866e-05, "epoch": 2.937334510150044, "percentage": 59.43, "elapsed_time": "13:28:32", "remaining_time": "9:11:58"}
209
+ {"current_steps": 209, "total_steps": 350, "loss": 0.3586, "lr": 3.344728354339449e-05, "epoch": 2.9514563106796117, "percentage": 59.71, "elapsed_time": "13:32:24", "remaining_time": "9:08:05"}
210
+ {"current_steps": 210, "total_steps": 350, "loss": 0.3559, "lr": 3.305407289332279e-05, "epoch": 2.965578111209179, "percentage": 60.0, "elapsed_time": "13:36:16", "remaining_time": "9:04:11"}
211
+ {"current_steps": 211, "total_steps": 350, "loss": 0.3631, "lr": 3.266155312744871e-05, "epoch": 2.979699911738747, "percentage": 60.29, "elapsed_time": "13:40:08", "remaining_time": "9:00:17"}
212
+ {"current_steps": 212, "total_steps": 350, "loss": 0.3927, "lr": 3.226976328817788e-05, "epoch": 2.993821712268314, "percentage": 60.57, "elapsed_time": "13:43:56", "remaining_time": "8:56:20"}
213
+ {"current_steps": 213, "total_steps": 350, "loss": 0.3822, "lr": 3.187874234531296e-05, "epoch": 3.0079435127978815, "percentage": 60.86, "elapsed_time": "13:48:16", "remaining_time": "8:52:44"}
214
+ {"current_steps": 214, "total_steps": 350, "loss": 0.3393, "lr": 3.1488529192177526e-05, "epoch": 3.0220653133274493, "percentage": 61.14, "elapsed_time": "13:52:08", "remaining_time": "8:48:50"}
215
+ {"current_steps": 215, "total_steps": 350, "loss": 0.3373, "lr": 3.109916264174743e-05, "epoch": 3.0361871138570167, "percentage": 61.43, "elapsed_time": "13:56:00", "remaining_time": "8:44:56"}
216
+ {"current_steps": 216, "total_steps": 350, "loss": 0.3371, "lr": 3.071068142279031e-05, "epoch": 3.0503089143865845, "percentage": 61.71, "elapsed_time": "13:59:53", "remaining_time": "8:41:02"}
217
+ {"current_steps": 217, "total_steps": 350, "loss": 0.3355, "lr": 3.0323124176013297e-05, "epoch": 3.064430714916152, "percentage": 62.0, "elapsed_time": "14:03:46", "remaining_time": "8:37:08"}
218
+ {"current_steps": 218, "total_steps": 350, "loss": 0.3377, "lr": 2.993652945021966e-05, "epoch": 3.078552515445719, "percentage": 62.29, "elapsed_time": "14:07:40", "remaining_time": "8:33:16"}
219
+ {"current_steps": 219, "total_steps": 350, "loss": 0.3366, "lr": 2.955093569847447e-05, "epoch": 3.092674315975287, "percentage": 62.57, "elapsed_time": "14:11:32", "remaining_time": "8:29:21"}
220
+ {"current_steps": 220, "total_steps": 350, "loss": 0.3312, "lr": 2.9166381274279803e-05, "epoch": 3.1067961165048543, "percentage": 62.86, "elapsed_time": "14:15:24", "remaining_time": "8:25:28"}
221
+ {"current_steps": 221, "total_steps": 350, "loss": 0.3311, "lr": 2.8782904427759898e-05, "epoch": 3.120917917034422, "percentage": 63.14, "elapsed_time": "14:19:16", "remaining_time": "8:21:34"}
222
+ {"current_steps": 222, "total_steps": 350, "loss": 0.3282, "lr": 2.8400543301856553e-05, "epoch": 3.1350397175639895, "percentage": 63.43, "elapsed_time": "14:23:08", "remaining_time": "8:17:40"}
223
+ {"current_steps": 223, "total_steps": 350, "loss": 0.3297, "lr": 2.8019335928535234e-05, "epoch": 3.149161518093557, "percentage": 63.71, "elapsed_time": "14:27:01", "remaining_time": "8:13:46"}
224
+ {"current_steps": 224, "total_steps": 350, "loss": 0.327, "lr": 2.7639320225002108e-05, "epoch": 3.1632833186231246, "percentage": 64.0, "elapsed_time": "14:30:53", "remaining_time": "8:09:52"}
225
+ {"current_steps": 225, "total_steps": 350, "loss": 0.3346, "lr": 2.7260533989932628e-05, "epoch": 3.177405119152692, "percentage": 64.29, "elapsed_time": "14:34:45", "remaining_time": "8:05:58"}
226
+ {"current_steps": 226, "total_steps": 350, "loss": 0.3271, "lr": 2.688301489971183e-05, "epoch": 3.1915269196822593, "percentage": 64.57, "elapsed_time": "14:38:38", "remaining_time": "8:02:05"}
227
+ {"current_steps": 227, "total_steps": 350, "loss": 0.328, "lr": 2.6506800504686806e-05, "epoch": 3.205648720211827, "percentage": 64.86, "elapsed_time": "14:42:31", "remaining_time": "7:58:11"}
228
+ {"current_steps": 228, "total_steps": 350, "loss": 0.33, "lr": 2.6131928225431713e-05, "epoch": 3.2197705207413945, "percentage": 65.14, "elapsed_time": "14:46:23", "remaining_time": "7:54:17"}
229
+ {"current_steps": 229, "total_steps": 350, "loss": 0.3358, "lr": 2.575843534902573e-05, "epoch": 3.233892321270962, "percentage": 65.43, "elapsed_time": "14:50:16", "remaining_time": "7:50:24"}
230
+ {"current_steps": 230, "total_steps": 350, "loss": 0.3364, "lr": 2.53863590253442e-05, "epoch": 3.2480141218005296, "percentage": 65.71, "elapsed_time": "14:54:08", "remaining_time": "7:46:30"}
231
+ {"current_steps": 231, "total_steps": 350, "loss": 0.3337, "lr": 2.501573626336352e-05, "epoch": 3.262135922330097, "percentage": 66.0, "elapsed_time": "14:58:00", "remaining_time": "7:42:36"}
232
+ {"current_steps": 232, "total_steps": 350, "loss": 0.3301, "lr": 2.464660392747999e-05, "epoch": 3.2762577228596648, "percentage": 66.29, "elapsed_time": "15:01:51", "remaining_time": "7:38:42"}
233
+ {"current_steps": 233, "total_steps": 350, "loss": 0.332, "lr": 2.427899873384306e-05, "epoch": 3.290379523389232, "percentage": 66.57, "elapsed_time": "15:05:44", "remaining_time": "7:34:49"}
234
+ {"current_steps": 234, "total_steps": 350, "loss": 0.3377, "lr": 2.3912957246703305e-05, "epoch": 3.3045013239187995, "percentage": 66.86, "elapsed_time": "15:09:36", "remaining_time": "7:30:55"}
235
+ {"current_steps": 235, "total_steps": 350, "loss": 0.3297, "lr": 2.3548515874775547e-05, "epoch": 3.3186231244483673, "percentage": 67.14, "elapsed_time": "15:13:29", "remaining_time": "7:27:01"}
236
+ {"current_steps": 236, "total_steps": 350, "loss": 0.3361, "lr": 2.3185710867617387e-05, "epoch": 3.3327449249779346, "percentage": 67.43, "elapsed_time": "15:17:20", "remaining_time": "7:23:07"}
237
+ {"current_steps": 237, "total_steps": 350, "loss": 0.3322, "lr": 2.2824578312023632e-05, "epoch": 3.3468667255075024, "percentage": 67.71, "elapsed_time": "15:21:13", "remaining_time": "7:19:13"}
238
+ {"current_steps": 238, "total_steps": 350, "loss": 0.3361, "lr": 2.24651541284369e-05, "epoch": 3.3609885260370698, "percentage": 68.0, "elapsed_time": "15:25:04", "remaining_time": "7:15:19"}
239
+ {"current_steps": 239, "total_steps": 350, "loss": 0.3344, "lr": 2.210747406737469e-05, "epoch": 3.375110326566637, "percentage": 68.29, "elapsed_time": "15:28:56", "remaining_time": "7:11:25"}
240
+ {"current_steps": 240, "total_steps": 350, "loss": 0.3324, "lr": 2.175157370587348e-05, "epoch": 3.389232127096205, "percentage": 68.57, "elapsed_time": "15:32:48", "remaining_time": "7:07:32"}
241
+ {"current_steps": 241, "total_steps": 350, "loss": 0.3366, "lr": 2.1397488443949985e-05, "epoch": 3.4033539276257723, "percentage": 68.86, "elapsed_time": "15:36:40", "remaining_time": "7:03:38"}
242
+ {"current_steps": 242, "total_steps": 350, "loss": 0.3335, "lr": 2.1045253501080058e-05, "epoch": 3.4174757281553396, "percentage": 69.14, "elapsed_time": "15:40:33", "remaining_time": "6:59:45"}
243
+ {"current_steps": 243, "total_steps": 350, "loss": 0.3342, "lr": 2.0694903912695574e-05, "epoch": 3.4315975286849074, "percentage": 69.43, "elapsed_time": "15:44:26", "remaining_time": "6:55:51"}
244
+ {"current_steps": 244, "total_steps": 350, "loss": 0.3343, "lr": 2.0346474526699552e-05, "epoch": 3.4457193292144748, "percentage": 69.71, "elapsed_time": "15:48:19", "remaining_time": "6:51:58"}
245
+ {"current_steps": 245, "total_steps": 350, "loss": 0.3342, "lr": 2.0000000000000012e-05, "epoch": 3.459841129744042, "percentage": 70.0, "elapsed_time": "15:52:11", "remaining_time": "6:48:05"}
246
+ {"current_steps": 246, "total_steps": 350, "loss": 0.3317, "lr": 1.9655514795062746e-05, "epoch": 3.47396293027361, "percentage": 70.29, "elapsed_time": "15:56:03", "remaining_time": "6:44:11"}
247
+ {"current_steps": 247, "total_steps": 350, "loss": 0.336, "lr": 1.931305317648349e-05, "epoch": 3.4880847308031773, "percentage": 70.57, "elapsed_time": "15:59:56", "remaining_time": "6:40:17"}
248
+ {"current_steps": 248, "total_steps": 350, "loss": 0.3329, "lr": 1.897264920757981e-05, "epoch": 3.502206531332745, "percentage": 70.86, "elapsed_time": "16:03:48", "remaining_time": "6:36:24"}
249
+ {"current_steps": 249, "total_steps": 350, "loss": 0.3363, "lr": 1.8634336747002853e-05, "epoch": 3.5163283318623124, "percentage": 71.14, "elapsed_time": "16:07:40", "remaining_time": "6:32:30"}
250
+ {"current_steps": 250, "total_steps": 350, "loss": 0.3366, "lr": 1.829814944536963e-05, "epoch": 3.5304501323918798, "percentage": 71.43, "elapsed_time": "16:11:32", "remaining_time": "6:28:37"}
251
+ {"current_steps": 251, "total_steps": 350, "loss": 0.3359, "lr": 1.7964120741915905e-05, "epoch": 3.5445719329214476, "percentage": 71.71, "elapsed_time": "16:15:25", "remaining_time": "6:24:43"}
252
+ {"current_steps": 252, "total_steps": 350, "loss": 0.33, "lr": 1.7632283861170135e-05, "epoch": 3.558693733451015, "percentage": 72.0, "elapsed_time": "16:19:17", "remaining_time": "6:20:50"}
253
+ {"current_steps": 253, "total_steps": 350, "loss": 0.3336, "lr": 1.7302671809648735e-05, "epoch": 3.5728155339805827, "percentage": 72.29, "elapsed_time": "16:23:09", "remaining_time": "6:16:56"}
254
+ {"current_steps": 254, "total_steps": 350, "loss": 0.334, "lr": 1.6975317372573066e-05, "epoch": 3.58693733451015, "percentage": 72.57, "elapsed_time": "16:27:03", "remaining_time": "6:13:03"}
255
+ {"current_steps": 255, "total_steps": 350, "loss": 0.3352, "lr": 1.6650253110608415e-05, "epoch": 3.6010591350397174, "percentage": 72.86, "elapsed_time": "16:30:56", "remaining_time": "6:09:10"}
256
+ {"current_steps": 256, "total_steps": 350, "loss": 0.3339, "lr": 1.6327511356625302e-05, "epoch": 3.615180935569285, "percentage": 73.14, "elapsed_time": "16:34:48", "remaining_time": "6:05:17"}
257
+ {"current_steps": 257, "total_steps": 350, "loss": 0.3303, "lr": 1.6007124212483453e-05, "epoch": 3.6293027360988526, "percentage": 73.43, "elapsed_time": "16:38:41", "remaining_time": "6:01:23"}
258
+ {"current_steps": 258, "total_steps": 350, "loss": 0.3319, "lr": 1.5689123545838804e-05, "epoch": 3.6434245366284204, "percentage": 73.71, "elapsed_time": "16:42:33", "remaining_time": "5:57:30"}
259
+ {"current_steps": 259, "total_steps": 350, "loss": 0.3285, "lr": 1.537354098697367e-05, "epoch": 3.6575463371579877, "percentage": 74.0, "elapsed_time": "16:46:27", "remaining_time": "5:53:37"}
260
+ {"current_steps": 260, "total_steps": 350, "loss": 0.3346, "lr": 1.5060407925650662e-05, "epoch": 3.671668137687555, "percentage": 74.29, "elapsed_time": "16:50:20", "remaining_time": "5:49:43"}
261
+ {"current_steps": 261, "total_steps": 350, "loss": 0.3265, "lr": 1.4749755507990449e-05, "epoch": 3.6857899382171224, "percentage": 74.57, "elapsed_time": "16:54:12", "remaining_time": "5:45:50"}
262
+ {"current_steps": 262, "total_steps": 350, "loss": 0.3367, "lr": 1.4441614633373773e-05, "epoch": 3.69991173874669, "percentage": 74.86, "elapsed_time": "16:58:05", "remaining_time": "5:41:57"}
263
+ {"current_steps": 263, "total_steps": 350, "loss": 0.335, "lr": 1.413601595136802e-05, "epoch": 3.7140335392762576, "percentage": 75.14, "elapsed_time": "17:01:58", "remaining_time": "5:38:04"}
264
+ {"current_steps": 264, "total_steps": 350, "loss": 0.3324, "lr": 1.383298985867863e-05, "epoch": 3.7281553398058254, "percentage": 75.43, "elapsed_time": "17:05:51", "remaining_time": "5:34:10"}
265
+ {"current_steps": 265, "total_steps": 350, "loss": 0.3313, "lr": 1.3532566496125634e-05, "epoch": 3.7422771403353927, "percentage": 75.71, "elapsed_time": "17:09:44", "remaining_time": "5:30:17"}
266
+ {"current_steps": 266, "total_steps": 350, "loss": 0.3351, "lr": 1.3234775745645684e-05, "epoch": 3.75639894086496, "percentage": 76.0, "elapsed_time": "17:13:37", "remaining_time": "5:26:24"}
267
+ {"current_steps": 267, "total_steps": 350, "loss": 0.3353, "lr": 1.2939647227319791e-05, "epoch": 3.770520741394528, "percentage": 76.29, "elapsed_time": "17:17:30", "remaining_time": "5:22:31"}
268
+ {"current_steps": 268, "total_steps": 350, "loss": 0.3323, "lr": 1.2647210296427197e-05, "epoch": 3.784642541924095, "percentage": 76.57, "elapsed_time": "17:21:23", "remaining_time": "5:18:37"}
269
+ {"current_steps": 269, "total_steps": 350, "loss": 0.3391, "lr": 1.2357494040525416e-05, "epoch": 3.798764342453663, "percentage": 76.86, "elapsed_time": "17:25:15", "remaining_time": "5:14:44"}
270
+ {"current_steps": 270, "total_steps": 350, "loss": 0.3327, "lr": 1.2070527276557092e-05, "epoch": 3.8128861429832304, "percentage": 77.14, "elapsed_time": "17:29:08", "remaining_time": "5:10:51"}
271
+ {"current_steps": 271, "total_steps": 350, "loss": 0.33, "lr": 1.178633854798365e-05, "epoch": 3.8270079435127977, "percentage": 77.43, "elapsed_time": "17:33:01", "remaining_time": "5:06:58"}
272
+ {"current_steps": 272, "total_steps": 350, "loss": 0.3317, "lr": 1.1504956121946216e-05, "epoch": 3.8411297440423655, "percentage": 77.71, "elapsed_time": "17:36:54", "remaining_time": "5:03:04"}
273
+ {"current_steps": 273, "total_steps": 350, "loss": 0.3294, "lr": 1.1226407986453963e-05, "epoch": 3.855251544571933, "percentage": 78.0, "elapsed_time": "17:40:47", "remaining_time": "4:59:11"}
274
+ {"current_steps": 274, "total_steps": 350, "loss": 0.3282, "lr": 1.0950721847600282e-05, "epoch": 3.8693733451015007, "percentage": 78.29, "elapsed_time": "17:44:38", "remaining_time": "4:55:18"}
275
+ {"current_steps": 275, "total_steps": 350, "loss": 0.335, "lr": 1.0677925126806956e-05, "epoch": 3.883495145631068, "percentage": 78.57, "elapsed_time": "17:48:31", "remaining_time": "4:51:24"}
276
+ {"current_steps": 276, "total_steps": 350, "loss": 0.3352, "lr": 1.040804495809665e-05, "epoch": 3.8976169461606354, "percentage": 78.86, "elapsed_time": "17:52:23", "remaining_time": "4:47:31"}
277
+ {"current_steps": 277, "total_steps": 350, "loss": 0.3307, "lr": 1.0141108185393995e-05, "epoch": 3.911738746690203, "percentage": 79.14, "elapsed_time": "17:56:15", "remaining_time": "4:43:38"}
278
+ {"current_steps": 278, "total_steps": 350, "loss": 0.3316, "lr": 9.877141359855567e-06, "epoch": 3.9258605472197705, "percentage": 79.43, "elapsed_time": "18:00:07", "remaining_time": "4:39:44"}
279
+ {"current_steps": 279, "total_steps": 350, "loss": 0.3301, "lr": 9.616170737228882e-06, "epoch": 3.9399823477493383, "percentage": 79.71, "elapsed_time": "18:03:59", "remaining_time": "4:35:51"}
280
+ {"current_steps": 280, "total_steps": 350, "loss": 0.3309, "lr": 9.358222275240884e-06, "epoch": 3.9541041482789057, "percentage": 80.0, "elapsed_time": "18:07:51", "remaining_time": "4:31:57"}
281
+ {"current_steps": 281, "total_steps": 350, "loss": 0.3294, "lr": 9.103321631016024e-06, "epoch": 3.968225948808473, "percentage": 80.29, "elapsed_time": "18:11:43", "remaining_time": "4:28:04"}
282
+ {"current_steps": 282, "total_steps": 350, "loss": 0.3299, "lr": 8.851494158524242e-06, "epoch": 3.9823477493380404, "percentage": 80.57, "elapsed_time": "18:15:37", "remaining_time": "4:24:11"}
283
+ {"current_steps": 283, "total_steps": 350, "loss": 0.3734, "lr": 8.602764906059109e-06, "epoch": 3.996469549867608, "percentage": 80.86, "elapsed_time": "18:19:25", "remaining_time": "4:20:17"}
284
+ {"current_steps": 284, "total_steps": 350, "loss": 0.3432, "lr": 8.35715861374636e-06, "epoch": 4.010591350397176, "percentage": 81.14, "elapsed_time": "18:23:45", "remaining_time": "4:16:30"}
285
+ {"current_steps": 285, "total_steps": 350, "loss": 0.3187, "lr": 8.114699711083113e-06, "epoch": 4.024713150926743, "percentage": 81.43, "elapsed_time": "18:27:37", "remaining_time": "4:12:37"}
286
+ {"current_steps": 286, "total_steps": 350, "loss": 0.3213, "lr": 7.875412314507942e-06, "epoch": 4.038834951456311, "percentage": 81.71, "elapsed_time": "18:31:30", "remaining_time": "4:08:43"}
287
+ {"current_steps": 287, "total_steps": 350, "loss": 0.3169, "lr": 7.639320225002106e-06, "epoch": 4.052956751985878, "percentage": 82.0, "elapsed_time": "18:35:22", "remaining_time": "4:04:50"}
288
+ {"current_steps": 288, "total_steps": 350, "loss": 0.3148, "lr": 7.406446925722211e-06, "epoch": 4.067078552515445, "percentage": 82.29, "elapsed_time": "18:39:14", "remaining_time": "4:00:56"}
289
+ {"current_steps": 289, "total_steps": 350, "loss": 0.3132, "lr": 7.176815579664343e-06, "epoch": 4.081200353045014, "percentage": 82.57, "elapsed_time": "18:43:06", "remaining_time": "3:57:03"}
290
+ {"current_steps": 290, "total_steps": 350, "loss": 0.3175, "lr": 6.950449027360213e-06, "epoch": 4.095322153574581, "percentage": 82.86, "elapsed_time": "18:46:58", "remaining_time": "3:53:09"}
291
+ {"current_steps": 291, "total_steps": 350, "loss": 0.3184, "lr": 6.7273697846052515e-06, "epoch": 4.109443954104148, "percentage": 83.14, "elapsed_time": "18:50:50", "remaining_time": "3:49:16"}
292
+ {"current_steps": 292, "total_steps": 350, "loss": 0.3164, "lr": 6.507600040219073e-06, "epoch": 4.123565754633716, "percentage": 83.43, "elapsed_time": "18:54:42", "remaining_time": "3:45:23"}
293
+ {"current_steps": 293, "total_steps": 350, "loss": 0.3177, "lr": 6.291161653838434e-06, "epoch": 4.137687555163283, "percentage": 83.71, "elapsed_time": "18:58:34", "remaining_time": "3:41:29"}
294
+ {"current_steps": 294, "total_steps": 350, "loss": 0.3131, "lr": 6.078076153742962e-06, "epoch": 4.151809355692851, "percentage": 84.0, "elapsed_time": "19:02:27", "remaining_time": "3:37:36"}
295
+ {"current_steps": 295, "total_steps": 350, "loss": 0.3142, "lr": 5.868364734713776e-06, "epoch": 4.165931156222419, "percentage": 84.29, "elapsed_time": "19:06:19", "remaining_time": "3:33:43"}
296
+ {"current_steps": 296, "total_steps": 350, "loss": 0.3204, "lr": 5.662048255925357e-06, "epoch": 4.180052956751986, "percentage": 84.57, "elapsed_time": "19:10:12", "remaining_time": "3:29:50"}
297
+ {"current_steps": 297, "total_steps": 350, "loss": 0.3158, "lr": 5.459147238870768e-06, "epoch": 4.194174757281553, "percentage": 84.86, "elapsed_time": "19:14:05", "remaining_time": "3:25:56"}
298
+ {"current_steps": 298, "total_steps": 350, "loss": 0.3194, "lr": 5.259681865320447e-06, "epoch": 4.208296557811121, "percentage": 85.14, "elapsed_time": "19:17:58", "remaining_time": "3:22:03"}
299
+ {"current_steps": 299, "total_steps": 350, "loss": 0.3163, "lr": 5.063671975314814e-06, "epoch": 4.222418358340688, "percentage": 85.43, "elapsed_time": "19:21:50", "remaining_time": "3:18:10"}
300
+ {"current_steps": 300, "total_steps": 350, "loss": 0.315, "lr": 4.871137065190854e-06, "epoch": 4.236540158870256, "percentage": 85.71, "elapsed_time": "19:25:42", "remaining_time": "3:14:17"}
301
+ {"current_steps": 301, "total_steps": 350, "loss": 0.3176, "lr": 4.6820962856429205e-06, "epoch": 4.250661959399824, "percentage": 86.0, "elapsed_time": "19:29:34", "remaining_time": "3:10:23"}
302
+ {"current_steps": 302, "total_steps": 350, "loss": 0.322, "lr": 4.496568439817836e-06, "epoch": 4.264783759929391, "percentage": 86.29, "elapsed_time": "19:33:27", "remaining_time": "3:06:30"}
303
+ {"current_steps": 303, "total_steps": 350, "loss": 0.311, "lr": 4.314571981444666e-06, "epoch": 4.278905560458958, "percentage": 86.57, "elapsed_time": "19:37:19", "remaining_time": "3:02:37"}
304
+ {"current_steps": 304, "total_steps": 350, "loss": 0.3203, "lr": 4.136125012999168e-06, "epoch": 4.293027360988526, "percentage": 86.86, "elapsed_time": "19:41:11", "remaining_time": "2:58:44"}
305
+ {"current_steps": 305, "total_steps": 350, "loss": 0.3161, "lr": 3.961245283903239e-06, "epoch": 4.307149161518094, "percentage": 87.14, "elapsed_time": "19:45:04", "remaining_time": "2:54:50"}
306
+ {"current_steps": 306, "total_steps": 350, "loss": 0.315, "lr": 3.7899501887594102e-06, "epoch": 4.321270962047661, "percentage": 87.43, "elapsed_time": "19:48:56", "remaining_time": "2:50:57"}
307
+ {"current_steps": 307, "total_steps": 350, "loss": 0.3165, "lr": 3.622256765620713e-06, "epoch": 4.335392762577229, "percentage": 87.71, "elapsed_time": "19:52:49", "remaining_time": "2:47:04"}
308
+ {"current_steps": 308, "total_steps": 350, "loss": 0.3114, "lr": 3.458181694295961e-06, "epoch": 4.349514563106796, "percentage": 88.0, "elapsed_time": "19:56:42", "remaining_time": "2:43:11"}
309
+ {"current_steps": 309, "total_steps": 350, "loss": 0.3125, "lr": 3.297741294690644e-06, "epoch": 4.363636363636363, "percentage": 88.29, "elapsed_time": "20:00:34", "remaining_time": "2:39:18"}
310
+ {"current_steps": 310, "total_steps": 350, "loss": 0.3156, "lr": 3.140951525183691e-06, "epoch": 4.3777581641659316, "percentage": 88.57, "elapsed_time": "20:04:27", "remaining_time": "2:35:24"}
311
+ {"current_steps": 311, "total_steps": 350, "loss": 0.3144, "lr": 2.987827981040132e-06, "epoch": 4.391879964695499, "percentage": 88.86, "elapsed_time": "20:08:20", "remaining_time": "2:31:31"}
312
+ {"current_steps": 312, "total_steps": 350, "loss": 0.3157, "lr": 2.8383858928598963e-06, "epoch": 4.406001765225066, "percentage": 89.14, "elapsed_time": "20:12:12", "remaining_time": "2:27:38"}
313
+ {"current_steps": 313, "total_steps": 350, "loss": 0.3116, "lr": 2.692640125062895e-06, "epoch": 4.420123565754634, "percentage": 89.43, "elapsed_time": "20:16:04", "remaining_time": "2:23:45"}
314
+ {"current_steps": 314, "total_steps": 350, "loss": 0.3206, "lr": 2.550605174410512e-06, "epoch": 4.434245366284201, "percentage": 89.71, "elapsed_time": "20:19:56", "remaining_time": "2:19:51"}
315
+ {"current_steps": 315, "total_steps": 350, "loss": 0.3119, "lr": 2.4122951685636674e-06, "epoch": 4.448367166813769, "percentage": 90.0, "elapsed_time": "20:23:47", "remaining_time": "2:15:58"}
316
+ {"current_steps": 316, "total_steps": 350, "loss": 0.314, "lr": 2.2777238646775768e-06, "epoch": 4.4624889673433366, "percentage": 90.29, "elapsed_time": "20:27:39", "remaining_time": "2:12:05"}
317
+ {"current_steps": 317, "total_steps": 350, "loss": 0.3116, "lr": 2.14690464803343e-06, "epoch": 4.476610767872904, "percentage": 90.57, "elapsed_time": "20:31:33", "remaining_time": "2:08:12"}
318
+ {"current_steps": 318, "total_steps": 350, "loss": 0.3162, "lr": 2.0198505307069462e-06, "epoch": 4.490732568402471, "percentage": 90.86, "elapsed_time": "20:35:25", "remaining_time": "2:04:19"}
319
+ {"current_steps": 319, "total_steps": 350, "loss": 0.318, "lr": 1.896574150274151e-06, "epoch": 4.504854368932039, "percentage": 91.14, "elapsed_time": "20:39:18", "remaining_time": "2:00:26"}
320
+ {"current_steps": 320, "total_steps": 350, "loss": 0.3146, "lr": 1.7770877685543687e-06, "epoch": 4.518976169461606, "percentage": 91.43, "elapsed_time": "20:43:10", "remaining_time": "1:56:32"}
321
+ {"current_steps": 321, "total_steps": 350, "loss": 0.3188, "lr": 1.6614032703905714e-06, "epoch": 4.533097969991174, "percentage": 91.71, "elapsed_time": "20:47:03", "remaining_time": "1:52:39"}
322
+ {"current_steps": 322, "total_steps": 350, "loss": 0.3117, "lr": 1.5495321624672443e-06, "epoch": 4.5472197705207416, "percentage": 92.0, "elapsed_time": "20:50:55", "remaining_time": "1:48:46"}
323
+ {"current_steps": 323, "total_steps": 350, "loss": 0.3179, "lr": 1.4414855721658705e-06, "epoch": 4.561341571050309, "percentage": 92.29, "elapsed_time": "20:54:47", "remaining_time": "1:44:53"}
324
+ {"current_steps": 324, "total_steps": 350, "loss": 0.3169, "lr": 1.3372742464581134e-06, "epoch": 4.575463371579876, "percentage": 92.57, "elapsed_time": "20:58:39", "remaining_time": "1:41:00"}
325
+ {"current_steps": 325, "total_steps": 350, "loss": 0.313, "lr": 1.2369085508368862e-06, "epoch": 4.589585172109444, "percentage": 92.86, "elapsed_time": "21:02:32", "remaining_time": "1:37:07"}
326
+ {"current_steps": 326, "total_steps": 350, "loss": 0.3162, "lr": 1.1403984682852998e-06, "epoch": 4.603706972639012, "percentage": 93.14, "elapsed_time": "21:06:24", "remaining_time": "1:33:13"}
327
+ {"current_steps": 327, "total_steps": 350, "loss": 0.3169, "lr": 1.0477535982837473e-06, "epoch": 4.617828773168579, "percentage": 93.43, "elapsed_time": "21:10:18", "remaining_time": "1:29:20"}
328
+ {"current_steps": 328, "total_steps": 350, "loss": 0.3147, "lr": 9.589831558550222e-07, "epoch": 4.631950573698147, "percentage": 93.71, "elapsed_time": "21:14:10", "remaining_time": "1:25:27"}
329
+ {"current_steps": 329, "total_steps": 350, "loss": 0.3155, "lr": 8.740959706477725e-07, "epoch": 4.646072374227714, "percentage": 94.0, "elapsed_time": "21:18:02", "remaining_time": "1:21:34"}
330
+ {"current_steps": 330, "total_steps": 350, "loss": 0.3172, "lr": 7.93100486058247e-07, "epoch": 4.660194174757281, "percentage": 94.29, "elapsed_time": "21:21:55", "remaining_time": "1:17:41"}
331
+ {"current_steps": 331, "total_steps": 350, "loss": 0.3123, "lr": 7.160047583904473e-07, "epoch": 4.674315975286849, "percentage": 94.57, "elapsed_time": "21:25:47", "remaining_time": "1:13:48"}
332
+ {"current_steps": 332, "total_steps": 350, "loss": 0.3126, "lr": 6.428164560548134e-07, "epoch": 4.688437775816417, "percentage": 94.86, "elapsed_time": "21:29:39", "remaining_time": "1:09:55"}
333
+ {"current_steps": 333, "total_steps": 350, "loss": 0.3091, "lr": 5.735428588054825e-07, "epoch": 4.702559576345984, "percentage": 95.14, "elapsed_time": "21:33:33", "remaining_time": "1:06:02"}
334
+ {"current_steps": 334, "total_steps": 350, "loss": 0.3168, "lr": 5.081908570161753e-07, "epoch": 4.716681376875552, "percentage": 95.43, "elapsed_time": "21:37:26", "remaining_time": "1:02:09"}
335
+ {"current_steps": 335, "total_steps": 350, "loss": 0.3168, "lr": 4.467669509948591e-07, "epoch": 4.730803177405119, "percentage": 95.71, "elapsed_time": "21:41:17", "remaining_time": "0:58:16"}
336
+ {"current_steps": 336, "total_steps": 350, "loss": 0.3096, "lr": 3.8927725033718553e-07, "epoch": 4.744924977934687, "percentage": 96.0, "elapsed_time": "21:45:09", "remaining_time": "0:54:22"}
337
+ {"current_steps": 337, "total_steps": 350, "loss": 0.3127, "lr": 3.3572747331878984e-07, "epoch": 4.7590467784642545, "percentage": 96.29, "elapsed_time": "21:49:02", "remaining_time": "0:50:29"}
338
+ {"current_steps": 338, "total_steps": 350, "loss": 0.3165, "lr": 2.8612294632650586e-07, "epoch": 4.773168578993822, "percentage": 96.57, "elapsed_time": "21:52:54", "remaining_time": "0:46:36"}
339
+ {"current_steps": 339, "total_steps": 350, "loss": 0.3211, "lr": 2.404686033285897e-07, "epoch": 4.787290379523389, "percentage": 96.86, "elapsed_time": "21:56:46", "remaining_time": "0:42:43"}
340
+ {"current_steps": 340, "total_steps": 350, "loss": 0.3139, "lr": 1.9876898538394362e-07, "epoch": 4.801412180052957, "percentage": 97.14, "elapsed_time": "22:00:40", "remaining_time": "0:38:50"}
341
+ {"current_steps": 341, "total_steps": 350, "loss": 0.3119, "lr": 1.6102824019043728e-07, "epoch": 4.815533980582524, "percentage": 97.43, "elapsed_time": "22:04:32", "remaining_time": "0:34:57"}
342
+ {"current_steps": 342, "total_steps": 350, "loss": 0.3189, "lr": 1.2725012167236207e-07, "epoch": 4.829655781112092, "percentage": 97.71, "elapsed_time": "22:08:24", "remaining_time": "0:31:04"}
343
+ {"current_steps": 343, "total_steps": 350, "loss": 0.3144, "lr": 9.74379896070321e-08, "epoch": 4.8437775816416595, "percentage": 98.0, "elapsed_time": "22:12:17", "remaining_time": "0:27:11"}
344
+ {"current_steps": 344, "total_steps": 350, "loss": 0.3208, "lr": 7.159480929059381e-08, "epoch": 4.857899382171227, "percentage": 98.29, "elapsed_time": "22:16:10", "remaining_time": "0:23:18"}
345
+ {"current_steps": 345, "total_steps": 350, "loss": 0.3164, "lr": 4.9723151243106225e-08, "epoch": 4.872021182700794, "percentage": 98.57, "elapsed_time": "22:20:03", "remaining_time": "0:19:25"}
346
+ {"current_steps": 346, "total_steps": 350, "loss": 0.3183, "lr": 3.1825190952829986e-08, "epoch": 4.886142983230362, "percentage": 98.86, "elapsed_time": "22:23:56", "remaining_time": "0:15:32"}
347
+ {"current_steps": 347, "total_steps": 350, "loss": 0.3185, "lr": 1.7902708659867096e-08, "epoch": 4.90026478375993, "percentage": 99.14, "elapsed_time": "22:27:49", "remaining_time": "0:11:39"}
348
+ {"current_steps": 348, "total_steps": 350, "loss": 0.3142, "lr": 7.957089179058131e-09, "epoch": 4.914386584289497, "percentage": 99.43, "elapsed_time": "22:31:43", "remaining_time": "0:07:46"}
349
+ {"current_steps": 349, "total_steps": 350, "loss": 0.3181, "lr": 1.9893217622790616e-09, "epoch": 4.9285083848190645, "percentage": 99.71, "elapsed_time": "22:35:35", "remaining_time": "0:03:53"}
350
+ {"current_steps": 350, "total_steps": 350, "loss": 0.313, "lr": 0.0, "epoch": 4.942630185348632, "percentage": 100.0, "elapsed_time": "22:39:28", "remaining_time": "0:00:00"}
351
+ {"current_steps": 350, "total_steps": 350, "epoch": 4.942630185348632, "percentage": 100.0, "elapsed_time": "22:39:58", "remaining_time": "0:00:00"}
352
+ {"current_steps": 350, "total_steps": 350, "epoch": 4.942630185348632, "percentage": 100.0, "elapsed_time": "0:00:00", "remaining_time": "0:00:00"}
trainer_state.json ADDED
@@ -0,0 +1,2492 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 4.942630185348632,
5
+ "eval_steps": 500,
6
+ "global_step": 350,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 0.01412180052956752,
13
+ "grad_norm": 5.979931009348192,
14
+ "learning_rate": 2.285714285714286e-06,
15
+ "loss": 0.8154,
16
+ "step": 1
17
+ },
18
+ {
19
+ "epoch": 0.02824360105913504,
20
+ "grad_norm": 6.047301062181586,
21
+ "learning_rate": 4.571428571428572e-06,
22
+ "loss": 0.8243,
23
+ "step": 2
24
+ },
25
+ {
26
+ "epoch": 0.04236540158870256,
27
+ "grad_norm": 5.589397434568418,
28
+ "learning_rate": 6.857142857142858e-06,
29
+ "loss": 0.8057,
30
+ "step": 3
31
+ },
32
+ {
33
+ "epoch": 0.05648720211827008,
34
+ "grad_norm": 4.017126644109988,
35
+ "learning_rate": 9.142857142857144e-06,
36
+ "loss": 0.7602,
37
+ "step": 4
38
+ },
39
+ {
40
+ "epoch": 0.0706090026478376,
41
+ "grad_norm": 2.1627060531795967,
42
+ "learning_rate": 1.1428571428571429e-05,
43
+ "loss": 0.7197,
44
+ "step": 5
45
+ },
46
+ {
47
+ "epoch": 0.08473080317740513,
48
+ "grad_norm": 4.667143281538081,
49
+ "learning_rate": 1.3714285714285716e-05,
50
+ "loss": 0.7374,
51
+ "step": 6
52
+ },
53
+ {
54
+ "epoch": 0.09885260370697264,
55
+ "grad_norm": 6.545492179001986,
56
+ "learning_rate": 1.6000000000000003e-05,
57
+ "loss": 0.7325,
58
+ "step": 7
59
+ },
60
+ {
61
+ "epoch": 0.11297440423654016,
62
+ "grad_norm": 7.281711055430636,
63
+ "learning_rate": 1.8285714285714288e-05,
64
+ "loss": 0.7502,
65
+ "step": 8
66
+ },
67
+ {
68
+ "epoch": 0.12709620476610767,
69
+ "grad_norm": 4.392674048119666,
70
+ "learning_rate": 2.057142857142857e-05,
71
+ "loss": 0.7051,
72
+ "step": 9
73
+ },
74
+ {
75
+ "epoch": 0.1412180052956752,
76
+ "grad_norm": 2.9074563359884973,
77
+ "learning_rate": 2.2857142857142858e-05,
78
+ "loss": 0.6593,
79
+ "step": 10
80
+ },
81
+ {
82
+ "epoch": 0.1553398058252427,
83
+ "grad_norm": 2.205323794009288,
84
+ "learning_rate": 2.5142857142857143e-05,
85
+ "loss": 0.6276,
86
+ "step": 11
87
+ },
88
+ {
89
+ "epoch": 0.16946160635481025,
90
+ "grad_norm": 1.4150033586173336,
91
+ "learning_rate": 2.742857142857143e-05,
92
+ "loss": 0.6013,
93
+ "step": 12
94
+ },
95
+ {
96
+ "epoch": 0.18358340688437777,
97
+ "grad_norm": 1.3251831618492345,
98
+ "learning_rate": 2.9714285714285717e-05,
99
+ "loss": 0.5859,
100
+ "step": 13
101
+ },
102
+ {
103
+ "epoch": 0.1977052074139453,
104
+ "grad_norm": 3.1805546649918544,
105
+ "learning_rate": 3.2000000000000005e-05,
106
+ "loss": 0.5824,
107
+ "step": 14
108
+ },
109
+ {
110
+ "epoch": 0.2118270079435128,
111
+ "grad_norm": 1.4989971887780031,
112
+ "learning_rate": 3.4285714285714284e-05,
113
+ "loss": 0.5708,
114
+ "step": 15
115
+ },
116
+ {
117
+ "epoch": 0.22594880847308033,
118
+ "grad_norm": 1.203788524759605,
119
+ "learning_rate": 3.6571428571428576e-05,
120
+ "loss": 0.5563,
121
+ "step": 16
122
+ },
123
+ {
124
+ "epoch": 0.24007060900264784,
125
+ "grad_norm": 1.3578810581727971,
126
+ "learning_rate": 3.885714285714286e-05,
127
+ "loss": 0.5566,
128
+ "step": 17
129
+ },
130
+ {
131
+ "epoch": 0.25419240953221534,
132
+ "grad_norm": 0.8074275719021523,
133
+ "learning_rate": 4.114285714285714e-05,
134
+ "loss": 0.5507,
135
+ "step": 18
136
+ },
137
+ {
138
+ "epoch": 0.26831421006178285,
139
+ "grad_norm": 1.1227844753364196,
140
+ "learning_rate": 4.342857142857143e-05,
141
+ "loss": 0.5418,
142
+ "step": 19
143
+ },
144
+ {
145
+ "epoch": 0.2824360105913504,
146
+ "grad_norm": 0.8760389990351023,
147
+ "learning_rate": 4.5714285714285716e-05,
148
+ "loss": 0.5333,
149
+ "step": 20
150
+ },
151
+ {
152
+ "epoch": 0.2965578111209179,
153
+ "grad_norm": 1.1881496989284341,
154
+ "learning_rate": 4.8e-05,
155
+ "loss": 0.5305,
156
+ "step": 21
157
+ },
158
+ {
159
+ "epoch": 0.3106796116504854,
160
+ "grad_norm": 0.9681459485298871,
161
+ "learning_rate": 5.0285714285714286e-05,
162
+ "loss": 0.5189,
163
+ "step": 22
164
+ },
165
+ {
166
+ "epoch": 0.324801412180053,
167
+ "grad_norm": 1.4344211614758422,
168
+ "learning_rate": 5.257142857142858e-05,
169
+ "loss": 0.5198,
170
+ "step": 23
171
+ },
172
+ {
173
+ "epoch": 0.3389232127096205,
174
+ "grad_norm": 0.8492140987790524,
175
+ "learning_rate": 5.485714285714286e-05,
176
+ "loss": 0.5102,
177
+ "step": 24
178
+ },
179
+ {
180
+ "epoch": 0.353045013239188,
181
+ "grad_norm": 1.2351656998878342,
182
+ "learning_rate": 5.714285714285715e-05,
183
+ "loss": 0.5098,
184
+ "step": 25
185
+ },
186
+ {
187
+ "epoch": 0.36716681376875554,
188
+ "grad_norm": 0.898578091846962,
189
+ "learning_rate": 5.9428571428571434e-05,
190
+ "loss": 0.5015,
191
+ "step": 26
192
+ },
193
+ {
194
+ "epoch": 0.38128861429832306,
195
+ "grad_norm": 1.5089247050740433,
196
+ "learning_rate": 6.171428571428573e-05,
197
+ "loss": 0.507,
198
+ "step": 27
199
+ },
200
+ {
201
+ "epoch": 0.3954104148278906,
202
+ "grad_norm": 0.9864208925736987,
203
+ "learning_rate": 6.400000000000001e-05,
204
+ "loss": 0.5034,
205
+ "step": 28
206
+ },
207
+ {
208
+ "epoch": 0.4095322153574581,
209
+ "grad_norm": 1.0206985743120736,
210
+ "learning_rate": 6.62857142857143e-05,
211
+ "loss": 0.4983,
212
+ "step": 29
213
+ },
214
+ {
215
+ "epoch": 0.4236540158870256,
216
+ "grad_norm": 1.4229934179471342,
217
+ "learning_rate": 6.857142857142857e-05,
218
+ "loss": 0.508,
219
+ "step": 30
220
+ },
221
+ {
222
+ "epoch": 0.43777581641659313,
223
+ "grad_norm": 0.9625460430091453,
224
+ "learning_rate": 7.085714285714287e-05,
225
+ "loss": 0.5016,
226
+ "step": 31
227
+ },
228
+ {
229
+ "epoch": 0.45189761694616065,
230
+ "grad_norm": 1.1144628190568628,
231
+ "learning_rate": 7.314285714285715e-05,
232
+ "loss": 0.4939,
233
+ "step": 32
234
+ },
235
+ {
236
+ "epoch": 0.46601941747572817,
237
+ "grad_norm": 0.9463549200929555,
238
+ "learning_rate": 7.542857142857144e-05,
239
+ "loss": 0.4924,
240
+ "step": 33
241
+ },
242
+ {
243
+ "epoch": 0.4801412180052957,
244
+ "grad_norm": 0.8892598203382347,
245
+ "learning_rate": 7.771428571428572e-05,
246
+ "loss": 0.4902,
247
+ "step": 34
248
+ },
249
+ {
250
+ "epoch": 0.4942630185348632,
251
+ "grad_norm": 0.9413854750510515,
252
+ "learning_rate": 8e-05,
253
+ "loss": 0.4852,
254
+ "step": 35
255
+ },
256
+ {
257
+ "epoch": 0.5083848190644307,
258
+ "grad_norm": 0.9034630826375731,
259
+ "learning_rate": 7.999801067823773e-05,
260
+ "loss": 0.4853,
261
+ "step": 36
262
+ },
263
+ {
264
+ "epoch": 0.5225066195939982,
265
+ "grad_norm": 1.2269858722315412,
266
+ "learning_rate": 7.999204291082095e-05,
267
+ "loss": 0.4764,
268
+ "step": 37
269
+ },
270
+ {
271
+ "epoch": 0.5366284201235657,
272
+ "grad_norm": 0.9045227868888749,
273
+ "learning_rate": 7.998209729134014e-05,
274
+ "loss": 0.4809,
275
+ "step": 38
276
+ },
277
+ {
278
+ "epoch": 0.5507502206531333,
279
+ "grad_norm": 1.2176978127037603,
280
+ "learning_rate": 7.996817480904718e-05,
281
+ "loss": 0.4719,
282
+ "step": 39
283
+ },
284
+ {
285
+ "epoch": 0.5648720211827007,
286
+ "grad_norm": 1.2333419409501036,
287
+ "learning_rate": 7.99502768487569e-05,
288
+ "loss": 0.477,
289
+ "step": 40
290
+ },
291
+ {
292
+ "epoch": 0.5789938217122683,
293
+ "grad_norm": 0.7483281237491166,
294
+ "learning_rate": 7.99284051907094e-05,
295
+ "loss": 0.4724,
296
+ "step": 41
297
+ },
298
+ {
299
+ "epoch": 0.5931156222418358,
300
+ "grad_norm": 0.6151558817864059,
301
+ "learning_rate": 7.990256201039297e-05,
302
+ "loss": 0.4662,
303
+ "step": 42
304
+ },
305
+ {
306
+ "epoch": 0.6072374227714034,
307
+ "grad_norm": 0.6814568571856022,
308
+ "learning_rate": 7.987274987832764e-05,
309
+ "loss": 0.4621,
310
+ "step": 43
311
+ },
312
+ {
313
+ "epoch": 0.6213592233009708,
314
+ "grad_norm": 0.9240497143419791,
315
+ "learning_rate": 7.983897175980957e-05,
316
+ "loss": 0.4665,
317
+ "step": 44
318
+ },
319
+ {
320
+ "epoch": 0.6354810238305384,
321
+ "grad_norm": 1.2034986971304784,
322
+ "learning_rate": 7.980123101461606e-05,
323
+ "loss": 0.4761,
324
+ "step": 45
325
+ },
326
+ {
327
+ "epoch": 0.649602824360106,
328
+ "grad_norm": 0.8192788227089312,
329
+ "learning_rate": 7.975953139667141e-05,
330
+ "loss": 0.4652,
331
+ "step": 46
332
+ },
333
+ {
334
+ "epoch": 0.6637246248896734,
335
+ "grad_norm": 0.7683306980890072,
336
+ "learning_rate": 7.97138770536735e-05,
337
+ "loss": 0.4619,
338
+ "step": 47
339
+ },
340
+ {
341
+ "epoch": 0.677846425419241,
342
+ "grad_norm": 0.7567015766907312,
343
+ "learning_rate": 7.966427252668121e-05,
344
+ "loss": 0.4638,
345
+ "step": 48
346
+ },
347
+ {
348
+ "epoch": 0.6919682259488085,
349
+ "grad_norm": 0.6846820764750615,
350
+ "learning_rate": 7.961072274966282e-05,
351
+ "loss": 0.4527,
352
+ "step": 49
353
+ },
354
+ {
355
+ "epoch": 0.706090026478376,
356
+ "grad_norm": 0.7395598100512276,
357
+ "learning_rate": 7.955323304900514e-05,
358
+ "loss": 0.4571,
359
+ "step": 50
360
+ },
361
+ {
362
+ "epoch": 0.7202118270079435,
363
+ "grad_norm": 0.5351232158771764,
364
+ "learning_rate": 7.949180914298383e-05,
365
+ "loss": 0.4496,
366
+ "step": 51
367
+ },
368
+ {
369
+ "epoch": 0.7343336275375111,
370
+ "grad_norm": 0.6324424558337066,
371
+ "learning_rate": 7.942645714119452e-05,
372
+ "loss": 0.4593,
373
+ "step": 52
374
+ },
375
+ {
376
+ "epoch": 0.7484554280670785,
377
+ "grad_norm": 0.547964739600884,
378
+ "learning_rate": 7.93571835439452e-05,
379
+ "loss": 0.4502,
380
+ "step": 53
381
+ },
382
+ {
383
+ "epoch": 0.7625772285966461,
384
+ "grad_norm": 0.7115536296101671,
385
+ "learning_rate": 7.928399524160956e-05,
386
+ "loss": 0.447,
387
+ "step": 54
388
+ },
389
+ {
390
+ "epoch": 0.7766990291262136,
391
+ "grad_norm": 0.742782814289987,
392
+ "learning_rate": 7.920689951394175e-05,
393
+ "loss": 0.4461,
394
+ "step": 55
395
+ },
396
+ {
397
+ "epoch": 0.7908208296557812,
398
+ "grad_norm": 0.6862659469941464,
399
+ "learning_rate": 7.912590402935223e-05,
400
+ "loss": 0.4473,
401
+ "step": 56
402
+ },
403
+ {
404
+ "epoch": 0.8049426301853486,
405
+ "grad_norm": 0.6235041641613883,
406
+ "learning_rate": 7.904101684414498e-05,
407
+ "loss": 0.4472,
408
+ "step": 57
409
+ },
410
+ {
411
+ "epoch": 0.8190644307149162,
412
+ "grad_norm": 0.44600795869954046,
413
+ "learning_rate": 7.895224640171625e-05,
414
+ "loss": 0.4442,
415
+ "step": 58
416
+ },
417
+ {
418
+ "epoch": 0.8331862312444837,
419
+ "grad_norm": 0.48251979778530707,
420
+ "learning_rate": 7.88596015317147e-05,
421
+ "loss": 0.4449,
422
+ "step": 59
423
+ },
424
+ {
425
+ "epoch": 0.8473080317740512,
426
+ "grad_norm": 0.4787558150068957,
427
+ "learning_rate": 7.876309144916312e-05,
428
+ "loss": 0.4433,
429
+ "step": 60
430
+ },
431
+ {
432
+ "epoch": 0.8614298323036187,
433
+ "grad_norm": 0.41827598666685606,
434
+ "learning_rate": 7.86627257535419e-05,
435
+ "loss": 0.4401,
436
+ "step": 61
437
+ },
438
+ {
439
+ "epoch": 0.8755516328331863,
440
+ "grad_norm": 0.4724100749619687,
441
+ "learning_rate": 7.855851442783414e-05,
442
+ "loss": 0.4374,
443
+ "step": 62
444
+ },
445
+ {
446
+ "epoch": 0.8896734333627537,
447
+ "grad_norm": 0.6571994588226032,
448
+ "learning_rate": 7.845046783753276e-05,
449
+ "loss": 0.4409,
450
+ "step": 63
451
+ },
452
+ {
453
+ "epoch": 0.9037952338923213,
454
+ "grad_norm": 0.9369294338435781,
455
+ "learning_rate": 7.833859672960943e-05,
456
+ "loss": 0.4407,
457
+ "step": 64
458
+ },
459
+ {
460
+ "epoch": 0.9179170344218888,
461
+ "grad_norm": 1.1601560447987704,
462
+ "learning_rate": 7.822291223144564e-05,
463
+ "loss": 0.4602,
464
+ "step": 65
465
+ },
466
+ {
467
+ "epoch": 0.9320388349514563,
468
+ "grad_norm": 0.6934703654331164,
469
+ "learning_rate": 7.810342584972585e-05,
470
+ "loss": 0.4369,
471
+ "step": 66
472
+ },
473
+ {
474
+ "epoch": 0.9461606354810238,
475
+ "grad_norm": 0.5791439547503463,
476
+ "learning_rate": 7.798014946929306e-05,
477
+ "loss": 0.4356,
478
+ "step": 67
479
+ },
480
+ {
481
+ "epoch": 0.9602824360105914,
482
+ "grad_norm": 0.8373041828808443,
483
+ "learning_rate": 7.785309535196657e-05,
484
+ "loss": 0.4504,
485
+ "step": 68
486
+ },
487
+ {
488
+ "epoch": 0.9744042365401588,
489
+ "grad_norm": 0.6796500376958069,
490
+ "learning_rate": 7.772227613532242e-05,
491
+ "loss": 0.4392,
492
+ "step": 69
493
+ },
494
+ {
495
+ "epoch": 0.9885260370697264,
496
+ "grad_norm": 0.6686880597044009,
497
+ "learning_rate": 7.758770483143634e-05,
498
+ "loss": 0.4474,
499
+ "step": 70
500
+ },
501
+ {
502
+ "epoch": 1.002647837599294,
503
+ "grad_norm": 0.6901488338737102,
504
+ "learning_rate": 7.74493948255895e-05,
505
+ "loss": 0.5108,
506
+ "step": 71
507
+ },
508
+ {
509
+ "epoch": 1.0167696381288613,
510
+ "grad_norm": 0.7139924415191212,
511
+ "learning_rate": 7.730735987493711e-05,
512
+ "loss": 0.4227,
513
+ "step": 72
514
+ },
515
+ {
516
+ "epoch": 1.030891438658429,
517
+ "grad_norm": 0.7623382444431029,
518
+ "learning_rate": 7.71616141071401e-05,
519
+ "loss": 0.419,
520
+ "step": 73
521
+ },
522
+ {
523
+ "epoch": 1.0450132391879965,
524
+ "grad_norm": 0.8179708530719029,
525
+ "learning_rate": 7.701217201895987e-05,
526
+ "loss": 0.4182,
527
+ "step": 74
528
+ },
529
+ {
530
+ "epoch": 1.059135039717564,
531
+ "grad_norm": 0.6036364923611257,
532
+ "learning_rate": 7.685904847481631e-05,
533
+ "loss": 0.4147,
534
+ "step": 75
535
+ },
536
+ {
537
+ "epoch": 1.0732568402471314,
538
+ "grad_norm": 0.5415944966587694,
539
+ "learning_rate": 7.670225870530936e-05,
540
+ "loss": 0.4192,
541
+ "step": 76
542
+ },
543
+ {
544
+ "epoch": 1.087378640776699,
545
+ "grad_norm": 0.548496642769106,
546
+ "learning_rate": 7.654181830570404e-05,
547
+ "loss": 0.4193,
548
+ "step": 77
549
+ },
550
+ {
551
+ "epoch": 1.1015004413062666,
552
+ "grad_norm": 0.4357435844414465,
553
+ "learning_rate": 7.637774323437929e-05,
554
+ "loss": 0.4126,
555
+ "step": 78
556
+ },
557
+ {
558
+ "epoch": 1.1156222418358341,
559
+ "grad_norm": 0.5890851003105865,
560
+ "learning_rate": 7.62100498112406e-05,
561
+ "loss": 0.4193,
562
+ "step": 79
563
+ },
564
+ {
565
+ "epoch": 1.1297440423654015,
566
+ "grad_norm": 0.5417176133106055,
567
+ "learning_rate": 7.603875471609677e-05,
568
+ "loss": 0.4069,
569
+ "step": 80
570
+ },
571
+ {
572
+ "epoch": 1.143865842894969,
573
+ "grad_norm": 0.5234067170715418,
574
+ "learning_rate": 7.586387498700084e-05,
575
+ "loss": 0.4187,
576
+ "step": 81
577
+ },
578
+ {
579
+ "epoch": 1.1579876434245366,
580
+ "grad_norm": 0.4795761329002007,
581
+ "learning_rate": 7.568542801855535e-05,
582
+ "loss": 0.4101,
583
+ "step": 82
584
+ },
585
+ {
586
+ "epoch": 1.1721094439541042,
587
+ "grad_norm": 0.510485207368403,
588
+ "learning_rate": 7.550343156018217e-05,
589
+ "loss": 0.4074,
590
+ "step": 83
591
+ },
592
+ {
593
+ "epoch": 1.1862312444836718,
594
+ "grad_norm": 0.5160993194955293,
595
+ "learning_rate": 7.531790371435709e-05,
596
+ "loss": 0.4105,
597
+ "step": 84
598
+ },
599
+ {
600
+ "epoch": 1.2003530450132391,
601
+ "grad_norm": 0.6272135654421417,
602
+ "learning_rate": 7.512886293480914e-05,
603
+ "loss": 0.4131,
604
+ "step": 85
605
+ },
606
+ {
607
+ "epoch": 1.2144748455428067,
608
+ "grad_norm": 0.7144516241332823,
609
+ "learning_rate": 7.49363280246852e-05,
610
+ "loss": 0.4123,
611
+ "step": 86
612
+ },
613
+ {
614
+ "epoch": 1.2285966460723743,
615
+ "grad_norm": 1.0197175196301183,
616
+ "learning_rate": 7.474031813467956e-05,
617
+ "loss": 0.4199,
618
+ "step": 87
619
+ },
620
+ {
621
+ "epoch": 1.2427184466019416,
622
+ "grad_norm": 0.9885970877399597,
623
+ "learning_rate": 7.454085276112925e-05,
624
+ "loss": 0.4152,
625
+ "step": 88
626
+ },
627
+ {
628
+ "epoch": 1.2568402471315092,
629
+ "grad_norm": 0.5518795345815659,
630
+ "learning_rate": 7.433795174407465e-05,
631
+ "loss": 0.4064,
632
+ "step": 89
633
+ },
634
+ {
635
+ "epoch": 1.2709620476610768,
636
+ "grad_norm": 0.42697954065556326,
637
+ "learning_rate": 7.413163526528623e-05,
638
+ "loss": 0.409,
639
+ "step": 90
640
+ },
641
+ {
642
+ "epoch": 1.2850838481906444,
643
+ "grad_norm": 0.698380780251885,
644
+ "learning_rate": 7.392192384625704e-05,
645
+ "loss": 0.4054,
646
+ "step": 91
647
+ },
648
+ {
649
+ "epoch": 1.299205648720212,
650
+ "grad_norm": 0.6703174317830842,
651
+ "learning_rate": 7.370883834616157e-05,
652
+ "loss": 0.4099,
653
+ "step": 92
654
+ },
655
+ {
656
+ "epoch": 1.3133274492497793,
657
+ "grad_norm": 0.3951173073488556,
658
+ "learning_rate": 7.349239995978095e-05,
659
+ "loss": 0.4084,
660
+ "step": 93
661
+ },
662
+ {
663
+ "epoch": 1.3274492497793469,
664
+ "grad_norm": 0.43174109319559356,
665
+ "learning_rate": 7.327263021539478e-05,
666
+ "loss": 0.4048,
667
+ "step": 94
668
+ },
669
+ {
670
+ "epoch": 1.3415710503089144,
671
+ "grad_norm": 0.5360712514545947,
672
+ "learning_rate": 7.30495509726398e-05,
673
+ "loss": 0.4068,
674
+ "step": 95
675
+ },
676
+ {
677
+ "epoch": 1.3556928508384818,
678
+ "grad_norm": 0.42774436448586106,
679
+ "learning_rate": 7.282318442033567e-05,
680
+ "loss": 0.4034,
681
+ "step": 96
682
+ },
683
+ {
684
+ "epoch": 1.3698146513680494,
685
+ "grad_norm": 0.5210499488927217,
686
+ "learning_rate": 7.259355307427781e-05,
687
+ "loss": 0.4078,
688
+ "step": 97
689
+ },
690
+ {
691
+ "epoch": 1.383936451897617,
692
+ "grad_norm": 0.7093148406292331,
693
+ "learning_rate": 7.236067977499791e-05,
694
+ "loss": 0.4084,
695
+ "step": 98
696
+ },
697
+ {
698
+ "epoch": 1.3980582524271845,
699
+ "grad_norm": 0.8196300420238753,
700
+ "learning_rate": 7.212458768549208e-05,
701
+ "loss": 0.4069,
702
+ "step": 99
703
+ },
704
+ {
705
+ "epoch": 1.412180052956752,
706
+ "grad_norm": 0.9973540383790642,
707
+ "learning_rate": 7.188530028891691e-05,
708
+ "loss": 0.4047,
709
+ "step": 100
710
+ },
711
+ {
712
+ "epoch": 1.4263018534863194,
713
+ "grad_norm": 1.0704815886198962,
714
+ "learning_rate": 7.164284138625367e-05,
715
+ "loss": 0.4075,
716
+ "step": 101
717
+ },
718
+ {
719
+ "epoch": 1.440423654015887,
720
+ "grad_norm": 0.5848553932345868,
721
+ "learning_rate": 7.13972350939409e-05,
722
+ "loss": 0.4036,
723
+ "step": 102
724
+ },
725
+ {
726
+ "epoch": 1.4545454545454546,
727
+ "grad_norm": 0.37289550464762866,
728
+ "learning_rate": 7.114850584147577e-05,
729
+ "loss": 0.4068,
730
+ "step": 103
731
+ },
732
+ {
733
+ "epoch": 1.468667255075022,
734
+ "grad_norm": 0.6651429035225815,
735
+ "learning_rate": 7.089667836898399e-05,
736
+ "loss": 0.4053,
737
+ "step": 104
738
+ },
739
+ {
740
+ "epoch": 1.4827890556045895,
741
+ "grad_norm": 0.6931193008736451,
742
+ "learning_rate": 7.064177772475912e-05,
743
+ "loss": 0.4002,
744
+ "step": 105
745
+ },
746
+ {
747
+ "epoch": 1.496910856134157,
748
+ "grad_norm": 0.3938085941153356,
749
+ "learning_rate": 7.038382926277113e-05,
750
+ "loss": 0.4013,
751
+ "step": 106
752
+ },
753
+ {
754
+ "epoch": 1.5110326566637247,
755
+ "grad_norm": 0.410899316731272,
756
+ "learning_rate": 7.012285864014445e-05,
757
+ "loss": 0.404,
758
+ "step": 107
759
+ },
760
+ {
761
+ "epoch": 1.5251544571932922,
762
+ "grad_norm": 0.5933306150673846,
763
+ "learning_rate": 6.985889181460602e-05,
764
+ "loss": 0.3992,
765
+ "step": 108
766
+ },
767
+ {
768
+ "epoch": 1.5392762577228596,
769
+ "grad_norm": 0.47465582200581674,
770
+ "learning_rate": 6.959195504190337e-05,
771
+ "loss": 0.4022,
772
+ "step": 109
773
+ },
774
+ {
775
+ "epoch": 1.5533980582524272,
776
+ "grad_norm": 0.29047076547162964,
777
+ "learning_rate": 6.932207487319305e-05,
778
+ "loss": 0.3933,
779
+ "step": 110
780
+ },
781
+ {
782
+ "epoch": 1.5675198587819947,
783
+ "grad_norm": 0.3955673661524972,
784
+ "learning_rate": 6.904927815239972e-05,
785
+ "loss": 0.4014,
786
+ "step": 111
787
+ },
788
+ {
789
+ "epoch": 1.581641659311562,
790
+ "grad_norm": 0.4729958849916794,
791
+ "learning_rate": 6.877359201354606e-05,
792
+ "loss": 0.4014,
793
+ "step": 112
794
+ },
795
+ {
796
+ "epoch": 1.5957634598411299,
797
+ "grad_norm": 0.3117935062342313,
798
+ "learning_rate": 6.84950438780538e-05,
799
+ "loss": 0.4021,
800
+ "step": 113
801
+ },
802
+ {
803
+ "epoch": 1.6098852603706972,
804
+ "grad_norm": 0.29707807435124145,
805
+ "learning_rate": 6.821366145201636e-05,
806
+ "loss": 0.4003,
807
+ "step": 114
808
+ },
809
+ {
810
+ "epoch": 1.6240070609002648,
811
+ "grad_norm": 0.43753418225532925,
812
+ "learning_rate": 6.792947272344292e-05,
813
+ "loss": 0.3992,
814
+ "step": 115
815
+ },
816
+ {
817
+ "epoch": 1.6381288614298324,
818
+ "grad_norm": 0.27791625901461003,
819
+ "learning_rate": 6.76425059594746e-05,
820
+ "loss": 0.3982,
821
+ "step": 116
822
+ },
823
+ {
824
+ "epoch": 1.6522506619593997,
825
+ "grad_norm": 0.2525501356536547,
826
+ "learning_rate": 6.73527897035728e-05,
827
+ "loss": 0.4053,
828
+ "step": 117
829
+ },
830
+ {
831
+ "epoch": 1.6663724624889673,
832
+ "grad_norm": 0.3669470139311434,
833
+ "learning_rate": 6.706035277268022e-05,
834
+ "loss": 0.4024,
835
+ "step": 118
836
+ },
837
+ {
838
+ "epoch": 1.6804942630185349,
839
+ "grad_norm": 0.30825865476024705,
840
+ "learning_rate": 6.676522425435433e-05,
841
+ "loss": 0.3945,
842
+ "step": 119
843
+ },
844
+ {
845
+ "epoch": 1.6946160635481022,
846
+ "grad_norm": 0.28018108144253323,
847
+ "learning_rate": 6.646743350387438e-05,
848
+ "loss": 0.3984,
849
+ "step": 120
850
+ },
851
+ {
852
+ "epoch": 1.70873786407767,
853
+ "grad_norm": 0.30987982118204843,
854
+ "learning_rate": 6.616701014132138e-05,
855
+ "loss": 0.4021,
856
+ "step": 121
857
+ },
858
+ {
859
+ "epoch": 1.7228596646072374,
860
+ "grad_norm": 0.3633571089136772,
861
+ "learning_rate": 6.586398404863198e-05,
862
+ "loss": 0.4026,
863
+ "step": 122
864
+ },
865
+ {
866
+ "epoch": 1.736981465136805,
867
+ "grad_norm": 0.36013913213670684,
868
+ "learning_rate": 6.555838536662624e-05,
869
+ "loss": 0.3925,
870
+ "step": 123
871
+ },
872
+ {
873
+ "epoch": 1.7511032656663725,
874
+ "grad_norm": 0.36709693358558493,
875
+ "learning_rate": 6.525024449200956e-05,
876
+ "loss": 0.3976,
877
+ "step": 124
878
+ },
879
+ {
880
+ "epoch": 1.7652250661959399,
881
+ "grad_norm": 0.44695442666677676,
882
+ "learning_rate": 6.493959207434934e-05,
883
+ "loss": 0.3982,
884
+ "step": 125
885
+ },
886
+ {
887
+ "epoch": 1.7793468667255075,
888
+ "grad_norm": 0.4500722428050271,
889
+ "learning_rate": 6.462645901302633e-05,
890
+ "loss": 0.3947,
891
+ "step": 126
892
+ },
893
+ {
894
+ "epoch": 1.793468667255075,
895
+ "grad_norm": 0.39821702121821073,
896
+ "learning_rate": 6.431087645416121e-05,
897
+ "loss": 0.4015,
898
+ "step": 127
899
+ },
900
+ {
901
+ "epoch": 1.8075904677846424,
902
+ "grad_norm": 0.42798393839154475,
903
+ "learning_rate": 6.399287578751656e-05,
904
+ "loss": 0.3959,
905
+ "step": 128
906
+ },
907
+ {
908
+ "epoch": 1.8217122683142102,
909
+ "grad_norm": 0.4978207058435827,
910
+ "learning_rate": 6.367248864337471e-05,
911
+ "loss": 0.3975,
912
+ "step": 129
913
+ },
914
+ {
915
+ "epoch": 1.8358340688437775,
916
+ "grad_norm": 0.4727933665511357,
917
+ "learning_rate": 6.334974688939161e-05,
918
+ "loss": 0.3961,
919
+ "step": 130
920
+ },
921
+ {
922
+ "epoch": 1.849955869373345,
923
+ "grad_norm": 0.30157997491072186,
924
+ "learning_rate": 6.302468262742695e-05,
925
+ "loss": 0.3923,
926
+ "step": 131
927
+ },
928
+ {
929
+ "epoch": 1.8640776699029127,
930
+ "grad_norm": 0.30111696128126747,
931
+ "learning_rate": 6.269732819035128e-05,
932
+ "loss": 0.3895,
933
+ "step": 132
934
+ },
935
+ {
936
+ "epoch": 1.87819947043248,
937
+ "grad_norm": 0.33866239621320493,
938
+ "learning_rate": 6.236771613882987e-05,
939
+ "loss": 0.3933,
940
+ "step": 133
941
+ },
942
+ {
943
+ "epoch": 1.8923212709620476,
944
+ "grad_norm": 0.2963866045397337,
945
+ "learning_rate": 6.20358792580841e-05,
946
+ "loss": 0.3865,
947
+ "step": 134
948
+ },
949
+ {
950
+ "epoch": 1.9064430714916152,
951
+ "grad_norm": 0.2821832464959724,
952
+ "learning_rate": 6.170185055463039e-05,
953
+ "loss": 0.3985,
954
+ "step": 135
955
+ },
956
+ {
957
+ "epoch": 1.9205648720211828,
958
+ "grad_norm": 0.26513081199542754,
959
+ "learning_rate": 6.136566325299715e-05,
960
+ "loss": 0.3972,
961
+ "step": 136
962
+ },
963
+ {
964
+ "epoch": 1.9346866725507503,
965
+ "grad_norm": 0.25040847849987535,
966
+ "learning_rate": 6.102735079242019e-05,
967
+ "loss": 0.398,
968
+ "step": 137
969
+ },
970
+ {
971
+ "epoch": 1.9488084730803177,
972
+ "grad_norm": 0.303971008854815,
973
+ "learning_rate": 6.068694682351651e-05,
974
+ "loss": 0.3957,
975
+ "step": 138
976
+ },
977
+ {
978
+ "epoch": 1.9629302736098853,
979
+ "grad_norm": 0.2610849344447032,
980
+ "learning_rate": 6.0344485204937274e-05,
981
+ "loss": 0.3953,
982
+ "step": 139
983
+ },
984
+ {
985
+ "epoch": 1.9770520741394528,
986
+ "grad_norm": 0.24540141466965165,
987
+ "learning_rate": 6.000000000000001e-05,
988
+ "loss": 0.3955,
989
+ "step": 140
990
+ },
991
+ {
992
+ "epoch": 1.9911738746690202,
993
+ "grad_norm": 0.2807585102662493,
994
+ "learning_rate": 5.965352547330046e-05,
995
+ "loss": 0.4096,
996
+ "step": 141
997
+ },
998
+ {
999
+ "epoch": 2.005295675198588,
1000
+ "grad_norm": 0.4269953277008037,
1001
+ "learning_rate": 5.930509608730444e-05,
1002
+ "loss": 0.4441,
1003
+ "step": 142
1004
+ },
1005
+ {
1006
+ "epoch": 2.0194174757281553,
1007
+ "grad_norm": 0.5672907609303462,
1008
+ "learning_rate": 5.895474649891995e-05,
1009
+ "loss": 0.3728,
1010
+ "step": 143
1011
+ },
1012
+ {
1013
+ "epoch": 2.0335392762577227,
1014
+ "grad_norm": 0.7266748405757633,
1015
+ "learning_rate": 5.860251155605003e-05,
1016
+ "loss": 0.3745,
1017
+ "step": 144
1018
+ },
1019
+ {
1020
+ "epoch": 2.0476610767872905,
1021
+ "grad_norm": 1.0069160934332146,
1022
+ "learning_rate": 5.824842629412653e-05,
1023
+ "loss": 0.3832,
1024
+ "step": 145
1025
+ },
1026
+ {
1027
+ "epoch": 2.061782877316858,
1028
+ "grad_norm": 0.9424187541004289,
1029
+ "learning_rate": 5.7892525932625305e-05,
1030
+ "loss": 0.3779,
1031
+ "step": 146
1032
+ },
1033
+ {
1034
+ "epoch": 2.0759046778464256,
1035
+ "grad_norm": 0.42574191446629944,
1036
+ "learning_rate": 5.75348458715631e-05,
1037
+ "loss": 0.3718,
1038
+ "step": 147
1039
+ },
1040
+ {
1041
+ "epoch": 2.090026478375993,
1042
+ "grad_norm": 0.7408316783846461,
1043
+ "learning_rate": 5.7175421687976374e-05,
1044
+ "loss": 0.3699,
1045
+ "step": 148
1046
+ },
1047
+ {
1048
+ "epoch": 2.1041482789055603,
1049
+ "grad_norm": 0.6750908749341442,
1050
+ "learning_rate": 5.681428913238263e-05,
1051
+ "loss": 0.367,
1052
+ "step": 149
1053
+ },
1054
+ {
1055
+ "epoch": 2.118270079435128,
1056
+ "grad_norm": 0.486610272879909,
1057
+ "learning_rate": 5.645148412522447e-05,
1058
+ "loss": 0.3752,
1059
+ "step": 150
1060
+ },
1061
+ {
1062
+ "epoch": 2.1323918799646955,
1063
+ "grad_norm": 0.5306866815139071,
1064
+ "learning_rate": 5.60870427532967e-05,
1065
+ "loss": 0.3657,
1066
+ "step": 151
1067
+ },
1068
+ {
1069
+ "epoch": 2.146513680494263,
1070
+ "grad_norm": 0.4884339447717486,
1071
+ "learning_rate": 5.572100126615695e-05,
1072
+ "loss": 0.3701,
1073
+ "step": 152
1074
+ },
1075
+ {
1076
+ "epoch": 2.1606354810238306,
1077
+ "grad_norm": 0.39396923901380754,
1078
+ "learning_rate": 5.535339607252003e-05,
1079
+ "loss": 0.364,
1080
+ "step": 153
1081
+ },
1082
+ {
1083
+ "epoch": 2.174757281553398,
1084
+ "grad_norm": 0.3784748162116266,
1085
+ "learning_rate": 5.4984263736636494e-05,
1086
+ "loss": 0.3641,
1087
+ "step": 154
1088
+ },
1089
+ {
1090
+ "epoch": 2.1888790820829658,
1091
+ "grad_norm": 0.38537106208995364,
1092
+ "learning_rate": 5.461364097465581e-05,
1093
+ "loss": 0.3634,
1094
+ "step": 155
1095
+ },
1096
+ {
1097
+ "epoch": 2.203000882612533,
1098
+ "grad_norm": 0.33639666599879814,
1099
+ "learning_rate": 5.424156465097428e-05,
1100
+ "loss": 0.3676,
1101
+ "step": 156
1102
+ },
1103
+ {
1104
+ "epoch": 2.2171226831421005,
1105
+ "grad_norm": 0.3286791724075738,
1106
+ "learning_rate": 5.38680717745683e-05,
1107
+ "loss": 0.3649,
1108
+ "step": 157
1109
+ },
1110
+ {
1111
+ "epoch": 2.2312444836716683,
1112
+ "grad_norm": 0.3129994921836922,
1113
+ "learning_rate": 5.349319949531321e-05,
1114
+ "loss": 0.3646,
1115
+ "step": 158
1116
+ },
1117
+ {
1118
+ "epoch": 2.2453662842012356,
1119
+ "grad_norm": 0.3031016329231297,
1120
+ "learning_rate": 5.3116985100288185e-05,
1121
+ "loss": 0.3682,
1122
+ "step": 159
1123
+ },
1124
+ {
1125
+ "epoch": 2.259488084730803,
1126
+ "grad_norm": 0.27541410223019297,
1127
+ "learning_rate": 5.2739466010067385e-05,
1128
+ "loss": 0.3606,
1129
+ "step": 160
1130
+ },
1131
+ {
1132
+ "epoch": 2.2736098852603708,
1133
+ "grad_norm": 0.36257443661095795,
1134
+ "learning_rate": 5.23606797749979e-05,
1135
+ "loss": 0.3638,
1136
+ "step": 161
1137
+ },
1138
+ {
1139
+ "epoch": 2.287731685789938,
1140
+ "grad_norm": 0.2567579985831816,
1141
+ "learning_rate": 5.1980664071464776e-05,
1142
+ "loss": 0.3667,
1143
+ "step": 162
1144
+ },
1145
+ {
1146
+ "epoch": 2.301853486319506,
1147
+ "grad_norm": 0.31190867351244567,
1148
+ "learning_rate": 5.159945669814345e-05,
1149
+ "loss": 0.3696,
1150
+ "step": 163
1151
+ },
1152
+ {
1153
+ "epoch": 2.3159752868490733,
1154
+ "grad_norm": 0.2937382011800516,
1155
+ "learning_rate": 5.121709557224011e-05,
1156
+ "loss": 0.3606,
1157
+ "step": 164
1158
+ },
1159
+ {
1160
+ "epoch": 2.3300970873786406,
1161
+ "grad_norm": 0.2256249267158452,
1162
+ "learning_rate": 5.0833618725720214e-05,
1163
+ "loss": 0.365,
1164
+ "step": 165
1165
+ },
1166
+ {
1167
+ "epoch": 2.3442188879082084,
1168
+ "grad_norm": 0.298331814145165,
1169
+ "learning_rate": 5.044906430152554e-05,
1170
+ "loss": 0.3667,
1171
+ "step": 166
1172
+ },
1173
+ {
1174
+ "epoch": 2.358340688437776,
1175
+ "grad_norm": 0.18895739371171252,
1176
+ "learning_rate": 5.006347054978035e-05,
1177
+ "loss": 0.3699,
1178
+ "step": 167
1179
+ },
1180
+ {
1181
+ "epoch": 2.3724624889673436,
1182
+ "grad_norm": 0.25034317840687215,
1183
+ "learning_rate": 4.967687582398671e-05,
1184
+ "loss": 0.3587,
1185
+ "step": 168
1186
+ },
1187
+ {
1188
+ "epoch": 2.386584289496911,
1189
+ "grad_norm": 0.17907966208059622,
1190
+ "learning_rate": 4.9289318577209706e-05,
1191
+ "loss": 0.3636,
1192
+ "step": 169
1193
+ },
1194
+ {
1195
+ "epoch": 2.4007060900264783,
1196
+ "grad_norm": 0.21210095036882018,
1197
+ "learning_rate": 4.890083735825258e-05,
1198
+ "loss": 0.3605,
1199
+ "step": 170
1200
+ },
1201
+ {
1202
+ "epoch": 2.414827890556046,
1203
+ "grad_norm": 0.16489305774518265,
1204
+ "learning_rate": 4.851147080782249e-05,
1205
+ "loss": 0.3648,
1206
+ "step": 171
1207
+ },
1208
+ {
1209
+ "epoch": 2.4289496910856134,
1210
+ "grad_norm": 0.19143993377462817,
1211
+ "learning_rate": 4.812125765468705e-05,
1212
+ "loss": 0.3606,
1213
+ "step": 172
1214
+ },
1215
+ {
1216
+ "epoch": 2.443071491615181,
1217
+ "grad_norm": 0.17804983590295367,
1218
+ "learning_rate": 4.773023671182213e-05,
1219
+ "loss": 0.3637,
1220
+ "step": 173
1221
+ },
1222
+ {
1223
+ "epoch": 2.4571932921447486,
1224
+ "grad_norm": 0.16707259472270428,
1225
+ "learning_rate": 4.73384468725513e-05,
1226
+ "loss": 0.3636,
1227
+ "step": 174
1228
+ },
1229
+ {
1230
+ "epoch": 2.471315092674316,
1231
+ "grad_norm": 0.17481885632199456,
1232
+ "learning_rate": 4.694592710667723e-05,
1233
+ "loss": 0.3645,
1234
+ "step": 175
1235
+ },
1236
+ {
1237
+ "epoch": 2.4854368932038833,
1238
+ "grad_norm": 0.1681053608116463,
1239
+ "learning_rate": 4.6552716456605514e-05,
1240
+ "loss": 0.3605,
1241
+ "step": 176
1242
+ },
1243
+ {
1244
+ "epoch": 2.499558693733451,
1245
+ "grad_norm": 0.14964611415536702,
1246
+ "learning_rate": 4.615885403346134e-05,
1247
+ "loss": 0.3562,
1248
+ "step": 177
1249
+ },
1250
+ {
1251
+ "epoch": 2.5136804942630184,
1252
+ "grad_norm": 0.14164675176141614,
1253
+ "learning_rate": 4.576437901319921e-05,
1254
+ "loss": 0.3636,
1255
+ "step": 178
1256
+ },
1257
+ {
1258
+ "epoch": 2.5278022947925862,
1259
+ "grad_norm": 0.16548274190466053,
1260
+ "learning_rate": 4.5369330632706223e-05,
1261
+ "loss": 0.3648,
1262
+ "step": 179
1263
+ },
1264
+ {
1265
+ "epoch": 2.5419240953221536,
1266
+ "grad_norm": 0.15269683467677936,
1267
+ "learning_rate": 4.4973748185899416e-05,
1268
+ "loss": 0.3612,
1269
+ "step": 180
1270
+ },
1271
+ {
1272
+ "epoch": 2.556045895851721,
1273
+ "grad_norm": 0.16869434151649507,
1274
+ "learning_rate": 4.457767101981728e-05,
1275
+ "loss": 0.3677,
1276
+ "step": 181
1277
+ },
1278
+ {
1279
+ "epoch": 2.5701676963812887,
1280
+ "grad_norm": 0.13337265767063033,
1281
+ "learning_rate": 4.418113853070614e-05,
1282
+ "loss": 0.3626,
1283
+ "step": 182
1284
+ },
1285
+ {
1286
+ "epoch": 2.584289496910856,
1287
+ "grad_norm": 0.14682144236789746,
1288
+ "learning_rate": 4.378419016010149e-05,
1289
+ "loss": 0.364,
1290
+ "step": 183
1291
+ },
1292
+ {
1293
+ "epoch": 2.598411297440424,
1294
+ "grad_norm": 0.150937900490833,
1295
+ "learning_rate": 4.338686539090493e-05,
1296
+ "loss": 0.3615,
1297
+ "step": 184
1298
+ },
1299
+ {
1300
+ "epoch": 2.6125330979699912,
1301
+ "grad_norm": 0.1341377364551312,
1302
+ "learning_rate": 4.298920374345698e-05,
1303
+ "loss": 0.3596,
1304
+ "step": 185
1305
+ },
1306
+ {
1307
+ "epoch": 2.6266548984995586,
1308
+ "grad_norm": 0.15572962430762588,
1309
+ "learning_rate": 4.259124477160607e-05,
1310
+ "loss": 0.3625,
1311
+ "step": 186
1312
+ },
1313
+ {
1314
+ "epoch": 2.6407766990291264,
1315
+ "grad_norm": 0.1475404012486826,
1316
+ "learning_rate": 4.219302805877441e-05,
1317
+ "loss": 0.3617,
1318
+ "step": 187
1319
+ },
1320
+ {
1321
+ "epoch": 2.6548984995586937,
1322
+ "grad_norm": 0.1781262720167099,
1323
+ "learning_rate": 4.17945932140206e-05,
1324
+ "loss": 0.3666,
1325
+ "step": 188
1326
+ },
1327
+ {
1328
+ "epoch": 2.6690203000882615,
1329
+ "grad_norm": 0.13824587532461255,
1330
+ "learning_rate": 4.139597986810005e-05,
1331
+ "loss": 0.3629,
1332
+ "step": 189
1333
+ },
1334
+ {
1335
+ "epoch": 2.683142100617829,
1336
+ "grad_norm": 0.15963593698467365,
1337
+ "learning_rate": 4.0997227669522924e-05,
1338
+ "loss": 0.3628,
1339
+ "step": 190
1340
+ },
1341
+ {
1342
+ "epoch": 2.6972639011473962,
1343
+ "grad_norm": 0.12511715922220792,
1344
+ "learning_rate": 4.059837628061055e-05,
1345
+ "loss": 0.3638,
1346
+ "step": 191
1347
+ },
1348
+ {
1349
+ "epoch": 2.7113857016769636,
1350
+ "grad_norm": 0.15752313446706914,
1351
+ "learning_rate": 4.019946537355033e-05,
1352
+ "loss": 0.3614,
1353
+ "step": 192
1354
+ },
1355
+ {
1356
+ "epoch": 2.7255075022065314,
1357
+ "grad_norm": 0.13647413322377422,
1358
+ "learning_rate": 3.9800534626449683e-05,
1359
+ "loss": 0.3634,
1360
+ "step": 193
1361
+ },
1362
+ {
1363
+ "epoch": 2.7396293027360987,
1364
+ "grad_norm": 0.13525074863232164,
1365
+ "learning_rate": 3.940162371938947e-05,
1366
+ "loss": 0.3587,
1367
+ "step": 194
1368
+ },
1369
+ {
1370
+ "epoch": 2.7537511032656665,
1371
+ "grad_norm": 0.13297285710552217,
1372
+ "learning_rate": 3.9002772330477096e-05,
1373
+ "loss": 0.3599,
1374
+ "step": 195
1375
+ },
1376
+ {
1377
+ "epoch": 2.767872903795234,
1378
+ "grad_norm": 0.14225004712058384,
1379
+ "learning_rate": 3.860402013189998e-05,
1380
+ "loss": 0.3575,
1381
+ "step": 196
1382
+ },
1383
+ {
1384
+ "epoch": 2.7819947043248012,
1385
+ "grad_norm": 0.13373630438071715,
1386
+ "learning_rate": 3.820540678597942e-05,
1387
+ "loss": 0.3648,
1388
+ "step": 197
1389
+ },
1390
+ {
1391
+ "epoch": 2.796116504854369,
1392
+ "grad_norm": 0.12615478953418785,
1393
+ "learning_rate": 3.78069719412256e-05,
1394
+ "loss": 0.3609,
1395
+ "step": 198
1396
+ },
1397
+ {
1398
+ "epoch": 2.8102383053839364,
1399
+ "grad_norm": 0.12669967225071216,
1400
+ "learning_rate": 3.740875522839393e-05,
1401
+ "loss": 0.3608,
1402
+ "step": 199
1403
+ },
1404
+ {
1405
+ "epoch": 2.824360105913504,
1406
+ "grad_norm": 0.13635382545910668,
1407
+ "learning_rate": 3.7010796256543034e-05,
1408
+ "loss": 0.3549,
1409
+ "step": 200
1410
+ },
1411
+ {
1412
+ "epoch": 2.8384819064430715,
1413
+ "grad_norm": 0.11546629160995592,
1414
+ "learning_rate": 3.661313460909507e-05,
1415
+ "loss": 0.3593,
1416
+ "step": 201
1417
+ },
1418
+ {
1419
+ "epoch": 2.852603706972639,
1420
+ "grad_norm": 0.12139128794186867,
1421
+ "learning_rate": 3.621580983989852e-05,
1422
+ "loss": 0.3608,
1423
+ "step": 202
1424
+ },
1425
+ {
1426
+ "epoch": 2.8667255075022067,
1427
+ "grad_norm": 0.12319344865206981,
1428
+ "learning_rate": 3.581886146929387e-05,
1429
+ "loss": 0.3605,
1430
+ "step": 203
1431
+ },
1432
+ {
1433
+ "epoch": 2.880847308031774,
1434
+ "grad_norm": 0.14742473593815408,
1435
+ "learning_rate": 3.542232898018273e-05,
1436
+ "loss": 0.3582,
1437
+ "step": 204
1438
+ },
1439
+ {
1440
+ "epoch": 2.894969108561342,
1441
+ "grad_norm": 0.11086460953888361,
1442
+ "learning_rate": 3.5026251814100604e-05,
1443
+ "loss": 0.359,
1444
+ "step": 205
1445
+ },
1446
+ {
1447
+ "epoch": 2.909090909090909,
1448
+ "grad_norm": 0.13533789741325936,
1449
+ "learning_rate": 3.4630669367293797e-05,
1450
+ "loss": 0.3562,
1451
+ "step": 206
1452
+ },
1453
+ {
1454
+ "epoch": 2.9232127096204765,
1455
+ "grad_norm": 0.11573276006772669,
1456
+ "learning_rate": 3.4235620986800806e-05,
1457
+ "loss": 0.3641,
1458
+ "step": 207
1459
+ },
1460
+ {
1461
+ "epoch": 2.937334510150044,
1462
+ "grad_norm": 0.12838446326005826,
1463
+ "learning_rate": 3.384114596653866e-05,
1464
+ "loss": 0.361,
1465
+ "step": 208
1466
+ },
1467
+ {
1468
+ "epoch": 2.9514563106796117,
1469
+ "grad_norm": 0.12304575149956651,
1470
+ "learning_rate": 3.344728354339449e-05,
1471
+ "loss": 0.3586,
1472
+ "step": 209
1473
+ },
1474
+ {
1475
+ "epoch": 2.965578111209179,
1476
+ "grad_norm": 0.12773291501034634,
1477
+ "learning_rate": 3.305407289332279e-05,
1478
+ "loss": 0.3559,
1479
+ "step": 210
1480
+ },
1481
+ {
1482
+ "epoch": 2.979699911738747,
1483
+ "grad_norm": 0.16335068209235123,
1484
+ "learning_rate": 3.266155312744871e-05,
1485
+ "loss": 0.3631,
1486
+ "step": 211
1487
+ },
1488
+ {
1489
+ "epoch": 2.993821712268314,
1490
+ "grad_norm": 0.1186978138033666,
1491
+ "learning_rate": 3.226976328817788e-05,
1492
+ "loss": 0.3927,
1493
+ "step": 212
1494
+ },
1495
+ {
1496
+ "epoch": 3.0079435127978815,
1497
+ "grad_norm": 0.16211984652497452,
1498
+ "learning_rate": 3.187874234531296e-05,
1499
+ "loss": 0.3822,
1500
+ "step": 213
1501
+ },
1502
+ {
1503
+ "epoch": 3.0220653133274493,
1504
+ "grad_norm": 0.14214772364476422,
1505
+ "learning_rate": 3.1488529192177526e-05,
1506
+ "loss": 0.3393,
1507
+ "step": 214
1508
+ },
1509
+ {
1510
+ "epoch": 3.0361871138570167,
1511
+ "grad_norm": 0.13255124874063956,
1512
+ "learning_rate": 3.109916264174743e-05,
1513
+ "loss": 0.3373,
1514
+ "step": 215
1515
+ },
1516
+ {
1517
+ "epoch": 3.0503089143865845,
1518
+ "grad_norm": 0.16606000923059963,
1519
+ "learning_rate": 3.071068142279031e-05,
1520
+ "loss": 0.3371,
1521
+ "step": 216
1522
+ },
1523
+ {
1524
+ "epoch": 3.064430714916152,
1525
+ "grad_norm": 0.14657630327267304,
1526
+ "learning_rate": 3.0323124176013297e-05,
1527
+ "loss": 0.3355,
1528
+ "step": 217
1529
+ },
1530
+ {
1531
+ "epoch": 3.078552515445719,
1532
+ "grad_norm": 0.1341605905929287,
1533
+ "learning_rate": 2.993652945021966e-05,
1534
+ "loss": 0.3377,
1535
+ "step": 218
1536
+ },
1537
+ {
1538
+ "epoch": 3.092674315975287,
1539
+ "grad_norm": 0.14490108611743277,
1540
+ "learning_rate": 2.955093569847447e-05,
1541
+ "loss": 0.3366,
1542
+ "step": 219
1543
+ },
1544
+ {
1545
+ "epoch": 3.1067961165048543,
1546
+ "grad_norm": 0.13919821523407064,
1547
+ "learning_rate": 2.9166381274279803e-05,
1548
+ "loss": 0.3312,
1549
+ "step": 220
1550
+ },
1551
+ {
1552
+ "epoch": 3.120917917034422,
1553
+ "grad_norm": 0.16300975058477254,
1554
+ "learning_rate": 2.8782904427759898e-05,
1555
+ "loss": 0.3311,
1556
+ "step": 221
1557
+ },
1558
+ {
1559
+ "epoch": 3.1350397175639895,
1560
+ "grad_norm": 0.1183225077661534,
1561
+ "learning_rate": 2.8400543301856553e-05,
1562
+ "loss": 0.3282,
1563
+ "step": 222
1564
+ },
1565
+ {
1566
+ "epoch": 3.149161518093557,
1567
+ "grad_norm": 0.14092204872317698,
1568
+ "learning_rate": 2.8019335928535234e-05,
1569
+ "loss": 0.3297,
1570
+ "step": 223
1571
+ },
1572
+ {
1573
+ "epoch": 3.1632833186231246,
1574
+ "grad_norm": 0.1282390396455681,
1575
+ "learning_rate": 2.7639320225002108e-05,
1576
+ "loss": 0.327,
1577
+ "step": 224
1578
+ },
1579
+ {
1580
+ "epoch": 3.177405119152692,
1581
+ "grad_norm": 0.12936573725572997,
1582
+ "learning_rate": 2.7260533989932628e-05,
1583
+ "loss": 0.3346,
1584
+ "step": 225
1585
+ },
1586
+ {
1587
+ "epoch": 3.1915269196822593,
1588
+ "grad_norm": 0.11727309920196596,
1589
+ "learning_rate": 2.688301489971183e-05,
1590
+ "loss": 0.3271,
1591
+ "step": 226
1592
+ },
1593
+ {
1594
+ "epoch": 3.205648720211827,
1595
+ "grad_norm": 0.12274146196879084,
1596
+ "learning_rate": 2.6506800504686806e-05,
1597
+ "loss": 0.328,
1598
+ "step": 227
1599
+ },
1600
+ {
1601
+ "epoch": 3.2197705207413945,
1602
+ "grad_norm": 0.11029811005681434,
1603
+ "learning_rate": 2.6131928225431713e-05,
1604
+ "loss": 0.33,
1605
+ "step": 228
1606
+ },
1607
+ {
1608
+ "epoch": 3.233892321270962,
1609
+ "grad_norm": 0.12463320131443856,
1610
+ "learning_rate": 2.575843534902573e-05,
1611
+ "loss": 0.3358,
1612
+ "step": 229
1613
+ },
1614
+ {
1615
+ "epoch": 3.2480141218005296,
1616
+ "grad_norm": 0.11256203223325899,
1617
+ "learning_rate": 2.53863590253442e-05,
1618
+ "loss": 0.3364,
1619
+ "step": 230
1620
+ },
1621
+ {
1622
+ "epoch": 3.262135922330097,
1623
+ "grad_norm": 0.10841743259905046,
1624
+ "learning_rate": 2.501573626336352e-05,
1625
+ "loss": 0.3337,
1626
+ "step": 231
1627
+ },
1628
+ {
1629
+ "epoch": 3.2762577228596648,
1630
+ "grad_norm": 0.11593566286716334,
1631
+ "learning_rate": 2.464660392747999e-05,
1632
+ "loss": 0.3301,
1633
+ "step": 232
1634
+ },
1635
+ {
1636
+ "epoch": 3.290379523389232,
1637
+ "grad_norm": 0.10969283000201786,
1638
+ "learning_rate": 2.427899873384306e-05,
1639
+ "loss": 0.332,
1640
+ "step": 233
1641
+ },
1642
+ {
1643
+ "epoch": 3.3045013239187995,
1644
+ "grad_norm": 0.12033857141829916,
1645
+ "learning_rate": 2.3912957246703305e-05,
1646
+ "loss": 0.3377,
1647
+ "step": 234
1648
+ },
1649
+ {
1650
+ "epoch": 3.3186231244483673,
1651
+ "grad_norm": 0.10210001952439796,
1652
+ "learning_rate": 2.3548515874775547e-05,
1653
+ "loss": 0.3297,
1654
+ "step": 235
1655
+ },
1656
+ {
1657
+ "epoch": 3.3327449249779346,
1658
+ "grad_norm": 0.12241287674636975,
1659
+ "learning_rate": 2.3185710867617387e-05,
1660
+ "loss": 0.3361,
1661
+ "step": 236
1662
+ },
1663
+ {
1664
+ "epoch": 3.3468667255075024,
1665
+ "grad_norm": 0.10969299118083352,
1666
+ "learning_rate": 2.2824578312023632e-05,
1667
+ "loss": 0.3322,
1668
+ "step": 237
1669
+ },
1670
+ {
1671
+ "epoch": 3.3609885260370698,
1672
+ "grad_norm": 0.12151530040465547,
1673
+ "learning_rate": 2.24651541284369e-05,
1674
+ "loss": 0.3361,
1675
+ "step": 238
1676
+ },
1677
+ {
1678
+ "epoch": 3.375110326566637,
1679
+ "grad_norm": 0.10631863902215113,
1680
+ "learning_rate": 2.210747406737469e-05,
1681
+ "loss": 0.3344,
1682
+ "step": 239
1683
+ },
1684
+ {
1685
+ "epoch": 3.389232127096205,
1686
+ "grad_norm": 0.11983276963310185,
1687
+ "learning_rate": 2.175157370587348e-05,
1688
+ "loss": 0.3324,
1689
+ "step": 240
1690
+ },
1691
+ {
1692
+ "epoch": 3.4033539276257723,
1693
+ "grad_norm": 0.10203118790788067,
1694
+ "learning_rate": 2.1397488443949985e-05,
1695
+ "loss": 0.3366,
1696
+ "step": 241
1697
+ },
1698
+ {
1699
+ "epoch": 3.4174757281553396,
1700
+ "grad_norm": 0.11460733945580791,
1701
+ "learning_rate": 2.1045253501080058e-05,
1702
+ "loss": 0.3335,
1703
+ "step": 242
1704
+ },
1705
+ {
1706
+ "epoch": 3.4315975286849074,
1707
+ "grad_norm": 0.10361959122829918,
1708
+ "learning_rate": 2.0694903912695574e-05,
1709
+ "loss": 0.3342,
1710
+ "step": 243
1711
+ },
1712
+ {
1713
+ "epoch": 3.4457193292144748,
1714
+ "grad_norm": 0.10602009006473866,
1715
+ "learning_rate": 2.0346474526699552e-05,
1716
+ "loss": 0.3343,
1717
+ "step": 244
1718
+ },
1719
+ {
1720
+ "epoch": 3.459841129744042,
1721
+ "grad_norm": 0.0981614565374733,
1722
+ "learning_rate": 2.0000000000000012e-05,
1723
+ "loss": 0.3342,
1724
+ "step": 245
1725
+ },
1726
+ {
1727
+ "epoch": 3.47396293027361,
1728
+ "grad_norm": 0.10563881070295801,
1729
+ "learning_rate": 1.9655514795062746e-05,
1730
+ "loss": 0.3317,
1731
+ "step": 246
1732
+ },
1733
+ {
1734
+ "epoch": 3.4880847308031773,
1735
+ "grad_norm": 0.0982393867459211,
1736
+ "learning_rate": 1.931305317648349e-05,
1737
+ "loss": 0.336,
1738
+ "step": 247
1739
+ },
1740
+ {
1741
+ "epoch": 3.502206531332745,
1742
+ "grad_norm": 0.10341107342114168,
1743
+ "learning_rate": 1.897264920757981e-05,
1744
+ "loss": 0.3329,
1745
+ "step": 248
1746
+ },
1747
+ {
1748
+ "epoch": 3.5163283318623124,
1749
+ "grad_norm": 0.1009205150822494,
1750
+ "learning_rate": 1.8634336747002853e-05,
1751
+ "loss": 0.3363,
1752
+ "step": 249
1753
+ },
1754
+ {
1755
+ "epoch": 3.5304501323918798,
1756
+ "grad_norm": 0.09562831286129422,
1757
+ "learning_rate": 1.829814944536963e-05,
1758
+ "loss": 0.3366,
1759
+ "step": 250
1760
+ },
1761
+ {
1762
+ "epoch": 3.5445719329214476,
1763
+ "grad_norm": 0.10055162803558056,
1764
+ "learning_rate": 1.7964120741915905e-05,
1765
+ "loss": 0.3359,
1766
+ "step": 251
1767
+ },
1768
+ {
1769
+ "epoch": 3.558693733451015,
1770
+ "grad_norm": 0.10362087580690618,
1771
+ "learning_rate": 1.7632283861170135e-05,
1772
+ "loss": 0.33,
1773
+ "step": 252
1774
+ },
1775
+ {
1776
+ "epoch": 3.5728155339805827,
1777
+ "grad_norm": 0.09578324331311534,
1778
+ "learning_rate": 1.7302671809648735e-05,
1779
+ "loss": 0.3336,
1780
+ "step": 253
1781
+ },
1782
+ {
1783
+ "epoch": 3.58693733451015,
1784
+ "grad_norm": 0.1021943484963981,
1785
+ "learning_rate": 1.6975317372573066e-05,
1786
+ "loss": 0.334,
1787
+ "step": 254
1788
+ },
1789
+ {
1790
+ "epoch": 3.6010591350397174,
1791
+ "grad_norm": 0.10104477227737499,
1792
+ "learning_rate": 1.6650253110608415e-05,
1793
+ "loss": 0.3352,
1794
+ "step": 255
1795
+ },
1796
+ {
1797
+ "epoch": 3.615180935569285,
1798
+ "grad_norm": 0.09719144111824624,
1799
+ "learning_rate": 1.6327511356625302e-05,
1800
+ "loss": 0.3339,
1801
+ "step": 256
1802
+ },
1803
+ {
1804
+ "epoch": 3.6293027360988526,
1805
+ "grad_norm": 0.10082549447043057,
1806
+ "learning_rate": 1.6007124212483453e-05,
1807
+ "loss": 0.3303,
1808
+ "step": 257
1809
+ },
1810
+ {
1811
+ "epoch": 3.6434245366284204,
1812
+ "grad_norm": 0.09855344501708733,
1813
+ "learning_rate": 1.5689123545838804e-05,
1814
+ "loss": 0.3319,
1815
+ "step": 258
1816
+ },
1817
+ {
1818
+ "epoch": 3.6575463371579877,
1819
+ "grad_norm": 0.10038693196972406,
1820
+ "learning_rate": 1.537354098697367e-05,
1821
+ "loss": 0.3285,
1822
+ "step": 259
1823
+ },
1824
+ {
1825
+ "epoch": 3.671668137687555,
1826
+ "grad_norm": 0.10993218050906065,
1827
+ "learning_rate": 1.5060407925650662e-05,
1828
+ "loss": 0.3346,
1829
+ "step": 260
1830
+ },
1831
+ {
1832
+ "epoch": 3.6857899382171224,
1833
+ "grad_norm": 0.09881058692426582,
1834
+ "learning_rate": 1.4749755507990449e-05,
1835
+ "loss": 0.3265,
1836
+ "step": 261
1837
+ },
1838
+ {
1839
+ "epoch": 3.69991173874669,
1840
+ "grad_norm": 0.11110424733317653,
1841
+ "learning_rate": 1.4441614633373773e-05,
1842
+ "loss": 0.3367,
1843
+ "step": 262
1844
+ },
1845
+ {
1846
+ "epoch": 3.7140335392762576,
1847
+ "grad_norm": 0.09507466207790345,
1848
+ "learning_rate": 1.413601595136802e-05,
1849
+ "loss": 0.335,
1850
+ "step": 263
1851
+ },
1852
+ {
1853
+ "epoch": 3.7281553398058254,
1854
+ "grad_norm": 0.10341229060389236,
1855
+ "learning_rate": 1.383298985867863e-05,
1856
+ "loss": 0.3324,
1857
+ "step": 264
1858
+ },
1859
+ {
1860
+ "epoch": 3.7422771403353927,
1861
+ "grad_norm": 0.09734360531860331,
1862
+ "learning_rate": 1.3532566496125634e-05,
1863
+ "loss": 0.3313,
1864
+ "step": 265
1865
+ },
1866
+ {
1867
+ "epoch": 3.75639894086496,
1868
+ "grad_norm": 0.09174570798780135,
1869
+ "learning_rate": 1.3234775745645684e-05,
1870
+ "loss": 0.3351,
1871
+ "step": 266
1872
+ },
1873
+ {
1874
+ "epoch": 3.770520741394528,
1875
+ "grad_norm": 0.10147835781586892,
1876
+ "learning_rate": 1.2939647227319791e-05,
1877
+ "loss": 0.3353,
1878
+ "step": 267
1879
+ },
1880
+ {
1881
+ "epoch": 3.784642541924095,
1882
+ "grad_norm": 0.09808246222031777,
1883
+ "learning_rate": 1.2647210296427197e-05,
1884
+ "loss": 0.3323,
1885
+ "step": 268
1886
+ },
1887
+ {
1888
+ "epoch": 3.798764342453663,
1889
+ "grad_norm": 0.09735163985861015,
1890
+ "learning_rate": 1.2357494040525416e-05,
1891
+ "loss": 0.3391,
1892
+ "step": 269
1893
+ },
1894
+ {
1895
+ "epoch": 3.8128861429832304,
1896
+ "grad_norm": 0.08930562493255255,
1897
+ "learning_rate": 1.2070527276557092e-05,
1898
+ "loss": 0.3327,
1899
+ "step": 270
1900
+ },
1901
+ {
1902
+ "epoch": 3.8270079435127977,
1903
+ "grad_norm": 0.09744814905553326,
1904
+ "learning_rate": 1.178633854798365e-05,
1905
+ "loss": 0.33,
1906
+ "step": 271
1907
+ },
1908
+ {
1909
+ "epoch": 3.8411297440423655,
1910
+ "grad_norm": 0.09183836496663382,
1911
+ "learning_rate": 1.1504956121946216e-05,
1912
+ "loss": 0.3317,
1913
+ "step": 272
1914
+ },
1915
+ {
1916
+ "epoch": 3.855251544571933,
1917
+ "grad_norm": 0.08801876422756064,
1918
+ "learning_rate": 1.1226407986453963e-05,
1919
+ "loss": 0.3294,
1920
+ "step": 273
1921
+ },
1922
+ {
1923
+ "epoch": 3.8693733451015007,
1924
+ "grad_norm": 0.08798928229950856,
1925
+ "learning_rate": 1.0950721847600282e-05,
1926
+ "loss": 0.3282,
1927
+ "step": 274
1928
+ },
1929
+ {
1930
+ "epoch": 3.883495145631068,
1931
+ "grad_norm": 0.09000845113363774,
1932
+ "learning_rate": 1.0677925126806956e-05,
1933
+ "loss": 0.335,
1934
+ "step": 275
1935
+ },
1936
+ {
1937
+ "epoch": 3.8976169461606354,
1938
+ "grad_norm": 0.09609952332604478,
1939
+ "learning_rate": 1.040804495809665e-05,
1940
+ "loss": 0.3352,
1941
+ "step": 276
1942
+ },
1943
+ {
1944
+ "epoch": 3.911738746690203,
1945
+ "grad_norm": 0.09426777621829556,
1946
+ "learning_rate": 1.0141108185393995e-05,
1947
+ "loss": 0.3307,
1948
+ "step": 277
1949
+ },
1950
+ {
1951
+ "epoch": 3.9258605472197705,
1952
+ "grad_norm": 0.08749576305220681,
1953
+ "learning_rate": 9.877141359855567e-06,
1954
+ "loss": 0.3316,
1955
+ "step": 278
1956
+ },
1957
+ {
1958
+ "epoch": 3.9399823477493383,
1959
+ "grad_norm": 0.08573388419725536,
1960
+ "learning_rate": 9.616170737228882e-06,
1961
+ "loss": 0.3301,
1962
+ "step": 279
1963
+ },
1964
+ {
1965
+ "epoch": 3.9541041482789057,
1966
+ "grad_norm": 0.08677743094561904,
1967
+ "learning_rate": 9.358222275240884e-06,
1968
+ "loss": 0.3309,
1969
+ "step": 280
1970
+ },
1971
+ {
1972
+ "epoch": 3.968225948808473,
1973
+ "grad_norm": 0.08456912932018501,
1974
+ "learning_rate": 9.103321631016024e-06,
1975
+ "loss": 0.3294,
1976
+ "step": 281
1977
+ },
1978
+ {
1979
+ "epoch": 3.9823477493380404,
1980
+ "grad_norm": 0.0892840459688823,
1981
+ "learning_rate": 8.851494158524242e-06,
1982
+ "loss": 0.3299,
1983
+ "step": 282
1984
+ },
1985
+ {
1986
+ "epoch": 3.996469549867608,
1987
+ "grad_norm": 0.09785834932292316,
1988
+ "learning_rate": 8.602764906059109e-06,
1989
+ "loss": 0.3734,
1990
+ "step": 283
1991
+ },
1992
+ {
1993
+ "epoch": 4.010591350397176,
1994
+ "grad_norm": 0.1159182382828669,
1995
+ "learning_rate": 8.35715861374636e-06,
1996
+ "loss": 0.3432,
1997
+ "step": 284
1998
+ },
1999
+ {
2000
+ "epoch": 4.024713150926743,
2001
+ "grad_norm": 0.11348869033645836,
2002
+ "learning_rate": 8.114699711083113e-06,
2003
+ "loss": 0.3187,
2004
+ "step": 285
2005
+ },
2006
+ {
2007
+ "epoch": 4.038834951456311,
2008
+ "grad_norm": 0.09626843456466473,
2009
+ "learning_rate": 7.875412314507942e-06,
2010
+ "loss": 0.3213,
2011
+ "step": 286
2012
+ },
2013
+ {
2014
+ "epoch": 4.052956751985878,
2015
+ "grad_norm": 0.0918806636447836,
2016
+ "learning_rate": 7.639320225002106e-06,
2017
+ "loss": 0.3169,
2018
+ "step": 287
2019
+ },
2020
+ {
2021
+ "epoch": 4.067078552515445,
2022
+ "grad_norm": 0.09514043448978982,
2023
+ "learning_rate": 7.406446925722211e-06,
2024
+ "loss": 0.3148,
2025
+ "step": 288
2026
+ },
2027
+ {
2028
+ "epoch": 4.081200353045014,
2029
+ "grad_norm": 0.10508295602012874,
2030
+ "learning_rate": 7.176815579664343e-06,
2031
+ "loss": 0.3132,
2032
+ "step": 289
2033
+ },
2034
+ {
2035
+ "epoch": 4.095322153574581,
2036
+ "grad_norm": 0.10091079365331981,
2037
+ "learning_rate": 6.950449027360213e-06,
2038
+ "loss": 0.3175,
2039
+ "step": 290
2040
+ },
2041
+ {
2042
+ "epoch": 4.109443954104148,
2043
+ "grad_norm": 0.0973346460822993,
2044
+ "learning_rate": 6.7273697846052515e-06,
2045
+ "loss": 0.3184,
2046
+ "step": 291
2047
+ },
2048
+ {
2049
+ "epoch": 4.123565754633716,
2050
+ "grad_norm": 0.09115379235697503,
2051
+ "learning_rate": 6.507600040219073e-06,
2052
+ "loss": 0.3164,
2053
+ "step": 292
2054
+ },
2055
+ {
2056
+ "epoch": 4.137687555163283,
2057
+ "grad_norm": 0.08901902718597547,
2058
+ "learning_rate": 6.291161653838434e-06,
2059
+ "loss": 0.3177,
2060
+ "step": 293
2061
+ },
2062
+ {
2063
+ "epoch": 4.151809355692851,
2064
+ "grad_norm": 0.09132299423316595,
2065
+ "learning_rate": 6.078076153742962e-06,
2066
+ "loss": 0.3131,
2067
+ "step": 294
2068
+ },
2069
+ {
2070
+ "epoch": 4.165931156222419,
2071
+ "grad_norm": 0.09543903005749907,
2072
+ "learning_rate": 5.868364734713776e-06,
2073
+ "loss": 0.3142,
2074
+ "step": 295
2075
+ },
2076
+ {
2077
+ "epoch": 4.180052956751986,
2078
+ "grad_norm": 0.09061531269851537,
2079
+ "learning_rate": 5.662048255925357e-06,
2080
+ "loss": 0.3204,
2081
+ "step": 296
2082
+ },
2083
+ {
2084
+ "epoch": 4.194174757281553,
2085
+ "grad_norm": 0.08551951038992002,
2086
+ "learning_rate": 5.459147238870768e-06,
2087
+ "loss": 0.3158,
2088
+ "step": 297
2089
+ },
2090
+ {
2091
+ "epoch": 4.208296557811121,
2092
+ "grad_norm": 0.08387425510980595,
2093
+ "learning_rate": 5.259681865320447e-06,
2094
+ "loss": 0.3194,
2095
+ "step": 298
2096
+ },
2097
+ {
2098
+ "epoch": 4.222418358340688,
2099
+ "grad_norm": 0.0901228464398898,
2100
+ "learning_rate": 5.063671975314814e-06,
2101
+ "loss": 0.3163,
2102
+ "step": 299
2103
+ },
2104
+ {
2105
+ "epoch": 4.236540158870256,
2106
+ "grad_norm": 0.08691256583540367,
2107
+ "learning_rate": 4.871137065190854e-06,
2108
+ "loss": 0.315,
2109
+ "step": 300
2110
+ },
2111
+ {
2112
+ "epoch": 4.250661959399824,
2113
+ "grad_norm": 0.0878527835574059,
2114
+ "learning_rate": 4.6820962856429205e-06,
2115
+ "loss": 0.3176,
2116
+ "step": 301
2117
+ },
2118
+ {
2119
+ "epoch": 4.264783759929391,
2120
+ "grad_norm": 0.0840437037057203,
2121
+ "learning_rate": 4.496568439817836e-06,
2122
+ "loss": 0.322,
2123
+ "step": 302
2124
+ },
2125
+ {
2126
+ "epoch": 4.278905560458958,
2127
+ "grad_norm": 0.08904988122589128,
2128
+ "learning_rate": 4.314571981444666e-06,
2129
+ "loss": 0.311,
2130
+ "step": 303
2131
+ },
2132
+ {
2133
+ "epoch": 4.293027360988526,
2134
+ "grad_norm": 0.08120215219780037,
2135
+ "learning_rate": 4.136125012999168e-06,
2136
+ "loss": 0.3203,
2137
+ "step": 304
2138
+ },
2139
+ {
2140
+ "epoch": 4.307149161518094,
2141
+ "grad_norm": 0.08522052695009742,
2142
+ "learning_rate": 3.961245283903239e-06,
2143
+ "loss": 0.3161,
2144
+ "step": 305
2145
+ },
2146
+ {
2147
+ "epoch": 4.321270962047661,
2148
+ "grad_norm": 0.08319753808748938,
2149
+ "learning_rate": 3.7899501887594102e-06,
2150
+ "loss": 0.315,
2151
+ "step": 306
2152
+ },
2153
+ {
2154
+ "epoch": 4.335392762577229,
2155
+ "grad_norm": 0.08198211403858394,
2156
+ "learning_rate": 3.622256765620713e-06,
2157
+ "loss": 0.3165,
2158
+ "step": 307
2159
+ },
2160
+ {
2161
+ "epoch": 4.349514563106796,
2162
+ "grad_norm": 0.07827444542073485,
2163
+ "learning_rate": 3.458181694295961e-06,
2164
+ "loss": 0.3114,
2165
+ "step": 308
2166
+ },
2167
+ {
2168
+ "epoch": 4.363636363636363,
2169
+ "grad_norm": 0.07827005931051699,
2170
+ "learning_rate": 3.297741294690644e-06,
2171
+ "loss": 0.3125,
2172
+ "step": 309
2173
+ },
2174
+ {
2175
+ "epoch": 4.3777581641659316,
2176
+ "grad_norm": 0.07833274350751808,
2177
+ "learning_rate": 3.140951525183691e-06,
2178
+ "loss": 0.3156,
2179
+ "step": 310
2180
+ },
2181
+ {
2182
+ "epoch": 4.391879964695499,
2183
+ "grad_norm": 0.08055700180528477,
2184
+ "learning_rate": 2.987827981040132e-06,
2185
+ "loss": 0.3144,
2186
+ "step": 311
2187
+ },
2188
+ {
2189
+ "epoch": 4.406001765225066,
2190
+ "grad_norm": 0.0799614180245514,
2191
+ "learning_rate": 2.8383858928598963e-06,
2192
+ "loss": 0.3157,
2193
+ "step": 312
2194
+ },
2195
+ {
2196
+ "epoch": 4.420123565754634,
2197
+ "grad_norm": 0.0722165779006397,
2198
+ "learning_rate": 2.692640125062895e-06,
2199
+ "loss": 0.3116,
2200
+ "step": 313
2201
+ },
2202
+ {
2203
+ "epoch": 4.434245366284201,
2204
+ "grad_norm": 0.07776220076295337,
2205
+ "learning_rate": 2.550605174410512e-06,
2206
+ "loss": 0.3206,
2207
+ "step": 314
2208
+ },
2209
+ {
2210
+ "epoch": 4.448367166813769,
2211
+ "grad_norm": 0.07577160557474086,
2212
+ "learning_rate": 2.4122951685636674e-06,
2213
+ "loss": 0.3119,
2214
+ "step": 315
2215
+ },
2216
+ {
2217
+ "epoch": 4.4624889673433366,
2218
+ "grad_norm": 0.07292199486310709,
2219
+ "learning_rate": 2.2777238646775768e-06,
2220
+ "loss": 0.314,
2221
+ "step": 316
2222
+ },
2223
+ {
2224
+ "epoch": 4.476610767872904,
2225
+ "grad_norm": 0.07321270589774292,
2226
+ "learning_rate": 2.14690464803343e-06,
2227
+ "loss": 0.3116,
2228
+ "step": 317
2229
+ },
2230
+ {
2231
+ "epoch": 4.490732568402471,
2232
+ "grad_norm": 0.07971761444372055,
2233
+ "learning_rate": 2.0198505307069462e-06,
2234
+ "loss": 0.3162,
2235
+ "step": 318
2236
+ },
2237
+ {
2238
+ "epoch": 4.504854368932039,
2239
+ "grad_norm": 0.0823725656624792,
2240
+ "learning_rate": 1.896574150274151e-06,
2241
+ "loss": 0.318,
2242
+ "step": 319
2243
+ },
2244
+ {
2245
+ "epoch": 4.518976169461606,
2246
+ "grad_norm": 0.07311612247681858,
2247
+ "learning_rate": 1.7770877685543687e-06,
2248
+ "loss": 0.3146,
2249
+ "step": 320
2250
+ },
2251
+ {
2252
+ "epoch": 4.533097969991174,
2253
+ "grad_norm": 0.0754285797360244,
2254
+ "learning_rate": 1.6614032703905714e-06,
2255
+ "loss": 0.3188,
2256
+ "step": 321
2257
+ },
2258
+ {
2259
+ "epoch": 4.5472197705207416,
2260
+ "grad_norm": 0.07192329712907819,
2261
+ "learning_rate": 1.5495321624672443e-06,
2262
+ "loss": 0.3117,
2263
+ "step": 322
2264
+ },
2265
+ {
2266
+ "epoch": 4.561341571050309,
2267
+ "grad_norm": 0.07683729191513318,
2268
+ "learning_rate": 1.4414855721658705e-06,
2269
+ "loss": 0.3179,
2270
+ "step": 323
2271
+ },
2272
+ {
2273
+ "epoch": 4.575463371579876,
2274
+ "grad_norm": 0.07466087193345237,
2275
+ "learning_rate": 1.3372742464581134e-06,
2276
+ "loss": 0.3169,
2277
+ "step": 324
2278
+ },
2279
+ {
2280
+ "epoch": 4.589585172109444,
2281
+ "grad_norm": 0.07472750780066512,
2282
+ "learning_rate": 1.2369085508368862e-06,
2283
+ "loss": 0.313,
2284
+ "step": 325
2285
+ },
2286
+ {
2287
+ "epoch": 4.603706972639012,
2288
+ "grad_norm": 0.07567268942020543,
2289
+ "learning_rate": 1.1403984682852998e-06,
2290
+ "loss": 0.3162,
2291
+ "step": 326
2292
+ },
2293
+ {
2294
+ "epoch": 4.617828773168579,
2295
+ "grad_norm": 0.07193466653913613,
2296
+ "learning_rate": 1.0477535982837473e-06,
2297
+ "loss": 0.3169,
2298
+ "step": 327
2299
+ },
2300
+ {
2301
+ "epoch": 4.631950573698147,
2302
+ "grad_norm": 0.07310364397796111,
2303
+ "learning_rate": 9.589831558550222e-07,
2304
+ "loss": 0.3147,
2305
+ "step": 328
2306
+ },
2307
+ {
2308
+ "epoch": 4.646072374227714,
2309
+ "grad_norm": 0.07226831665121733,
2310
+ "learning_rate": 8.740959706477725e-07,
2311
+ "loss": 0.3155,
2312
+ "step": 329
2313
+ },
2314
+ {
2315
+ "epoch": 4.660194174757281,
2316
+ "grad_norm": 0.07380784680617208,
2317
+ "learning_rate": 7.93100486058247e-07,
2318
+ "loss": 0.3172,
2319
+ "step": 330
2320
+ },
2321
+ {
2322
+ "epoch": 4.674315975286849,
2323
+ "grad_norm": 0.07265097137199653,
2324
+ "learning_rate": 7.160047583904473e-07,
2325
+ "loss": 0.3123,
2326
+ "step": 331
2327
+ },
2328
+ {
2329
+ "epoch": 4.688437775816417,
2330
+ "grad_norm": 0.07526606061681983,
2331
+ "learning_rate": 6.428164560548134e-07,
2332
+ "loss": 0.3126,
2333
+ "step": 332
2334
+ },
2335
+ {
2336
+ "epoch": 4.702559576345984,
2337
+ "grad_norm": 0.07096951660387449,
2338
+ "learning_rate": 5.735428588054825e-07,
2339
+ "loss": 0.3091,
2340
+ "step": 333
2341
+ },
2342
+ {
2343
+ "epoch": 4.716681376875552,
2344
+ "grad_norm": 0.07491929428893927,
2345
+ "learning_rate": 5.081908570161753e-07,
2346
+ "loss": 0.3168,
2347
+ "step": 334
2348
+ },
2349
+ {
2350
+ "epoch": 4.730803177405119,
2351
+ "grad_norm": 0.07068035565889964,
2352
+ "learning_rate": 4.467669509948591e-07,
2353
+ "loss": 0.3168,
2354
+ "step": 335
2355
+ },
2356
+ {
2357
+ "epoch": 4.744924977934687,
2358
+ "grad_norm": 0.07006153238881019,
2359
+ "learning_rate": 3.8927725033718553e-07,
2360
+ "loss": 0.3096,
2361
+ "step": 336
2362
+ },
2363
+ {
2364
+ "epoch": 4.7590467784642545,
2365
+ "grad_norm": 0.07031296479074185,
2366
+ "learning_rate": 3.3572747331878984e-07,
2367
+ "loss": 0.3127,
2368
+ "step": 337
2369
+ },
2370
+ {
2371
+ "epoch": 4.773168578993822,
2372
+ "grad_norm": 0.07086156685048181,
2373
+ "learning_rate": 2.8612294632650586e-07,
2374
+ "loss": 0.3165,
2375
+ "step": 338
2376
+ },
2377
+ {
2378
+ "epoch": 4.787290379523389,
2379
+ "grad_norm": 0.07041702874195928,
2380
+ "learning_rate": 2.404686033285897e-07,
2381
+ "loss": 0.3211,
2382
+ "step": 339
2383
+ },
2384
+ {
2385
+ "epoch": 4.801412180052957,
2386
+ "grad_norm": 0.07111545002538634,
2387
+ "learning_rate": 1.9876898538394362e-07,
2388
+ "loss": 0.3139,
2389
+ "step": 340
2390
+ },
2391
+ {
2392
+ "epoch": 4.815533980582524,
2393
+ "grad_norm": 0.06964445264833816,
2394
+ "learning_rate": 1.6102824019043728e-07,
2395
+ "loss": 0.3119,
2396
+ "step": 341
2397
+ },
2398
+ {
2399
+ "epoch": 4.829655781112092,
2400
+ "grad_norm": 0.07185826317569316,
2401
+ "learning_rate": 1.2725012167236207e-07,
2402
+ "loss": 0.3189,
2403
+ "step": 342
2404
+ },
2405
+ {
2406
+ "epoch": 4.8437775816416595,
2407
+ "grad_norm": 0.07175971991165786,
2408
+ "learning_rate": 9.74379896070321e-08,
2409
+ "loss": 0.3144,
2410
+ "step": 343
2411
+ },
2412
+ {
2413
+ "epoch": 4.857899382171227,
2414
+ "grad_norm": 0.07027377563502572,
2415
+ "learning_rate": 7.159480929059381e-08,
2416
+ "loss": 0.3208,
2417
+ "step": 344
2418
+ },
2419
+ {
2420
+ "epoch": 4.872021182700794,
2421
+ "grad_norm": 0.07130198834034268,
2422
+ "learning_rate": 4.9723151243106225e-08,
2423
+ "loss": 0.3164,
2424
+ "step": 345
2425
+ },
2426
+ {
2427
+ "epoch": 4.886142983230362,
2428
+ "grad_norm": 0.07512577557190175,
2429
+ "learning_rate": 3.1825190952829986e-08,
2430
+ "loss": 0.3183,
2431
+ "step": 346
2432
+ },
2433
+ {
2434
+ "epoch": 4.90026478375993,
2435
+ "grad_norm": 0.0718819094759202,
2436
+ "learning_rate": 1.7902708659867096e-08,
2437
+ "loss": 0.3185,
2438
+ "step": 347
2439
+ },
2440
+ {
2441
+ "epoch": 4.914386584289497,
2442
+ "grad_norm": 0.0706893833001464,
2443
+ "learning_rate": 7.957089179058131e-09,
2444
+ "loss": 0.3142,
2445
+ "step": 348
2446
+ },
2447
+ {
2448
+ "epoch": 4.9285083848190645,
2449
+ "grad_norm": 0.07170028442056126,
2450
+ "learning_rate": 1.9893217622790616e-09,
2451
+ "loss": 0.3181,
2452
+ "step": 349
2453
+ },
2454
+ {
2455
+ "epoch": 4.942630185348632,
2456
+ "grad_norm": 0.07142066838497432,
2457
+ "learning_rate": 0.0,
2458
+ "loss": 0.313,
2459
+ "step": 350
2460
+ },
2461
+ {
2462
+ "epoch": 4.942630185348632,
2463
+ "step": 350,
2464
+ "total_flos": 9.306564393200255e+18,
2465
+ "train_loss": 0.0,
2466
+ "train_runtime": 1.9909,
2467
+ "train_samples_per_second": 91040.986,
2468
+ "train_steps_per_second": 175.798
2469
+ }
2470
+ ],
2471
+ "logging_steps": 1,
2472
+ "max_steps": 350,
2473
+ "num_input_tokens_seen": 0,
2474
+ "num_train_epochs": 5,
2475
+ "save_steps": 500,
2476
+ "stateful_callbacks": {
2477
+ "TrainerControl": {
2478
+ "args": {
2479
+ "should_epoch_stop": false,
2480
+ "should_evaluate": false,
2481
+ "should_log": false,
2482
+ "should_save": true,
2483
+ "should_training_stop": true
2484
+ },
2485
+ "attributes": {}
2486
+ }
2487
+ },
2488
+ "total_flos": 9.306564393200255e+18,
2489
+ "train_batch_size": 1,
2490
+ "trial_name": null,
2491
+ "trial_params": null
2492
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:039e32e706bff12cff024410b904be7a320b0461bffe60ae4b85e290ebfee2ea
3
+ size 7416
training_loss.png ADDED
vocab.json ADDED
The diff for this file is too large to render. See raw diff