BoboboChen commited on
Commit
6fd7924
·
verified ·
1 Parent(s): eae2441

Add files using upload-large-folder tool

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
added_tokens.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "</tool_call>": 151658,
3
+ "<tool_call>": 151657,
4
+ "<|box_end|>": 151649,
5
+ "<|box_start|>": 151648,
6
+ "<|endoftext|>": 151643,
7
+ "<|file_sep|>": 151664,
8
+ "<|fim_middle|>": 151660,
9
+ "<|fim_pad|>": 151662,
10
+ "<|fim_prefix|>": 151659,
11
+ "<|fim_suffix|>": 151661,
12
+ "<|im_end|>": 151645,
13
+ "<|im_start|>": 151644,
14
+ "<|image_pad|>": 151655,
15
+ "<|object_ref_end|>": 151647,
16
+ "<|object_ref_start|>": 151646,
17
+ "<|quad_end|>": 151651,
18
+ "<|quad_start|>": 151650,
19
+ "<|repo_name|>": 151663,
20
+ "<|video_pad|>": 151656,
21
+ "<|vision_end|>": 151653,
22
+ "<|vision_pad|>": 151654,
23
+ "<|vision_start|>": 151652
24
+ }
config.json ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "Qwen/Qwen2.5-7B-Instruct",
3
+ "architectures": [
4
+ "Qwen2ForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 151643,
8
+ "eos_token_id": 151645,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 3584,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 18944,
13
+ "max_position_embeddings": 32768,
14
+ "max_window_layers": 28,
15
+ "model_type": "qwen2",
16
+ "num_attention_heads": 28,
17
+ "num_hidden_layers": 28,
18
+ "num_key_value_heads": 4,
19
+ "rms_norm_eps": 1e-06,
20
+ "rope_scaling": null,
21
+ "rope_theta": 1000000.0,
22
+ "sliding_window": null,
23
+ "tie_word_embeddings": false,
24
+ "torch_dtype": "bfloat16",
25
+ "transformers_version": "4.48.2",
26
+ "use_cache": false,
27
+ "use_sliding_window": false,
28
+ "vocab_size": 152064
29
+ }
generation_config.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 151643,
3
+ "do_sample": true,
4
+ "eos_token_id": [
5
+ 151645,
6
+ 151643
7
+ ],
8
+ "pad_token_id": 151643,
9
+ "repetition_penalty": 1.05,
10
+ "temperature": 0.7,
11
+ "top_k": 20,
12
+ "top_p": 0.8,
13
+ "transformers_version": "4.48.2"
14
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
model-00001-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d84bb669c93fa3d80fdd52e140f3376b330fa5fd7accb091bde321de9ac04831
3
+ size 4877660776
model-00002-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3f76c9620a174ceb0540e5f2dbb22f89fbbe976e78f5fde97501f6e08f5e61c8
3
+ size 4932751008
model-00003-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bad210af36ec349a5dba8aaf346e3243b3f42adb703cf8e7c7003c1b5317b47b
3
+ size 4330865200
model-00004-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:51da41519a9d613d590d683c6c1ff5f906b0d60b55cbd877f8a5805d7e1bf8dc
3
+ size 1089994880
model.safetensors.index.json ADDED
@@ -0,0 +1,346 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 15231233024
4
+ },
5
+ "weight_map": {
6
+ "lm_head.weight": "model-00004-of-00004.safetensors",
7
+ "model.embed_tokens.weight": "model-00001-of-00004.safetensors",
8
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00004.safetensors",
9
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
10
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
11
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
12
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
13
+ "model.layers.0.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
14
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
15
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
16
+ "model.layers.0.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
17
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
18
+ "model.layers.0.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
19
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
20
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00004.safetensors",
21
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
22
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
23
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
24
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
25
+ "model.layers.1.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
26
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
27
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
28
+ "model.layers.1.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
29
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
30
+ "model.layers.1.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
31
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
32
+ "model.layers.10.input_layernorm.weight": "model-00002-of-00004.safetensors",
33
+ "model.layers.10.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
34
+ "model.layers.10.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
35
+ "model.layers.10.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
36
+ "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
37
+ "model.layers.10.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
38
+ "model.layers.10.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
39
+ "model.layers.10.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
40
+ "model.layers.10.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
41
+ "model.layers.10.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
42
+ "model.layers.10.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
43
+ "model.layers.10.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
44
+ "model.layers.11.input_layernorm.weight": "model-00002-of-00004.safetensors",
45
+ "model.layers.11.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
46
+ "model.layers.11.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
47
+ "model.layers.11.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
48
+ "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
49
+ "model.layers.11.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
50
+ "model.layers.11.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
51
+ "model.layers.11.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
52
+ "model.layers.11.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
53
+ "model.layers.11.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
54
+ "model.layers.11.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
55
+ "model.layers.11.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
56
+ "model.layers.12.input_layernorm.weight": "model-00002-of-00004.safetensors",
57
+ "model.layers.12.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
58
+ "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
59
+ "model.layers.12.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
60
+ "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
61
+ "model.layers.12.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
62
+ "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
63
+ "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
64
+ "model.layers.12.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
65
+ "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
66
+ "model.layers.12.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
67
+ "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
68
+ "model.layers.13.input_layernorm.weight": "model-00002-of-00004.safetensors",
69
+ "model.layers.13.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
70
+ "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
71
+ "model.layers.13.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
72
+ "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
73
+ "model.layers.13.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
74
+ "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
75
+ "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
76
+ "model.layers.13.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
77
+ "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
78
+ "model.layers.13.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
79
+ "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
80
+ "model.layers.14.input_layernorm.weight": "model-00002-of-00004.safetensors",
81
+ "model.layers.14.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
82
+ "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
83
+ "model.layers.14.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
84
+ "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
85
+ "model.layers.14.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
86
+ "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
87
+ "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
88
+ "model.layers.14.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
89
+ "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
90
+ "model.layers.14.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
91
+ "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
92
+ "model.layers.15.input_layernorm.weight": "model-00002-of-00004.safetensors",
93
+ "model.layers.15.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
94
+ "model.layers.15.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
95
+ "model.layers.15.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
96
+ "model.layers.15.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
97
+ "model.layers.15.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
98
+ "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
99
+ "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
100
+ "model.layers.15.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
101
+ "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
102
+ "model.layers.15.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
103
+ "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
104
+ "model.layers.16.input_layernorm.weight": "model-00002-of-00004.safetensors",
105
+ "model.layers.16.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
106
+ "model.layers.16.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
107
+ "model.layers.16.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
108
+ "model.layers.16.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
109
+ "model.layers.16.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
110
+ "model.layers.16.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
111
+ "model.layers.16.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
112
+ "model.layers.16.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
113
+ "model.layers.16.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
114
+ "model.layers.16.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
115
+ "model.layers.16.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
116
+ "model.layers.17.input_layernorm.weight": "model-00002-of-00004.safetensors",
117
+ "model.layers.17.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
118
+ "model.layers.17.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
119
+ "model.layers.17.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
120
+ "model.layers.17.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
121
+ "model.layers.17.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
122
+ "model.layers.17.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
123
+ "model.layers.17.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
124
+ "model.layers.17.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
125
+ "model.layers.17.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
126
+ "model.layers.17.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
127
+ "model.layers.17.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
128
+ "model.layers.18.input_layernorm.weight": "model-00003-of-00004.safetensors",
129
+ "model.layers.18.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
130
+ "model.layers.18.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
131
+ "model.layers.18.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
132
+ "model.layers.18.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
133
+ "model.layers.18.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
134
+ "model.layers.18.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
135
+ "model.layers.18.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
136
+ "model.layers.18.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
137
+ "model.layers.18.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
138
+ "model.layers.18.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
139
+ "model.layers.18.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
140
+ "model.layers.19.input_layernorm.weight": "model-00003-of-00004.safetensors",
141
+ "model.layers.19.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
142
+ "model.layers.19.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
143
+ "model.layers.19.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
144
+ "model.layers.19.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
145
+ "model.layers.19.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
146
+ "model.layers.19.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
147
+ "model.layers.19.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
148
+ "model.layers.19.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
149
+ "model.layers.19.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
150
+ "model.layers.19.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
151
+ "model.layers.19.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
152
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00004.safetensors",
153
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
154
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
155
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
156
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
157
+ "model.layers.2.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
158
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
159
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
160
+ "model.layers.2.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
161
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
162
+ "model.layers.2.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
163
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
164
+ "model.layers.20.input_layernorm.weight": "model-00003-of-00004.safetensors",
165
+ "model.layers.20.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
166
+ "model.layers.20.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
167
+ "model.layers.20.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
168
+ "model.layers.20.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
169
+ "model.layers.20.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
170
+ "model.layers.20.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
171
+ "model.layers.20.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
172
+ "model.layers.20.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
173
+ "model.layers.20.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
174
+ "model.layers.20.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
175
+ "model.layers.20.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
176
+ "model.layers.21.input_layernorm.weight": "model-00003-of-00004.safetensors",
177
+ "model.layers.21.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
178
+ "model.layers.21.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
179
+ "model.layers.21.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
180
+ "model.layers.21.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
181
+ "model.layers.21.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
182
+ "model.layers.21.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
183
+ "model.layers.21.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
184
+ "model.layers.21.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
185
+ "model.layers.21.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
186
+ "model.layers.21.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
187
+ "model.layers.21.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
188
+ "model.layers.22.input_layernorm.weight": "model-00003-of-00004.safetensors",
189
+ "model.layers.22.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
190
+ "model.layers.22.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
191
+ "model.layers.22.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
192
+ "model.layers.22.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
193
+ "model.layers.22.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
194
+ "model.layers.22.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
195
+ "model.layers.22.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
196
+ "model.layers.22.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
197
+ "model.layers.22.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
198
+ "model.layers.22.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
199
+ "model.layers.22.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
200
+ "model.layers.23.input_layernorm.weight": "model-00003-of-00004.safetensors",
201
+ "model.layers.23.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
202
+ "model.layers.23.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
203
+ "model.layers.23.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
204
+ "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
205
+ "model.layers.23.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
206
+ "model.layers.23.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
207
+ "model.layers.23.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
208
+ "model.layers.23.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
209
+ "model.layers.23.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
210
+ "model.layers.23.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
211
+ "model.layers.23.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
212
+ "model.layers.24.input_layernorm.weight": "model-00003-of-00004.safetensors",
213
+ "model.layers.24.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
214
+ "model.layers.24.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
215
+ "model.layers.24.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
216
+ "model.layers.24.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
217
+ "model.layers.24.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
218
+ "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
219
+ "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
220
+ "model.layers.24.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
221
+ "model.layers.24.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
222
+ "model.layers.24.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
223
+ "model.layers.24.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
224
+ "model.layers.25.input_layernorm.weight": "model-00003-of-00004.safetensors",
225
+ "model.layers.25.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
226
+ "model.layers.25.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
227
+ "model.layers.25.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
228
+ "model.layers.25.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
229
+ "model.layers.25.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
230
+ "model.layers.25.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
231
+ "model.layers.25.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
232
+ "model.layers.25.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
233
+ "model.layers.25.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
234
+ "model.layers.25.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
235
+ "model.layers.25.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
236
+ "model.layers.26.input_layernorm.weight": "model-00003-of-00004.safetensors",
237
+ "model.layers.26.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
238
+ "model.layers.26.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
239
+ "model.layers.26.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
240
+ "model.layers.26.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
241
+ "model.layers.26.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
242
+ "model.layers.26.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
243
+ "model.layers.26.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
244
+ "model.layers.26.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
245
+ "model.layers.26.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
246
+ "model.layers.26.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
247
+ "model.layers.26.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
248
+ "model.layers.27.input_layernorm.weight": "model-00003-of-00004.safetensors",
249
+ "model.layers.27.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
250
+ "model.layers.27.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
251
+ "model.layers.27.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
252
+ "model.layers.27.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
253
+ "model.layers.27.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
254
+ "model.layers.27.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
255
+ "model.layers.27.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
256
+ "model.layers.27.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
257
+ "model.layers.27.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
258
+ "model.layers.27.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
259
+ "model.layers.27.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
260
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00004.safetensors",
261
+ "model.layers.3.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
262
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
263
+ "model.layers.3.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
264
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
265
+ "model.layers.3.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
266
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
267
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
268
+ "model.layers.3.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
269
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
270
+ "model.layers.3.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
271
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
272
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00004.safetensors",
273
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
274
+ "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
275
+ "model.layers.4.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
276
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
277
+ "model.layers.4.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
278
+ "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
279
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
280
+ "model.layers.4.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
281
+ "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
282
+ "model.layers.4.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
283
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
284
+ "model.layers.5.input_layernorm.weight": "model-00001-of-00004.safetensors",
285
+ "model.layers.5.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
286
+ "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
287
+ "model.layers.5.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
288
+ "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
289
+ "model.layers.5.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
290
+ "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
291
+ "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
292
+ "model.layers.5.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
293
+ "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
294
+ "model.layers.5.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
295
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
296
+ "model.layers.6.input_layernorm.weight": "model-00001-of-00004.safetensors",
297
+ "model.layers.6.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
298
+ "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
299
+ "model.layers.6.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
300
+ "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
301
+ "model.layers.6.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
302
+ "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
303
+ "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
304
+ "model.layers.6.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
305
+ "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
306
+ "model.layers.6.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
307
+ "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
308
+ "model.layers.7.input_layernorm.weight": "model-00001-of-00004.safetensors",
309
+ "model.layers.7.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
310
+ "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
311
+ "model.layers.7.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
312
+ "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
313
+ "model.layers.7.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
314
+ "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
315
+ "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
316
+ "model.layers.7.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
317
+ "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
318
+ "model.layers.7.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
319
+ "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
320
+ "model.layers.8.input_layernorm.weight": "model-00002-of-00004.safetensors",
321
+ "model.layers.8.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
322
+ "model.layers.8.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
323
+ "model.layers.8.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
324
+ "model.layers.8.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
325
+ "model.layers.8.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
326
+ "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
327
+ "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
328
+ "model.layers.8.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
329
+ "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
330
+ "model.layers.8.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
331
+ "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
332
+ "model.layers.9.input_layernorm.weight": "model-00002-of-00004.safetensors",
333
+ "model.layers.9.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
334
+ "model.layers.9.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
335
+ "model.layers.9.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
336
+ "model.layers.9.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
337
+ "model.layers.9.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
338
+ "model.layers.9.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
339
+ "model.layers.9.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
340
+ "model.layers.9.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
341
+ "model.layers.9.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
342
+ "model.layers.9.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
343
+ "model.layers.9.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
344
+ "model.norm.weight": "model-00003-of-00004.safetensors"
345
+ }
346
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|im_start|>",
4
+ "<|im_end|>",
5
+ "<|object_ref_start|>",
6
+ "<|object_ref_end|>",
7
+ "<|box_start|>",
8
+ "<|box_end|>",
9
+ "<|quad_start|>",
10
+ "<|quad_end|>",
11
+ "<|vision_start|>",
12
+ "<|vision_end|>",
13
+ "<|vision_pad|>",
14
+ "<|image_pad|>",
15
+ "<|video_pad|>"
16
+ ],
17
+ "eos_token": {
18
+ "content": "<|im_end|>",
19
+ "lstrip": false,
20
+ "normalized": false,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ },
24
+ "pad_token": {
25
+ "content": "<|endoftext|>",
26
+ "lstrip": false,
27
+ "normalized": false,
28
+ "rstrip": false,
29
+ "single_word": false
30
+ }
31
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9c5ae00e602b8860cbd784ba82a8aa14e8feecec692e7076590d014d7b7fdafa
3
+ size 11421896
tokenizer_config.json ADDED
@@ -0,0 +1,209 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": false,
3
+ "add_prefix_space": false,
4
+ "added_tokens_decoder": {
5
+ "151643": {
6
+ "content": "<|endoftext|>",
7
+ "lstrip": false,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ },
13
+ "151644": {
14
+ "content": "<|im_start|>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false,
19
+ "special": true
20
+ },
21
+ "151645": {
22
+ "content": "<|im_end|>",
23
+ "lstrip": false,
24
+ "normalized": false,
25
+ "rstrip": false,
26
+ "single_word": false,
27
+ "special": true
28
+ },
29
+ "151646": {
30
+ "content": "<|object_ref_start|>",
31
+ "lstrip": false,
32
+ "normalized": false,
33
+ "rstrip": false,
34
+ "single_word": false,
35
+ "special": true
36
+ },
37
+ "151647": {
38
+ "content": "<|object_ref_end|>",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": false,
42
+ "single_word": false,
43
+ "special": true
44
+ },
45
+ "151648": {
46
+ "content": "<|box_start|>",
47
+ "lstrip": false,
48
+ "normalized": false,
49
+ "rstrip": false,
50
+ "single_word": false,
51
+ "special": true
52
+ },
53
+ "151649": {
54
+ "content": "<|box_end|>",
55
+ "lstrip": false,
56
+ "normalized": false,
57
+ "rstrip": false,
58
+ "single_word": false,
59
+ "special": true
60
+ },
61
+ "151650": {
62
+ "content": "<|quad_start|>",
63
+ "lstrip": false,
64
+ "normalized": false,
65
+ "rstrip": false,
66
+ "single_word": false,
67
+ "special": true
68
+ },
69
+ "151651": {
70
+ "content": "<|quad_end|>",
71
+ "lstrip": false,
72
+ "normalized": false,
73
+ "rstrip": false,
74
+ "single_word": false,
75
+ "special": true
76
+ },
77
+ "151652": {
78
+ "content": "<|vision_start|>",
79
+ "lstrip": false,
80
+ "normalized": false,
81
+ "rstrip": false,
82
+ "single_word": false,
83
+ "special": true
84
+ },
85
+ "151653": {
86
+ "content": "<|vision_end|>",
87
+ "lstrip": false,
88
+ "normalized": false,
89
+ "rstrip": false,
90
+ "single_word": false,
91
+ "special": true
92
+ },
93
+ "151654": {
94
+ "content": "<|vision_pad|>",
95
+ "lstrip": false,
96
+ "normalized": false,
97
+ "rstrip": false,
98
+ "single_word": false,
99
+ "special": true
100
+ },
101
+ "151655": {
102
+ "content": "<|image_pad|>",
103
+ "lstrip": false,
104
+ "normalized": false,
105
+ "rstrip": false,
106
+ "single_word": false,
107
+ "special": true
108
+ },
109
+ "151656": {
110
+ "content": "<|video_pad|>",
111
+ "lstrip": false,
112
+ "normalized": false,
113
+ "rstrip": false,
114
+ "single_word": false,
115
+ "special": true
116
+ },
117
+ "151657": {
118
+ "content": "<tool_call>",
119
+ "lstrip": false,
120
+ "normalized": false,
121
+ "rstrip": false,
122
+ "single_word": false,
123
+ "special": false
124
+ },
125
+ "151658": {
126
+ "content": "</tool_call>",
127
+ "lstrip": false,
128
+ "normalized": false,
129
+ "rstrip": false,
130
+ "single_word": false,
131
+ "special": false
132
+ },
133
+ "151659": {
134
+ "content": "<|fim_prefix|>",
135
+ "lstrip": false,
136
+ "normalized": false,
137
+ "rstrip": false,
138
+ "single_word": false,
139
+ "special": false
140
+ },
141
+ "151660": {
142
+ "content": "<|fim_middle|>",
143
+ "lstrip": false,
144
+ "normalized": false,
145
+ "rstrip": false,
146
+ "single_word": false,
147
+ "special": false
148
+ },
149
+ "151661": {
150
+ "content": "<|fim_suffix|>",
151
+ "lstrip": false,
152
+ "normalized": false,
153
+ "rstrip": false,
154
+ "single_word": false,
155
+ "special": false
156
+ },
157
+ "151662": {
158
+ "content": "<|fim_pad|>",
159
+ "lstrip": false,
160
+ "normalized": false,
161
+ "rstrip": false,
162
+ "single_word": false,
163
+ "special": false
164
+ },
165
+ "151663": {
166
+ "content": "<|repo_name|>",
167
+ "lstrip": false,
168
+ "normalized": false,
169
+ "rstrip": false,
170
+ "single_word": false,
171
+ "special": false
172
+ },
173
+ "151664": {
174
+ "content": "<|file_sep|>",
175
+ "lstrip": false,
176
+ "normalized": false,
177
+ "rstrip": false,
178
+ "single_word": false,
179
+ "special": false
180
+ }
181
+ },
182
+ "additional_special_tokens": [
183
+ "<|im_start|>",
184
+ "<|im_end|>",
185
+ "<|object_ref_start|>",
186
+ "<|object_ref_end|>",
187
+ "<|box_start|>",
188
+ "<|box_end|>",
189
+ "<|quad_start|>",
190
+ "<|quad_end|>",
191
+ "<|vision_start|>",
192
+ "<|vision_end|>",
193
+ "<|vision_pad|>",
194
+ "<|image_pad|>",
195
+ "<|video_pad|>"
196
+ ],
197
+ "bos_token": null,
198
+ "chat_template": "{%- if tools %}\n {{- '<|im_start|>system\\n' }}\n {%- if messages[0]['role'] == 'system' %}\n {{- messages[0]['content'] }}\n {%- else %}\n {{- 'You are Qwen, created by Alibaba Cloud. You are a helpful assistant.' }}\n {%- endif %}\n {{- \"\\n\\n# Tools\\n\\nYou may call one or more functions to assist with the user query.\\n\\nYou are provided with function signatures within <tools></tools> XML tags:\\n<tools>\" }}\n {%- for tool in tools %}\n {{- \"\\n\" }}\n {{- tool | tojson }}\n {%- endfor %}\n {{- \"\\n</tools>\\n\\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\\n<tool_call>\\n{\\\"name\\\": <function-name>, \\\"arguments\\\": <args-json-object>}\\n</tool_call><|im_end|>\\n\" }}\n{%- else %}\n {%- if messages[0]['role'] == 'system' %}\n {{- '<|im_start|>system\\n' + messages[0]['content'] + '<|im_end|>\\n' }}\n {%- else %}\n {{- '<|im_start|>system\\nYou are Qwen, created by Alibaba Cloud. You are a helpful assistant.<|im_end|>\\n' }}\n {%- endif %}\n{%- endif %}\n{%- for message in messages %}\n {%- if (message.role == \"user\") or (message.role == \"system\" and not loop.first) or (message.role == \"assistant\" and not message.tool_calls) %}\n {{- '<|im_start|>' + message.role + '\\n' + message.content + '<|im_end|>' + '\\n' }}\n {%- elif message.role == \"assistant\" %}\n {{- '<|im_start|>' + message.role }}\n {%- if message.content %}\n {{- '\\n' + message.content }}\n {%- endif %}\n {%- for tool_call in message.tool_calls %}\n {%- if tool_call.function is defined %}\n {%- set tool_call = tool_call.function %}\n {%- endif %}\n {{- '\\n<tool_call>\\n{\"name\": \"' }}\n {{- tool_call.name }}\n {{- '\", \"arguments\": ' }}\n {{- tool_call.arguments | tojson }}\n {{- '}\\n</tool_call>' }}\n {%- endfor %}\n {{- '<|im_end|>\\n' }}\n {%- elif message.role == \"tool\" %}\n {%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != \"tool\") %}\n {{- '<|im_start|>user' }}\n {%- endif %}\n {{- '\\n<tool_response>\\n' }}\n {{- message.content }}\n {{- '\\n</tool_response>' }}\n {%- if loop.last or (messages[loop.index0 + 1].role != \"tool\") %}\n {{- '<|im_end|>\\n' }}\n {%- endif %}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|im_start|>assistant\\n' }}\n{%- endif %}\n",
199
+ "clean_up_tokenization_spaces": false,
200
+ "eos_token": "<|im_end|>",
201
+ "errors": "replace",
202
+ "extra_special_tokens": {},
203
+ "model_max_length": 10000,
204
+ "pad_token": "<|endoftext|>",
205
+ "padding_side": "right",
206
+ "split_special_tokens": false,
207
+ "tokenizer_class": "Qwen2Tokenizer",
208
+ "unk_token": null
209
+ }
trainer_state.json ADDED
@@ -0,0 +1,3127 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 2.0,
5
+ "eval_steps": 500,
6
+ "global_step": 442,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 0.004524886877828055,
13
+ "grad_norm": 3.2510592937469482,
14
+ "learning_rate": 4.524886877828055e-08,
15
+ "loss": 0.8401,
16
+ "step": 1
17
+ },
18
+ {
19
+ "epoch": 0.00904977375565611,
20
+ "grad_norm": 2.890446424484253,
21
+ "learning_rate": 9.04977375565611e-08,
22
+ "loss": 0.7699,
23
+ "step": 2
24
+ },
25
+ {
26
+ "epoch": 0.013574660633484163,
27
+ "grad_norm": 3.4535956382751465,
28
+ "learning_rate": 1.3574660633484163e-07,
29
+ "loss": 0.8325,
30
+ "step": 3
31
+ },
32
+ {
33
+ "epoch": 0.01809954751131222,
34
+ "grad_norm": 2.992128610610962,
35
+ "learning_rate": 1.809954751131222e-07,
36
+ "loss": 0.7607,
37
+ "step": 4
38
+ },
39
+ {
40
+ "epoch": 0.02262443438914027,
41
+ "grad_norm": 2.929943561553955,
42
+ "learning_rate": 2.2624434389140273e-07,
43
+ "loss": 0.7356,
44
+ "step": 5
45
+ },
46
+ {
47
+ "epoch": 0.027149321266968326,
48
+ "grad_norm": 3.091803550720215,
49
+ "learning_rate": 2.7149321266968326e-07,
50
+ "loss": 0.7924,
51
+ "step": 6
52
+ },
53
+ {
54
+ "epoch": 0.03167420814479638,
55
+ "grad_norm": 2.760162591934204,
56
+ "learning_rate": 3.167420814479638e-07,
57
+ "loss": 0.7152,
58
+ "step": 7
59
+ },
60
+ {
61
+ "epoch": 0.03619909502262444,
62
+ "grad_norm": 2.924128770828247,
63
+ "learning_rate": 3.619909502262444e-07,
64
+ "loss": 0.7361,
65
+ "step": 8
66
+ },
67
+ {
68
+ "epoch": 0.04072398190045249,
69
+ "grad_norm": 3.2693638801574707,
70
+ "learning_rate": 4.072398190045249e-07,
71
+ "loss": 0.8098,
72
+ "step": 9
73
+ },
74
+ {
75
+ "epoch": 0.04524886877828054,
76
+ "grad_norm": 2.807117462158203,
77
+ "learning_rate": 4.5248868778280546e-07,
78
+ "loss": 0.7414,
79
+ "step": 10
80
+ },
81
+ {
82
+ "epoch": 0.049773755656108594,
83
+ "grad_norm": 2.837460994720459,
84
+ "learning_rate": 4.977375565610859e-07,
85
+ "loss": 0.7736,
86
+ "step": 11
87
+ },
88
+ {
89
+ "epoch": 0.05429864253393665,
90
+ "grad_norm": 2.843400239944458,
91
+ "learning_rate": 5.429864253393665e-07,
92
+ "loss": 0.7254,
93
+ "step": 12
94
+ },
95
+ {
96
+ "epoch": 0.058823529411764705,
97
+ "grad_norm": 2.7627649307250977,
98
+ "learning_rate": 5.882352941176471e-07,
99
+ "loss": 0.7314,
100
+ "step": 13
101
+ },
102
+ {
103
+ "epoch": 0.06334841628959276,
104
+ "grad_norm": 2.5128955841064453,
105
+ "learning_rate": 6.334841628959276e-07,
106
+ "loss": 0.6816,
107
+ "step": 14
108
+ },
109
+ {
110
+ "epoch": 0.06787330316742081,
111
+ "grad_norm": 2.2178218364715576,
112
+ "learning_rate": 6.787330316742082e-07,
113
+ "loss": 0.687,
114
+ "step": 15
115
+ },
116
+ {
117
+ "epoch": 0.07239819004524888,
118
+ "grad_norm": 2.223222017288208,
119
+ "learning_rate": 7.239819004524888e-07,
120
+ "loss": 0.6834,
121
+ "step": 16
122
+ },
123
+ {
124
+ "epoch": 0.07692307692307693,
125
+ "grad_norm": 2.177546501159668,
126
+ "learning_rate": 7.692307692307694e-07,
127
+ "loss": 0.7176,
128
+ "step": 17
129
+ },
130
+ {
131
+ "epoch": 0.08144796380090498,
132
+ "grad_norm": 2.0788660049438477,
133
+ "learning_rate": 8.144796380090498e-07,
134
+ "loss": 0.6928,
135
+ "step": 18
136
+ },
137
+ {
138
+ "epoch": 0.08597285067873303,
139
+ "grad_norm": 2.013267993927002,
140
+ "learning_rate": 8.597285067873304e-07,
141
+ "loss": 0.7033,
142
+ "step": 19
143
+ },
144
+ {
145
+ "epoch": 0.09049773755656108,
146
+ "grad_norm": 1.640211820602417,
147
+ "learning_rate": 9.049773755656109e-07,
148
+ "loss": 0.7006,
149
+ "step": 20
150
+ },
151
+ {
152
+ "epoch": 0.09502262443438914,
153
+ "grad_norm": 1.2833340167999268,
154
+ "learning_rate": 9.502262443438914e-07,
155
+ "loss": 0.6464,
156
+ "step": 21
157
+ },
158
+ {
159
+ "epoch": 0.09954751131221719,
160
+ "grad_norm": 1.2619421482086182,
161
+ "learning_rate": 9.954751131221719e-07,
162
+ "loss": 0.6643,
163
+ "step": 22
164
+ },
165
+ {
166
+ "epoch": 0.10407239819004525,
167
+ "grad_norm": 1.2324565649032593,
168
+ "learning_rate": 1.0407239819004527e-06,
169
+ "loss": 0.6724,
170
+ "step": 23
171
+ },
172
+ {
173
+ "epoch": 0.1085972850678733,
174
+ "grad_norm": 1.1131088733673096,
175
+ "learning_rate": 1.085972850678733e-06,
176
+ "loss": 0.6059,
177
+ "step": 24
178
+ },
179
+ {
180
+ "epoch": 0.11312217194570136,
181
+ "grad_norm": 1.1473792791366577,
182
+ "learning_rate": 1.1312217194570136e-06,
183
+ "loss": 0.6494,
184
+ "step": 25
185
+ },
186
+ {
187
+ "epoch": 0.11764705882352941,
188
+ "grad_norm": 1.1130750179290771,
189
+ "learning_rate": 1.1764705882352942e-06,
190
+ "loss": 0.6693,
191
+ "step": 26
192
+ },
193
+ {
194
+ "epoch": 0.12217194570135746,
195
+ "grad_norm": 1.1199308633804321,
196
+ "learning_rate": 1.2217194570135748e-06,
197
+ "loss": 0.7353,
198
+ "step": 27
199
+ },
200
+ {
201
+ "epoch": 0.12669683257918551,
202
+ "grad_norm": 1.094309687614441,
203
+ "learning_rate": 1.2669683257918552e-06,
204
+ "loss": 0.6729,
205
+ "step": 28
206
+ },
207
+ {
208
+ "epoch": 0.13122171945701358,
209
+ "grad_norm": 1.2093147039413452,
210
+ "learning_rate": 1.312217194570136e-06,
211
+ "loss": 0.6549,
212
+ "step": 29
213
+ },
214
+ {
215
+ "epoch": 0.13574660633484162,
216
+ "grad_norm": 1.2095019817352295,
217
+ "learning_rate": 1.3574660633484164e-06,
218
+ "loss": 0.6317,
219
+ "step": 30
220
+ },
221
+ {
222
+ "epoch": 0.14027149321266968,
223
+ "grad_norm": 1.43827223777771,
224
+ "learning_rate": 1.402714932126697e-06,
225
+ "loss": 0.6496,
226
+ "step": 31
227
+ },
228
+ {
229
+ "epoch": 0.14479638009049775,
230
+ "grad_norm": 1.4549624919891357,
231
+ "learning_rate": 1.4479638009049775e-06,
232
+ "loss": 0.6697,
233
+ "step": 32
234
+ },
235
+ {
236
+ "epoch": 0.1493212669683258,
237
+ "grad_norm": 1.165290117263794,
238
+ "learning_rate": 1.493212669683258e-06,
239
+ "loss": 0.6249,
240
+ "step": 33
241
+ },
242
+ {
243
+ "epoch": 0.15384615384615385,
244
+ "grad_norm": 1.257883906364441,
245
+ "learning_rate": 1.5384615384615387e-06,
246
+ "loss": 0.6564,
247
+ "step": 34
248
+ },
249
+ {
250
+ "epoch": 0.1583710407239819,
251
+ "grad_norm": 1.027738094329834,
252
+ "learning_rate": 1.583710407239819e-06,
253
+ "loss": 0.5368,
254
+ "step": 35
255
+ },
256
+ {
257
+ "epoch": 0.16289592760180996,
258
+ "grad_norm": 0.9741559624671936,
259
+ "learning_rate": 1.6289592760180997e-06,
260
+ "loss": 0.6242,
261
+ "step": 36
262
+ },
263
+ {
264
+ "epoch": 0.167420814479638,
265
+ "grad_norm": 0.8538191318511963,
266
+ "learning_rate": 1.67420814479638e-06,
267
+ "loss": 0.6142,
268
+ "step": 37
269
+ },
270
+ {
271
+ "epoch": 0.17194570135746606,
272
+ "grad_norm": 0.6769871115684509,
273
+ "learning_rate": 1.7194570135746609e-06,
274
+ "loss": 0.5496,
275
+ "step": 38
276
+ },
277
+ {
278
+ "epoch": 0.17647058823529413,
279
+ "grad_norm": 0.6637622117996216,
280
+ "learning_rate": 1.7647058823529414e-06,
281
+ "loss": 0.5751,
282
+ "step": 39
283
+ },
284
+ {
285
+ "epoch": 0.18099547511312217,
286
+ "grad_norm": 0.6483653783798218,
287
+ "learning_rate": 1.8099547511312218e-06,
288
+ "loss": 0.6127,
289
+ "step": 40
290
+ },
291
+ {
292
+ "epoch": 0.18552036199095023,
293
+ "grad_norm": 0.7997961044311523,
294
+ "learning_rate": 1.8552036199095024e-06,
295
+ "loss": 0.576,
296
+ "step": 41
297
+ },
298
+ {
299
+ "epoch": 0.19004524886877827,
300
+ "grad_norm": 0.7297148704528809,
301
+ "learning_rate": 1.9004524886877828e-06,
302
+ "loss": 0.6254,
303
+ "step": 42
304
+ },
305
+ {
306
+ "epoch": 0.19457013574660634,
307
+ "grad_norm": 0.6087789535522461,
308
+ "learning_rate": 1.9457013574660634e-06,
309
+ "loss": 0.5344,
310
+ "step": 43
311
+ },
312
+ {
313
+ "epoch": 0.19909502262443438,
314
+ "grad_norm": 0.7219811677932739,
315
+ "learning_rate": 1.9909502262443437e-06,
316
+ "loss": 0.5776,
317
+ "step": 44
318
+ },
319
+ {
320
+ "epoch": 0.20361990950226244,
321
+ "grad_norm": 0.7508623003959656,
322
+ "learning_rate": 2.0361990950226245e-06,
323
+ "loss": 0.5435,
324
+ "step": 45
325
+ },
326
+ {
327
+ "epoch": 0.2081447963800905,
328
+ "grad_norm": 0.6041704416275024,
329
+ "learning_rate": 2.0814479638009053e-06,
330
+ "loss": 0.5094,
331
+ "step": 46
332
+ },
333
+ {
334
+ "epoch": 0.21266968325791855,
335
+ "grad_norm": 0.6673488020896912,
336
+ "learning_rate": 2.1266968325791857e-06,
337
+ "loss": 0.556,
338
+ "step": 47
339
+ },
340
+ {
341
+ "epoch": 0.2171945701357466,
342
+ "grad_norm": 0.6235842704772949,
343
+ "learning_rate": 2.171945701357466e-06,
344
+ "loss": 0.5766,
345
+ "step": 48
346
+ },
347
+ {
348
+ "epoch": 0.22171945701357465,
349
+ "grad_norm": 0.5541844367980957,
350
+ "learning_rate": 2.2171945701357465e-06,
351
+ "loss": 0.5422,
352
+ "step": 49
353
+ },
354
+ {
355
+ "epoch": 0.22624434389140272,
356
+ "grad_norm": 0.6511553525924683,
357
+ "learning_rate": 2.2624434389140273e-06,
358
+ "loss": 0.6076,
359
+ "step": 50
360
+ },
361
+ {
362
+ "epoch": 0.23076923076923078,
363
+ "grad_norm": 0.5737389922142029,
364
+ "learning_rate": 2.307692307692308e-06,
365
+ "loss": 0.5804,
366
+ "step": 51
367
+ },
368
+ {
369
+ "epoch": 0.23529411764705882,
370
+ "grad_norm": 0.45331427454948425,
371
+ "learning_rate": 2.3529411764705885e-06,
372
+ "loss": 0.4903,
373
+ "step": 52
374
+ },
375
+ {
376
+ "epoch": 0.2398190045248869,
377
+ "grad_norm": 0.5451053977012634,
378
+ "learning_rate": 2.3981900452488693e-06,
379
+ "loss": 0.5716,
380
+ "step": 53
381
+ },
382
+ {
383
+ "epoch": 0.24434389140271492,
384
+ "grad_norm": 0.5204625725746155,
385
+ "learning_rate": 2.4434389140271496e-06,
386
+ "loss": 0.5129,
387
+ "step": 54
388
+ },
389
+ {
390
+ "epoch": 0.248868778280543,
391
+ "grad_norm": 0.4971032440662384,
392
+ "learning_rate": 2.48868778280543e-06,
393
+ "loss": 0.5389,
394
+ "step": 55
395
+ },
396
+ {
397
+ "epoch": 0.25339366515837103,
398
+ "grad_norm": 0.5272262096405029,
399
+ "learning_rate": 2.5339366515837104e-06,
400
+ "loss": 0.5235,
401
+ "step": 56
402
+ },
403
+ {
404
+ "epoch": 0.2579185520361991,
405
+ "grad_norm": 0.5696743130683899,
406
+ "learning_rate": 2.5791855203619916e-06,
407
+ "loss": 0.508,
408
+ "step": 57
409
+ },
410
+ {
411
+ "epoch": 0.26244343891402716,
412
+ "grad_norm": 0.5246968269348145,
413
+ "learning_rate": 2.624434389140272e-06,
414
+ "loss": 0.5625,
415
+ "step": 58
416
+ },
417
+ {
418
+ "epoch": 0.2669683257918552,
419
+ "grad_norm": 0.5081693530082703,
420
+ "learning_rate": 2.6696832579185524e-06,
421
+ "loss": 0.501,
422
+ "step": 59
423
+ },
424
+ {
425
+ "epoch": 0.27149321266968324,
426
+ "grad_norm": 0.48727846145629883,
427
+ "learning_rate": 2.7149321266968327e-06,
428
+ "loss": 0.5259,
429
+ "step": 60
430
+ },
431
+ {
432
+ "epoch": 0.27601809954751133,
433
+ "grad_norm": 0.4420638084411621,
434
+ "learning_rate": 2.7601809954751135e-06,
435
+ "loss": 0.5165,
436
+ "step": 61
437
+ },
438
+ {
439
+ "epoch": 0.28054298642533937,
440
+ "grad_norm": 0.4388761818408966,
441
+ "learning_rate": 2.805429864253394e-06,
442
+ "loss": 0.4902,
443
+ "step": 62
444
+ },
445
+ {
446
+ "epoch": 0.2850678733031674,
447
+ "grad_norm": 0.42472442984580994,
448
+ "learning_rate": 2.8506787330316743e-06,
449
+ "loss": 0.4561,
450
+ "step": 63
451
+ },
452
+ {
453
+ "epoch": 0.2895927601809955,
454
+ "grad_norm": 0.4837457239627838,
455
+ "learning_rate": 2.895927601809955e-06,
456
+ "loss": 0.5295,
457
+ "step": 64
458
+ },
459
+ {
460
+ "epoch": 0.29411764705882354,
461
+ "grad_norm": 0.4987254738807678,
462
+ "learning_rate": 2.9411764705882355e-06,
463
+ "loss": 0.515,
464
+ "step": 65
465
+ },
466
+ {
467
+ "epoch": 0.2986425339366516,
468
+ "grad_norm": 0.4454256594181061,
469
+ "learning_rate": 2.986425339366516e-06,
470
+ "loss": 0.5238,
471
+ "step": 66
472
+ },
473
+ {
474
+ "epoch": 0.3031674208144796,
475
+ "grad_norm": 0.5037821531295776,
476
+ "learning_rate": 3.0316742081447962e-06,
477
+ "loss": 0.5527,
478
+ "step": 67
479
+ },
480
+ {
481
+ "epoch": 0.3076923076923077,
482
+ "grad_norm": 0.4538140296936035,
483
+ "learning_rate": 3.0769230769230774e-06,
484
+ "loss": 0.4905,
485
+ "step": 68
486
+ },
487
+ {
488
+ "epoch": 0.31221719457013575,
489
+ "grad_norm": 0.5122717022895813,
490
+ "learning_rate": 3.122171945701358e-06,
491
+ "loss": 0.5493,
492
+ "step": 69
493
+ },
494
+ {
495
+ "epoch": 0.3167420814479638,
496
+ "grad_norm": 0.44596371054649353,
497
+ "learning_rate": 3.167420814479638e-06,
498
+ "loss": 0.5484,
499
+ "step": 70
500
+ },
501
+ {
502
+ "epoch": 0.3212669683257919,
503
+ "grad_norm": 0.4473898410797119,
504
+ "learning_rate": 3.212669683257919e-06,
505
+ "loss": 0.5218,
506
+ "step": 71
507
+ },
508
+ {
509
+ "epoch": 0.3257918552036199,
510
+ "grad_norm": 0.3825899064540863,
511
+ "learning_rate": 3.2579185520361994e-06,
512
+ "loss": 0.4883,
513
+ "step": 72
514
+ },
515
+ {
516
+ "epoch": 0.33031674208144796,
517
+ "grad_norm": 0.40941205620765686,
518
+ "learning_rate": 3.3031674208144797e-06,
519
+ "loss": 0.4127,
520
+ "step": 73
521
+ },
522
+ {
523
+ "epoch": 0.334841628959276,
524
+ "grad_norm": 0.42915788292884827,
525
+ "learning_rate": 3.34841628959276e-06,
526
+ "loss": 0.5194,
527
+ "step": 74
528
+ },
529
+ {
530
+ "epoch": 0.3393665158371041,
531
+ "grad_norm": 0.44509854912757874,
532
+ "learning_rate": 3.3936651583710413e-06,
533
+ "loss": 0.4664,
534
+ "step": 75
535
+ },
536
+ {
537
+ "epoch": 0.3438914027149321,
538
+ "grad_norm": 0.41393935680389404,
539
+ "learning_rate": 3.4389140271493217e-06,
540
+ "loss": 0.5213,
541
+ "step": 76
542
+ },
543
+ {
544
+ "epoch": 0.34841628959276016,
545
+ "grad_norm": 0.4840753674507141,
546
+ "learning_rate": 3.484162895927602e-06,
547
+ "loss": 0.5456,
548
+ "step": 77
549
+ },
550
+ {
551
+ "epoch": 0.35294117647058826,
552
+ "grad_norm": 0.39024800062179565,
553
+ "learning_rate": 3.529411764705883e-06,
554
+ "loss": 0.446,
555
+ "step": 78
556
+ },
557
+ {
558
+ "epoch": 0.3574660633484163,
559
+ "grad_norm": 0.4665883481502533,
560
+ "learning_rate": 3.5746606334841633e-06,
561
+ "loss": 0.504,
562
+ "step": 79
563
+ },
564
+ {
565
+ "epoch": 0.36199095022624433,
566
+ "grad_norm": 0.38244664669036865,
567
+ "learning_rate": 3.6199095022624436e-06,
568
+ "loss": 0.4656,
569
+ "step": 80
570
+ },
571
+ {
572
+ "epoch": 0.3665158371040724,
573
+ "grad_norm": 0.39441755414009094,
574
+ "learning_rate": 3.665158371040724e-06,
575
+ "loss": 0.4861,
576
+ "step": 81
577
+ },
578
+ {
579
+ "epoch": 0.37104072398190047,
580
+ "grad_norm": 0.41697946190834045,
581
+ "learning_rate": 3.710407239819005e-06,
582
+ "loss": 0.4678,
583
+ "step": 82
584
+ },
585
+ {
586
+ "epoch": 0.3755656108597285,
587
+ "grad_norm": 0.4145350456237793,
588
+ "learning_rate": 3.755656108597285e-06,
589
+ "loss": 0.4501,
590
+ "step": 83
591
+ },
592
+ {
593
+ "epoch": 0.38009049773755654,
594
+ "grad_norm": 0.42326846718788147,
595
+ "learning_rate": 3.8009049773755656e-06,
596
+ "loss": 0.4617,
597
+ "step": 84
598
+ },
599
+ {
600
+ "epoch": 0.38461538461538464,
601
+ "grad_norm": 0.44864463806152344,
602
+ "learning_rate": 3.846153846153847e-06,
603
+ "loss": 0.5503,
604
+ "step": 85
605
+ },
606
+ {
607
+ "epoch": 0.3891402714932127,
608
+ "grad_norm": 0.4511263072490692,
609
+ "learning_rate": 3.891402714932127e-06,
610
+ "loss": 0.4581,
611
+ "step": 86
612
+ },
613
+ {
614
+ "epoch": 0.3936651583710407,
615
+ "grad_norm": 0.5121440291404724,
616
+ "learning_rate": 3.9366515837104075e-06,
617
+ "loss": 0.4622,
618
+ "step": 87
619
+ },
620
+ {
621
+ "epoch": 0.39819004524886875,
622
+ "grad_norm": 0.45248016715049744,
623
+ "learning_rate": 3.9819004524886875e-06,
624
+ "loss": 0.5035,
625
+ "step": 88
626
+ },
627
+ {
628
+ "epoch": 0.40271493212669685,
629
+ "grad_norm": 0.4109788239002228,
630
+ "learning_rate": 4.027149321266969e-06,
631
+ "loss": 0.4743,
632
+ "step": 89
633
+ },
634
+ {
635
+ "epoch": 0.4072398190045249,
636
+ "grad_norm": 0.4233633577823639,
637
+ "learning_rate": 4.072398190045249e-06,
638
+ "loss": 0.4829,
639
+ "step": 90
640
+ },
641
+ {
642
+ "epoch": 0.4117647058823529,
643
+ "grad_norm": 0.47834792733192444,
644
+ "learning_rate": 4.11764705882353e-06,
645
+ "loss": 0.5588,
646
+ "step": 91
647
+ },
648
+ {
649
+ "epoch": 0.416289592760181,
650
+ "grad_norm": 0.4849421977996826,
651
+ "learning_rate": 4.162895927601811e-06,
652
+ "loss": 0.4738,
653
+ "step": 92
654
+ },
655
+ {
656
+ "epoch": 0.42081447963800905,
657
+ "grad_norm": 0.4980224370956421,
658
+ "learning_rate": 4.208144796380091e-06,
659
+ "loss": 0.5288,
660
+ "step": 93
661
+ },
662
+ {
663
+ "epoch": 0.4253393665158371,
664
+ "grad_norm": 0.42526569962501526,
665
+ "learning_rate": 4.2533936651583714e-06,
666
+ "loss": 0.4128,
667
+ "step": 94
668
+ },
669
+ {
670
+ "epoch": 0.4298642533936652,
671
+ "grad_norm": 0.483390748500824,
672
+ "learning_rate": 4.298642533936652e-06,
673
+ "loss": 0.4445,
674
+ "step": 95
675
+ },
676
+ {
677
+ "epoch": 0.4343891402714932,
678
+ "grad_norm": 0.4766150116920471,
679
+ "learning_rate": 4.343891402714932e-06,
680
+ "loss": 0.5001,
681
+ "step": 96
682
+ },
683
+ {
684
+ "epoch": 0.43891402714932126,
685
+ "grad_norm": 0.5314707159996033,
686
+ "learning_rate": 4.389140271493213e-06,
687
+ "loss": 0.4891,
688
+ "step": 97
689
+ },
690
+ {
691
+ "epoch": 0.4434389140271493,
692
+ "grad_norm": 0.44213107228279114,
693
+ "learning_rate": 4.434389140271493e-06,
694
+ "loss": 0.5129,
695
+ "step": 98
696
+ },
697
+ {
698
+ "epoch": 0.4479638009049774,
699
+ "grad_norm": 0.43551620841026306,
700
+ "learning_rate": 4.479638009049775e-06,
701
+ "loss": 0.4557,
702
+ "step": 99
703
+ },
704
+ {
705
+ "epoch": 0.45248868778280543,
706
+ "grad_norm": 0.4333869516849518,
707
+ "learning_rate": 4.5248868778280546e-06,
708
+ "loss": 0.4746,
709
+ "step": 100
710
+ },
711
+ {
712
+ "epoch": 0.45701357466063347,
713
+ "grad_norm": 0.42583024501800537,
714
+ "learning_rate": 4.570135746606335e-06,
715
+ "loss": 0.45,
716
+ "step": 101
717
+ },
718
+ {
719
+ "epoch": 0.46153846153846156,
720
+ "grad_norm": 0.48476096987724304,
721
+ "learning_rate": 4.615384615384616e-06,
722
+ "loss": 0.5087,
723
+ "step": 102
724
+ },
725
+ {
726
+ "epoch": 0.4660633484162896,
727
+ "grad_norm": 0.4503658413887024,
728
+ "learning_rate": 4.660633484162896e-06,
729
+ "loss": 0.4574,
730
+ "step": 103
731
+ },
732
+ {
733
+ "epoch": 0.47058823529411764,
734
+ "grad_norm": 0.43352094292640686,
735
+ "learning_rate": 4.705882352941177e-06,
736
+ "loss": 0.4494,
737
+ "step": 104
738
+ },
739
+ {
740
+ "epoch": 0.4751131221719457,
741
+ "grad_norm": 0.4933190643787384,
742
+ "learning_rate": 4.751131221719457e-06,
743
+ "loss": 0.4441,
744
+ "step": 105
745
+ },
746
+ {
747
+ "epoch": 0.4796380090497738,
748
+ "grad_norm": 0.4639478325843811,
749
+ "learning_rate": 4.7963800904977385e-06,
750
+ "loss": 0.4721,
751
+ "step": 106
752
+ },
753
+ {
754
+ "epoch": 0.4841628959276018,
755
+ "grad_norm": 0.4035486876964569,
756
+ "learning_rate": 4.8416289592760185e-06,
757
+ "loss": 0.4574,
758
+ "step": 107
759
+ },
760
+ {
761
+ "epoch": 0.48868778280542985,
762
+ "grad_norm": 0.47403860092163086,
763
+ "learning_rate": 4.886877828054299e-06,
764
+ "loss": 0.4516,
765
+ "step": 108
766
+ },
767
+ {
768
+ "epoch": 0.49321266968325794,
769
+ "grad_norm": 0.48216933012008667,
770
+ "learning_rate": 4.93212669683258e-06,
771
+ "loss": 0.4521,
772
+ "step": 109
773
+ },
774
+ {
775
+ "epoch": 0.497737556561086,
776
+ "grad_norm": 0.43759751319885254,
777
+ "learning_rate": 4.97737556561086e-06,
778
+ "loss": 0.4409,
779
+ "step": 110
780
+ },
781
+ {
782
+ "epoch": 0.502262443438914,
783
+ "grad_norm": 0.4080127775669098,
784
+ "learning_rate": 5.022624434389141e-06,
785
+ "loss": 0.4762,
786
+ "step": 111
787
+ },
788
+ {
789
+ "epoch": 0.5067873303167421,
790
+ "grad_norm": 0.5794436931610107,
791
+ "learning_rate": 5.067873303167421e-06,
792
+ "loss": 0.4954,
793
+ "step": 112
794
+ },
795
+ {
796
+ "epoch": 0.5113122171945701,
797
+ "grad_norm": 0.5085976123809814,
798
+ "learning_rate": 5.1131221719457016e-06,
799
+ "loss": 0.4635,
800
+ "step": 113
801
+ },
802
+ {
803
+ "epoch": 0.5158371040723982,
804
+ "grad_norm": 0.41183438897132874,
805
+ "learning_rate": 5.158371040723983e-06,
806
+ "loss": 0.46,
807
+ "step": 114
808
+ },
809
+ {
810
+ "epoch": 0.5203619909502263,
811
+ "grad_norm": 0.4061497747898102,
812
+ "learning_rate": 5.203619909502263e-06,
813
+ "loss": 0.4559,
814
+ "step": 115
815
+ },
816
+ {
817
+ "epoch": 0.5248868778280543,
818
+ "grad_norm": 0.4753948748111725,
819
+ "learning_rate": 5.248868778280544e-06,
820
+ "loss": 0.4398,
821
+ "step": 116
822
+ },
823
+ {
824
+ "epoch": 0.5294117647058824,
825
+ "grad_norm": 0.47879111766815186,
826
+ "learning_rate": 5.294117647058824e-06,
827
+ "loss": 0.4709,
828
+ "step": 117
829
+ },
830
+ {
831
+ "epoch": 0.5339366515837104,
832
+ "grad_norm": 0.4345899820327759,
833
+ "learning_rate": 5.339366515837105e-06,
834
+ "loss": 0.4576,
835
+ "step": 118
836
+ },
837
+ {
838
+ "epoch": 0.5384615384615384,
839
+ "grad_norm": 0.4419268071651459,
840
+ "learning_rate": 5.384615384615385e-06,
841
+ "loss": 0.4489,
842
+ "step": 119
843
+ },
844
+ {
845
+ "epoch": 0.5429864253393665,
846
+ "grad_norm": 0.46015602350234985,
847
+ "learning_rate": 5.4298642533936655e-06,
848
+ "loss": 0.4463,
849
+ "step": 120
850
+ },
851
+ {
852
+ "epoch": 0.5475113122171946,
853
+ "grad_norm": 0.4941313862800598,
854
+ "learning_rate": 5.475113122171946e-06,
855
+ "loss": 0.5041,
856
+ "step": 121
857
+ },
858
+ {
859
+ "epoch": 0.5520361990950227,
860
+ "grad_norm": 0.4165358245372772,
861
+ "learning_rate": 5.520361990950227e-06,
862
+ "loss": 0.4749,
863
+ "step": 122
864
+ },
865
+ {
866
+ "epoch": 0.5565610859728507,
867
+ "grad_norm": 0.4693758189678192,
868
+ "learning_rate": 5.565610859728508e-06,
869
+ "loss": 0.4536,
870
+ "step": 123
871
+ },
872
+ {
873
+ "epoch": 0.5610859728506787,
874
+ "grad_norm": 0.4350532293319702,
875
+ "learning_rate": 5.610859728506788e-06,
876
+ "loss": 0.5089,
877
+ "step": 124
878
+ },
879
+ {
880
+ "epoch": 0.5656108597285068,
881
+ "grad_norm": 0.45343929529190063,
882
+ "learning_rate": 5.656108597285069e-06,
883
+ "loss": 0.4014,
884
+ "step": 125
885
+ },
886
+ {
887
+ "epoch": 0.5701357466063348,
888
+ "grad_norm": 0.507248044013977,
889
+ "learning_rate": 5.7013574660633486e-06,
890
+ "loss": 0.4932,
891
+ "step": 126
892
+ },
893
+ {
894
+ "epoch": 0.5746606334841629,
895
+ "grad_norm": 0.48399487137794495,
896
+ "learning_rate": 5.746606334841629e-06,
897
+ "loss": 0.5221,
898
+ "step": 127
899
+ },
900
+ {
901
+ "epoch": 0.579185520361991,
902
+ "grad_norm": 0.39801326394081116,
903
+ "learning_rate": 5.79185520361991e-06,
904
+ "loss": 0.4325,
905
+ "step": 128
906
+ },
907
+ {
908
+ "epoch": 0.583710407239819,
909
+ "grad_norm": 0.4183308780193329,
910
+ "learning_rate": 5.837104072398191e-06,
911
+ "loss": 0.4293,
912
+ "step": 129
913
+ },
914
+ {
915
+ "epoch": 0.5882352941176471,
916
+ "grad_norm": 0.4360859990119934,
917
+ "learning_rate": 5.882352941176471e-06,
918
+ "loss": 0.4135,
919
+ "step": 130
920
+ },
921
+ {
922
+ "epoch": 0.5927601809954751,
923
+ "grad_norm": 0.4908457398414612,
924
+ "learning_rate": 5.927601809954752e-06,
925
+ "loss": 0.4765,
926
+ "step": 131
927
+ },
928
+ {
929
+ "epoch": 0.5972850678733032,
930
+ "grad_norm": 0.4215766489505768,
931
+ "learning_rate": 5.972850678733032e-06,
932
+ "loss": 0.4622,
933
+ "step": 132
934
+ },
935
+ {
936
+ "epoch": 0.6018099547511312,
937
+ "grad_norm": 0.4052114188671112,
938
+ "learning_rate": 6.0180995475113125e-06,
939
+ "loss": 0.4317,
940
+ "step": 133
941
+ },
942
+ {
943
+ "epoch": 0.6063348416289592,
944
+ "grad_norm": 0.46360260248184204,
945
+ "learning_rate": 6.0633484162895924e-06,
946
+ "loss": 0.4827,
947
+ "step": 134
948
+ },
949
+ {
950
+ "epoch": 0.6108597285067874,
951
+ "grad_norm": 0.40575292706489563,
952
+ "learning_rate": 6.108597285067874e-06,
953
+ "loss": 0.4281,
954
+ "step": 135
955
+ },
956
+ {
957
+ "epoch": 0.6153846153846154,
958
+ "grad_norm": 0.5005738139152527,
959
+ "learning_rate": 6.153846153846155e-06,
960
+ "loss": 0.5005,
961
+ "step": 136
962
+ },
963
+ {
964
+ "epoch": 0.6199095022624435,
965
+ "grad_norm": 0.451043963432312,
966
+ "learning_rate": 6.199095022624435e-06,
967
+ "loss": 0.4006,
968
+ "step": 137
969
+ },
970
+ {
971
+ "epoch": 0.6244343891402715,
972
+ "grad_norm": 0.4629691541194916,
973
+ "learning_rate": 6.244343891402716e-06,
974
+ "loss": 0.4941,
975
+ "step": 138
976
+ },
977
+ {
978
+ "epoch": 0.6289592760180995,
979
+ "grad_norm": 0.4519931375980377,
980
+ "learning_rate": 6.2895927601809956e-06,
981
+ "loss": 0.4438,
982
+ "step": 139
983
+ },
984
+ {
985
+ "epoch": 0.6334841628959276,
986
+ "grad_norm": 0.4885016679763794,
987
+ "learning_rate": 6.334841628959276e-06,
988
+ "loss": 0.4642,
989
+ "step": 140
990
+ },
991
+ {
992
+ "epoch": 0.6380090497737556,
993
+ "grad_norm": 0.4142250418663025,
994
+ "learning_rate": 6.380090497737556e-06,
995
+ "loss": 0.4164,
996
+ "step": 141
997
+ },
998
+ {
999
+ "epoch": 0.6425339366515838,
1000
+ "grad_norm": 0.4803358316421509,
1001
+ "learning_rate": 6.425339366515838e-06,
1002
+ "loss": 0.4564,
1003
+ "step": 142
1004
+ },
1005
+ {
1006
+ "epoch": 0.6470588235294118,
1007
+ "grad_norm": 0.47766202688217163,
1008
+ "learning_rate": 6.470588235294119e-06,
1009
+ "loss": 0.4106,
1010
+ "step": 143
1011
+ },
1012
+ {
1013
+ "epoch": 0.6515837104072398,
1014
+ "grad_norm": 0.449381947517395,
1015
+ "learning_rate": 6.515837104072399e-06,
1016
+ "loss": 0.4301,
1017
+ "step": 144
1018
+ },
1019
+ {
1020
+ "epoch": 0.6561085972850679,
1021
+ "grad_norm": 0.3938814103603363,
1022
+ "learning_rate": 6.5610859728506795e-06,
1023
+ "loss": 0.4916,
1024
+ "step": 145
1025
+ },
1026
+ {
1027
+ "epoch": 0.6606334841628959,
1028
+ "grad_norm": 0.39036545157432556,
1029
+ "learning_rate": 6.6063348416289595e-06,
1030
+ "loss": 0.4538,
1031
+ "step": 146
1032
+ },
1033
+ {
1034
+ "epoch": 0.665158371040724,
1035
+ "grad_norm": 0.4129018485546112,
1036
+ "learning_rate": 6.65158371040724e-06,
1037
+ "loss": 0.4414,
1038
+ "step": 147
1039
+ },
1040
+ {
1041
+ "epoch": 0.669683257918552,
1042
+ "grad_norm": 0.4925534725189209,
1043
+ "learning_rate": 6.69683257918552e-06,
1044
+ "loss": 0.4393,
1045
+ "step": 148
1046
+ },
1047
+ {
1048
+ "epoch": 0.6742081447963801,
1049
+ "grad_norm": 0.4131447374820709,
1050
+ "learning_rate": 6.742081447963802e-06,
1051
+ "loss": 0.4944,
1052
+ "step": 149
1053
+ },
1054
+ {
1055
+ "epoch": 0.6787330316742082,
1056
+ "grad_norm": 0.4236401319503784,
1057
+ "learning_rate": 6.787330316742083e-06,
1058
+ "loss": 0.4603,
1059
+ "step": 150
1060
+ },
1061
+ {
1062
+ "epoch": 0.6832579185520362,
1063
+ "grad_norm": 0.4191535413265228,
1064
+ "learning_rate": 6.832579185520363e-06,
1065
+ "loss": 0.4195,
1066
+ "step": 151
1067
+ },
1068
+ {
1069
+ "epoch": 0.6877828054298643,
1070
+ "grad_norm": 0.36520934104919434,
1071
+ "learning_rate": 6.8778280542986434e-06,
1072
+ "loss": 0.4602,
1073
+ "step": 152
1074
+ },
1075
+ {
1076
+ "epoch": 0.6923076923076923,
1077
+ "grad_norm": 0.5145202875137329,
1078
+ "learning_rate": 6.923076923076923e-06,
1079
+ "loss": 0.508,
1080
+ "step": 153
1081
+ },
1082
+ {
1083
+ "epoch": 0.6968325791855203,
1084
+ "grad_norm": 0.5549051761627197,
1085
+ "learning_rate": 6.968325791855204e-06,
1086
+ "loss": 0.4974,
1087
+ "step": 154
1088
+ },
1089
+ {
1090
+ "epoch": 0.7013574660633484,
1091
+ "grad_norm": 0.42621299624443054,
1092
+ "learning_rate": 7.013574660633484e-06,
1093
+ "loss": 0.4783,
1094
+ "step": 155
1095
+ },
1096
+ {
1097
+ "epoch": 0.7058823529411765,
1098
+ "grad_norm": 0.562082052230835,
1099
+ "learning_rate": 7.058823529411766e-06,
1100
+ "loss": 0.519,
1101
+ "step": 156
1102
+ },
1103
+ {
1104
+ "epoch": 0.7104072398190046,
1105
+ "grad_norm": 0.4220001697540283,
1106
+ "learning_rate": 7.104072398190046e-06,
1107
+ "loss": 0.4909,
1108
+ "step": 157
1109
+ },
1110
+ {
1111
+ "epoch": 0.7149321266968326,
1112
+ "grad_norm": 0.450371652841568,
1113
+ "learning_rate": 7.1493212669683265e-06,
1114
+ "loss": 0.441,
1115
+ "step": 158
1116
+ },
1117
+ {
1118
+ "epoch": 0.7194570135746606,
1119
+ "grad_norm": 0.4640924036502838,
1120
+ "learning_rate": 7.1945701357466065e-06,
1121
+ "loss": 0.4315,
1122
+ "step": 159
1123
+ },
1124
+ {
1125
+ "epoch": 0.7239819004524887,
1126
+ "grad_norm": 0.417269229888916,
1127
+ "learning_rate": 7.239819004524887e-06,
1128
+ "loss": 0.4071,
1129
+ "step": 160
1130
+ },
1131
+ {
1132
+ "epoch": 0.7285067873303167,
1133
+ "grad_norm": 0.5066007971763611,
1134
+ "learning_rate": 7.285067873303168e-06,
1135
+ "loss": 0.4618,
1136
+ "step": 161
1137
+ },
1138
+ {
1139
+ "epoch": 0.7330316742081447,
1140
+ "grad_norm": 0.412210613489151,
1141
+ "learning_rate": 7.330316742081448e-06,
1142
+ "loss": 0.4871,
1143
+ "step": 162
1144
+ },
1145
+ {
1146
+ "epoch": 0.7375565610859729,
1147
+ "grad_norm": 0.41878026723861694,
1148
+ "learning_rate": 7.37556561085973e-06,
1149
+ "loss": 0.437,
1150
+ "step": 163
1151
+ },
1152
+ {
1153
+ "epoch": 0.7420814479638009,
1154
+ "grad_norm": 0.48414674401283264,
1155
+ "learning_rate": 7.42081447963801e-06,
1156
+ "loss": 0.4682,
1157
+ "step": 164
1158
+ },
1159
+ {
1160
+ "epoch": 0.746606334841629,
1161
+ "grad_norm": 0.45116230845451355,
1162
+ "learning_rate": 7.4660633484162904e-06,
1163
+ "loss": 0.4137,
1164
+ "step": 165
1165
+ },
1166
+ {
1167
+ "epoch": 0.751131221719457,
1168
+ "grad_norm": 0.4211224615573883,
1169
+ "learning_rate": 7.51131221719457e-06,
1170
+ "loss": 0.4063,
1171
+ "step": 166
1172
+ },
1173
+ {
1174
+ "epoch": 0.755656108597285,
1175
+ "grad_norm": 0.46173834800720215,
1176
+ "learning_rate": 7.556561085972851e-06,
1177
+ "loss": 0.4924,
1178
+ "step": 167
1179
+ },
1180
+ {
1181
+ "epoch": 0.7601809954751131,
1182
+ "grad_norm": 0.4191412925720215,
1183
+ "learning_rate": 7.601809954751131e-06,
1184
+ "loss": 0.4572,
1185
+ "step": 168
1186
+ },
1187
+ {
1188
+ "epoch": 0.7647058823529411,
1189
+ "grad_norm": 0.4505658745765686,
1190
+ "learning_rate": 7.647058823529411e-06,
1191
+ "loss": 0.4481,
1192
+ "step": 169
1193
+ },
1194
+ {
1195
+ "epoch": 0.7692307692307693,
1196
+ "grad_norm": 0.5168994069099426,
1197
+ "learning_rate": 7.692307692307694e-06,
1198
+ "loss": 0.4449,
1199
+ "step": 170
1200
+ },
1201
+ {
1202
+ "epoch": 0.7737556561085973,
1203
+ "grad_norm": 0.5589344501495361,
1204
+ "learning_rate": 7.737556561085974e-06,
1205
+ "loss": 0.4761,
1206
+ "step": 171
1207
+ },
1208
+ {
1209
+ "epoch": 0.7782805429864253,
1210
+ "grad_norm": 0.4831865429878235,
1211
+ "learning_rate": 7.782805429864253e-06,
1212
+ "loss": 0.4805,
1213
+ "step": 172
1214
+ },
1215
+ {
1216
+ "epoch": 0.7828054298642534,
1217
+ "grad_norm": 0.46204084157943726,
1218
+ "learning_rate": 7.828054298642534e-06,
1219
+ "loss": 0.4535,
1220
+ "step": 173
1221
+ },
1222
+ {
1223
+ "epoch": 0.7873303167420814,
1224
+ "grad_norm": 0.44106197357177734,
1225
+ "learning_rate": 7.873303167420815e-06,
1226
+ "loss": 0.4072,
1227
+ "step": 174
1228
+ },
1229
+ {
1230
+ "epoch": 0.7918552036199095,
1231
+ "grad_norm": 0.5106877088546753,
1232
+ "learning_rate": 7.918552036199096e-06,
1233
+ "loss": 0.4125,
1234
+ "step": 175
1235
+ },
1236
+ {
1237
+ "epoch": 0.7963800904977375,
1238
+ "grad_norm": 0.5074198842048645,
1239
+ "learning_rate": 7.963800904977375e-06,
1240
+ "loss": 0.434,
1241
+ "step": 176
1242
+ },
1243
+ {
1244
+ "epoch": 0.8009049773755657,
1245
+ "grad_norm": 0.40606701374053955,
1246
+ "learning_rate": 8.009049773755657e-06,
1247
+ "loss": 0.4624,
1248
+ "step": 177
1249
+ },
1250
+ {
1251
+ "epoch": 0.8054298642533937,
1252
+ "grad_norm": 0.5007057189941406,
1253
+ "learning_rate": 8.054298642533938e-06,
1254
+ "loss": 0.4476,
1255
+ "step": 178
1256
+ },
1257
+ {
1258
+ "epoch": 0.8099547511312217,
1259
+ "grad_norm": 0.5165268778800964,
1260
+ "learning_rate": 8.099547511312217e-06,
1261
+ "loss": 0.4746,
1262
+ "step": 179
1263
+ },
1264
+ {
1265
+ "epoch": 0.8144796380090498,
1266
+ "grad_norm": 0.46993181109428406,
1267
+ "learning_rate": 8.144796380090498e-06,
1268
+ "loss": 0.4432,
1269
+ "step": 180
1270
+ },
1271
+ {
1272
+ "epoch": 0.8190045248868778,
1273
+ "grad_norm": 0.5677555799484253,
1274
+ "learning_rate": 8.190045248868779e-06,
1275
+ "loss": 0.4422,
1276
+ "step": 181
1277
+ },
1278
+ {
1279
+ "epoch": 0.8235294117647058,
1280
+ "grad_norm": 0.5643273591995239,
1281
+ "learning_rate": 8.23529411764706e-06,
1282
+ "loss": 0.5143,
1283
+ "step": 182
1284
+ },
1285
+ {
1286
+ "epoch": 0.8280542986425339,
1287
+ "grad_norm": 0.5022807717323303,
1288
+ "learning_rate": 8.280542986425339e-06,
1289
+ "loss": 0.4544,
1290
+ "step": 183
1291
+ },
1292
+ {
1293
+ "epoch": 0.832579185520362,
1294
+ "grad_norm": 0.49226853251457214,
1295
+ "learning_rate": 8.325791855203621e-06,
1296
+ "loss": 0.4637,
1297
+ "step": 184
1298
+ },
1299
+ {
1300
+ "epoch": 0.8371040723981901,
1301
+ "grad_norm": 0.48023420572280884,
1302
+ "learning_rate": 8.371040723981902e-06,
1303
+ "loss": 0.4264,
1304
+ "step": 185
1305
+ },
1306
+ {
1307
+ "epoch": 0.8416289592760181,
1308
+ "grad_norm": 0.5742875337600708,
1309
+ "learning_rate": 8.416289592760181e-06,
1310
+ "loss": 0.4761,
1311
+ "step": 186
1312
+ },
1313
+ {
1314
+ "epoch": 0.8461538461538461,
1315
+ "grad_norm": 0.5855184197425842,
1316
+ "learning_rate": 8.461538461538462e-06,
1317
+ "loss": 0.4668,
1318
+ "step": 187
1319
+ },
1320
+ {
1321
+ "epoch": 0.8506787330316742,
1322
+ "grad_norm": 0.5004088282585144,
1323
+ "learning_rate": 8.506787330316743e-06,
1324
+ "loss": 0.4354,
1325
+ "step": 188
1326
+ },
1327
+ {
1328
+ "epoch": 0.8552036199095022,
1329
+ "grad_norm": 0.5111898183822632,
1330
+ "learning_rate": 8.552036199095024e-06,
1331
+ "loss": 0.4494,
1332
+ "step": 189
1333
+ },
1334
+ {
1335
+ "epoch": 0.8597285067873304,
1336
+ "grad_norm": 0.5159589052200317,
1337
+ "learning_rate": 8.597285067873304e-06,
1338
+ "loss": 0.493,
1339
+ "step": 190
1340
+ },
1341
+ {
1342
+ "epoch": 0.8642533936651584,
1343
+ "grad_norm": 0.5572559833526611,
1344
+ "learning_rate": 8.642533936651585e-06,
1345
+ "loss": 0.4845,
1346
+ "step": 191
1347
+ },
1348
+ {
1349
+ "epoch": 0.8687782805429864,
1350
+ "grad_norm": 0.47993189096450806,
1351
+ "learning_rate": 8.687782805429864e-06,
1352
+ "loss": 0.4709,
1353
+ "step": 192
1354
+ },
1355
+ {
1356
+ "epoch": 0.8733031674208145,
1357
+ "grad_norm": 0.4479634463787079,
1358
+ "learning_rate": 8.733031674208145e-06,
1359
+ "loss": 0.4192,
1360
+ "step": 193
1361
+ },
1362
+ {
1363
+ "epoch": 0.8778280542986425,
1364
+ "grad_norm": 0.48463767766952515,
1365
+ "learning_rate": 8.778280542986426e-06,
1366
+ "loss": 0.4468,
1367
+ "step": 194
1368
+ },
1369
+ {
1370
+ "epoch": 0.8823529411764706,
1371
+ "grad_norm": 0.4300975203514099,
1372
+ "learning_rate": 8.823529411764707e-06,
1373
+ "loss": 0.4546,
1374
+ "step": 195
1375
+ },
1376
+ {
1377
+ "epoch": 0.8868778280542986,
1378
+ "grad_norm": 0.43377694487571716,
1379
+ "learning_rate": 8.868778280542986e-06,
1380
+ "loss": 0.4517,
1381
+ "step": 196
1382
+ },
1383
+ {
1384
+ "epoch": 0.8914027149321267,
1385
+ "grad_norm": 0.4832284152507782,
1386
+ "learning_rate": 8.914027149321268e-06,
1387
+ "loss": 0.4296,
1388
+ "step": 197
1389
+ },
1390
+ {
1391
+ "epoch": 0.8959276018099548,
1392
+ "grad_norm": 0.47211429476737976,
1393
+ "learning_rate": 8.95927601809955e-06,
1394
+ "loss": 0.4487,
1395
+ "step": 198
1396
+ },
1397
+ {
1398
+ "epoch": 0.9004524886877828,
1399
+ "grad_norm": 0.4101850688457489,
1400
+ "learning_rate": 9.004524886877828e-06,
1401
+ "loss": 0.4392,
1402
+ "step": 199
1403
+ },
1404
+ {
1405
+ "epoch": 0.9049773755656109,
1406
+ "grad_norm": 0.5208500623703003,
1407
+ "learning_rate": 9.049773755656109e-06,
1408
+ "loss": 0.4276,
1409
+ "step": 200
1410
+ },
1411
+ {
1412
+ "epoch": 0.9095022624434389,
1413
+ "grad_norm": 0.49378854036331177,
1414
+ "learning_rate": 9.09502262443439e-06,
1415
+ "loss": 0.454,
1416
+ "step": 201
1417
+ },
1418
+ {
1419
+ "epoch": 0.9140271493212669,
1420
+ "grad_norm": 0.4764745831489563,
1421
+ "learning_rate": 9.14027149321267e-06,
1422
+ "loss": 0.4176,
1423
+ "step": 202
1424
+ },
1425
+ {
1426
+ "epoch": 0.918552036199095,
1427
+ "grad_norm": 0.45192357897758484,
1428
+ "learning_rate": 9.18552036199095e-06,
1429
+ "loss": 0.4762,
1430
+ "step": 203
1431
+ },
1432
+ {
1433
+ "epoch": 0.9230769230769231,
1434
+ "grad_norm": 0.4330678880214691,
1435
+ "learning_rate": 9.230769230769232e-06,
1436
+ "loss": 0.4158,
1437
+ "step": 204
1438
+ },
1439
+ {
1440
+ "epoch": 0.9276018099547512,
1441
+ "grad_norm": 0.5406427383422852,
1442
+ "learning_rate": 9.276018099547513e-06,
1443
+ "loss": 0.4351,
1444
+ "step": 205
1445
+ },
1446
+ {
1447
+ "epoch": 0.9321266968325792,
1448
+ "grad_norm": 0.4686480760574341,
1449
+ "learning_rate": 9.321266968325792e-06,
1450
+ "loss": 0.4683,
1451
+ "step": 206
1452
+ },
1453
+ {
1454
+ "epoch": 0.9366515837104072,
1455
+ "grad_norm": 0.430961549282074,
1456
+ "learning_rate": 9.366515837104073e-06,
1457
+ "loss": 0.4077,
1458
+ "step": 207
1459
+ },
1460
+ {
1461
+ "epoch": 0.9411764705882353,
1462
+ "grad_norm": 0.4596535563468933,
1463
+ "learning_rate": 9.411764705882354e-06,
1464
+ "loss": 0.4357,
1465
+ "step": 208
1466
+ },
1467
+ {
1468
+ "epoch": 0.9457013574660633,
1469
+ "grad_norm": 0.49825915694236755,
1470
+ "learning_rate": 9.457013574660635e-06,
1471
+ "loss": 0.4438,
1472
+ "step": 209
1473
+ },
1474
+ {
1475
+ "epoch": 0.9502262443438914,
1476
+ "grad_norm": 0.4968724846839905,
1477
+ "learning_rate": 9.502262443438914e-06,
1478
+ "loss": 0.4391,
1479
+ "step": 210
1480
+ },
1481
+ {
1482
+ "epoch": 0.9547511312217195,
1483
+ "grad_norm": 0.5227912664413452,
1484
+ "learning_rate": 9.547511312217196e-06,
1485
+ "loss": 0.4682,
1486
+ "step": 211
1487
+ },
1488
+ {
1489
+ "epoch": 0.9592760180995475,
1490
+ "grad_norm": 0.593227744102478,
1491
+ "learning_rate": 9.592760180995477e-06,
1492
+ "loss": 0.4375,
1493
+ "step": 212
1494
+ },
1495
+ {
1496
+ "epoch": 0.9638009049773756,
1497
+ "grad_norm": 0.46191665530204773,
1498
+ "learning_rate": 9.638009049773756e-06,
1499
+ "loss": 0.4469,
1500
+ "step": 213
1501
+ },
1502
+ {
1503
+ "epoch": 0.9683257918552036,
1504
+ "grad_norm": 0.49439120292663574,
1505
+ "learning_rate": 9.683257918552037e-06,
1506
+ "loss": 0.4167,
1507
+ "step": 214
1508
+ },
1509
+ {
1510
+ "epoch": 0.9728506787330317,
1511
+ "grad_norm": 0.5323225855827332,
1512
+ "learning_rate": 9.728506787330318e-06,
1513
+ "loss": 0.462,
1514
+ "step": 215
1515
+ },
1516
+ {
1517
+ "epoch": 0.9773755656108597,
1518
+ "grad_norm": 0.40613311529159546,
1519
+ "learning_rate": 9.773755656108599e-06,
1520
+ "loss": 0.3551,
1521
+ "step": 216
1522
+ },
1523
+ {
1524
+ "epoch": 0.9819004524886877,
1525
+ "grad_norm": 0.523820161819458,
1526
+ "learning_rate": 9.819004524886878e-06,
1527
+ "loss": 0.4303,
1528
+ "step": 217
1529
+ },
1530
+ {
1531
+ "epoch": 0.9864253393665159,
1532
+ "grad_norm": 0.4367105960845947,
1533
+ "learning_rate": 9.86425339366516e-06,
1534
+ "loss": 0.4617,
1535
+ "step": 218
1536
+ },
1537
+ {
1538
+ "epoch": 0.9909502262443439,
1539
+ "grad_norm": 0.474623441696167,
1540
+ "learning_rate": 9.90950226244344e-06,
1541
+ "loss": 0.4098,
1542
+ "step": 219
1543
+ },
1544
+ {
1545
+ "epoch": 0.995475113122172,
1546
+ "grad_norm": 0.5020592212677002,
1547
+ "learning_rate": 9.95475113122172e-06,
1548
+ "loss": 0.3816,
1549
+ "step": 220
1550
+ },
1551
+ {
1552
+ "epoch": 1.0,
1553
+ "grad_norm": 0.4186856150627136,
1554
+ "learning_rate": 1e-05,
1555
+ "loss": 0.4303,
1556
+ "step": 221
1557
+ },
1558
+ {
1559
+ "epoch": 1.004524886877828,
1560
+ "grad_norm": 0.5000538229942322,
1561
+ "learning_rate": 9.999993763081091e-06,
1562
+ "loss": 0.4021,
1563
+ "step": 222
1564
+ },
1565
+ {
1566
+ "epoch": 1.009049773755656,
1567
+ "grad_norm": 0.4233403205871582,
1568
+ "learning_rate": 9.999975052339923e-06,
1569
+ "loss": 0.3917,
1570
+ "step": 223
1571
+ },
1572
+ {
1573
+ "epoch": 1.0135746606334841,
1574
+ "grad_norm": 0.44955193996429443,
1575
+ "learning_rate": 9.999943867823174e-06,
1576
+ "loss": 0.4112,
1577
+ "step": 224
1578
+ },
1579
+ {
1580
+ "epoch": 1.0180995475113122,
1581
+ "grad_norm": 0.5060259699821472,
1582
+ "learning_rate": 9.999900209608642e-06,
1583
+ "loss": 0.3585,
1584
+ "step": 225
1585
+ },
1586
+ {
1587
+ "epoch": 1.0226244343891402,
1588
+ "grad_norm": 0.46285244822502136,
1589
+ "learning_rate": 9.999844077805245e-06,
1590
+ "loss": 0.3972,
1591
+ "step": 226
1592
+ },
1593
+ {
1594
+ "epoch": 1.0271493212669682,
1595
+ "grad_norm": 0.519055962562561,
1596
+ "learning_rate": 9.999775472553019e-06,
1597
+ "loss": 0.3439,
1598
+ "step": 227
1599
+ },
1600
+ {
1601
+ "epoch": 1.0316742081447963,
1602
+ "grad_norm": 0.49448850750923157,
1603
+ "learning_rate": 9.999694394023119e-06,
1604
+ "loss": 0.3777,
1605
+ "step": 228
1606
+ },
1607
+ {
1608
+ "epoch": 1.0361990950226245,
1609
+ "grad_norm": 0.502755343914032,
1610
+ "learning_rate": 9.999600842417815e-06,
1611
+ "loss": 0.3685,
1612
+ "step": 229
1613
+ },
1614
+ {
1615
+ "epoch": 1.0407239819004526,
1616
+ "grad_norm": 0.5343412756919861,
1617
+ "learning_rate": 9.999494817970498e-06,
1618
+ "loss": 0.3402,
1619
+ "step": 230
1620
+ },
1621
+ {
1622
+ "epoch": 1.0452488687782806,
1623
+ "grad_norm": 0.5237849354743958,
1624
+ "learning_rate": 9.999376320945673e-06,
1625
+ "loss": 0.3453,
1626
+ "step": 231
1627
+ },
1628
+ {
1629
+ "epoch": 1.0497737556561086,
1630
+ "grad_norm": 0.5596418380737305,
1631
+ "learning_rate": 9.999245351638964e-06,
1632
+ "loss": 0.3945,
1633
+ "step": 232
1634
+ },
1635
+ {
1636
+ "epoch": 1.0542986425339367,
1637
+ "grad_norm": 0.3989311456680298,
1638
+ "learning_rate": 9.999101910377107e-06,
1639
+ "loss": 0.3383,
1640
+ "step": 233
1641
+ },
1642
+ {
1643
+ "epoch": 1.0588235294117647,
1644
+ "grad_norm": 0.4172278642654419,
1645
+ "learning_rate": 9.998945997517957e-06,
1646
+ "loss": 0.392,
1647
+ "step": 234
1648
+ },
1649
+ {
1650
+ "epoch": 1.0633484162895928,
1651
+ "grad_norm": 0.5389004349708557,
1652
+ "learning_rate": 9.998777613450478e-06,
1653
+ "loss": 0.3883,
1654
+ "step": 235
1655
+ },
1656
+ {
1657
+ "epoch": 1.0678733031674208,
1658
+ "grad_norm": 0.5382748246192932,
1659
+ "learning_rate": 9.998596758594752e-06,
1660
+ "loss": 0.3582,
1661
+ "step": 236
1662
+ },
1663
+ {
1664
+ "epoch": 1.0723981900452488,
1665
+ "grad_norm": 0.5459226965904236,
1666
+ "learning_rate": 9.998403433401969e-06,
1667
+ "loss": 0.4137,
1668
+ "step": 237
1669
+ },
1670
+ {
1671
+ "epoch": 1.0769230769230769,
1672
+ "grad_norm": 0.48526811599731445,
1673
+ "learning_rate": 9.998197638354428e-06,
1674
+ "loss": 0.3618,
1675
+ "step": 238
1676
+ },
1677
+ {
1678
+ "epoch": 1.081447963800905,
1679
+ "grad_norm": 0.47047311067581177,
1680
+ "learning_rate": 9.997979373965542e-06,
1681
+ "loss": 0.3097,
1682
+ "step": 239
1683
+ },
1684
+ {
1685
+ "epoch": 1.085972850678733,
1686
+ "grad_norm": 0.5708703398704529,
1687
+ "learning_rate": 9.997748640779829e-06,
1688
+ "loss": 0.4371,
1689
+ "step": 240
1690
+ },
1691
+ {
1692
+ "epoch": 1.090497737556561,
1693
+ "grad_norm": 0.5245316028594971,
1694
+ "learning_rate": 9.997505439372914e-06,
1695
+ "loss": 0.3988,
1696
+ "step": 241
1697
+ },
1698
+ {
1699
+ "epoch": 1.0950226244343892,
1700
+ "grad_norm": 0.5281980633735657,
1701
+ "learning_rate": 9.997249770351531e-06,
1702
+ "loss": 0.414,
1703
+ "step": 242
1704
+ },
1705
+ {
1706
+ "epoch": 1.0995475113122173,
1707
+ "grad_norm": 0.6034480333328247,
1708
+ "learning_rate": 9.99698163435351e-06,
1709
+ "loss": 0.4268,
1710
+ "step": 243
1711
+ },
1712
+ {
1713
+ "epoch": 1.1040723981900453,
1714
+ "grad_norm": 0.479973703622818,
1715
+ "learning_rate": 9.996701032047795e-06,
1716
+ "loss": 0.4261,
1717
+ "step": 244
1718
+ },
1719
+ {
1720
+ "epoch": 1.1085972850678734,
1721
+ "grad_norm": 0.4137464165687561,
1722
+ "learning_rate": 9.996407964134416e-06,
1723
+ "loss": 0.369,
1724
+ "step": 245
1725
+ },
1726
+ {
1727
+ "epoch": 1.1131221719457014,
1728
+ "grad_norm": 0.7938151359558105,
1729
+ "learning_rate": 9.996102431344514e-06,
1730
+ "loss": 0.4152,
1731
+ "step": 246
1732
+ },
1733
+ {
1734
+ "epoch": 1.1176470588235294,
1735
+ "grad_norm": 0.5222673416137695,
1736
+ "learning_rate": 9.99578443444032e-06,
1737
+ "loss": 0.3956,
1738
+ "step": 247
1739
+ },
1740
+ {
1741
+ "epoch": 1.1221719457013575,
1742
+ "grad_norm": 0.41782158613204956,
1743
+ "learning_rate": 9.995453974215164e-06,
1744
+ "loss": 0.3423,
1745
+ "step": 248
1746
+ },
1747
+ {
1748
+ "epoch": 1.1266968325791855,
1749
+ "grad_norm": 0.6398916244506836,
1750
+ "learning_rate": 9.995111051493468e-06,
1751
+ "loss": 0.3975,
1752
+ "step": 249
1753
+ },
1754
+ {
1755
+ "epoch": 1.1312217194570136,
1756
+ "grad_norm": 0.5231488943099976,
1757
+ "learning_rate": 9.99475566713074e-06,
1758
+ "loss": 0.4051,
1759
+ "step": 250
1760
+ },
1761
+ {
1762
+ "epoch": 1.1357466063348416,
1763
+ "grad_norm": 0.48015347123146057,
1764
+ "learning_rate": 9.994387822013586e-06,
1765
+ "loss": 0.4183,
1766
+ "step": 251
1767
+ },
1768
+ {
1769
+ "epoch": 1.1402714932126696,
1770
+ "grad_norm": 0.44875526428222656,
1771
+ "learning_rate": 9.994007517059693e-06,
1772
+ "loss": 0.37,
1773
+ "step": 252
1774
+ },
1775
+ {
1776
+ "epoch": 1.1447963800904977,
1777
+ "grad_norm": 0.6783897876739502,
1778
+ "learning_rate": 9.993614753217833e-06,
1779
+ "loss": 0.4063,
1780
+ "step": 253
1781
+ },
1782
+ {
1783
+ "epoch": 1.1493212669683257,
1784
+ "grad_norm": 0.5309958457946777,
1785
+ "learning_rate": 9.99320953146786e-06,
1786
+ "loss": 0.3896,
1787
+ "step": 254
1788
+ },
1789
+ {
1790
+ "epoch": 1.1538461538461537,
1791
+ "grad_norm": 0.5052987933158875,
1792
+ "learning_rate": 9.992791852820709e-06,
1793
+ "loss": 0.4243,
1794
+ "step": 255
1795
+ },
1796
+ {
1797
+ "epoch": 1.1583710407239818,
1798
+ "grad_norm": 0.47427037358283997,
1799
+ "learning_rate": 9.992361718318393e-06,
1800
+ "loss": 0.3601,
1801
+ "step": 256
1802
+ },
1803
+ {
1804
+ "epoch": 1.16289592760181,
1805
+ "grad_norm": 0.4682779312133789,
1806
+ "learning_rate": 9.991919129033994e-06,
1807
+ "loss": 0.3564,
1808
+ "step": 257
1809
+ },
1810
+ {
1811
+ "epoch": 1.167420814479638,
1812
+ "grad_norm": 0.5049699544906616,
1813
+ "learning_rate": 9.991464086071669e-06,
1814
+ "loss": 0.3362,
1815
+ "step": 258
1816
+ },
1817
+ {
1818
+ "epoch": 1.1719457013574661,
1819
+ "grad_norm": 0.43032440543174744,
1820
+ "learning_rate": 9.990996590566648e-06,
1821
+ "loss": 0.3884,
1822
+ "step": 259
1823
+ },
1824
+ {
1825
+ "epoch": 1.1764705882352942,
1826
+ "grad_norm": 0.48647770285606384,
1827
+ "learning_rate": 9.990516643685222e-06,
1828
+ "loss": 0.3797,
1829
+ "step": 260
1830
+ },
1831
+ {
1832
+ "epoch": 1.1809954751131222,
1833
+ "grad_norm": 0.6087546944618225,
1834
+ "learning_rate": 9.990024246624745e-06,
1835
+ "loss": 0.4037,
1836
+ "step": 261
1837
+ },
1838
+ {
1839
+ "epoch": 1.1855203619909502,
1840
+ "grad_norm": 0.4716806709766388,
1841
+ "learning_rate": 9.989519400613636e-06,
1842
+ "loss": 0.4145,
1843
+ "step": 262
1844
+ },
1845
+ {
1846
+ "epoch": 1.1900452488687783,
1847
+ "grad_norm": 0.47347491979599,
1848
+ "learning_rate": 9.989002106911368e-06,
1849
+ "loss": 0.3781,
1850
+ "step": 263
1851
+ },
1852
+ {
1853
+ "epoch": 1.1945701357466063,
1854
+ "grad_norm": 0.6511465907096863,
1855
+ "learning_rate": 9.988472366808468e-06,
1856
+ "loss": 0.4168,
1857
+ "step": 264
1858
+ },
1859
+ {
1860
+ "epoch": 1.1990950226244343,
1861
+ "grad_norm": 0.4355635941028595,
1862
+ "learning_rate": 9.987930181626515e-06,
1863
+ "loss": 0.3703,
1864
+ "step": 265
1865
+ },
1866
+ {
1867
+ "epoch": 1.2036199095022624,
1868
+ "grad_norm": 0.4431900382041931,
1869
+ "learning_rate": 9.987375552718133e-06,
1870
+ "loss": 0.4355,
1871
+ "step": 266
1872
+ },
1873
+ {
1874
+ "epoch": 1.2081447963800904,
1875
+ "grad_norm": 0.4137621223926544,
1876
+ "learning_rate": 9.986808481466994e-06,
1877
+ "loss": 0.3837,
1878
+ "step": 267
1879
+ },
1880
+ {
1881
+ "epoch": 1.2126696832579185,
1882
+ "grad_norm": 0.49528124928474426,
1883
+ "learning_rate": 9.98622896928781e-06,
1884
+ "loss": 0.3732,
1885
+ "step": 268
1886
+ },
1887
+ {
1888
+ "epoch": 1.2171945701357467,
1889
+ "grad_norm": 0.45777904987335205,
1890
+ "learning_rate": 9.985637017626326e-06,
1891
+ "loss": 0.4144,
1892
+ "step": 269
1893
+ },
1894
+ {
1895
+ "epoch": 1.2217194570135748,
1896
+ "grad_norm": 0.5112358331680298,
1897
+ "learning_rate": 9.985032627959325e-06,
1898
+ "loss": 0.4616,
1899
+ "step": 270
1900
+ },
1901
+ {
1902
+ "epoch": 1.2262443438914028,
1903
+ "grad_norm": 0.5006369948387146,
1904
+ "learning_rate": 9.984415801794622e-06,
1905
+ "loss": 0.4317,
1906
+ "step": 271
1907
+ },
1908
+ {
1909
+ "epoch": 1.2307692307692308,
1910
+ "grad_norm": 0.48134738206863403,
1911
+ "learning_rate": 9.983786540671052e-06,
1912
+ "loss": 0.3928,
1913
+ "step": 272
1914
+ },
1915
+ {
1916
+ "epoch": 1.2352941176470589,
1917
+ "grad_norm": 0.4563564956188202,
1918
+ "learning_rate": 9.983144846158472e-06,
1919
+ "loss": 0.3907,
1920
+ "step": 273
1921
+ },
1922
+ {
1923
+ "epoch": 1.239819004524887,
1924
+ "grad_norm": 0.43982940912246704,
1925
+ "learning_rate": 9.982490719857766e-06,
1926
+ "loss": 0.41,
1927
+ "step": 274
1928
+ },
1929
+ {
1930
+ "epoch": 1.244343891402715,
1931
+ "grad_norm": 0.4124949872493744,
1932
+ "learning_rate": 9.981824163400827e-06,
1933
+ "loss": 0.3775,
1934
+ "step": 275
1935
+ },
1936
+ {
1937
+ "epoch": 1.248868778280543,
1938
+ "grad_norm": 0.44366535544395447,
1939
+ "learning_rate": 9.981145178450555e-06,
1940
+ "loss": 0.3958,
1941
+ "step": 276
1942
+ },
1943
+ {
1944
+ "epoch": 1.253393665158371,
1945
+ "grad_norm": 0.4540594220161438,
1946
+ "learning_rate": 9.980453766700861e-06,
1947
+ "loss": 0.4276,
1948
+ "step": 277
1949
+ },
1950
+ {
1951
+ "epoch": 1.257918552036199,
1952
+ "grad_norm": 0.4591408967971802,
1953
+ "learning_rate": 9.979749929876658e-06,
1954
+ "loss": 0.3899,
1955
+ "step": 278
1956
+ },
1957
+ {
1958
+ "epoch": 1.262443438914027,
1959
+ "grad_norm": 0.4709743857383728,
1960
+ "learning_rate": 9.979033669733855e-06,
1961
+ "loss": 0.4155,
1962
+ "step": 279
1963
+ },
1964
+ {
1965
+ "epoch": 1.2669683257918551,
1966
+ "grad_norm": 0.5153703689575195,
1967
+ "learning_rate": 9.978304988059352e-06,
1968
+ "loss": 0.355,
1969
+ "step": 280
1970
+ },
1971
+ {
1972
+ "epoch": 1.2714932126696832,
1973
+ "grad_norm": 0.5748324990272522,
1974
+ "learning_rate": 9.977563886671043e-06,
1975
+ "loss": 0.3903,
1976
+ "step": 281
1977
+ },
1978
+ {
1979
+ "epoch": 1.2760180995475112,
1980
+ "grad_norm": 0.42208483815193176,
1981
+ "learning_rate": 9.976810367417801e-06,
1982
+ "loss": 0.3711,
1983
+ "step": 282
1984
+ },
1985
+ {
1986
+ "epoch": 1.2805429864253393,
1987
+ "grad_norm": 0.4175834059715271,
1988
+ "learning_rate": 9.976044432179486e-06,
1989
+ "loss": 0.3714,
1990
+ "step": 283
1991
+ },
1992
+ {
1993
+ "epoch": 1.2850678733031673,
1994
+ "grad_norm": 0.48363685607910156,
1995
+ "learning_rate": 9.975266082866923e-06,
1996
+ "loss": 0.359,
1997
+ "step": 284
1998
+ },
1999
+ {
2000
+ "epoch": 1.2895927601809956,
2001
+ "grad_norm": 0.4426751732826233,
2002
+ "learning_rate": 9.974475321421918e-06,
2003
+ "loss": 0.3497,
2004
+ "step": 285
2005
+ },
2006
+ {
2007
+ "epoch": 1.2941176470588236,
2008
+ "grad_norm": 0.49176037311553955,
2009
+ "learning_rate": 9.973672149817232e-06,
2010
+ "loss": 0.4077,
2011
+ "step": 286
2012
+ },
2013
+ {
2014
+ "epoch": 1.2986425339366516,
2015
+ "grad_norm": 0.47568652033805847,
2016
+ "learning_rate": 9.972856570056594e-06,
2017
+ "loss": 0.3892,
2018
+ "step": 287
2019
+ },
2020
+ {
2021
+ "epoch": 1.3031674208144797,
2022
+ "grad_norm": 0.484466552734375,
2023
+ "learning_rate": 9.972028584174687e-06,
2024
+ "loss": 0.4417,
2025
+ "step": 288
2026
+ },
2027
+ {
2028
+ "epoch": 1.3076923076923077,
2029
+ "grad_norm": 0.41258934140205383,
2030
+ "learning_rate": 9.971188194237141e-06,
2031
+ "loss": 0.3803,
2032
+ "step": 289
2033
+ },
2034
+ {
2035
+ "epoch": 1.3122171945701357,
2036
+ "grad_norm": 0.463830828666687,
2037
+ "learning_rate": 9.970335402340534e-06,
2038
+ "loss": 0.4389,
2039
+ "step": 290
2040
+ },
2041
+ {
2042
+ "epoch": 1.3167420814479638,
2043
+ "grad_norm": 0.425300270318985,
2044
+ "learning_rate": 9.969470210612384e-06,
2045
+ "loss": 0.4108,
2046
+ "step": 291
2047
+ },
2048
+ {
2049
+ "epoch": 1.3212669683257918,
2050
+ "grad_norm": 0.5346444249153137,
2051
+ "learning_rate": 9.968592621211146e-06,
2052
+ "loss": 0.3969,
2053
+ "step": 292
2054
+ },
2055
+ {
2056
+ "epoch": 1.3257918552036199,
2057
+ "grad_norm": 0.4658094644546509,
2058
+ "learning_rate": 9.967702636326195e-06,
2059
+ "loss": 0.4434,
2060
+ "step": 293
2061
+ },
2062
+ {
2063
+ "epoch": 1.330316742081448,
2064
+ "grad_norm": 0.48898160457611084,
2065
+ "learning_rate": 9.966800258177842e-06,
2066
+ "loss": 0.4164,
2067
+ "step": 294
2068
+ },
2069
+ {
2070
+ "epoch": 1.334841628959276,
2071
+ "grad_norm": 0.5616970062255859,
2072
+ "learning_rate": 9.96588548901731e-06,
2073
+ "loss": 0.4472,
2074
+ "step": 295
2075
+ },
2076
+ {
2077
+ "epoch": 1.3393665158371042,
2078
+ "grad_norm": 0.43943265080451965,
2079
+ "learning_rate": 9.964958331126735e-06,
2080
+ "loss": 0.4284,
2081
+ "step": 296
2082
+ },
2083
+ {
2084
+ "epoch": 1.3438914027149322,
2085
+ "grad_norm": 0.5051960945129395,
2086
+ "learning_rate": 9.964018786819158e-06,
2087
+ "loss": 0.3999,
2088
+ "step": 297
2089
+ },
2090
+ {
2091
+ "epoch": 1.3484162895927603,
2092
+ "grad_norm": 0.5062891840934753,
2093
+ "learning_rate": 9.963066858438525e-06,
2094
+ "loss": 0.4063,
2095
+ "step": 298
2096
+ },
2097
+ {
2098
+ "epoch": 1.3529411764705883,
2099
+ "grad_norm": 0.5116050243377686,
2100
+ "learning_rate": 9.96210254835968e-06,
2101
+ "loss": 0.3752,
2102
+ "step": 299
2103
+ },
2104
+ {
2105
+ "epoch": 1.3574660633484164,
2106
+ "grad_norm": 0.4431830048561096,
2107
+ "learning_rate": 9.961125858988348e-06,
2108
+ "loss": 0.3763,
2109
+ "step": 300
2110
+ },
2111
+ {
2112
+ "epoch": 1.3619909502262444,
2113
+ "grad_norm": 0.4586993455886841,
2114
+ "learning_rate": 9.96013679276114e-06,
2115
+ "loss": 0.3983,
2116
+ "step": 301
2117
+ },
2118
+ {
2119
+ "epoch": 1.3665158371040724,
2120
+ "grad_norm": 0.4054921567440033,
2121
+ "learning_rate": 9.959135352145552e-06,
2122
+ "loss": 0.3679,
2123
+ "step": 302
2124
+ },
2125
+ {
2126
+ "epoch": 1.3710407239819005,
2127
+ "grad_norm": 0.4635688066482544,
2128
+ "learning_rate": 9.958121539639945e-06,
2129
+ "loss": 0.3835,
2130
+ "step": 303
2131
+ },
2132
+ {
2133
+ "epoch": 1.3755656108597285,
2134
+ "grad_norm": 0.4249536395072937,
2135
+ "learning_rate": 9.95709535777354e-06,
2136
+ "loss": 0.3947,
2137
+ "step": 304
2138
+ },
2139
+ {
2140
+ "epoch": 1.3800904977375565,
2141
+ "grad_norm": 0.43373027443885803,
2142
+ "learning_rate": 9.956056809106426e-06,
2143
+ "loss": 0.3603,
2144
+ "step": 305
2145
+ },
2146
+ {
2147
+ "epoch": 1.3846153846153846,
2148
+ "grad_norm": 0.45955413579940796,
2149
+ "learning_rate": 9.955005896229543e-06,
2150
+ "loss": 0.3909,
2151
+ "step": 306
2152
+ },
2153
+ {
2154
+ "epoch": 1.3891402714932126,
2155
+ "grad_norm": 0.44556981325149536,
2156
+ "learning_rate": 9.95394262176467e-06,
2157
+ "loss": 0.3668,
2158
+ "step": 307
2159
+ },
2160
+ {
2161
+ "epoch": 1.3936651583710407,
2162
+ "grad_norm": 0.4461539685726166,
2163
+ "learning_rate": 9.952866988364431e-06,
2164
+ "loss": 0.3902,
2165
+ "step": 308
2166
+ },
2167
+ {
2168
+ "epoch": 1.3981900452488687,
2169
+ "grad_norm": 0.4445544481277466,
2170
+ "learning_rate": 9.951778998712282e-06,
2171
+ "loss": 0.407,
2172
+ "step": 309
2173
+ },
2174
+ {
2175
+ "epoch": 1.4027149321266967,
2176
+ "grad_norm": 0.40608200430870056,
2177
+ "learning_rate": 9.950678655522505e-06,
2178
+ "loss": 0.3472,
2179
+ "step": 310
2180
+ },
2181
+ {
2182
+ "epoch": 1.4072398190045248,
2183
+ "grad_norm": 0.4478733241558075,
2184
+ "learning_rate": 9.9495659615402e-06,
2185
+ "loss": 0.4088,
2186
+ "step": 311
2187
+ },
2188
+ {
2189
+ "epoch": 1.4117647058823528,
2190
+ "grad_norm": 0.5241243243217468,
2191
+ "learning_rate": 9.948440919541277e-06,
2192
+ "loss": 0.3937,
2193
+ "step": 312
2194
+ },
2195
+ {
2196
+ "epoch": 1.416289592760181,
2197
+ "grad_norm": 0.44417575001716614,
2198
+ "learning_rate": 9.947303532332457e-06,
2199
+ "loss": 0.3508,
2200
+ "step": 313
2201
+ },
2202
+ {
2203
+ "epoch": 1.420814479638009,
2204
+ "grad_norm": 0.46424350142478943,
2205
+ "learning_rate": 9.946153802751257e-06,
2206
+ "loss": 0.4189,
2207
+ "step": 314
2208
+ },
2209
+ {
2210
+ "epoch": 1.4253393665158371,
2211
+ "grad_norm": 0.41135942935943604,
2212
+ "learning_rate": 9.944991733665986e-06,
2213
+ "loss": 0.3924,
2214
+ "step": 315
2215
+ },
2216
+ {
2217
+ "epoch": 1.4298642533936652,
2218
+ "grad_norm": 0.4415293037891388,
2219
+ "learning_rate": 9.943817327975732e-06,
2220
+ "loss": 0.3657,
2221
+ "step": 316
2222
+ },
2223
+ {
2224
+ "epoch": 1.4343891402714932,
2225
+ "grad_norm": 0.40911784768104553,
2226
+ "learning_rate": 9.942630588610368e-06,
2227
+ "loss": 0.383,
2228
+ "step": 317
2229
+ },
2230
+ {
2231
+ "epoch": 1.4389140271493213,
2232
+ "grad_norm": 0.43340831995010376,
2233
+ "learning_rate": 9.94143151853053e-06,
2234
+ "loss": 0.3657,
2235
+ "step": 318
2236
+ },
2237
+ {
2238
+ "epoch": 1.4434389140271493,
2239
+ "grad_norm": 0.46907198429107666,
2240
+ "learning_rate": 9.940220120727624e-06,
2241
+ "loss": 0.4239,
2242
+ "step": 319
2243
+ },
2244
+ {
2245
+ "epoch": 1.4479638009049773,
2246
+ "grad_norm": 0.4872211813926697,
2247
+ "learning_rate": 9.938996398223802e-06,
2248
+ "loss": 0.4008,
2249
+ "step": 320
2250
+ },
2251
+ {
2252
+ "epoch": 1.4524886877828054,
2253
+ "grad_norm": 0.46110352873802185,
2254
+ "learning_rate": 9.937760354071966e-06,
2255
+ "loss": 0.3792,
2256
+ "step": 321
2257
+ },
2258
+ {
2259
+ "epoch": 1.4570135746606334,
2260
+ "grad_norm": 0.46933513879776,
2261
+ "learning_rate": 9.936511991355764e-06,
2262
+ "loss": 0.3626,
2263
+ "step": 322
2264
+ },
2265
+ {
2266
+ "epoch": 1.4615384615384617,
2267
+ "grad_norm": 0.47610121965408325,
2268
+ "learning_rate": 9.935251313189564e-06,
2269
+ "loss": 0.4133,
2270
+ "step": 323
2271
+ },
2272
+ {
2273
+ "epoch": 1.4660633484162897,
2274
+ "grad_norm": 0.4467121660709381,
2275
+ "learning_rate": 9.933978322718472e-06,
2276
+ "loss": 0.4247,
2277
+ "step": 324
2278
+ },
2279
+ {
2280
+ "epoch": 1.4705882352941178,
2281
+ "grad_norm": 0.4571016728878021,
2282
+ "learning_rate": 9.932693023118299e-06,
2283
+ "loss": 0.4107,
2284
+ "step": 325
2285
+ },
2286
+ {
2287
+ "epoch": 1.4751131221719458,
2288
+ "grad_norm": 0.533100962638855,
2289
+ "learning_rate": 9.931395417595568e-06,
2290
+ "loss": 0.4091,
2291
+ "step": 326
2292
+ },
2293
+ {
2294
+ "epoch": 1.4796380090497738,
2295
+ "grad_norm": 0.4366355538368225,
2296
+ "learning_rate": 9.930085509387509e-06,
2297
+ "loss": 0.3467,
2298
+ "step": 327
2299
+ },
2300
+ {
2301
+ "epoch": 1.4841628959276019,
2302
+ "grad_norm": 0.41149038076400757,
2303
+ "learning_rate": 9.92876330176203e-06,
2304
+ "loss": 0.387,
2305
+ "step": 328
2306
+ },
2307
+ {
2308
+ "epoch": 1.48868778280543,
2309
+ "grad_norm": 0.4341404438018799,
2310
+ "learning_rate": 9.927428798017738e-06,
2311
+ "loss": 0.4285,
2312
+ "step": 329
2313
+ },
2314
+ {
2315
+ "epoch": 1.493212669683258,
2316
+ "grad_norm": 0.46218249201774597,
2317
+ "learning_rate": 9.926082001483909e-06,
2318
+ "loss": 0.4224,
2319
+ "step": 330
2320
+ },
2321
+ {
2322
+ "epoch": 1.497737556561086,
2323
+ "grad_norm": 0.42733314633369446,
2324
+ "learning_rate": 9.924722915520484e-06,
2325
+ "loss": 0.375,
2326
+ "step": 331
2327
+ },
2328
+ {
2329
+ "epoch": 1.502262443438914,
2330
+ "grad_norm": 0.4854833781719208,
2331
+ "learning_rate": 9.92335154351807e-06,
2332
+ "loss": 0.4287,
2333
+ "step": 332
2334
+ },
2335
+ {
2336
+ "epoch": 1.506787330316742,
2337
+ "grad_norm": 0.4013662040233612,
2338
+ "learning_rate": 9.92196788889792e-06,
2339
+ "loss": 0.3262,
2340
+ "step": 333
2341
+ },
2342
+ {
2343
+ "epoch": 1.51131221719457,
2344
+ "grad_norm": 0.4636831283569336,
2345
+ "learning_rate": 9.92057195511193e-06,
2346
+ "loss": 0.4486,
2347
+ "step": 334
2348
+ },
2349
+ {
2350
+ "epoch": 1.5158371040723981,
2351
+ "grad_norm": 0.3883086144924164,
2352
+ "learning_rate": 9.919163745642633e-06,
2353
+ "loss": 0.3536,
2354
+ "step": 335
2355
+ },
2356
+ {
2357
+ "epoch": 1.5203619909502262,
2358
+ "grad_norm": 0.47283634543418884,
2359
+ "learning_rate": 9.91774326400318e-06,
2360
+ "loss": 0.3475,
2361
+ "step": 336
2362
+ },
2363
+ {
2364
+ "epoch": 1.5248868778280542,
2365
+ "grad_norm": 0.5383743047714233,
2366
+ "learning_rate": 9.916310513737345e-06,
2367
+ "loss": 0.5013,
2368
+ "step": 337
2369
+ },
2370
+ {
2371
+ "epoch": 1.5294117647058822,
2372
+ "grad_norm": 0.4297215938568115,
2373
+ "learning_rate": 9.91486549841951e-06,
2374
+ "loss": 0.3726,
2375
+ "step": 338
2376
+ },
2377
+ {
2378
+ "epoch": 1.5339366515837103,
2379
+ "grad_norm": 0.5066801905632019,
2380
+ "learning_rate": 9.913408221654647e-06,
2381
+ "loss": 0.3823,
2382
+ "step": 339
2383
+ },
2384
+ {
2385
+ "epoch": 1.5384615384615383,
2386
+ "grad_norm": 0.4163861870765686,
2387
+ "learning_rate": 9.911938687078324e-06,
2388
+ "loss": 0.3728,
2389
+ "step": 340
2390
+ },
2391
+ {
2392
+ "epoch": 1.5429864253393664,
2393
+ "grad_norm": 0.46085265278816223,
2394
+ "learning_rate": 9.91045689835669e-06,
2395
+ "loss": 0.3822,
2396
+ "step": 341
2397
+ },
2398
+ {
2399
+ "epoch": 1.5475113122171946,
2400
+ "grad_norm": 0.4126754105091095,
2401
+ "learning_rate": 9.908962859186465e-06,
2402
+ "loss": 0.3634,
2403
+ "step": 342
2404
+ },
2405
+ {
2406
+ "epoch": 1.5520361990950227,
2407
+ "grad_norm": 0.4993842542171478,
2408
+ "learning_rate": 9.907456573294924e-06,
2409
+ "loss": 0.3835,
2410
+ "step": 343
2411
+ },
2412
+ {
2413
+ "epoch": 1.5565610859728507,
2414
+ "grad_norm": 0.4884220063686371,
2415
+ "learning_rate": 9.905938044439904e-06,
2416
+ "loss": 0.436,
2417
+ "step": 344
2418
+ },
2419
+ {
2420
+ "epoch": 1.5610859728506787,
2421
+ "grad_norm": 0.3982529044151306,
2422
+ "learning_rate": 9.90440727640978e-06,
2423
+ "loss": 0.389,
2424
+ "step": 345
2425
+ },
2426
+ {
2427
+ "epoch": 1.5656108597285068,
2428
+ "grad_norm": 0.4887678921222687,
2429
+ "learning_rate": 9.902864273023465e-06,
2430
+ "loss": 0.344,
2431
+ "step": 346
2432
+ },
2433
+ {
2434
+ "epoch": 1.5701357466063348,
2435
+ "grad_norm": 0.4793776869773865,
2436
+ "learning_rate": 9.901309038130392e-06,
2437
+ "loss": 0.4078,
2438
+ "step": 347
2439
+ },
2440
+ {
2441
+ "epoch": 1.5746606334841629,
2442
+ "grad_norm": 0.44910818338394165,
2443
+ "learning_rate": 9.89974157561051e-06,
2444
+ "loss": 0.3894,
2445
+ "step": 348
2446
+ },
2447
+ {
2448
+ "epoch": 1.5791855203619911,
2449
+ "grad_norm": 0.45025259256362915,
2450
+ "learning_rate": 9.898161889374272e-06,
2451
+ "loss": 0.4182,
2452
+ "step": 349
2453
+ },
2454
+ {
2455
+ "epoch": 1.5837104072398192,
2456
+ "grad_norm": 0.45233941078186035,
2457
+ "learning_rate": 9.896569983362632e-06,
2458
+ "loss": 0.3846,
2459
+ "step": 350
2460
+ },
2461
+ {
2462
+ "epoch": 1.5882352941176472,
2463
+ "grad_norm": 0.4646618664264679,
2464
+ "learning_rate": 9.894965861547023e-06,
2465
+ "loss": 0.364,
2466
+ "step": 351
2467
+ },
2468
+ {
2469
+ "epoch": 1.5927601809954752,
2470
+ "grad_norm": 0.44090190529823303,
2471
+ "learning_rate": 9.893349527929357e-06,
2472
+ "loss": 0.3949,
2473
+ "step": 352
2474
+ },
2475
+ {
2476
+ "epoch": 1.5972850678733033,
2477
+ "grad_norm": 0.5286484360694885,
2478
+ "learning_rate": 9.891720986542011e-06,
2479
+ "loss": 0.4311,
2480
+ "step": 353
2481
+ },
2482
+ {
2483
+ "epoch": 1.6018099547511313,
2484
+ "grad_norm": 0.44429534673690796,
2485
+ "learning_rate": 9.890080241447816e-06,
2486
+ "loss": 0.3906,
2487
+ "step": 354
2488
+ },
2489
+ {
2490
+ "epoch": 1.6063348416289593,
2491
+ "grad_norm": 0.5068367123603821,
2492
+ "learning_rate": 9.888427296740054e-06,
2493
+ "loss": 0.4052,
2494
+ "step": 355
2495
+ },
2496
+ {
2497
+ "epoch": 1.6108597285067874,
2498
+ "grad_norm": 0.507785975933075,
2499
+ "learning_rate": 9.886762156542428e-06,
2500
+ "loss": 0.4063,
2501
+ "step": 356
2502
+ },
2503
+ {
2504
+ "epoch": 1.6153846153846154,
2505
+ "grad_norm": 0.49715733528137207,
2506
+ "learning_rate": 9.885084825009085e-06,
2507
+ "loss": 0.3965,
2508
+ "step": 357
2509
+ },
2510
+ {
2511
+ "epoch": 1.6199095022624435,
2512
+ "grad_norm": 0.4482433497905731,
2513
+ "learning_rate": 9.883395306324575e-06,
2514
+ "loss": 0.4066,
2515
+ "step": 358
2516
+ },
2517
+ {
2518
+ "epoch": 1.6244343891402715,
2519
+ "grad_norm": 0.41335636377334595,
2520
+ "learning_rate": 9.881693604703853e-06,
2521
+ "loss": 0.3674,
2522
+ "step": 359
2523
+ },
2524
+ {
2525
+ "epoch": 1.6289592760180995,
2526
+ "grad_norm": 0.5023950934410095,
2527
+ "learning_rate": 9.87997972439227e-06,
2528
+ "loss": 0.3444,
2529
+ "step": 360
2530
+ },
2531
+ {
2532
+ "epoch": 1.6334841628959276,
2533
+ "grad_norm": 0.5373045802116394,
2534
+ "learning_rate": 9.878253669665557e-06,
2535
+ "loss": 0.3853,
2536
+ "step": 361
2537
+ },
2538
+ {
2539
+ "epoch": 1.6380090497737556,
2540
+ "grad_norm": 0.4574486315250397,
2541
+ "learning_rate": 9.876515444829822e-06,
2542
+ "loss": 0.4254,
2543
+ "step": 362
2544
+ },
2545
+ {
2546
+ "epoch": 1.6425339366515836,
2547
+ "grad_norm": 0.5090571045875549,
2548
+ "learning_rate": 9.874765054221532e-06,
2549
+ "loss": 0.391,
2550
+ "step": 363
2551
+ },
2552
+ {
2553
+ "epoch": 1.6470588235294117,
2554
+ "grad_norm": 0.4528499245643616,
2555
+ "learning_rate": 9.873002502207502e-06,
2556
+ "loss": 0.4292,
2557
+ "step": 364
2558
+ },
2559
+ {
2560
+ "epoch": 1.6515837104072397,
2561
+ "grad_norm": 0.4229438304901123,
2562
+ "learning_rate": 9.871227793184893e-06,
2563
+ "loss": 0.3829,
2564
+ "step": 365
2565
+ },
2566
+ {
2567
+ "epoch": 1.6561085972850678,
2568
+ "grad_norm": 0.4995672404766083,
2569
+ "learning_rate": 9.869440931581186e-06,
2570
+ "loss": 0.3534,
2571
+ "step": 366
2572
+ },
2573
+ {
2574
+ "epoch": 1.6606334841628958,
2575
+ "grad_norm": 0.527503490447998,
2576
+ "learning_rate": 9.867641921854192e-06,
2577
+ "loss": 0.4057,
2578
+ "step": 367
2579
+ },
2580
+ {
2581
+ "epoch": 1.6651583710407238,
2582
+ "grad_norm": 0.4658501446247101,
2583
+ "learning_rate": 9.865830768492019e-06,
2584
+ "loss": 0.4088,
2585
+ "step": 368
2586
+ },
2587
+ {
2588
+ "epoch": 1.6696832579185519,
2589
+ "grad_norm": 0.5441451668739319,
2590
+ "learning_rate": 9.864007476013075e-06,
2591
+ "loss": 0.4149,
2592
+ "step": 369
2593
+ },
2594
+ {
2595
+ "epoch": 1.6742081447963801,
2596
+ "grad_norm": 0.48692771792411804,
2597
+ "learning_rate": 9.862172048966048e-06,
2598
+ "loss": 0.3818,
2599
+ "step": 370
2600
+ },
2601
+ {
2602
+ "epoch": 1.6787330316742082,
2603
+ "grad_norm": 0.40074610710144043,
2604
+ "learning_rate": 9.860324491929905e-06,
2605
+ "loss": 0.3657,
2606
+ "step": 371
2607
+ },
2608
+ {
2609
+ "epoch": 1.6832579185520362,
2610
+ "grad_norm": 0.4921663999557495,
2611
+ "learning_rate": 9.858464809513867e-06,
2612
+ "loss": 0.4022,
2613
+ "step": 372
2614
+ },
2615
+ {
2616
+ "epoch": 1.6877828054298643,
2617
+ "grad_norm": 0.46872252225875854,
2618
+ "learning_rate": 9.856593006357415e-06,
2619
+ "loss": 0.4099,
2620
+ "step": 373
2621
+ },
2622
+ {
2623
+ "epoch": 1.6923076923076923,
2624
+ "grad_norm": 0.5141183137893677,
2625
+ "learning_rate": 9.854709087130261e-06,
2626
+ "loss": 0.3961,
2627
+ "step": 374
2628
+ },
2629
+ {
2630
+ "epoch": 1.6968325791855203,
2631
+ "grad_norm": 0.46619221568107605,
2632
+ "learning_rate": 9.852813056532345e-06,
2633
+ "loss": 0.3172,
2634
+ "step": 375
2635
+ },
2636
+ {
2637
+ "epoch": 1.7013574660633484,
2638
+ "grad_norm": 0.5484828352928162,
2639
+ "learning_rate": 9.85090491929382e-06,
2640
+ "loss": 0.4182,
2641
+ "step": 376
2642
+ },
2643
+ {
2644
+ "epoch": 1.7058823529411766,
2645
+ "grad_norm": 0.5394891500473022,
2646
+ "learning_rate": 9.848984680175049e-06,
2647
+ "loss": 0.3898,
2648
+ "step": 377
2649
+ },
2650
+ {
2651
+ "epoch": 1.7104072398190047,
2652
+ "grad_norm": 0.48792046308517456,
2653
+ "learning_rate": 9.84705234396658e-06,
2654
+ "loss": 0.3373,
2655
+ "step": 378
2656
+ },
2657
+ {
2658
+ "epoch": 1.7149321266968327,
2659
+ "grad_norm": 0.3987761437892914,
2660
+ "learning_rate": 9.845107915489143e-06,
2661
+ "loss": 0.334,
2662
+ "step": 379
2663
+ },
2664
+ {
2665
+ "epoch": 1.7194570135746607,
2666
+ "grad_norm": 0.525193452835083,
2667
+ "learning_rate": 9.843151399593636e-06,
2668
+ "loss": 0.3718,
2669
+ "step": 380
2670
+ },
2671
+ {
2672
+ "epoch": 1.7239819004524888,
2673
+ "grad_norm": 0.48990345001220703,
2674
+ "learning_rate": 9.84118280116111e-06,
2675
+ "loss": 0.3763,
2676
+ "step": 381
2677
+ },
2678
+ {
2679
+ "epoch": 1.7285067873303168,
2680
+ "grad_norm": 0.4889325797557831,
2681
+ "learning_rate": 9.839202125102761e-06,
2682
+ "loss": 0.3785,
2683
+ "step": 382
2684
+ },
2685
+ {
2686
+ "epoch": 1.7330316742081449,
2687
+ "grad_norm": 0.5154342651367188,
2688
+ "learning_rate": 9.837209376359918e-06,
2689
+ "loss": 0.3883,
2690
+ "step": 383
2691
+ },
2692
+ {
2693
+ "epoch": 1.737556561085973,
2694
+ "grad_norm": 0.5224171280860901,
2695
+ "learning_rate": 9.835204559904021e-06,
2696
+ "loss": 0.3666,
2697
+ "step": 384
2698
+ },
2699
+ {
2700
+ "epoch": 1.742081447963801,
2701
+ "grad_norm": 0.5012906193733215,
2702
+ "learning_rate": 9.833187680736624e-06,
2703
+ "loss": 0.4093,
2704
+ "step": 385
2705
+ },
2706
+ {
2707
+ "epoch": 1.746606334841629,
2708
+ "grad_norm": 0.5155039429664612,
2709
+ "learning_rate": 9.831158743889373e-06,
2710
+ "loss": 0.3768,
2711
+ "step": 386
2712
+ },
2713
+ {
2714
+ "epoch": 1.751131221719457,
2715
+ "grad_norm": 0.44377192854881287,
2716
+ "learning_rate": 9.829117754423991e-06,
2717
+ "loss": 0.4136,
2718
+ "step": 387
2719
+ },
2720
+ {
2721
+ "epoch": 1.755656108597285,
2722
+ "grad_norm": 0.6181144714355469,
2723
+ "learning_rate": 9.827064717432272e-06,
2724
+ "loss": 0.4698,
2725
+ "step": 388
2726
+ },
2727
+ {
2728
+ "epoch": 1.760180995475113,
2729
+ "grad_norm": 0.5261528491973877,
2730
+ "learning_rate": 9.82499963803607e-06,
2731
+ "loss": 0.3924,
2732
+ "step": 389
2733
+ },
2734
+ {
2735
+ "epoch": 1.7647058823529411,
2736
+ "grad_norm": 0.43123725056648254,
2737
+ "learning_rate": 9.822922521387277e-06,
2738
+ "loss": 0.4207,
2739
+ "step": 390
2740
+ },
2741
+ {
2742
+ "epoch": 1.7692307692307692,
2743
+ "grad_norm": 0.4998953342437744,
2744
+ "learning_rate": 9.820833372667813e-06,
2745
+ "loss": 0.3933,
2746
+ "step": 391
2747
+ },
2748
+ {
2749
+ "epoch": 1.7737556561085972,
2750
+ "grad_norm": 0.5654891133308411,
2751
+ "learning_rate": 9.81873219708962e-06,
2752
+ "loss": 0.3732,
2753
+ "step": 392
2754
+ },
2755
+ {
2756
+ "epoch": 1.7782805429864252,
2757
+ "grad_norm": 0.4899405539035797,
2758
+ "learning_rate": 9.816618999894645e-06,
2759
+ "loss": 0.3752,
2760
+ "step": 393
2761
+ },
2762
+ {
2763
+ "epoch": 1.7828054298642533,
2764
+ "grad_norm": 0.5240674614906311,
2765
+ "learning_rate": 9.81449378635482e-06,
2766
+ "loss": 0.4596,
2767
+ "step": 394
2768
+ },
2769
+ {
2770
+ "epoch": 1.7873303167420813,
2771
+ "grad_norm": 0.46099403500556946,
2772
+ "learning_rate": 9.81235656177206e-06,
2773
+ "loss": 0.3748,
2774
+ "step": 395
2775
+ },
2776
+ {
2777
+ "epoch": 1.7918552036199094,
2778
+ "grad_norm": 0.46495547890663147,
2779
+ "learning_rate": 9.810207331478247e-06,
2780
+ "loss": 0.4091,
2781
+ "step": 396
2782
+ },
2783
+ {
2784
+ "epoch": 1.7963800904977374,
2785
+ "grad_norm": 0.5413225293159485,
2786
+ "learning_rate": 9.808046100835207e-06,
2787
+ "loss": 0.3903,
2788
+ "step": 397
2789
+ },
2790
+ {
2791
+ "epoch": 1.8009049773755657,
2792
+ "grad_norm": 0.5968965888023376,
2793
+ "learning_rate": 9.80587287523471e-06,
2794
+ "loss": 0.3925,
2795
+ "step": 398
2796
+ },
2797
+ {
2798
+ "epoch": 1.8054298642533937,
2799
+ "grad_norm": 0.46063658595085144,
2800
+ "learning_rate": 9.803687660098446e-06,
2801
+ "loss": 0.4055,
2802
+ "step": 399
2803
+ },
2804
+ {
2805
+ "epoch": 1.8099547511312217,
2806
+ "grad_norm": 0.4856792390346527,
2807
+ "learning_rate": 9.801490460878023e-06,
2808
+ "loss": 0.3594,
2809
+ "step": 400
2810
+ },
2811
+ {
2812
+ "epoch": 1.8144796380090498,
2813
+ "grad_norm": 0.4093053638935089,
2814
+ "learning_rate": 9.79928128305494e-06,
2815
+ "loss": 0.353,
2816
+ "step": 401
2817
+ },
2818
+ {
2819
+ "epoch": 1.8190045248868778,
2820
+ "grad_norm": 0.5286359786987305,
2821
+ "learning_rate": 9.797060132140584e-06,
2822
+ "loss": 0.3798,
2823
+ "step": 402
2824
+ },
2825
+ {
2826
+ "epoch": 1.8235294117647058,
2827
+ "grad_norm": 0.49250075221061707,
2828
+ "learning_rate": 9.794827013676206e-06,
2829
+ "loss": 0.412,
2830
+ "step": 403
2831
+ },
2832
+ {
2833
+ "epoch": 1.8280542986425339,
2834
+ "grad_norm": 0.43814513087272644,
2835
+ "learning_rate": 9.792581933232924e-06,
2836
+ "loss": 0.386,
2837
+ "step": 404
2838
+ },
2839
+ {
2840
+ "epoch": 1.8325791855203621,
2841
+ "grad_norm": 0.4019566774368286,
2842
+ "learning_rate": 9.790324896411684e-06,
2843
+ "loss": 0.3668,
2844
+ "step": 405
2845
+ },
2846
+ {
2847
+ "epoch": 1.8371040723981902,
2848
+ "grad_norm": 0.44960689544677734,
2849
+ "learning_rate": 9.788055908843275e-06,
2850
+ "loss": 0.3761,
2851
+ "step": 406
2852
+ },
2853
+ {
2854
+ "epoch": 1.8416289592760182,
2855
+ "grad_norm": 0.40887507796287537,
2856
+ "learning_rate": 9.78577497618829e-06,
2857
+ "loss": 0.3824,
2858
+ "step": 407
2859
+ },
2860
+ {
2861
+ "epoch": 1.8461538461538463,
2862
+ "grad_norm": 0.5183314085006714,
2863
+ "learning_rate": 9.783482104137127e-06,
2864
+ "loss": 0.3466,
2865
+ "step": 408
2866
+ },
2867
+ {
2868
+ "epoch": 1.8506787330316743,
2869
+ "grad_norm": 0.4178057312965393,
2870
+ "learning_rate": 9.781177298409969e-06,
2871
+ "loss": 0.3428,
2872
+ "step": 409
2873
+ },
2874
+ {
2875
+ "epoch": 1.8552036199095023,
2876
+ "grad_norm": 0.45295459032058716,
2877
+ "learning_rate": 9.778860564756769e-06,
2878
+ "loss": 0.3886,
2879
+ "step": 410
2880
+ },
2881
+ {
2882
+ "epoch": 1.8597285067873304,
2883
+ "grad_norm": 0.5001879334449768,
2884
+ "learning_rate": 9.776531908957241e-06,
2885
+ "loss": 0.4219,
2886
+ "step": 411
2887
+ },
2888
+ {
2889
+ "epoch": 1.8642533936651584,
2890
+ "grad_norm": 0.47920331358909607,
2891
+ "learning_rate": 9.77419133682084e-06,
2892
+ "loss": 0.4203,
2893
+ "step": 412
2894
+ },
2895
+ {
2896
+ "epoch": 1.8687782805429864,
2897
+ "grad_norm": 0.48827502131462097,
2898
+ "learning_rate": 9.771838854186748e-06,
2899
+ "loss": 0.4053,
2900
+ "step": 413
2901
+ },
2902
+ {
2903
+ "epoch": 1.8733031674208145,
2904
+ "grad_norm": 0.42105311155319214,
2905
+ "learning_rate": 9.769474466923864e-06,
2906
+ "loss": 0.3749,
2907
+ "step": 414
2908
+ },
2909
+ {
2910
+ "epoch": 1.8778280542986425,
2911
+ "grad_norm": 0.41811907291412354,
2912
+ "learning_rate": 9.76709818093078e-06,
2913
+ "loss": 0.3489,
2914
+ "step": 415
2915
+ },
2916
+ {
2917
+ "epoch": 1.8823529411764706,
2918
+ "grad_norm": 0.42248275876045227,
2919
+ "learning_rate": 9.764710002135784e-06,
2920
+ "loss": 0.4341,
2921
+ "step": 416
2922
+ },
2923
+ {
2924
+ "epoch": 1.8868778280542986,
2925
+ "grad_norm": 0.4599759578704834,
2926
+ "learning_rate": 9.762309936496824e-06,
2927
+ "loss": 0.3931,
2928
+ "step": 417
2929
+ },
2930
+ {
2931
+ "epoch": 1.8914027149321266,
2932
+ "grad_norm": 0.47709447145462036,
2933
+ "learning_rate": 9.759897990001504e-06,
2934
+ "loss": 0.3697,
2935
+ "step": 418
2936
+ },
2937
+ {
2938
+ "epoch": 1.8959276018099547,
2939
+ "grad_norm": 0.44299864768981934,
2940
+ "learning_rate": 9.757474168667072e-06,
2941
+ "loss": 0.4235,
2942
+ "step": 419
2943
+ },
2944
+ {
2945
+ "epoch": 1.9004524886877827,
2946
+ "grad_norm": 0.45147445797920227,
2947
+ "learning_rate": 9.7550384785404e-06,
2948
+ "loss": 0.3915,
2949
+ "step": 420
2950
+ },
2951
+ {
2952
+ "epoch": 1.9049773755656108,
2953
+ "grad_norm": 0.4743190109729767,
2954
+ "learning_rate": 9.752590925697964e-06,
2955
+ "loss": 0.4003,
2956
+ "step": 421
2957
+ },
2958
+ {
2959
+ "epoch": 1.9095022624434388,
2960
+ "grad_norm": 0.515183687210083,
2961
+ "learning_rate": 9.750131516245844e-06,
2962
+ "loss": 0.4147,
2963
+ "step": 422
2964
+ },
2965
+ {
2966
+ "epoch": 1.9140271493212668,
2967
+ "grad_norm": 0.4393065273761749,
2968
+ "learning_rate": 9.747660256319695e-06,
2969
+ "loss": 0.4065,
2970
+ "step": 423
2971
+ },
2972
+ {
2973
+ "epoch": 1.9185520361990949,
2974
+ "grad_norm": 0.4030460715293884,
2975
+ "learning_rate": 9.745177152084733e-06,
2976
+ "loss": 0.3824,
2977
+ "step": 424
2978
+ },
2979
+ {
2980
+ "epoch": 1.9230769230769231,
2981
+ "grad_norm": 0.47524887323379517,
2982
+ "learning_rate": 9.742682209735727e-06,
2983
+ "loss": 0.4089,
2984
+ "step": 425
2985
+ },
2986
+ {
2987
+ "epoch": 1.9276018099547512,
2988
+ "grad_norm": 0.4686073660850525,
2989
+ "learning_rate": 9.74017543549698e-06,
2990
+ "loss": 0.3935,
2991
+ "step": 426
2992
+ },
2993
+ {
2994
+ "epoch": 1.9321266968325792,
2995
+ "grad_norm": 0.3981013894081116,
2996
+ "learning_rate": 9.737656835622312e-06,
2997
+ "loss": 0.3755,
2998
+ "step": 427
2999
+ },
3000
+ {
3001
+ "epoch": 1.9366515837104072,
3002
+ "grad_norm": 0.46402475237846375,
3003
+ "learning_rate": 9.73512641639504e-06,
3004
+ "loss": 0.3673,
3005
+ "step": 428
3006
+ },
3007
+ {
3008
+ "epoch": 1.9411764705882353,
3009
+ "grad_norm": 0.502984881401062,
3010
+ "learning_rate": 9.732584184127973e-06,
3011
+ "loss": 0.3981,
3012
+ "step": 429
3013
+ },
3014
+ {
3015
+ "epoch": 1.9457013574660633,
3016
+ "grad_norm": 0.42585644125938416,
3017
+ "learning_rate": 9.730030145163392e-06,
3018
+ "loss": 0.4047,
3019
+ "step": 430
3020
+ },
3021
+ {
3022
+ "epoch": 1.9502262443438914,
3023
+ "grad_norm": 0.39187467098236084,
3024
+ "learning_rate": 9.72746430587303e-06,
3025
+ "loss": 0.391,
3026
+ "step": 431
3027
+ },
3028
+ {
3029
+ "epoch": 1.9547511312217196,
3030
+ "grad_norm": 0.3843924105167389,
3031
+ "learning_rate": 9.724886672658056e-06,
3032
+ "loss": 0.4038,
3033
+ "step": 432
3034
+ },
3035
+ {
3036
+ "epoch": 1.9592760180995477,
3037
+ "grad_norm": 0.47524020075798035,
3038
+ "learning_rate": 9.72229725194907e-06,
3039
+ "loss": 0.4075,
3040
+ "step": 433
3041
+ },
3042
+ {
3043
+ "epoch": 1.9638009049773757,
3044
+ "grad_norm": 0.46401557326316833,
3045
+ "learning_rate": 9.719696050206072e-06,
3046
+ "loss": 0.3564,
3047
+ "step": 434
3048
+ },
3049
+ {
3050
+ "epoch": 1.9683257918552037,
3051
+ "grad_norm": 0.4504929482936859,
3052
+ "learning_rate": 9.717083073918459e-06,
3053
+ "loss": 0.401,
3054
+ "step": 435
3055
+ },
3056
+ {
3057
+ "epoch": 1.9728506787330318,
3058
+ "grad_norm": 0.4272751212120056,
3059
+ "learning_rate": 9.714458329604997e-06,
3060
+ "loss": 0.4096,
3061
+ "step": 436
3062
+ },
3063
+ {
3064
+ "epoch": 1.9773755656108598,
3065
+ "grad_norm": 0.4029974341392517,
3066
+ "learning_rate": 9.711821823813812e-06,
3067
+ "loss": 0.4362,
3068
+ "step": 437
3069
+ },
3070
+ {
3071
+ "epoch": 1.9819004524886878,
3072
+ "grad_norm": 0.4730750322341919,
3073
+ "learning_rate": 9.709173563122377e-06,
3074
+ "loss": 0.3892,
3075
+ "step": 438
3076
+ },
3077
+ {
3078
+ "epoch": 1.9864253393665159,
3079
+ "grad_norm": 0.4282892644405365,
3080
+ "learning_rate": 9.706513554137482e-06,
3081
+ "loss": 0.4198,
3082
+ "step": 439
3083
+ },
3084
+ {
3085
+ "epoch": 1.990950226244344,
3086
+ "grad_norm": 0.4743562340736389,
3087
+ "learning_rate": 9.703841803495234e-06,
3088
+ "loss": 0.4141,
3089
+ "step": 440
3090
+ },
3091
+ {
3092
+ "epoch": 1.995475113122172,
3093
+ "grad_norm": 0.4112869203090668,
3094
+ "learning_rate": 9.701158317861031e-06,
3095
+ "loss": 0.3836,
3096
+ "step": 441
3097
+ },
3098
+ {
3099
+ "epoch": 2.0,
3100
+ "grad_norm": 0.41886061429977417,
3101
+ "learning_rate": 9.698463103929542e-06,
3102
+ "loss": 0.3317,
3103
+ "step": 442
3104
+ }
3105
+ ],
3106
+ "logging_steps": 1,
3107
+ "max_steps": 2210,
3108
+ "num_input_tokens_seen": 0,
3109
+ "num_train_epochs": 10,
3110
+ "save_steps": 442,
3111
+ "stateful_callbacks": {
3112
+ "TrainerControl": {
3113
+ "args": {
3114
+ "should_epoch_stop": false,
3115
+ "should_evaluate": false,
3116
+ "should_log": false,
3117
+ "should_save": true,
3118
+ "should_training_stop": false
3119
+ },
3120
+ "attributes": {}
3121
+ }
3122
+ },
3123
+ "total_flos": 90814059528192.0,
3124
+ "train_batch_size": 1,
3125
+ "trial_name": null,
3126
+ "trial_params": null
3127
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3043921de5ed0b9eee1ff8eb6ce6ddc9116b2cac0d219cd3f349af26a1ec10ae
3
+ size 7672
vocab.json ADDED
The diff for this file is too large to render. See raw diff