rica40325 commited on
Commit
56fbe27
·
verified ·
1 Parent(s): dc28823

Upload folder using huggingface_hub

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
config.json ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "MistralForCausalLM"
4
+ ],
5
+ "attention_dropout": 0.0,
6
+ "bos_token_id": 1,
7
+ "eos_token_id": 2,
8
+ "head_dim": 128,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 5120,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 14336,
13
+ "max_position_embeddings": 1024000,
14
+ "model_type": "mistral",
15
+ "num_attention_heads": 32,
16
+ "num_hidden_layers": 40,
17
+ "num_key_value_heads": 8,
18
+ "rms_norm_eps": 1e-05,
19
+ "rope_theta": 1000000.0,
20
+ "sliding_window": null,
21
+ "tie_word_embeddings": false,
22
+ "torch_dtype": "bfloat16",
23
+ "transformers_version": "4.51.3",
24
+ "use_cache": false,
25
+ "vocab_size": 131072
26
+ }
generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "transformers_version": "4.51.3"
6
+ }
model-00001-of-00005.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:719b52515e18e4187666f5e5cfe6297ca699308fbdb6fd8fdd795ef11bb0cb75
3
+ size 4865522496
model-00002-of-00005.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d7470034a5cb1db5a69012715b2dc2953fe926875ef8e4a94d306c02af850c88
3
+ size 4907529424
model-00003-of-00005.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:68fc611e2046025a65f31a8e8491965438aa0671ec66448375a8dbbc67999575
3
+ size 4907529456
model-00004-of-00005.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4b807419c2174c381f1368277be7fa2f3337b3e83946cd5402dea841c70dcd2d
3
+ size 4907529456
model-00005-of-00005.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:03527b3513f4aee24d1bb7969ba1748db8d1139e5c2c33c11f2d4a9a3d550ee2
3
+ size 4907496272
model.safetensors.index.json ADDED
@@ -0,0 +1,370 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 24495564800
4
+ },
5
+ "weight_map": {
6
+ "lm_head.weight": "model-00005-of-00005.safetensors",
7
+ "model.embed_tokens.weight": "model-00001-of-00005.safetensors",
8
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00005.safetensors",
9
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
10
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
11
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
12
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
13
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
14
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
15
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
16
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
17
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00005.safetensors",
18
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
19
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
20
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
21
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
22
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
23
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
24
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
25
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
26
+ "model.layers.10.input_layernorm.weight": "model-00002-of-00005.safetensors",
27
+ "model.layers.10.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
28
+ "model.layers.10.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
29
+ "model.layers.10.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
30
+ "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
31
+ "model.layers.10.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
32
+ "model.layers.10.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
33
+ "model.layers.10.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
34
+ "model.layers.10.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
35
+ "model.layers.11.input_layernorm.weight": "model-00002-of-00005.safetensors",
36
+ "model.layers.11.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
37
+ "model.layers.11.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
38
+ "model.layers.11.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
39
+ "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
40
+ "model.layers.11.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
41
+ "model.layers.11.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
42
+ "model.layers.11.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
43
+ "model.layers.11.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
44
+ "model.layers.12.input_layernorm.weight": "model-00002-of-00005.safetensors",
45
+ "model.layers.12.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
46
+ "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
47
+ "model.layers.12.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
48
+ "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
49
+ "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
50
+ "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
51
+ "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
52
+ "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
53
+ "model.layers.13.input_layernorm.weight": "model-00002-of-00005.safetensors",
54
+ "model.layers.13.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
55
+ "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
56
+ "model.layers.13.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
57
+ "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
58
+ "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
59
+ "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
60
+ "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
61
+ "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
62
+ "model.layers.14.input_layernorm.weight": "model-00002-of-00005.safetensors",
63
+ "model.layers.14.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
64
+ "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
65
+ "model.layers.14.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
66
+ "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
67
+ "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
68
+ "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
69
+ "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
70
+ "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
71
+ "model.layers.15.input_layernorm.weight": "model-00003-of-00005.safetensors",
72
+ "model.layers.15.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
73
+ "model.layers.15.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
74
+ "model.layers.15.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
75
+ "model.layers.15.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
76
+ "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
77
+ "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
78
+ "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
79
+ "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
80
+ "model.layers.16.input_layernorm.weight": "model-00003-of-00005.safetensors",
81
+ "model.layers.16.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
82
+ "model.layers.16.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
83
+ "model.layers.16.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
84
+ "model.layers.16.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
85
+ "model.layers.16.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
86
+ "model.layers.16.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
87
+ "model.layers.16.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
88
+ "model.layers.16.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
89
+ "model.layers.17.input_layernorm.weight": "model-00003-of-00005.safetensors",
90
+ "model.layers.17.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
91
+ "model.layers.17.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
92
+ "model.layers.17.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
93
+ "model.layers.17.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
94
+ "model.layers.17.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
95
+ "model.layers.17.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
96
+ "model.layers.17.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
97
+ "model.layers.17.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
98
+ "model.layers.18.input_layernorm.weight": "model-00003-of-00005.safetensors",
99
+ "model.layers.18.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
100
+ "model.layers.18.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
101
+ "model.layers.18.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
102
+ "model.layers.18.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
103
+ "model.layers.18.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
104
+ "model.layers.18.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
105
+ "model.layers.18.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
106
+ "model.layers.18.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
107
+ "model.layers.19.input_layernorm.weight": "model-00003-of-00005.safetensors",
108
+ "model.layers.19.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
109
+ "model.layers.19.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
110
+ "model.layers.19.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
111
+ "model.layers.19.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
112
+ "model.layers.19.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
113
+ "model.layers.19.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
114
+ "model.layers.19.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
115
+ "model.layers.19.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
116
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00005.safetensors",
117
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
118
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
119
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
120
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
121
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
122
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
123
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
124
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
125
+ "model.layers.20.input_layernorm.weight": "model-00003-of-00005.safetensors",
126
+ "model.layers.20.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
127
+ "model.layers.20.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
128
+ "model.layers.20.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
129
+ "model.layers.20.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
130
+ "model.layers.20.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
131
+ "model.layers.20.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
132
+ "model.layers.20.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
133
+ "model.layers.20.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
134
+ "model.layers.21.input_layernorm.weight": "model-00003-of-00005.safetensors",
135
+ "model.layers.21.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
136
+ "model.layers.21.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
137
+ "model.layers.21.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
138
+ "model.layers.21.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
139
+ "model.layers.21.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
140
+ "model.layers.21.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
141
+ "model.layers.21.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
142
+ "model.layers.21.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
143
+ "model.layers.22.input_layernorm.weight": "model-00003-of-00005.safetensors",
144
+ "model.layers.22.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
145
+ "model.layers.22.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
146
+ "model.layers.22.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
147
+ "model.layers.22.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
148
+ "model.layers.22.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
149
+ "model.layers.22.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
150
+ "model.layers.22.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
151
+ "model.layers.22.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
152
+ "model.layers.23.input_layernorm.weight": "model-00003-of-00005.safetensors",
153
+ "model.layers.23.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
154
+ "model.layers.23.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
155
+ "model.layers.23.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
156
+ "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
157
+ "model.layers.23.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
158
+ "model.layers.23.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
159
+ "model.layers.23.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
160
+ "model.layers.23.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
161
+ "model.layers.24.input_layernorm.weight": "model-00004-of-00005.safetensors",
162
+ "model.layers.24.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
163
+ "model.layers.24.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
164
+ "model.layers.24.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
165
+ "model.layers.24.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
166
+ "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
167
+ "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
168
+ "model.layers.24.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
169
+ "model.layers.24.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
170
+ "model.layers.25.input_layernorm.weight": "model-00004-of-00005.safetensors",
171
+ "model.layers.25.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
172
+ "model.layers.25.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
173
+ "model.layers.25.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
174
+ "model.layers.25.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
175
+ "model.layers.25.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
176
+ "model.layers.25.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
177
+ "model.layers.25.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
178
+ "model.layers.25.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
179
+ "model.layers.26.input_layernorm.weight": "model-00004-of-00005.safetensors",
180
+ "model.layers.26.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
181
+ "model.layers.26.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
182
+ "model.layers.26.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
183
+ "model.layers.26.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
184
+ "model.layers.26.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
185
+ "model.layers.26.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
186
+ "model.layers.26.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
187
+ "model.layers.26.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
188
+ "model.layers.27.input_layernorm.weight": "model-00004-of-00005.safetensors",
189
+ "model.layers.27.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
190
+ "model.layers.27.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
191
+ "model.layers.27.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
192
+ "model.layers.27.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
193
+ "model.layers.27.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
194
+ "model.layers.27.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
195
+ "model.layers.27.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
196
+ "model.layers.27.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
197
+ "model.layers.28.input_layernorm.weight": "model-00004-of-00005.safetensors",
198
+ "model.layers.28.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
199
+ "model.layers.28.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
200
+ "model.layers.28.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
201
+ "model.layers.28.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
202
+ "model.layers.28.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
203
+ "model.layers.28.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
204
+ "model.layers.28.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
205
+ "model.layers.28.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
206
+ "model.layers.29.input_layernorm.weight": "model-00004-of-00005.safetensors",
207
+ "model.layers.29.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
208
+ "model.layers.29.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
209
+ "model.layers.29.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
210
+ "model.layers.29.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
211
+ "model.layers.29.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
212
+ "model.layers.29.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
213
+ "model.layers.29.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
214
+ "model.layers.29.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
215
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00005.safetensors",
216
+ "model.layers.3.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
217
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
218
+ "model.layers.3.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
219
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
220
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
221
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
222
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
223
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
224
+ "model.layers.30.input_layernorm.weight": "model-00004-of-00005.safetensors",
225
+ "model.layers.30.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
226
+ "model.layers.30.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
227
+ "model.layers.30.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
228
+ "model.layers.30.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
229
+ "model.layers.30.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
230
+ "model.layers.30.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
231
+ "model.layers.30.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
232
+ "model.layers.30.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
233
+ "model.layers.31.input_layernorm.weight": "model-00004-of-00005.safetensors",
234
+ "model.layers.31.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
235
+ "model.layers.31.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
236
+ "model.layers.31.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
237
+ "model.layers.31.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
238
+ "model.layers.31.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
239
+ "model.layers.31.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
240
+ "model.layers.31.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
241
+ "model.layers.31.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
242
+ "model.layers.32.input_layernorm.weight": "model-00004-of-00005.safetensors",
243
+ "model.layers.32.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
244
+ "model.layers.32.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
245
+ "model.layers.32.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
246
+ "model.layers.32.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
247
+ "model.layers.32.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
248
+ "model.layers.32.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
249
+ "model.layers.32.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
250
+ "model.layers.32.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
251
+ "model.layers.33.input_layernorm.weight": "model-00005-of-00005.safetensors",
252
+ "model.layers.33.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
253
+ "model.layers.33.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
254
+ "model.layers.33.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
255
+ "model.layers.33.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
256
+ "model.layers.33.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
257
+ "model.layers.33.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
258
+ "model.layers.33.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
259
+ "model.layers.33.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
260
+ "model.layers.34.input_layernorm.weight": "model-00005-of-00005.safetensors",
261
+ "model.layers.34.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
262
+ "model.layers.34.mlp.gate_proj.weight": "model-00005-of-00005.safetensors",
263
+ "model.layers.34.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
264
+ "model.layers.34.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
265
+ "model.layers.34.self_attn.k_proj.weight": "model-00005-of-00005.safetensors",
266
+ "model.layers.34.self_attn.o_proj.weight": "model-00005-of-00005.safetensors",
267
+ "model.layers.34.self_attn.q_proj.weight": "model-00005-of-00005.safetensors",
268
+ "model.layers.34.self_attn.v_proj.weight": "model-00005-of-00005.safetensors",
269
+ "model.layers.35.input_layernorm.weight": "model-00005-of-00005.safetensors",
270
+ "model.layers.35.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
271
+ "model.layers.35.mlp.gate_proj.weight": "model-00005-of-00005.safetensors",
272
+ "model.layers.35.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
273
+ "model.layers.35.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
274
+ "model.layers.35.self_attn.k_proj.weight": "model-00005-of-00005.safetensors",
275
+ "model.layers.35.self_attn.o_proj.weight": "model-00005-of-00005.safetensors",
276
+ "model.layers.35.self_attn.q_proj.weight": "model-00005-of-00005.safetensors",
277
+ "model.layers.35.self_attn.v_proj.weight": "model-00005-of-00005.safetensors",
278
+ "model.layers.36.input_layernorm.weight": "model-00005-of-00005.safetensors",
279
+ "model.layers.36.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
280
+ "model.layers.36.mlp.gate_proj.weight": "model-00005-of-00005.safetensors",
281
+ "model.layers.36.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
282
+ "model.layers.36.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
283
+ "model.layers.36.self_attn.k_proj.weight": "model-00005-of-00005.safetensors",
284
+ "model.layers.36.self_attn.o_proj.weight": "model-00005-of-00005.safetensors",
285
+ "model.layers.36.self_attn.q_proj.weight": "model-00005-of-00005.safetensors",
286
+ "model.layers.36.self_attn.v_proj.weight": "model-00005-of-00005.safetensors",
287
+ "model.layers.37.input_layernorm.weight": "model-00005-of-00005.safetensors",
288
+ "model.layers.37.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
289
+ "model.layers.37.mlp.gate_proj.weight": "model-00005-of-00005.safetensors",
290
+ "model.layers.37.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
291
+ "model.layers.37.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
292
+ "model.layers.37.self_attn.k_proj.weight": "model-00005-of-00005.safetensors",
293
+ "model.layers.37.self_attn.o_proj.weight": "model-00005-of-00005.safetensors",
294
+ "model.layers.37.self_attn.q_proj.weight": "model-00005-of-00005.safetensors",
295
+ "model.layers.37.self_attn.v_proj.weight": "model-00005-of-00005.safetensors",
296
+ "model.layers.38.input_layernorm.weight": "model-00005-of-00005.safetensors",
297
+ "model.layers.38.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
298
+ "model.layers.38.mlp.gate_proj.weight": "model-00005-of-00005.safetensors",
299
+ "model.layers.38.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
300
+ "model.layers.38.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
301
+ "model.layers.38.self_attn.k_proj.weight": "model-00005-of-00005.safetensors",
302
+ "model.layers.38.self_attn.o_proj.weight": "model-00005-of-00005.safetensors",
303
+ "model.layers.38.self_attn.q_proj.weight": "model-00005-of-00005.safetensors",
304
+ "model.layers.38.self_attn.v_proj.weight": "model-00005-of-00005.safetensors",
305
+ "model.layers.39.input_layernorm.weight": "model-00005-of-00005.safetensors",
306
+ "model.layers.39.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
307
+ "model.layers.39.mlp.gate_proj.weight": "model-00005-of-00005.safetensors",
308
+ "model.layers.39.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
309
+ "model.layers.39.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
310
+ "model.layers.39.self_attn.k_proj.weight": "model-00005-of-00005.safetensors",
311
+ "model.layers.39.self_attn.o_proj.weight": "model-00005-of-00005.safetensors",
312
+ "model.layers.39.self_attn.q_proj.weight": "model-00005-of-00005.safetensors",
313
+ "model.layers.39.self_attn.v_proj.weight": "model-00005-of-00005.safetensors",
314
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00005.safetensors",
315
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
316
+ "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
317
+ "model.layers.4.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
318
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
319
+ "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
320
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
321
+ "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
322
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
323
+ "model.layers.5.input_layernorm.weight": "model-00001-of-00005.safetensors",
324
+ "model.layers.5.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
325
+ "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
326
+ "model.layers.5.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
327
+ "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
328
+ "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
329
+ "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
330
+ "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
331
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
332
+ "model.layers.6.input_layernorm.weight": "model-00002-of-00005.safetensors",
333
+ "model.layers.6.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
334
+ "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
335
+ "model.layers.6.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
336
+ "model.layers.6.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
337
+ "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
338
+ "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
339
+ "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
340
+ "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
341
+ "model.layers.7.input_layernorm.weight": "model-00002-of-00005.safetensors",
342
+ "model.layers.7.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
343
+ "model.layers.7.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
344
+ "model.layers.7.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
345
+ "model.layers.7.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
346
+ "model.layers.7.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
347
+ "model.layers.7.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
348
+ "model.layers.7.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
349
+ "model.layers.7.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
350
+ "model.layers.8.input_layernorm.weight": "model-00002-of-00005.safetensors",
351
+ "model.layers.8.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
352
+ "model.layers.8.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
353
+ "model.layers.8.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
354
+ "model.layers.8.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
355
+ "model.layers.8.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
356
+ "model.layers.8.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
357
+ "model.layers.8.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
358
+ "model.layers.8.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
359
+ "model.layers.9.input_layernorm.weight": "model-00002-of-00005.safetensors",
360
+ "model.layers.9.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
361
+ "model.layers.9.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
362
+ "model.layers.9.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
363
+ "model.layers.9.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
364
+ "model.layers.9.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
365
+ "model.layers.9.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
366
+ "model.layers.9.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
367
+ "model.layers.9.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
368
+ "model.norm.weight": "model-00005-of-00005.safetensors"
369
+ }
370
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "</s>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": "</s>",
17
+ "unk_token": {
18
+ "content": "<unk>",
19
+ "lstrip": false,
20
+ "normalized": false,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ }
24
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b0240ce510f08e6c2041724e9043e33be9d251d1e4a4d94eb68cd47b954b61d2
3
+ size 17078292
tokenizer_config.json ADDED
The diff for this file is too large to render. See raw diff
 
trainer_state.json ADDED
@@ -0,0 +1,2134 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_global_step": null,
3
+ "best_metric": null,
4
+ "best_model_checkpoint": null,
5
+ "epoch": 0.6954505940307157,
6
+ "eval_steps": 500,
7
+ "global_step": 300,
8
+ "is_hyper_param_search": false,
9
+ "is_local_process_zero": true,
10
+ "is_world_process_zero": true,
11
+ "log_history": [
12
+ {
13
+ "epoch": 0.0023181686467690526,
14
+ "grad_norm": 28.501070022583008,
15
+ "learning_rate": 2e-05,
16
+ "loss": 2.6862,
17
+ "step": 1
18
+ },
19
+ {
20
+ "epoch": 0.004636337293538105,
21
+ "grad_norm": 2.4246978759765625,
22
+ "learning_rate": 1.998453209590101e-05,
23
+ "loss": 1.2503,
24
+ "step": 2
25
+ },
26
+ {
27
+ "epoch": 0.006954505940307157,
28
+ "grad_norm": 8.621129989624023,
29
+ "learning_rate": 1.996906419180201e-05,
30
+ "loss": 1.9701,
31
+ "step": 3
32
+ },
33
+ {
34
+ "epoch": 0.00927267458707621,
35
+ "grad_norm": 2.628929853439331,
36
+ "learning_rate": 1.995359628770302e-05,
37
+ "loss": 1.4985,
38
+ "step": 4
39
+ },
40
+ {
41
+ "epoch": 0.011590843233845263,
42
+ "grad_norm": 5.25876522064209,
43
+ "learning_rate": 1.993812838360402e-05,
44
+ "loss": 1.3525,
45
+ "step": 5
46
+ },
47
+ {
48
+ "epoch": 0.013909011880614315,
49
+ "grad_norm": 1.959221363067627,
50
+ "learning_rate": 1.992266047950503e-05,
51
+ "loss": 1.2634,
52
+ "step": 6
53
+ },
54
+ {
55
+ "epoch": 0.016227180527383367,
56
+ "grad_norm": 4.436636447906494,
57
+ "learning_rate": 1.9907192575406035e-05,
58
+ "loss": 1.3113,
59
+ "step": 7
60
+ },
61
+ {
62
+ "epoch": 0.01854534917415242,
63
+ "grad_norm": 2.8270750045776367,
64
+ "learning_rate": 1.989172467130704e-05,
65
+ "loss": 1.2802,
66
+ "step": 8
67
+ },
68
+ {
69
+ "epoch": 0.02086351782092147,
70
+ "grad_norm": 1.8319437503814697,
71
+ "learning_rate": 1.9876256767208045e-05,
72
+ "loss": 1.2364,
73
+ "step": 9
74
+ },
75
+ {
76
+ "epoch": 0.023181686467690525,
77
+ "grad_norm": 1.2635818719863892,
78
+ "learning_rate": 1.986078886310905e-05,
79
+ "loss": 1.2281,
80
+ "step": 10
81
+ },
82
+ {
83
+ "epoch": 0.025499855114459576,
84
+ "grad_norm": 1.3797353506088257,
85
+ "learning_rate": 1.9845320959010055e-05,
86
+ "loss": 1.2345,
87
+ "step": 11
88
+ },
89
+ {
90
+ "epoch": 0.02781802376122863,
91
+ "grad_norm": 1.020139455795288,
92
+ "learning_rate": 1.982985305491106e-05,
93
+ "loss": 1.2106,
94
+ "step": 12
95
+ },
96
+ {
97
+ "epoch": 0.030136192407997683,
98
+ "grad_norm": 1.2911367416381836,
99
+ "learning_rate": 1.9814385150812065e-05,
100
+ "loss": 1.2277,
101
+ "step": 13
102
+ },
103
+ {
104
+ "epoch": 0.032454361054766734,
105
+ "grad_norm": 1.878492832183838,
106
+ "learning_rate": 1.979891724671307e-05,
107
+ "loss": 1.1971,
108
+ "step": 14
109
+ },
110
+ {
111
+ "epoch": 0.034772529701535784,
112
+ "grad_norm": 1.455005407333374,
113
+ "learning_rate": 1.978344934261408e-05,
114
+ "loss": 1.1909,
115
+ "step": 15
116
+ },
117
+ {
118
+ "epoch": 0.03709069834830484,
119
+ "grad_norm": 1.5538443326950073,
120
+ "learning_rate": 1.976798143851508e-05,
121
+ "loss": 1.227,
122
+ "step": 16
123
+ },
124
+ {
125
+ "epoch": 0.03940886699507389,
126
+ "grad_norm": 1.4034634828567505,
127
+ "learning_rate": 1.975251353441609e-05,
128
+ "loss": 1.1373,
129
+ "step": 17
130
+ },
131
+ {
132
+ "epoch": 0.04172703564184294,
133
+ "grad_norm": 1.3893576860427856,
134
+ "learning_rate": 1.973704563031709e-05,
135
+ "loss": 1.178,
136
+ "step": 18
137
+ },
138
+ {
139
+ "epoch": 0.044045204288612,
140
+ "grad_norm": 1.352433204650879,
141
+ "learning_rate": 1.97215777262181e-05,
142
+ "loss": 1.163,
143
+ "step": 19
144
+ },
145
+ {
146
+ "epoch": 0.04636337293538105,
147
+ "grad_norm": 1.179675579071045,
148
+ "learning_rate": 1.9706109822119105e-05,
149
+ "loss": 1.1223,
150
+ "step": 20
151
+ },
152
+ {
153
+ "epoch": 0.0486815415821501,
154
+ "grad_norm": 1.208991289138794,
155
+ "learning_rate": 1.9690641918020112e-05,
156
+ "loss": 1.1867,
157
+ "step": 21
158
+ },
159
+ {
160
+ "epoch": 0.05099971022891915,
161
+ "grad_norm": 0.8430147171020508,
162
+ "learning_rate": 1.9675174013921115e-05,
163
+ "loss": 1.1488,
164
+ "step": 22
165
+ },
166
+ {
167
+ "epoch": 0.05331787887568821,
168
+ "grad_norm": 0.7868801355361938,
169
+ "learning_rate": 1.9659706109822122e-05,
170
+ "loss": 1.1035,
171
+ "step": 23
172
+ },
173
+ {
174
+ "epoch": 0.05563604752245726,
175
+ "grad_norm": 0.9440180063247681,
176
+ "learning_rate": 1.9644238205723125e-05,
177
+ "loss": 1.1582,
178
+ "step": 24
179
+ },
180
+ {
181
+ "epoch": 0.05795421616922631,
182
+ "grad_norm": 1.0409374237060547,
183
+ "learning_rate": 1.9628770301624132e-05,
184
+ "loss": 1.1304,
185
+ "step": 25
186
+ },
187
+ {
188
+ "epoch": 0.06027238481599537,
189
+ "grad_norm": 1.4376304149627686,
190
+ "learning_rate": 1.961330239752514e-05,
191
+ "loss": 1.1535,
192
+ "step": 26
193
+ },
194
+ {
195
+ "epoch": 0.06259055346276442,
196
+ "grad_norm": 0.7775300741195679,
197
+ "learning_rate": 1.9597834493426142e-05,
198
+ "loss": 1.0941,
199
+ "step": 27
200
+ },
201
+ {
202
+ "epoch": 0.06490872210953347,
203
+ "grad_norm": 1.294155478477478,
204
+ "learning_rate": 1.958236658932715e-05,
205
+ "loss": 1.1859,
206
+ "step": 28
207
+ },
208
+ {
209
+ "epoch": 0.06722689075630252,
210
+ "grad_norm": 1.0435543060302734,
211
+ "learning_rate": 1.9566898685228152e-05,
212
+ "loss": 1.147,
213
+ "step": 29
214
+ },
215
+ {
216
+ "epoch": 0.06954505940307157,
217
+ "grad_norm": 1.1211307048797607,
218
+ "learning_rate": 1.955143078112916e-05,
219
+ "loss": 1.1148,
220
+ "step": 30
221
+ },
222
+ {
223
+ "epoch": 0.07186322804984063,
224
+ "grad_norm": 0.9777933955192566,
225
+ "learning_rate": 1.9535962877030165e-05,
226
+ "loss": 1.1266,
227
+ "step": 31
228
+ },
229
+ {
230
+ "epoch": 0.07418139669660968,
231
+ "grad_norm": 0.9920445680618286,
232
+ "learning_rate": 1.952049497293117e-05,
233
+ "loss": 1.1535,
234
+ "step": 32
235
+ },
236
+ {
237
+ "epoch": 0.07649956534337873,
238
+ "grad_norm": 0.7202315330505371,
239
+ "learning_rate": 1.9505027068832175e-05,
240
+ "loss": 1.139,
241
+ "step": 33
242
+ },
243
+ {
244
+ "epoch": 0.07881773399014778,
245
+ "grad_norm": 0.6207343339920044,
246
+ "learning_rate": 1.9489559164733182e-05,
247
+ "loss": 1.1105,
248
+ "step": 34
249
+ },
250
+ {
251
+ "epoch": 0.08113590263691683,
252
+ "grad_norm": 1.078873634338379,
253
+ "learning_rate": 1.9474091260634185e-05,
254
+ "loss": 1.1407,
255
+ "step": 35
256
+ },
257
+ {
258
+ "epoch": 0.08345407128368589,
259
+ "grad_norm": 0.9423937201499939,
260
+ "learning_rate": 1.9458623356535192e-05,
261
+ "loss": 1.145,
262
+ "step": 36
263
+ },
264
+ {
265
+ "epoch": 0.08577223993045494,
266
+ "grad_norm": 0.8272204399108887,
267
+ "learning_rate": 1.9443155452436195e-05,
268
+ "loss": 1.1141,
269
+ "step": 37
270
+ },
271
+ {
272
+ "epoch": 0.088090408577224,
273
+ "grad_norm": 0.9147409796714783,
274
+ "learning_rate": 1.9427687548337202e-05,
275
+ "loss": 1.1113,
276
+ "step": 38
277
+ },
278
+ {
279
+ "epoch": 0.09040857722399305,
280
+ "grad_norm": 1.4252681732177734,
281
+ "learning_rate": 1.941221964423821e-05,
282
+ "loss": 1.1633,
283
+ "step": 39
284
+ },
285
+ {
286
+ "epoch": 0.0927267458707621,
287
+ "grad_norm": 0.8701033592224121,
288
+ "learning_rate": 1.9396751740139212e-05,
289
+ "loss": 1.1243,
290
+ "step": 40
291
+ },
292
+ {
293
+ "epoch": 0.09504491451753115,
294
+ "grad_norm": 0.9681833386421204,
295
+ "learning_rate": 1.938128383604022e-05,
296
+ "loss": 1.1518,
297
+ "step": 41
298
+ },
299
+ {
300
+ "epoch": 0.0973630831643002,
301
+ "grad_norm": 1.1157395839691162,
302
+ "learning_rate": 1.9365815931941222e-05,
303
+ "loss": 1.1178,
304
+ "step": 42
305
+ },
306
+ {
307
+ "epoch": 0.09968125181106925,
308
+ "grad_norm": 0.7797672152519226,
309
+ "learning_rate": 1.935034802784223e-05,
310
+ "loss": 1.0887,
311
+ "step": 43
312
+ },
313
+ {
314
+ "epoch": 0.1019994204578383,
315
+ "grad_norm": 1.3890780210494995,
316
+ "learning_rate": 1.9334880123743235e-05,
317
+ "loss": 1.1307,
318
+ "step": 44
319
+ },
320
+ {
321
+ "epoch": 0.10431758910460737,
322
+ "grad_norm": 0.8613622784614563,
323
+ "learning_rate": 1.9319412219644242e-05,
324
+ "loss": 1.1662,
325
+ "step": 45
326
+ },
327
+ {
328
+ "epoch": 0.10663575775137642,
329
+ "grad_norm": 1.0830810070037842,
330
+ "learning_rate": 1.9303944315545245e-05,
331
+ "loss": 1.1796,
332
+ "step": 46
333
+ },
334
+ {
335
+ "epoch": 0.10895392639814547,
336
+ "grad_norm": 0.8859057426452637,
337
+ "learning_rate": 1.9288476411446252e-05,
338
+ "loss": 1.1584,
339
+ "step": 47
340
+ },
341
+ {
342
+ "epoch": 0.11127209504491452,
343
+ "grad_norm": 0.6870171427726746,
344
+ "learning_rate": 1.9273008507347255e-05,
345
+ "loss": 1.1035,
346
+ "step": 48
347
+ },
348
+ {
349
+ "epoch": 0.11359026369168357,
350
+ "grad_norm": 1.181731104850769,
351
+ "learning_rate": 1.9257540603248262e-05,
352
+ "loss": 1.1241,
353
+ "step": 49
354
+ },
355
+ {
356
+ "epoch": 0.11590843233845262,
357
+ "grad_norm": 1.8238871097564697,
358
+ "learning_rate": 1.924207269914927e-05,
359
+ "loss": 1.2359,
360
+ "step": 50
361
+ },
362
+ {
363
+ "epoch": 0.11822660098522167,
364
+ "grad_norm": 0.9966158270835876,
365
+ "learning_rate": 1.9226604795050272e-05,
366
+ "loss": 1.1387,
367
+ "step": 51
368
+ },
369
+ {
370
+ "epoch": 0.12054476963199073,
371
+ "grad_norm": 0.9722117185592651,
372
+ "learning_rate": 1.921113689095128e-05,
373
+ "loss": 1.1725,
374
+ "step": 52
375
+ },
376
+ {
377
+ "epoch": 0.12286293827875978,
378
+ "grad_norm": 1.1024895906448364,
379
+ "learning_rate": 1.9195668986852282e-05,
380
+ "loss": 1.1424,
381
+ "step": 53
382
+ },
383
+ {
384
+ "epoch": 0.12518110692552883,
385
+ "grad_norm": 1.157772421836853,
386
+ "learning_rate": 1.918020108275329e-05,
387
+ "loss": 1.1627,
388
+ "step": 54
389
+ },
390
+ {
391
+ "epoch": 0.12749927557229787,
392
+ "grad_norm": 1.0060945749282837,
393
+ "learning_rate": 1.9164733178654292e-05,
394
+ "loss": 1.1714,
395
+ "step": 55
396
+ },
397
+ {
398
+ "epoch": 0.12981744421906694,
399
+ "grad_norm": 1.3089208602905273,
400
+ "learning_rate": 1.91492652745553e-05,
401
+ "loss": 1.1394,
402
+ "step": 56
403
+ },
404
+ {
405
+ "epoch": 0.132135612865836,
406
+ "grad_norm": 1.0314549207687378,
407
+ "learning_rate": 1.9133797370456305e-05,
408
+ "loss": 1.1214,
409
+ "step": 57
410
+ },
411
+ {
412
+ "epoch": 0.13445378151260504,
413
+ "grad_norm": 0.848561704158783,
414
+ "learning_rate": 1.9118329466357312e-05,
415
+ "loss": 1.1364,
416
+ "step": 58
417
+ },
418
+ {
419
+ "epoch": 0.1367719501593741,
420
+ "grad_norm": 1.1316096782684326,
421
+ "learning_rate": 1.9102861562258315e-05,
422
+ "loss": 1.0673,
423
+ "step": 59
424
+ },
425
+ {
426
+ "epoch": 0.13909011880614314,
427
+ "grad_norm": 0.910849928855896,
428
+ "learning_rate": 1.9087393658159322e-05,
429
+ "loss": 1.0848,
430
+ "step": 60
431
+ },
432
+ {
433
+ "epoch": 0.1414082874529122,
434
+ "grad_norm": 1.2117191553115845,
435
+ "learning_rate": 1.9071925754060325e-05,
436
+ "loss": 1.2018,
437
+ "step": 61
438
+ },
439
+ {
440
+ "epoch": 0.14372645609968127,
441
+ "grad_norm": 1.3396297693252563,
442
+ "learning_rate": 1.9056457849961332e-05,
443
+ "loss": 1.1773,
444
+ "step": 62
445
+ },
446
+ {
447
+ "epoch": 0.1460446247464503,
448
+ "grad_norm": 1.0479182004928589,
449
+ "learning_rate": 1.904098994586234e-05,
450
+ "loss": 1.1557,
451
+ "step": 63
452
+ },
453
+ {
454
+ "epoch": 0.14836279339321937,
455
+ "grad_norm": 1.0438235998153687,
456
+ "learning_rate": 1.9025522041763342e-05,
457
+ "loss": 1.0985,
458
+ "step": 64
459
+ },
460
+ {
461
+ "epoch": 0.1506809620399884,
462
+ "grad_norm": 1.3567838668823242,
463
+ "learning_rate": 1.901005413766435e-05,
464
+ "loss": 1.0874,
465
+ "step": 65
466
+ },
467
+ {
468
+ "epoch": 0.15299913068675747,
469
+ "grad_norm": 1.0052990913391113,
470
+ "learning_rate": 1.8994586233565352e-05,
471
+ "loss": 1.1008,
472
+ "step": 66
473
+ },
474
+ {
475
+ "epoch": 0.1553172993335265,
476
+ "grad_norm": 1.06718111038208,
477
+ "learning_rate": 1.897911832946636e-05,
478
+ "loss": 1.1004,
479
+ "step": 67
480
+ },
481
+ {
482
+ "epoch": 0.15763546798029557,
483
+ "grad_norm": 1.326567530632019,
484
+ "learning_rate": 1.8963650425367365e-05,
485
+ "loss": 1.1033,
486
+ "step": 68
487
+ },
488
+ {
489
+ "epoch": 0.15995363662706463,
490
+ "grad_norm": 1.1070104837417603,
491
+ "learning_rate": 1.894818252126837e-05,
492
+ "loss": 1.1287,
493
+ "step": 69
494
+ },
495
+ {
496
+ "epoch": 0.16227180527383367,
497
+ "grad_norm": 1.0842565298080444,
498
+ "learning_rate": 1.8932714617169375e-05,
499
+ "loss": 1.1005,
500
+ "step": 70
501
+ },
502
+ {
503
+ "epoch": 0.16458997392060273,
504
+ "grad_norm": 0.9498984813690186,
505
+ "learning_rate": 1.8917246713070382e-05,
506
+ "loss": 1.0714,
507
+ "step": 71
508
+ },
509
+ {
510
+ "epoch": 0.16690814256737177,
511
+ "grad_norm": 1.0374786853790283,
512
+ "learning_rate": 1.8901778808971385e-05,
513
+ "loss": 1.1789,
514
+ "step": 72
515
+ },
516
+ {
517
+ "epoch": 0.16922631121414083,
518
+ "grad_norm": 0.8770291209220886,
519
+ "learning_rate": 1.8886310904872392e-05,
520
+ "loss": 1.087,
521
+ "step": 73
522
+ },
523
+ {
524
+ "epoch": 0.17154447986090987,
525
+ "grad_norm": 1.4603461027145386,
526
+ "learning_rate": 1.88708430007734e-05,
527
+ "loss": 1.1119,
528
+ "step": 74
529
+ },
530
+ {
531
+ "epoch": 0.17386264850767894,
532
+ "grad_norm": 1.0779688358306885,
533
+ "learning_rate": 1.8855375096674402e-05,
534
+ "loss": 1.1346,
535
+ "step": 75
536
+ },
537
+ {
538
+ "epoch": 0.176180817154448,
539
+ "grad_norm": 1.1575367450714111,
540
+ "learning_rate": 1.883990719257541e-05,
541
+ "loss": 1.043,
542
+ "step": 76
543
+ },
544
+ {
545
+ "epoch": 0.17849898580121704,
546
+ "grad_norm": 0.991324245929718,
547
+ "learning_rate": 1.8824439288476412e-05,
548
+ "loss": 1.0807,
549
+ "step": 77
550
+ },
551
+ {
552
+ "epoch": 0.1808171544479861,
553
+ "grad_norm": 1.2354373931884766,
554
+ "learning_rate": 1.880897138437742e-05,
555
+ "loss": 1.0485,
556
+ "step": 78
557
+ },
558
+ {
559
+ "epoch": 0.18313532309475514,
560
+ "grad_norm": 1.3966253995895386,
561
+ "learning_rate": 1.8793503480278422e-05,
562
+ "loss": 1.0349,
563
+ "step": 79
564
+ },
565
+ {
566
+ "epoch": 0.1854534917415242,
567
+ "grad_norm": 1.1162493228912354,
568
+ "learning_rate": 1.877803557617943e-05,
569
+ "loss": 1.1038,
570
+ "step": 80
571
+ },
572
+ {
573
+ "epoch": 0.18777166038829324,
574
+ "grad_norm": 1.117507815361023,
575
+ "learning_rate": 1.8762567672080435e-05,
576
+ "loss": 1.0844,
577
+ "step": 81
578
+ },
579
+ {
580
+ "epoch": 0.1900898290350623,
581
+ "grad_norm": 1.083954930305481,
582
+ "learning_rate": 1.8747099767981442e-05,
583
+ "loss": 1.1057,
584
+ "step": 82
585
+ },
586
+ {
587
+ "epoch": 0.19240799768183137,
588
+ "grad_norm": 1.0554256439208984,
589
+ "learning_rate": 1.8731631863882445e-05,
590
+ "loss": 1.0824,
591
+ "step": 83
592
+ },
593
+ {
594
+ "epoch": 0.1947261663286004,
595
+ "grad_norm": 1.1306427717208862,
596
+ "learning_rate": 1.8716163959783452e-05,
597
+ "loss": 1.0552,
598
+ "step": 84
599
+ },
600
+ {
601
+ "epoch": 0.19704433497536947,
602
+ "grad_norm": 1.1089762449264526,
603
+ "learning_rate": 1.8700696055684455e-05,
604
+ "loss": 1.0374,
605
+ "step": 85
606
+ },
607
+ {
608
+ "epoch": 0.1993625036221385,
609
+ "grad_norm": 1.0922352075576782,
610
+ "learning_rate": 1.8685228151585462e-05,
611
+ "loss": 1.2775,
612
+ "step": 86
613
+ },
614
+ {
615
+ "epoch": 0.20168067226890757,
616
+ "grad_norm": 4.459912300109863,
617
+ "learning_rate": 1.866976024748647e-05,
618
+ "loss": 1.061,
619
+ "step": 87
620
+ },
621
+ {
622
+ "epoch": 0.2039988409156766,
623
+ "grad_norm": 0.9974443316459656,
624
+ "learning_rate": 1.8654292343387472e-05,
625
+ "loss": 1.0759,
626
+ "step": 88
627
+ },
628
+ {
629
+ "epoch": 0.20631700956244567,
630
+ "grad_norm": 0.914336621761322,
631
+ "learning_rate": 1.863882443928848e-05,
632
+ "loss": 1.0549,
633
+ "step": 89
634
+ },
635
+ {
636
+ "epoch": 0.20863517820921473,
637
+ "grad_norm": 0.8654055595397949,
638
+ "learning_rate": 1.8623356535189482e-05,
639
+ "loss": 1.0634,
640
+ "step": 90
641
+ },
642
+ {
643
+ "epoch": 0.21095334685598377,
644
+ "grad_norm": 0.7419248819351196,
645
+ "learning_rate": 1.860788863109049e-05,
646
+ "loss": 1.0512,
647
+ "step": 91
648
+ },
649
+ {
650
+ "epoch": 0.21327151550275283,
651
+ "grad_norm": 1.4844622611999512,
652
+ "learning_rate": 1.8592420726991492e-05,
653
+ "loss": 1.0969,
654
+ "step": 92
655
+ },
656
+ {
657
+ "epoch": 0.21558968414952187,
658
+ "grad_norm": 1.29688560962677,
659
+ "learning_rate": 1.85769528228925e-05,
660
+ "loss": 1.0711,
661
+ "step": 93
662
+ },
663
+ {
664
+ "epoch": 0.21790785279629094,
665
+ "grad_norm": 1.421087622642517,
666
+ "learning_rate": 1.8561484918793505e-05,
667
+ "loss": 1.0772,
668
+ "step": 94
669
+ },
670
+ {
671
+ "epoch": 0.22022602144305997,
672
+ "grad_norm": 1.200110673904419,
673
+ "learning_rate": 1.8546017014694512e-05,
674
+ "loss": 1.1029,
675
+ "step": 95
676
+ },
677
+ {
678
+ "epoch": 0.22254419008982904,
679
+ "grad_norm": 1.025266408920288,
680
+ "learning_rate": 1.8530549110595515e-05,
681
+ "loss": 1.1009,
682
+ "step": 96
683
+ },
684
+ {
685
+ "epoch": 0.2248623587365981,
686
+ "grad_norm": 1.1353425979614258,
687
+ "learning_rate": 1.8515081206496522e-05,
688
+ "loss": 1.1109,
689
+ "step": 97
690
+ },
691
+ {
692
+ "epoch": 0.22718052738336714,
693
+ "grad_norm": 1.0217199325561523,
694
+ "learning_rate": 1.849961330239753e-05,
695
+ "loss": 1.1158,
696
+ "step": 98
697
+ },
698
+ {
699
+ "epoch": 0.2294986960301362,
700
+ "grad_norm": 0.9707551598548889,
701
+ "learning_rate": 1.8484145398298532e-05,
702
+ "loss": 1.0189,
703
+ "step": 99
704
+ },
705
+ {
706
+ "epoch": 0.23181686467690524,
707
+ "grad_norm": 0.8363978266716003,
708
+ "learning_rate": 1.846867749419954e-05,
709
+ "loss": 1.086,
710
+ "step": 100
711
+ },
712
+ {
713
+ "epoch": 0.2341350333236743,
714
+ "grad_norm": 1.1158548593521118,
715
+ "learning_rate": 1.8453209590100542e-05,
716
+ "loss": 1.0169,
717
+ "step": 101
718
+ },
719
+ {
720
+ "epoch": 0.23645320197044334,
721
+ "grad_norm": 0.8453333973884583,
722
+ "learning_rate": 1.843774168600155e-05,
723
+ "loss": 1.0584,
724
+ "step": 102
725
+ },
726
+ {
727
+ "epoch": 0.2387713706172124,
728
+ "grad_norm": 1.572540044784546,
729
+ "learning_rate": 1.8422273781902552e-05,
730
+ "loss": 1.105,
731
+ "step": 103
732
+ },
733
+ {
734
+ "epoch": 0.24108953926398147,
735
+ "grad_norm": 0.9463809132575989,
736
+ "learning_rate": 1.840680587780356e-05,
737
+ "loss": 1.0552,
738
+ "step": 104
739
+ },
740
+ {
741
+ "epoch": 0.2434077079107505,
742
+ "grad_norm": 0.8801397681236267,
743
+ "learning_rate": 1.8391337973704565e-05,
744
+ "loss": 1.0615,
745
+ "step": 105
746
+ },
747
+ {
748
+ "epoch": 0.24572587655751957,
749
+ "grad_norm": 1.251951813697815,
750
+ "learning_rate": 1.837587006960557e-05,
751
+ "loss": 1.1217,
752
+ "step": 106
753
+ },
754
+ {
755
+ "epoch": 0.2480440452042886,
756
+ "grad_norm": 1.297305703163147,
757
+ "learning_rate": 1.8360402165506575e-05,
758
+ "loss": 1.1102,
759
+ "step": 107
760
+ },
761
+ {
762
+ "epoch": 0.25036221385105767,
763
+ "grad_norm": 0.9023735523223877,
764
+ "learning_rate": 1.8344934261407582e-05,
765
+ "loss": 1.0786,
766
+ "step": 108
767
+ },
768
+ {
769
+ "epoch": 0.2526803824978267,
770
+ "grad_norm": 0.7729614973068237,
771
+ "learning_rate": 1.8329466357308585e-05,
772
+ "loss": 1.0636,
773
+ "step": 109
774
+ },
775
+ {
776
+ "epoch": 0.25499855114459574,
777
+ "grad_norm": 1.458217978477478,
778
+ "learning_rate": 1.8313998453209592e-05,
779
+ "loss": 1.0998,
780
+ "step": 110
781
+ },
782
+ {
783
+ "epoch": 0.25731671979136483,
784
+ "grad_norm": 1.295067548751831,
785
+ "learning_rate": 1.82985305491106e-05,
786
+ "loss": 1.0756,
787
+ "step": 111
788
+ },
789
+ {
790
+ "epoch": 0.25963488843813387,
791
+ "grad_norm": 0.7502389550209045,
792
+ "learning_rate": 1.8283062645011602e-05,
793
+ "loss": 1.048,
794
+ "step": 112
795
+ },
796
+ {
797
+ "epoch": 0.2619530570849029,
798
+ "grad_norm": 0.7939056158065796,
799
+ "learning_rate": 1.826759474091261e-05,
800
+ "loss": 1.1759,
801
+ "step": 113
802
+ },
803
+ {
804
+ "epoch": 0.264271225731672,
805
+ "grad_norm": 0.8996245861053467,
806
+ "learning_rate": 1.8252126836813612e-05,
807
+ "loss": 1.0799,
808
+ "step": 114
809
+ },
810
+ {
811
+ "epoch": 0.26658939437844104,
812
+ "grad_norm": 1.1998958587646484,
813
+ "learning_rate": 1.823665893271462e-05,
814
+ "loss": 1.0622,
815
+ "step": 115
816
+ },
817
+ {
818
+ "epoch": 0.2689075630252101,
819
+ "grad_norm": 0.7442781329154968,
820
+ "learning_rate": 1.8221191028615622e-05,
821
+ "loss": 1.1025,
822
+ "step": 116
823
+ },
824
+ {
825
+ "epoch": 0.2712257316719791,
826
+ "grad_norm": 1.0310958623886108,
827
+ "learning_rate": 1.820572312451663e-05,
828
+ "loss": 1.0557,
829
+ "step": 117
830
+ },
831
+ {
832
+ "epoch": 0.2735439003187482,
833
+ "grad_norm": 1.071614384651184,
834
+ "learning_rate": 1.8190255220417635e-05,
835
+ "loss": 1.1166,
836
+ "step": 118
837
+ },
838
+ {
839
+ "epoch": 0.27586206896551724,
840
+ "grad_norm": 0.8248165845870972,
841
+ "learning_rate": 1.8174787316318642e-05,
842
+ "loss": 1.1389,
843
+ "step": 119
844
+ },
845
+ {
846
+ "epoch": 0.2781802376122863,
847
+ "grad_norm": 0.9798437356948853,
848
+ "learning_rate": 1.8159319412219645e-05,
849
+ "loss": 1.0331,
850
+ "step": 120
851
+ },
852
+ {
853
+ "epoch": 0.28049840625905537,
854
+ "grad_norm": 0.7473148703575134,
855
+ "learning_rate": 1.8143851508120652e-05,
856
+ "loss": 1.0211,
857
+ "step": 121
858
+ },
859
+ {
860
+ "epoch": 0.2828165749058244,
861
+ "grad_norm": 1.3669893741607666,
862
+ "learning_rate": 1.812838360402166e-05,
863
+ "loss": 1.1495,
864
+ "step": 122
865
+ },
866
+ {
867
+ "epoch": 0.28513474355259344,
868
+ "grad_norm": 0.7082749009132385,
869
+ "learning_rate": 1.8112915699922662e-05,
870
+ "loss": 1.0244,
871
+ "step": 123
872
+ },
873
+ {
874
+ "epoch": 0.28745291219936253,
875
+ "grad_norm": 1.2198262214660645,
876
+ "learning_rate": 1.809744779582367e-05,
877
+ "loss": 1.1052,
878
+ "step": 124
879
+ },
880
+ {
881
+ "epoch": 0.28977108084613157,
882
+ "grad_norm": 2.2517826557159424,
883
+ "learning_rate": 1.8081979891724672e-05,
884
+ "loss": 1.0324,
885
+ "step": 125
886
+ },
887
+ {
888
+ "epoch": 0.2920892494929006,
889
+ "grad_norm": 0.9831060767173767,
890
+ "learning_rate": 1.806651198762568e-05,
891
+ "loss": 1.1176,
892
+ "step": 126
893
+ },
894
+ {
895
+ "epoch": 0.29440741813966964,
896
+ "grad_norm": 0.8122763633728027,
897
+ "learning_rate": 1.8051044083526682e-05,
898
+ "loss": 1.0238,
899
+ "step": 127
900
+ },
901
+ {
902
+ "epoch": 0.29672558678643873,
903
+ "grad_norm": 1.0002597570419312,
904
+ "learning_rate": 1.803557617942769e-05,
905
+ "loss": 1.0847,
906
+ "step": 128
907
+ },
908
+ {
909
+ "epoch": 0.29904375543320777,
910
+ "grad_norm": 0.8262125253677368,
911
+ "learning_rate": 1.8020108275328692e-05,
912
+ "loss": 1.1149,
913
+ "step": 129
914
+ },
915
+ {
916
+ "epoch": 0.3013619240799768,
917
+ "grad_norm": 1.2185602188110352,
918
+ "learning_rate": 1.80046403712297e-05,
919
+ "loss": 1.0688,
920
+ "step": 130
921
+ },
922
+ {
923
+ "epoch": 0.3036800927267459,
924
+ "grad_norm": 1.2163466215133667,
925
+ "learning_rate": 1.7989172467130705e-05,
926
+ "loss": 1.0366,
927
+ "step": 131
928
+ },
929
+ {
930
+ "epoch": 0.30599826137351493,
931
+ "grad_norm": 0.6977062225341797,
932
+ "learning_rate": 1.7973704563031712e-05,
933
+ "loss": 1.0853,
934
+ "step": 132
935
+ },
936
+ {
937
+ "epoch": 0.30831643002028397,
938
+ "grad_norm": 0.8096152544021606,
939
+ "learning_rate": 1.7958236658932715e-05,
940
+ "loss": 1.0104,
941
+ "step": 133
942
+ },
943
+ {
944
+ "epoch": 0.310634598667053,
945
+ "grad_norm": 0.9192125201225281,
946
+ "learning_rate": 1.7942768754833722e-05,
947
+ "loss": 1.0596,
948
+ "step": 134
949
+ },
950
+ {
951
+ "epoch": 0.3129527673138221,
952
+ "grad_norm": 0.7752702236175537,
953
+ "learning_rate": 1.792730085073473e-05,
954
+ "loss": 1.1281,
955
+ "step": 135
956
+ },
957
+ {
958
+ "epoch": 0.31527093596059114,
959
+ "grad_norm": 1.0385123491287231,
960
+ "learning_rate": 1.7911832946635732e-05,
961
+ "loss": 1.0857,
962
+ "step": 136
963
+ },
964
+ {
965
+ "epoch": 0.3175891046073602,
966
+ "grad_norm": 2.8603484630584717,
967
+ "learning_rate": 1.789636504253674e-05,
968
+ "loss": 1.1425,
969
+ "step": 137
970
+ },
971
+ {
972
+ "epoch": 0.31990727325412927,
973
+ "grad_norm": 1.104943037033081,
974
+ "learning_rate": 1.7880897138437742e-05,
975
+ "loss": 1.1464,
976
+ "step": 138
977
+ },
978
+ {
979
+ "epoch": 0.3222254419008983,
980
+ "grad_norm": 1.7259231805801392,
981
+ "learning_rate": 1.786542923433875e-05,
982
+ "loss": 1.0472,
983
+ "step": 139
984
+ },
985
+ {
986
+ "epoch": 0.32454361054766734,
987
+ "grad_norm": 0.9174676537513733,
988
+ "learning_rate": 1.7849961330239752e-05,
989
+ "loss": 1.0523,
990
+ "step": 140
991
+ },
992
+ {
993
+ "epoch": 0.3268617791944364,
994
+ "grad_norm": 0.9572336673736572,
995
+ "learning_rate": 1.783449342614076e-05,
996
+ "loss": 1.0327,
997
+ "step": 141
998
+ },
999
+ {
1000
+ "epoch": 0.32917994784120547,
1001
+ "grad_norm": 0.6567716598510742,
1002
+ "learning_rate": 1.7819025522041766e-05,
1003
+ "loss": 1.0979,
1004
+ "step": 142
1005
+ },
1006
+ {
1007
+ "epoch": 0.3314981164879745,
1008
+ "grad_norm": 1.8695584535598755,
1009
+ "learning_rate": 1.780355761794277e-05,
1010
+ "loss": 1.0659,
1011
+ "step": 143
1012
+ },
1013
+ {
1014
+ "epoch": 0.33381628513474354,
1015
+ "grad_norm": 0.8160743713378906,
1016
+ "learning_rate": 1.7788089713843776e-05,
1017
+ "loss": 1.0746,
1018
+ "step": 144
1019
+ },
1020
+ {
1021
+ "epoch": 0.33613445378151263,
1022
+ "grad_norm": 0.7144508957862854,
1023
+ "learning_rate": 1.7772621809744782e-05,
1024
+ "loss": 1.002,
1025
+ "step": 145
1026
+ },
1027
+ {
1028
+ "epoch": 0.33845262242828167,
1029
+ "grad_norm": 0.8914051055908203,
1030
+ "learning_rate": 1.7757153905645786e-05,
1031
+ "loss": 1.0334,
1032
+ "step": 146
1033
+ },
1034
+ {
1035
+ "epoch": 0.3407707910750507,
1036
+ "grad_norm": 1.1182371377944946,
1037
+ "learning_rate": 1.7741686001546792e-05,
1038
+ "loss": 1.0535,
1039
+ "step": 147
1040
+ },
1041
+ {
1042
+ "epoch": 0.34308895972181974,
1043
+ "grad_norm": 0.6911827325820923,
1044
+ "learning_rate": 1.77262180974478e-05,
1045
+ "loss": 1.0783,
1046
+ "step": 148
1047
+ },
1048
+ {
1049
+ "epoch": 0.34540712836858883,
1050
+ "grad_norm": 1.141491413116455,
1051
+ "learning_rate": 1.7710750193348802e-05,
1052
+ "loss": 1.0522,
1053
+ "step": 149
1054
+ },
1055
+ {
1056
+ "epoch": 0.34772529701535787,
1057
+ "grad_norm": 1.103798747062683,
1058
+ "learning_rate": 1.769528228924981e-05,
1059
+ "loss": 1.1178,
1060
+ "step": 150
1061
+ },
1062
+ {
1063
+ "epoch": 0.3500434656621269,
1064
+ "grad_norm": 1.1297893524169922,
1065
+ "learning_rate": 1.7679814385150812e-05,
1066
+ "loss": 1.19,
1067
+ "step": 151
1068
+ },
1069
+ {
1070
+ "epoch": 0.352361634308896,
1071
+ "grad_norm": 0.8850527405738831,
1072
+ "learning_rate": 1.766434648105182e-05,
1073
+ "loss": 0.9929,
1074
+ "step": 152
1075
+ },
1076
+ {
1077
+ "epoch": 0.35467980295566504,
1078
+ "grad_norm": 1.096604585647583,
1079
+ "learning_rate": 1.7648878576952822e-05,
1080
+ "loss": 1.0992,
1081
+ "step": 153
1082
+ },
1083
+ {
1084
+ "epoch": 0.35699797160243407,
1085
+ "grad_norm": 0.9644438624382019,
1086
+ "learning_rate": 1.763341067285383e-05,
1087
+ "loss": 1.0421,
1088
+ "step": 154
1089
+ },
1090
+ {
1091
+ "epoch": 0.3593161402492031,
1092
+ "grad_norm": 1.0480139255523682,
1093
+ "learning_rate": 1.7617942768754836e-05,
1094
+ "loss": 1.0495,
1095
+ "step": 155
1096
+ },
1097
+ {
1098
+ "epoch": 0.3616343088959722,
1099
+ "grad_norm": 3.11247181892395,
1100
+ "learning_rate": 1.7602474864655842e-05,
1101
+ "loss": 1.0407,
1102
+ "step": 156
1103
+ },
1104
+ {
1105
+ "epoch": 0.36395247754274124,
1106
+ "grad_norm": 0.9178793430328369,
1107
+ "learning_rate": 1.7587006960556846e-05,
1108
+ "loss": 1.0208,
1109
+ "step": 157
1110
+ },
1111
+ {
1112
+ "epoch": 0.3662706461895103,
1113
+ "grad_norm": 1.0008949041366577,
1114
+ "learning_rate": 1.7571539056457852e-05,
1115
+ "loss": 1.036,
1116
+ "step": 158
1117
+ },
1118
+ {
1119
+ "epoch": 0.36858881483627937,
1120
+ "grad_norm": 0.9334746599197388,
1121
+ "learning_rate": 1.755607115235886e-05,
1122
+ "loss": 1.1097,
1123
+ "step": 159
1124
+ },
1125
+ {
1126
+ "epoch": 0.3709069834830484,
1127
+ "grad_norm": 0.9296855330467224,
1128
+ "learning_rate": 1.7540603248259862e-05,
1129
+ "loss": 1.0597,
1130
+ "step": 160
1131
+ },
1132
+ {
1133
+ "epoch": 0.37322515212981744,
1134
+ "grad_norm": 0.7528197765350342,
1135
+ "learning_rate": 1.752513534416087e-05,
1136
+ "loss": 1.0252,
1137
+ "step": 161
1138
+ },
1139
+ {
1140
+ "epoch": 0.3755433207765865,
1141
+ "grad_norm": 0.7995203733444214,
1142
+ "learning_rate": 1.7509667440061872e-05,
1143
+ "loss": 1.0446,
1144
+ "step": 162
1145
+ },
1146
+ {
1147
+ "epoch": 0.37786148942335557,
1148
+ "grad_norm": 0.7773709297180176,
1149
+ "learning_rate": 1.749419953596288e-05,
1150
+ "loss": 1.0527,
1151
+ "step": 163
1152
+ },
1153
+ {
1154
+ "epoch": 0.3801796580701246,
1155
+ "grad_norm": 0.9108691811561584,
1156
+ "learning_rate": 1.7478731631863882e-05,
1157
+ "loss": 1.0571,
1158
+ "step": 164
1159
+ },
1160
+ {
1161
+ "epoch": 0.38249782671689364,
1162
+ "grad_norm": 0.8565751910209656,
1163
+ "learning_rate": 1.746326372776489e-05,
1164
+ "loss": 1.1003,
1165
+ "step": 165
1166
+ },
1167
+ {
1168
+ "epoch": 0.38481599536366273,
1169
+ "grad_norm": 1.3683419227600098,
1170
+ "learning_rate": 1.7447795823665896e-05,
1171
+ "loss": 1.0697,
1172
+ "step": 166
1173
+ },
1174
+ {
1175
+ "epoch": 0.38713416401043177,
1176
+ "grad_norm": 0.6560070514678955,
1177
+ "learning_rate": 1.74323279195669e-05,
1178
+ "loss": 0.9556,
1179
+ "step": 167
1180
+ },
1181
+ {
1182
+ "epoch": 0.3894523326572008,
1183
+ "grad_norm": 1.370263934135437,
1184
+ "learning_rate": 1.7416860015467906e-05,
1185
+ "loss": 1.0848,
1186
+ "step": 168
1187
+ },
1188
+ {
1189
+ "epoch": 0.39177050130396984,
1190
+ "grad_norm": 1.013763427734375,
1191
+ "learning_rate": 1.7401392111368912e-05,
1192
+ "loss": 1.0643,
1193
+ "step": 169
1194
+ },
1195
+ {
1196
+ "epoch": 0.39408866995073893,
1197
+ "grad_norm": 0.8194316029548645,
1198
+ "learning_rate": 1.7385924207269916e-05,
1199
+ "loss": 1.0708,
1200
+ "step": 170
1201
+ },
1202
+ {
1203
+ "epoch": 0.39640683859750797,
1204
+ "grad_norm": 0.9241949319839478,
1205
+ "learning_rate": 1.7370456303170922e-05,
1206
+ "loss": 1.0502,
1207
+ "step": 171
1208
+ },
1209
+ {
1210
+ "epoch": 0.398725007244277,
1211
+ "grad_norm": 0.9724448323249817,
1212
+ "learning_rate": 1.735498839907193e-05,
1213
+ "loss": 1.0152,
1214
+ "step": 172
1215
+ },
1216
+ {
1217
+ "epoch": 0.4010431758910461,
1218
+ "grad_norm": 0.6559419631958008,
1219
+ "learning_rate": 1.7339520494972932e-05,
1220
+ "loss": 1.0148,
1221
+ "step": 173
1222
+ },
1223
+ {
1224
+ "epoch": 0.40336134453781514,
1225
+ "grad_norm": 1.1617038249969482,
1226
+ "learning_rate": 1.732405259087394e-05,
1227
+ "loss": 1.0972,
1228
+ "step": 174
1229
+ },
1230
+ {
1231
+ "epoch": 0.4056795131845842,
1232
+ "grad_norm": 0.7249406576156616,
1233
+ "learning_rate": 1.7308584686774942e-05,
1234
+ "loss": 0.9664,
1235
+ "step": 175
1236
+ },
1237
+ {
1238
+ "epoch": 0.4079976818313532,
1239
+ "grad_norm": 1.2099742889404297,
1240
+ "learning_rate": 1.729311678267595e-05,
1241
+ "loss": 1.0314,
1242
+ "step": 176
1243
+ },
1244
+ {
1245
+ "epoch": 0.4103158504781223,
1246
+ "grad_norm": 0.8690075278282166,
1247
+ "learning_rate": 1.7277648878576952e-05,
1248
+ "loss": 1.0789,
1249
+ "step": 177
1250
+ },
1251
+ {
1252
+ "epoch": 0.41263401912489134,
1253
+ "grad_norm": 0.7662826180458069,
1254
+ "learning_rate": 1.726218097447796e-05,
1255
+ "loss": 1.014,
1256
+ "step": 178
1257
+ },
1258
+ {
1259
+ "epoch": 0.4149521877716604,
1260
+ "grad_norm": 1.22348952293396,
1261
+ "learning_rate": 1.7246713070378966e-05,
1262
+ "loss": 0.9765,
1263
+ "step": 179
1264
+ },
1265
+ {
1266
+ "epoch": 0.41727035641842947,
1267
+ "grad_norm": 1.0363351106643677,
1268
+ "learning_rate": 1.7231245166279972e-05,
1269
+ "loss": 1.0487,
1270
+ "step": 180
1271
+ },
1272
+ {
1273
+ "epoch": 0.4195885250651985,
1274
+ "grad_norm": 0.8833026885986328,
1275
+ "learning_rate": 1.7215777262180976e-05,
1276
+ "loss": 1.0261,
1277
+ "step": 181
1278
+ },
1279
+ {
1280
+ "epoch": 0.42190669371196754,
1281
+ "grad_norm": 0.8683452606201172,
1282
+ "learning_rate": 1.7200309358081982e-05,
1283
+ "loss": 1.0609,
1284
+ "step": 182
1285
+ },
1286
+ {
1287
+ "epoch": 0.4242248623587366,
1288
+ "grad_norm": 0.8211922645568848,
1289
+ "learning_rate": 1.718484145398299e-05,
1290
+ "loss": 0.9942,
1291
+ "step": 183
1292
+ },
1293
+ {
1294
+ "epoch": 0.42654303100550567,
1295
+ "grad_norm": 0.8936122059822083,
1296
+ "learning_rate": 1.7169373549883992e-05,
1297
+ "loss": 1.0591,
1298
+ "step": 184
1299
+ },
1300
+ {
1301
+ "epoch": 0.4288611996522747,
1302
+ "grad_norm": 0.9455772042274475,
1303
+ "learning_rate": 1.7153905645785e-05,
1304
+ "loss": 1.0628,
1305
+ "step": 185
1306
+ },
1307
+ {
1308
+ "epoch": 0.43117936829904374,
1309
+ "grad_norm": 1.0464543104171753,
1310
+ "learning_rate": 1.7138437741686002e-05,
1311
+ "loss": 0.9742,
1312
+ "step": 186
1313
+ },
1314
+ {
1315
+ "epoch": 0.43349753694581283,
1316
+ "grad_norm": 1.4931954145431519,
1317
+ "learning_rate": 1.712296983758701e-05,
1318
+ "loss": 1.0534,
1319
+ "step": 187
1320
+ },
1321
+ {
1322
+ "epoch": 0.43581570559258187,
1323
+ "grad_norm": 0.7873006463050842,
1324
+ "learning_rate": 1.7107501933488012e-05,
1325
+ "loss": 1.0499,
1326
+ "step": 188
1327
+ },
1328
+ {
1329
+ "epoch": 0.4381338742393509,
1330
+ "grad_norm": 0.7451059222221375,
1331
+ "learning_rate": 1.709203402938902e-05,
1332
+ "loss": 1.0006,
1333
+ "step": 189
1334
+ },
1335
+ {
1336
+ "epoch": 0.44045204288611994,
1337
+ "grad_norm": 0.8252111673355103,
1338
+ "learning_rate": 1.7076566125290022e-05,
1339
+ "loss": 1.057,
1340
+ "step": 190
1341
+ },
1342
+ {
1343
+ "epoch": 0.44277021153288904,
1344
+ "grad_norm": 0.6444481015205383,
1345
+ "learning_rate": 1.706109822119103e-05,
1346
+ "loss": 1.0098,
1347
+ "step": 191
1348
+ },
1349
+ {
1350
+ "epoch": 0.44508838017965807,
1351
+ "grad_norm": 0.8497568368911743,
1352
+ "learning_rate": 1.7045630317092036e-05,
1353
+ "loss": 1.0186,
1354
+ "step": 192
1355
+ },
1356
+ {
1357
+ "epoch": 0.4474065488264271,
1358
+ "grad_norm": 1.0328199863433838,
1359
+ "learning_rate": 1.7030162412993042e-05,
1360
+ "loss": 1.0474,
1361
+ "step": 193
1362
+ },
1363
+ {
1364
+ "epoch": 0.4497247174731962,
1365
+ "grad_norm": 0.7315878868103027,
1366
+ "learning_rate": 1.7014694508894046e-05,
1367
+ "loss": 0.9985,
1368
+ "step": 194
1369
+ },
1370
+ {
1371
+ "epoch": 0.45204288611996524,
1372
+ "grad_norm": 1.0060752630233765,
1373
+ "learning_rate": 1.6999226604795052e-05,
1374
+ "loss": 1.0985,
1375
+ "step": 195
1376
+ },
1377
+ {
1378
+ "epoch": 0.4543610547667343,
1379
+ "grad_norm": 0.8653793334960938,
1380
+ "learning_rate": 1.698375870069606e-05,
1381
+ "loss": 0.999,
1382
+ "step": 196
1383
+ },
1384
+ {
1385
+ "epoch": 0.4566792234135033,
1386
+ "grad_norm": 1.0214215517044067,
1387
+ "learning_rate": 1.6968290796597062e-05,
1388
+ "loss": 1.0759,
1389
+ "step": 197
1390
+ },
1391
+ {
1392
+ "epoch": 0.4589973920602724,
1393
+ "grad_norm": 0.7069177627563477,
1394
+ "learning_rate": 1.695282289249807e-05,
1395
+ "loss": 1.1569,
1396
+ "step": 198
1397
+ },
1398
+ {
1399
+ "epoch": 0.46131556070704144,
1400
+ "grad_norm": 1.8065637350082397,
1401
+ "learning_rate": 1.6937354988399072e-05,
1402
+ "loss": 1.0398,
1403
+ "step": 199
1404
+ },
1405
+ {
1406
+ "epoch": 0.4636337293538105,
1407
+ "grad_norm": 8.820870399475098,
1408
+ "learning_rate": 1.692188708430008e-05,
1409
+ "loss": 1.0801,
1410
+ "step": 200
1411
+ },
1412
+ {
1413
+ "epoch": 0.46595189800057957,
1414
+ "grad_norm": 0.7957196235656738,
1415
+ "learning_rate": 1.6906419180201082e-05,
1416
+ "loss": 1.01,
1417
+ "step": 201
1418
+ },
1419
+ {
1420
+ "epoch": 0.4682700666473486,
1421
+ "grad_norm": 0.9078807830810547,
1422
+ "learning_rate": 1.689095127610209e-05,
1423
+ "loss": 1.0068,
1424
+ "step": 202
1425
+ },
1426
+ {
1427
+ "epoch": 0.47058823529411764,
1428
+ "grad_norm": 0.8792912364006042,
1429
+ "learning_rate": 1.6875483372003096e-05,
1430
+ "loss": 1.0169,
1431
+ "step": 203
1432
+ },
1433
+ {
1434
+ "epoch": 0.4729064039408867,
1435
+ "grad_norm": 1.0289100408554077,
1436
+ "learning_rate": 1.68600154679041e-05,
1437
+ "loss": 0.9956,
1438
+ "step": 204
1439
+ },
1440
+ {
1441
+ "epoch": 0.47522457258765577,
1442
+ "grad_norm": 2.788477897644043,
1443
+ "learning_rate": 1.6844547563805106e-05,
1444
+ "loss": 1.0839,
1445
+ "step": 205
1446
+ },
1447
+ {
1448
+ "epoch": 0.4775427412344248,
1449
+ "grad_norm": 0.841396152973175,
1450
+ "learning_rate": 1.6829079659706112e-05,
1451
+ "loss": 1.0735,
1452
+ "step": 206
1453
+ },
1454
+ {
1455
+ "epoch": 0.47986090988119384,
1456
+ "grad_norm": 1.1330881118774414,
1457
+ "learning_rate": 1.681361175560712e-05,
1458
+ "loss": 1.0486,
1459
+ "step": 207
1460
+ },
1461
+ {
1462
+ "epoch": 0.48217907852796293,
1463
+ "grad_norm": 1.2185862064361572,
1464
+ "learning_rate": 1.6798143851508122e-05,
1465
+ "loss": 1.0962,
1466
+ "step": 208
1467
+ },
1468
+ {
1469
+ "epoch": 0.48449724717473197,
1470
+ "grad_norm": 1.2216246128082275,
1471
+ "learning_rate": 1.678267594740913e-05,
1472
+ "loss": 1.0202,
1473
+ "step": 209
1474
+ },
1475
+ {
1476
+ "epoch": 0.486815415821501,
1477
+ "grad_norm": 0.7822516560554504,
1478
+ "learning_rate": 1.6767208043310132e-05,
1479
+ "loss": 1.0278,
1480
+ "step": 210
1481
+ },
1482
+ {
1483
+ "epoch": 0.48913358446827004,
1484
+ "grad_norm": 1.021760106086731,
1485
+ "learning_rate": 1.675174013921114e-05,
1486
+ "loss": 1.0626,
1487
+ "step": 211
1488
+ },
1489
+ {
1490
+ "epoch": 0.49145175311503914,
1491
+ "grad_norm": 1.334328055381775,
1492
+ "learning_rate": 1.6736272235112142e-05,
1493
+ "loss": 1.1798,
1494
+ "step": 212
1495
+ },
1496
+ {
1497
+ "epoch": 0.4937699217618082,
1498
+ "grad_norm": 0.7392475605010986,
1499
+ "learning_rate": 1.672080433101315e-05,
1500
+ "loss": 1.0713,
1501
+ "step": 213
1502
+ },
1503
+ {
1504
+ "epoch": 0.4960880904085772,
1505
+ "grad_norm": 0.768805205821991,
1506
+ "learning_rate": 1.6705336426914152e-05,
1507
+ "loss": 0.9839,
1508
+ "step": 214
1509
+ },
1510
+ {
1511
+ "epoch": 0.4984062590553463,
1512
+ "grad_norm": 0.7203591465950012,
1513
+ "learning_rate": 1.668986852281516e-05,
1514
+ "loss": 0.9926,
1515
+ "step": 215
1516
+ },
1517
+ {
1518
+ "epoch": 0.5007244277021153,
1519
+ "grad_norm": 1.2835793495178223,
1520
+ "learning_rate": 1.6674400618716166e-05,
1521
+ "loss": 1.024,
1522
+ "step": 216
1523
+ },
1524
+ {
1525
+ "epoch": 0.5030425963488844,
1526
+ "grad_norm": 0.8296486139297485,
1527
+ "learning_rate": 1.6658932714617173e-05,
1528
+ "loss": 1.0421,
1529
+ "step": 217
1530
+ },
1531
+ {
1532
+ "epoch": 0.5053607649956534,
1533
+ "grad_norm": 0.8933680653572083,
1534
+ "learning_rate": 1.6643464810518176e-05,
1535
+ "loss": 0.9958,
1536
+ "step": 218
1537
+ },
1538
+ {
1539
+ "epoch": 0.5076789336424224,
1540
+ "grad_norm": 0.6815921068191528,
1541
+ "learning_rate": 1.6627996906419182e-05,
1542
+ "loss": 1.0334,
1543
+ "step": 219
1544
+ },
1545
+ {
1546
+ "epoch": 0.5099971022891915,
1547
+ "grad_norm": 0.795447051525116,
1548
+ "learning_rate": 1.661252900232019e-05,
1549
+ "loss": 1.0553,
1550
+ "step": 220
1551
+ },
1552
+ {
1553
+ "epoch": 0.5123152709359606,
1554
+ "grad_norm": 1.1784237623214722,
1555
+ "learning_rate": 1.6597061098221192e-05,
1556
+ "loss": 0.9998,
1557
+ "step": 221
1558
+ },
1559
+ {
1560
+ "epoch": 0.5146334395827297,
1561
+ "grad_norm": 0.9474261403083801,
1562
+ "learning_rate": 1.65815931941222e-05,
1563
+ "loss": 1.0704,
1564
+ "step": 222
1565
+ },
1566
+ {
1567
+ "epoch": 0.5169516082294987,
1568
+ "grad_norm": 0.9175812602043152,
1569
+ "learning_rate": 1.6566125290023202e-05,
1570
+ "loss": 1.0779,
1571
+ "step": 223
1572
+ },
1573
+ {
1574
+ "epoch": 0.5192697768762677,
1575
+ "grad_norm": 0.8006009459495544,
1576
+ "learning_rate": 1.655065738592421e-05,
1577
+ "loss": 1.024,
1578
+ "step": 224
1579
+ },
1580
+ {
1581
+ "epoch": 0.5215879455230368,
1582
+ "grad_norm": 0.7539005875587463,
1583
+ "learning_rate": 1.6535189481825212e-05,
1584
+ "loss": 1.0301,
1585
+ "step": 225
1586
+ },
1587
+ {
1588
+ "epoch": 0.5239061141698058,
1589
+ "grad_norm": 0.8373304009437561,
1590
+ "learning_rate": 1.651972157772622e-05,
1591
+ "loss": 1.0519,
1592
+ "step": 226
1593
+ },
1594
+ {
1595
+ "epoch": 0.5262242828165749,
1596
+ "grad_norm": 0.7653727531433105,
1597
+ "learning_rate": 1.6504253673627222e-05,
1598
+ "loss": 0.9657,
1599
+ "step": 227
1600
+ },
1601
+ {
1602
+ "epoch": 0.528542451463344,
1603
+ "grad_norm": 0.6552687287330627,
1604
+ "learning_rate": 1.648878576952823e-05,
1605
+ "loss": 1.0106,
1606
+ "step": 228
1607
+ },
1608
+ {
1609
+ "epoch": 0.530860620110113,
1610
+ "grad_norm": 1.428830862045288,
1611
+ "learning_rate": 1.6473317865429236e-05,
1612
+ "loss": 1.0327,
1613
+ "step": 229
1614
+ },
1615
+ {
1616
+ "epoch": 0.5331787887568821,
1617
+ "grad_norm": 1.0795942544937134,
1618
+ "learning_rate": 1.6457849961330243e-05,
1619
+ "loss": 1.0854,
1620
+ "step": 230
1621
+ },
1622
+ {
1623
+ "epoch": 0.5354969574036511,
1624
+ "grad_norm": 0.5399507284164429,
1625
+ "learning_rate": 1.644238205723125e-05,
1626
+ "loss": 0.9932,
1627
+ "step": 231
1628
+ },
1629
+ {
1630
+ "epoch": 0.5378151260504201,
1631
+ "grad_norm": 2.8251047134399414,
1632
+ "learning_rate": 1.6426914153132253e-05,
1633
+ "loss": 1.002,
1634
+ "step": 232
1635
+ },
1636
+ {
1637
+ "epoch": 0.5401332946971892,
1638
+ "grad_norm": 0.7555001974105835,
1639
+ "learning_rate": 1.641144624903326e-05,
1640
+ "loss": 0.9898,
1641
+ "step": 233
1642
+ },
1643
+ {
1644
+ "epoch": 0.5424514633439582,
1645
+ "grad_norm": 0.9090583324432373,
1646
+ "learning_rate": 1.6395978344934263e-05,
1647
+ "loss": 0.9762,
1648
+ "step": 234
1649
+ },
1650
+ {
1651
+ "epoch": 0.5447696319907274,
1652
+ "grad_norm": 0.8169143199920654,
1653
+ "learning_rate": 1.638051044083527e-05,
1654
+ "loss": 1.0588,
1655
+ "step": 235
1656
+ },
1657
+ {
1658
+ "epoch": 0.5470878006374964,
1659
+ "grad_norm": 0.7842413783073425,
1660
+ "learning_rate": 1.6365042536736273e-05,
1661
+ "loss": 0.9432,
1662
+ "step": 236
1663
+ },
1664
+ {
1665
+ "epoch": 0.5494059692842654,
1666
+ "grad_norm": 2.24771785736084,
1667
+ "learning_rate": 1.634957463263728e-05,
1668
+ "loss": 1.0636,
1669
+ "step": 237
1670
+ },
1671
+ {
1672
+ "epoch": 0.5517241379310345,
1673
+ "grad_norm": 0.9846341013908386,
1674
+ "learning_rate": 1.6334106728538283e-05,
1675
+ "loss": 0.9743,
1676
+ "step": 238
1677
+ },
1678
+ {
1679
+ "epoch": 0.5540423065778035,
1680
+ "grad_norm": 0.7598584294319153,
1681
+ "learning_rate": 1.631863882443929e-05,
1682
+ "loss": 1.0192,
1683
+ "step": 239
1684
+ },
1685
+ {
1686
+ "epoch": 0.5563604752245725,
1687
+ "grad_norm": 1.200215458869934,
1688
+ "learning_rate": 1.6303170920340296e-05,
1689
+ "loss": 1.1051,
1690
+ "step": 240
1691
+ },
1692
+ {
1693
+ "epoch": 0.5586786438713416,
1694
+ "grad_norm": 0.8878689408302307,
1695
+ "learning_rate": 1.62877030162413e-05,
1696
+ "loss": 1.0622,
1697
+ "step": 241
1698
+ },
1699
+ {
1700
+ "epoch": 0.5609968125181107,
1701
+ "grad_norm": 1.18966543674469,
1702
+ "learning_rate": 1.6272235112142306e-05,
1703
+ "loss": 1.0396,
1704
+ "step": 242
1705
+ },
1706
+ {
1707
+ "epoch": 0.5633149811648798,
1708
+ "grad_norm": 0.7161230444908142,
1709
+ "learning_rate": 1.6256767208043313e-05,
1710
+ "loss": 1.0611,
1711
+ "step": 243
1712
+ },
1713
+ {
1714
+ "epoch": 0.5656331498116488,
1715
+ "grad_norm": 0.806091845035553,
1716
+ "learning_rate": 1.624129930394432e-05,
1717
+ "loss": 1.0862,
1718
+ "step": 244
1719
+ },
1720
+ {
1721
+ "epoch": 0.5679513184584178,
1722
+ "grad_norm": 0.9060055613517761,
1723
+ "learning_rate": 1.6225831399845323e-05,
1724
+ "loss": 0.9447,
1725
+ "step": 245
1726
+ },
1727
+ {
1728
+ "epoch": 0.5702694871051869,
1729
+ "grad_norm": 0.6209578514099121,
1730
+ "learning_rate": 1.621036349574633e-05,
1731
+ "loss": 1.0291,
1732
+ "step": 246
1733
+ },
1734
+ {
1735
+ "epoch": 0.5725876557519559,
1736
+ "grad_norm": 0.8875139951705933,
1737
+ "learning_rate": 1.6194895591647333e-05,
1738
+ "loss": 0.9596,
1739
+ "step": 247
1740
+ },
1741
+ {
1742
+ "epoch": 0.5749058243987251,
1743
+ "grad_norm": 0.8894098997116089,
1744
+ "learning_rate": 1.617942768754834e-05,
1745
+ "loss": 1.0363,
1746
+ "step": 248
1747
+ },
1748
+ {
1749
+ "epoch": 0.5772239930454941,
1750
+ "grad_norm": 0.8011065125465393,
1751
+ "learning_rate": 1.6163959783449343e-05,
1752
+ "loss": 0.9735,
1753
+ "step": 249
1754
+ },
1755
+ {
1756
+ "epoch": 0.5795421616922631,
1757
+ "grad_norm": 1.0448037385940552,
1758
+ "learning_rate": 1.614849187935035e-05,
1759
+ "loss": 1.0154,
1760
+ "step": 250
1761
+ },
1762
+ {
1763
+ "epoch": 0.5818603303390322,
1764
+ "grad_norm": 0.7367164492607117,
1765
+ "learning_rate": 1.6133023975251353e-05,
1766
+ "loss": 1.0509,
1767
+ "step": 251
1768
+ },
1769
+ {
1770
+ "epoch": 0.5841784989858012,
1771
+ "grad_norm": 0.8820902705192566,
1772
+ "learning_rate": 1.611755607115236e-05,
1773
+ "loss": 1.074,
1774
+ "step": 252
1775
+ },
1776
+ {
1777
+ "epoch": 0.5864966676325702,
1778
+ "grad_norm": 0.8512645363807678,
1779
+ "learning_rate": 1.6102088167053366e-05,
1780
+ "loss": 1.0478,
1781
+ "step": 253
1782
+ },
1783
+ {
1784
+ "epoch": 0.5888148362793393,
1785
+ "grad_norm": 0.8832964897155762,
1786
+ "learning_rate": 1.6086620262954373e-05,
1787
+ "loss": 1.0502,
1788
+ "step": 254
1789
+ },
1790
+ {
1791
+ "epoch": 0.5911330049261084,
1792
+ "grad_norm": 0.7311517596244812,
1793
+ "learning_rate": 1.6071152358855376e-05,
1794
+ "loss": 0.9904,
1795
+ "step": 255
1796
+ },
1797
+ {
1798
+ "epoch": 0.5934511735728775,
1799
+ "grad_norm": 0.9509069919586182,
1800
+ "learning_rate": 1.6055684454756383e-05,
1801
+ "loss": 1.0628,
1802
+ "step": 256
1803
+ },
1804
+ {
1805
+ "epoch": 0.5957693422196465,
1806
+ "grad_norm": 0.5056537389755249,
1807
+ "learning_rate": 1.604021655065739e-05,
1808
+ "loss": 0.9579,
1809
+ "step": 257
1810
+ },
1811
+ {
1812
+ "epoch": 0.5980875108664155,
1813
+ "grad_norm": 0.6654573082923889,
1814
+ "learning_rate": 1.6024748646558393e-05,
1815
+ "loss": 0.9752,
1816
+ "step": 258
1817
+ },
1818
+ {
1819
+ "epoch": 0.6004056795131846,
1820
+ "grad_norm": 0.7242197394371033,
1821
+ "learning_rate": 1.60092807424594e-05,
1822
+ "loss": 1.0986,
1823
+ "step": 259
1824
+ },
1825
+ {
1826
+ "epoch": 0.6027238481599536,
1827
+ "grad_norm": 0.8016011118888855,
1828
+ "learning_rate": 1.5993812838360403e-05,
1829
+ "loss": 1.0193,
1830
+ "step": 260
1831
+ },
1832
+ {
1833
+ "epoch": 0.6050420168067226,
1834
+ "grad_norm": 0.8806138038635254,
1835
+ "learning_rate": 1.597834493426141e-05,
1836
+ "loss": 1.0192,
1837
+ "step": 261
1838
+ },
1839
+ {
1840
+ "epoch": 0.6073601854534918,
1841
+ "grad_norm": 1.1127492189407349,
1842
+ "learning_rate": 1.5962877030162413e-05,
1843
+ "loss": 1.1279,
1844
+ "step": 262
1845
+ },
1846
+ {
1847
+ "epoch": 0.6096783541002608,
1848
+ "grad_norm": 0.8649683594703674,
1849
+ "learning_rate": 1.594740912606342e-05,
1850
+ "loss": 1.0059,
1851
+ "step": 263
1852
+ },
1853
+ {
1854
+ "epoch": 0.6119965227470299,
1855
+ "grad_norm": 0.7879909873008728,
1856
+ "learning_rate": 1.5931941221964423e-05,
1857
+ "loss": 1.0077,
1858
+ "step": 264
1859
+ },
1860
+ {
1861
+ "epoch": 0.6143146913937989,
1862
+ "grad_norm": 0.8802973031997681,
1863
+ "learning_rate": 1.591647331786543e-05,
1864
+ "loss": 1.0484,
1865
+ "step": 265
1866
+ },
1867
+ {
1868
+ "epoch": 0.6166328600405679,
1869
+ "grad_norm": 1.8282607793807983,
1870
+ "learning_rate": 1.5901005413766436e-05,
1871
+ "loss": 0.9435,
1872
+ "step": 266
1873
+ },
1874
+ {
1875
+ "epoch": 0.618951028687337,
1876
+ "grad_norm": 0.643280565738678,
1877
+ "learning_rate": 1.5885537509667443e-05,
1878
+ "loss": 0.948,
1879
+ "step": 267
1880
+ },
1881
+ {
1882
+ "epoch": 0.621269197334106,
1883
+ "grad_norm": 0.727376401424408,
1884
+ "learning_rate": 1.587006960556845e-05,
1885
+ "loss": 0.9687,
1886
+ "step": 268
1887
+ },
1888
+ {
1889
+ "epoch": 0.6235873659808752,
1890
+ "grad_norm": 0.9891621470451355,
1891
+ "learning_rate": 1.5854601701469453e-05,
1892
+ "loss": 0.995,
1893
+ "step": 269
1894
+ },
1895
+ {
1896
+ "epoch": 0.6259055346276442,
1897
+ "grad_norm": 1.4208780527114868,
1898
+ "learning_rate": 1.583913379737046e-05,
1899
+ "loss": 1.107,
1900
+ "step": 270
1901
+ },
1902
+ {
1903
+ "epoch": 0.6282237032744132,
1904
+ "grad_norm": 0.8574293851852417,
1905
+ "learning_rate": 1.5823665893271463e-05,
1906
+ "loss": 1.0016,
1907
+ "step": 271
1908
+ },
1909
+ {
1910
+ "epoch": 0.6305418719211823,
1911
+ "grad_norm": 1.4257714748382568,
1912
+ "learning_rate": 1.580819798917247e-05,
1913
+ "loss": 1.3383,
1914
+ "step": 272
1915
+ },
1916
+ {
1917
+ "epoch": 0.6328600405679513,
1918
+ "grad_norm": 0.7138167023658752,
1919
+ "learning_rate": 1.5792730085073473e-05,
1920
+ "loss": 1.0534,
1921
+ "step": 273
1922
+ },
1923
+ {
1924
+ "epoch": 0.6351782092147203,
1925
+ "grad_norm": 1.5973683595657349,
1926
+ "learning_rate": 1.577726218097448e-05,
1927
+ "loss": 0.9784,
1928
+ "step": 274
1929
+ },
1930
+ {
1931
+ "epoch": 0.6374963778614894,
1932
+ "grad_norm": 0.6794442534446716,
1933
+ "learning_rate": 1.5761794276875483e-05,
1934
+ "loss": 0.9809,
1935
+ "step": 275
1936
+ },
1937
+ {
1938
+ "epoch": 0.6398145465082585,
1939
+ "grad_norm": 0.7616905570030212,
1940
+ "learning_rate": 1.574632637277649e-05,
1941
+ "loss": 0.9981,
1942
+ "step": 276
1943
+ },
1944
+ {
1945
+ "epoch": 0.6421327151550276,
1946
+ "grad_norm": 1.709405541419983,
1947
+ "learning_rate": 1.5730858468677496e-05,
1948
+ "loss": 1.0105,
1949
+ "step": 277
1950
+ },
1951
+ {
1952
+ "epoch": 0.6444508838017966,
1953
+ "grad_norm": 0.6796721816062927,
1954
+ "learning_rate": 1.57153905645785e-05,
1955
+ "loss": 0.9856,
1956
+ "step": 278
1957
+ },
1958
+ {
1959
+ "epoch": 0.6467690524485656,
1960
+ "grad_norm": 0.7686854600906372,
1961
+ "learning_rate": 1.5699922660479506e-05,
1962
+ "loss": 1.0534,
1963
+ "step": 279
1964
+ },
1965
+ {
1966
+ "epoch": 0.6490872210953347,
1967
+ "grad_norm": 1.0257889032363892,
1968
+ "learning_rate": 1.5684454756380513e-05,
1969
+ "loss": 0.9869,
1970
+ "step": 280
1971
+ },
1972
+ {
1973
+ "epoch": 0.6514053897421037,
1974
+ "grad_norm": 0.7100695371627808,
1975
+ "learning_rate": 1.566898685228152e-05,
1976
+ "loss": 1.0433,
1977
+ "step": 281
1978
+ },
1979
+ {
1980
+ "epoch": 0.6537235583888727,
1981
+ "grad_norm": 0.7201927900314331,
1982
+ "learning_rate": 1.5653518948182523e-05,
1983
+ "loss": 0.9854,
1984
+ "step": 282
1985
+ },
1986
+ {
1987
+ "epoch": 0.6560417270356419,
1988
+ "grad_norm": 1.5743852853775024,
1989
+ "learning_rate": 1.563805104408353e-05,
1990
+ "loss": 1.0076,
1991
+ "step": 283
1992
+ },
1993
+ {
1994
+ "epoch": 0.6583598956824109,
1995
+ "grad_norm": 0.7456634640693665,
1996
+ "learning_rate": 1.5622583139984533e-05,
1997
+ "loss": 1.0214,
1998
+ "step": 284
1999
+ },
2000
+ {
2001
+ "epoch": 0.66067806432918,
2002
+ "grad_norm": 0.6395049691200256,
2003
+ "learning_rate": 1.560711523588554e-05,
2004
+ "loss": 1.0321,
2005
+ "step": 285
2006
+ },
2007
+ {
2008
+ "epoch": 0.662996232975949,
2009
+ "grad_norm": 0.9406479001045227,
2010
+ "learning_rate": 1.5591647331786543e-05,
2011
+ "loss": 1.0387,
2012
+ "step": 286
2013
+ },
2014
+ {
2015
+ "epoch": 0.665314401622718,
2016
+ "grad_norm": 0.668521523475647,
2017
+ "learning_rate": 1.557617942768755e-05,
2018
+ "loss": 0.9458,
2019
+ "step": 287
2020
+ },
2021
+ {
2022
+ "epoch": 0.6676325702694871,
2023
+ "grad_norm": 0.8241714239120483,
2024
+ "learning_rate": 1.5560711523588553e-05,
2025
+ "loss": 0.994,
2026
+ "step": 288
2027
+ },
2028
+ {
2029
+ "epoch": 0.6699507389162561,
2030
+ "grad_norm": 0.7906151413917542,
2031
+ "learning_rate": 1.554524361948956e-05,
2032
+ "loss": 1.0127,
2033
+ "step": 289
2034
+ },
2035
+ {
2036
+ "epoch": 0.6722689075630253,
2037
+ "grad_norm": 3.7441999912261963,
2038
+ "learning_rate": 1.5529775715390566e-05,
2039
+ "loss": 1.1235,
2040
+ "step": 290
2041
+ },
2042
+ {
2043
+ "epoch": 0.6745870762097943,
2044
+ "grad_norm": 0.7488934397697449,
2045
+ "learning_rate": 1.5514307811291573e-05,
2046
+ "loss": 1.0132,
2047
+ "step": 291
2048
+ },
2049
+ {
2050
+ "epoch": 0.6769052448565633,
2051
+ "grad_norm": 0.6223219037055969,
2052
+ "learning_rate": 1.5498839907192576e-05,
2053
+ "loss": 0.9831,
2054
+ "step": 292
2055
+ },
2056
+ {
2057
+ "epoch": 0.6792234135033324,
2058
+ "grad_norm": 0.9072495698928833,
2059
+ "learning_rate": 1.5483372003093583e-05,
2060
+ "loss": 0.9978,
2061
+ "step": 293
2062
+ },
2063
+ {
2064
+ "epoch": 0.6815415821501014,
2065
+ "grad_norm": 1.0984013080596924,
2066
+ "learning_rate": 1.546790409899459e-05,
2067
+ "loss": 0.9942,
2068
+ "step": 294
2069
+ },
2070
+ {
2071
+ "epoch": 0.6838597507968704,
2072
+ "grad_norm": 0.8855274319648743,
2073
+ "learning_rate": 1.5452436194895593e-05,
2074
+ "loss": 1.0135,
2075
+ "step": 295
2076
+ },
2077
+ {
2078
+ "epoch": 0.6861779194436395,
2079
+ "grad_norm": 0.7710789442062378,
2080
+ "learning_rate": 1.54369682907966e-05,
2081
+ "loss": 1.034,
2082
+ "step": 296
2083
+ },
2084
+ {
2085
+ "epoch": 0.6884960880904086,
2086
+ "grad_norm": 19.71253204345703,
2087
+ "learning_rate": 1.5421500386697603e-05,
2088
+ "loss": 0.9598,
2089
+ "step": 297
2090
+ },
2091
+ {
2092
+ "epoch": 0.6908142567371777,
2093
+ "grad_norm": 0.930057942867279,
2094
+ "learning_rate": 1.540603248259861e-05,
2095
+ "loss": 0.9755,
2096
+ "step": 298
2097
+ },
2098
+ {
2099
+ "epoch": 0.6931324253839467,
2100
+ "grad_norm": 1.0376302003860474,
2101
+ "learning_rate": 1.5390564578499613e-05,
2102
+ "loss": 1.0324,
2103
+ "step": 299
2104
+ },
2105
+ {
2106
+ "epoch": 0.6954505940307157,
2107
+ "grad_norm": 0.8160769939422607,
2108
+ "learning_rate": 1.537509667440062e-05,
2109
+ "loss": 1.0805,
2110
+ "step": 300
2111
+ }
2112
+ ],
2113
+ "logging_steps": 1,
2114
+ "max_steps": 1293,
2115
+ "num_input_tokens_seen": 0,
2116
+ "num_train_epochs": 3,
2117
+ "save_steps": 100,
2118
+ "stateful_callbacks": {
2119
+ "TrainerControl": {
2120
+ "args": {
2121
+ "should_epoch_stop": false,
2122
+ "should_evaluate": false,
2123
+ "should_log": false,
2124
+ "should_save": true,
2125
+ "should_training_stop": false
2126
+ },
2127
+ "attributes": {}
2128
+ }
2129
+ },
2130
+ "total_flos": 1.0925139234599731e+19,
2131
+ "train_batch_size": 2,
2132
+ "trial_name": null,
2133
+ "trial_params": null
2134
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aaf0b1b95273dbdf80afb35d7fc535ee7d5ea84d998c877b4bc52a6963322dcc
3
+ size 7352