hardlyworking commited on
Commit
adfc114
·
verified ·
1 Parent(s): 69ab0a6

Delete checkpoint-228

Browse files
checkpoint-228/added_tokens.json DELETED
@@ -1,5 +0,0 @@
1
- {
2
- "<|endofprompt|>": 100276,
3
- "<|im_end|>": 100265,
4
- "<|im_start|>": 100264
5
- }
 
 
 
 
 
 
checkpoint-228/chat_template.jinja DELETED
@@ -1,4 +0,0 @@
1
- {% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% for message in messages %}{{'<|im_start|>' + message['role'] + '
2
- ' + message['content'] + '<|im_end|>' + '
3
- '}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant
4
- ' }}{% endif %}
 
 
 
 
 
checkpoint-228/config.json DELETED
@@ -1,32 +0,0 @@
1
- {
2
- "architectures": [
3
- "LlamaForCausalLM"
4
- ],
5
- "attention_bias": false,
6
- "attention_dropout": 0.0,
7
- "bos_token_id": 100257,
8
- "embd_pdrop": 0.0,
9
- "eos_token_id": 100265,
10
- "head_dim": 128,
11
- "hidden_act": "silu",
12
- "hidden_size": 4096,
13
- "initializer_range": 0.02,
14
- "intermediate_size": 8192,
15
- "max_position_embeddings": 262144,
16
- "mlp_bias": false,
17
- "model_type": "llama",
18
- "num_attention_heads": 32,
19
- "num_hidden_layers": 28,
20
- "num_key_value_heads": 8,
21
- "pad_token_id": 100257,
22
- "pretraining_tp": 1,
23
- "resid_pdrop": 0.0,
24
- "rms_norm_eps": 1e-06,
25
- "rope_scaling": null,
26
- "rope_theta": 128000000,
27
- "tie_word_embeddings": true,
28
- "torch_dtype": "bfloat16",
29
- "transformers_version": "4.52.4",
30
- "use_cache": false,
31
- "vocab_size": 102400
32
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
checkpoint-228/generation_config.json DELETED
@@ -1,9 +0,0 @@
1
- {
2
- "_from_model_config": true,
3
- "bos_token_id": 100257,
4
- "do_sample": true,
5
- "eos_token_id": 100265,
6
- "pad_token_id": 100257,
7
- "transformers_version": "4.52.4",
8
- "use_cache": false
9
- }
 
 
 
 
 
 
 
 
 
 
checkpoint-228/merges.txt DELETED
The diff for this file is too large to render. See raw diff
 
checkpoint-228/model-00001-of-00002.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:90262fb12654cd0f0fa65c06486ad9ab261bfc8dfed43a6191fedbafd4189a67
3
- size 4983077832
 
 
 
 
checkpoint-228/model-00002-of-00002.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:75672dfb25c87d01482b8ad04da1cb099a3d0b21a802f705cd574fd6448fa4d4
3
- size 3842234168
 
 
 
 
checkpoint-228/model.safetensors.index.json DELETED
@@ -1,261 +0,0 @@
1
- {
2
- "metadata": {
3
- "total_size": 8825282560
4
- },
5
- "weight_map": {
6
- "model.embed_tokens.weight": "model-00001-of-00002.safetensors",
7
- "model.layers.0.input_layernorm.weight": "model-00001-of-00002.safetensors",
8
- "model.layers.0.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
9
- "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
10
- "model.layers.0.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
11
- "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
12
- "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
13
- "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
14
- "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
15
- "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
16
- "model.layers.1.input_layernorm.weight": "model-00001-of-00002.safetensors",
17
- "model.layers.1.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
18
- "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
19
- "model.layers.1.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
20
- "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
21
- "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
22
- "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
23
- "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
24
- "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
25
- "model.layers.10.input_layernorm.weight": "model-00001-of-00002.safetensors",
26
- "model.layers.10.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
27
- "model.layers.10.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
28
- "model.layers.10.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
29
- "model.layers.10.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
30
- "model.layers.10.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
31
- "model.layers.10.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
32
- "model.layers.10.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
33
- "model.layers.10.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
34
- "model.layers.11.input_layernorm.weight": "model-00001-of-00002.safetensors",
35
- "model.layers.11.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
36
- "model.layers.11.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
37
- "model.layers.11.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
38
- "model.layers.11.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
39
- "model.layers.11.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
40
- "model.layers.11.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
41
- "model.layers.11.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
42
- "model.layers.11.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
43
- "model.layers.12.input_layernorm.weight": "model-00001-of-00002.safetensors",
44
- "model.layers.12.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
45
- "model.layers.12.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
46
- "model.layers.12.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
47
- "model.layers.12.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
48
- "model.layers.12.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
49
- "model.layers.12.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
50
- "model.layers.12.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
51
- "model.layers.12.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
52
- "model.layers.13.input_layernorm.weight": "model-00001-of-00002.safetensors",
53
- "model.layers.13.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
54
- "model.layers.13.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
55
- "model.layers.13.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
56
- "model.layers.13.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
57
- "model.layers.13.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
58
- "model.layers.13.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
59
- "model.layers.13.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
60
- "model.layers.13.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
61
- "model.layers.14.input_layernorm.weight": "model-00002-of-00002.safetensors",
62
- "model.layers.14.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
63
- "model.layers.14.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
64
- "model.layers.14.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
65
- "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
66
- "model.layers.14.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
67
- "model.layers.14.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
68
- "model.layers.14.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
69
- "model.layers.14.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
70
- "model.layers.15.input_layernorm.weight": "model-00002-of-00002.safetensors",
71
- "model.layers.15.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
72
- "model.layers.15.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
73
- "model.layers.15.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
74
- "model.layers.15.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
75
- "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
76
- "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
77
- "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
78
- "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
79
- "model.layers.16.input_layernorm.weight": "model-00002-of-00002.safetensors",
80
- "model.layers.16.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
81
- "model.layers.16.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
82
- "model.layers.16.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
83
- "model.layers.16.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
84
- "model.layers.16.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
85
- "model.layers.16.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
86
- "model.layers.16.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
87
- "model.layers.16.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
88
- "model.layers.17.input_layernorm.weight": "model-00002-of-00002.safetensors",
89
- "model.layers.17.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
90
- "model.layers.17.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
91
- "model.layers.17.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
92
- "model.layers.17.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
93
- "model.layers.17.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
94
- "model.layers.17.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
95
- "model.layers.17.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
96
- "model.layers.17.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
97
- "model.layers.18.input_layernorm.weight": "model-00002-of-00002.safetensors",
98
- "model.layers.18.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
99
- "model.layers.18.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
100
- "model.layers.18.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
101
- "model.layers.18.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
102
- "model.layers.18.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
103
- "model.layers.18.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
104
- "model.layers.18.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
105
- "model.layers.18.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
106
- "model.layers.19.input_layernorm.weight": "model-00002-of-00002.safetensors",
107
- "model.layers.19.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
108
- "model.layers.19.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
109
- "model.layers.19.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
110
- "model.layers.19.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
111
- "model.layers.19.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
112
- "model.layers.19.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
113
- "model.layers.19.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
114
- "model.layers.19.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
115
- "model.layers.2.input_layernorm.weight": "model-00001-of-00002.safetensors",
116
- "model.layers.2.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
117
- "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
118
- "model.layers.2.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
119
- "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
120
- "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
121
- "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
122
- "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
123
- "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
124
- "model.layers.20.input_layernorm.weight": "model-00002-of-00002.safetensors",
125
- "model.layers.20.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
126
- "model.layers.20.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
127
- "model.layers.20.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
128
- "model.layers.20.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
129
- "model.layers.20.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
130
- "model.layers.20.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
131
- "model.layers.20.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
132
- "model.layers.20.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
133
- "model.layers.21.input_layernorm.weight": "model-00002-of-00002.safetensors",
134
- "model.layers.21.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
135
- "model.layers.21.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
136
- "model.layers.21.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
137
- "model.layers.21.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
138
- "model.layers.21.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
139
- "model.layers.21.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
140
- "model.layers.21.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
141
- "model.layers.21.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
142
- "model.layers.22.input_layernorm.weight": "model-00002-of-00002.safetensors",
143
- "model.layers.22.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
144
- "model.layers.22.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
145
- "model.layers.22.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
146
- "model.layers.22.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
147
- "model.layers.22.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
148
- "model.layers.22.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
149
- "model.layers.22.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
150
- "model.layers.22.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
151
- "model.layers.23.input_layernorm.weight": "model-00002-of-00002.safetensors",
152
- "model.layers.23.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
153
- "model.layers.23.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
154
- "model.layers.23.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
155
- "model.layers.23.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
156
- "model.layers.23.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
157
- "model.layers.23.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
158
- "model.layers.23.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
159
- "model.layers.23.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
160
- "model.layers.24.input_layernorm.weight": "model-00002-of-00002.safetensors",
161
- "model.layers.24.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
162
- "model.layers.24.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
163
- "model.layers.24.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
164
- "model.layers.24.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
165
- "model.layers.24.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
166
- "model.layers.24.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
167
- "model.layers.24.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
168
- "model.layers.24.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
169
- "model.layers.25.input_layernorm.weight": "model-00002-of-00002.safetensors",
170
- "model.layers.25.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
171
- "model.layers.25.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
172
- "model.layers.25.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
173
- "model.layers.25.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
174
- "model.layers.25.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
175
- "model.layers.25.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
176
- "model.layers.25.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
177
- "model.layers.25.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
178
- "model.layers.26.input_layernorm.weight": "model-00002-of-00002.safetensors",
179
- "model.layers.26.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
180
- "model.layers.26.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
181
- "model.layers.26.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
182
- "model.layers.26.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
183
- "model.layers.26.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
184
- "model.layers.26.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
185
- "model.layers.26.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
186
- "model.layers.26.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
187
- "model.layers.27.input_layernorm.weight": "model-00002-of-00002.safetensors",
188
- "model.layers.27.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
189
- "model.layers.27.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
190
- "model.layers.27.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
191
- "model.layers.27.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
192
- "model.layers.27.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
193
- "model.layers.27.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
194
- "model.layers.27.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
195
- "model.layers.27.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
196
- "model.layers.3.input_layernorm.weight": "model-00001-of-00002.safetensors",
197
- "model.layers.3.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
198
- "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
199
- "model.layers.3.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
200
- "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
201
- "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
202
- "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
203
- "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
204
- "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
205
- "model.layers.4.input_layernorm.weight": "model-00001-of-00002.safetensors",
206
- "model.layers.4.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
207
- "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
208
- "model.layers.4.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
209
- "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
210
- "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
211
- "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
212
- "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
213
- "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
214
- "model.layers.5.input_layernorm.weight": "model-00001-of-00002.safetensors",
215
- "model.layers.5.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
216
- "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
217
- "model.layers.5.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
218
- "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
219
- "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
220
- "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
221
- "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
222
- "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
223
- "model.layers.6.input_layernorm.weight": "model-00001-of-00002.safetensors",
224
- "model.layers.6.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
225
- "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
226
- "model.layers.6.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
227
- "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
228
- "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
229
- "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
230
- "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
231
- "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
232
- "model.layers.7.input_layernorm.weight": "model-00001-of-00002.safetensors",
233
- "model.layers.7.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
234
- "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
235
- "model.layers.7.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
236
- "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
237
- "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
238
- "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
239
- "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
240
- "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
241
- "model.layers.8.input_layernorm.weight": "model-00001-of-00002.safetensors",
242
- "model.layers.8.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
243
- "model.layers.8.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
244
- "model.layers.8.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
245
- "model.layers.8.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
246
- "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
247
- "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
248
- "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
249
- "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
250
- "model.layers.9.input_layernorm.weight": "model-00001-of-00002.safetensors",
251
- "model.layers.9.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
252
- "model.layers.9.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
253
- "model.layers.9.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
254
- "model.layers.9.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
255
- "model.layers.9.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
256
- "model.layers.9.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
257
- "model.layers.9.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
258
- "model.layers.9.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
259
- "model.norm.weight": "model-00002-of-00002.safetensors"
260
- }
261
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
checkpoint-228/optimizer.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:941af5b7b2b301f7de1f99119b4f89399bf3b35be2517cbb3a6d33bc6c750da2
3
- size 11466962878
 
 
 
 
checkpoint-228/rng_state.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:b1a97db8e41139aa1239ba7fb79ddeb0af5998c6305a440c1fe182e6ad02f2f5
3
- size 14244
 
 
 
 
checkpoint-228/scheduler.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:09529ec298915bd02a86b1029ceb1b7af7fec86183593774d24da81d03b08254
3
- size 1064
 
 
 
 
checkpoint-228/special_tokens_map.json DELETED
@@ -1,30 +0,0 @@
1
- {
2
- "bos_token": {
3
- "content": "<|endoftext|>",
4
- "lstrip": false,
5
- "normalized": false,
6
- "rstrip": false,
7
- "single_word": false
8
- },
9
- "eos_token": {
10
- "content": "<|im_end|>",
11
- "lstrip": false,
12
- "normalized": false,
13
- "rstrip": false,
14
- "single_word": false
15
- },
16
- "pad_token": {
17
- "content": "<|endoftext|>",
18
- "lstrip": false,
19
- "normalized": false,
20
- "rstrip": false,
21
- "single_word": false
22
- },
23
- "unk_token": {
24
- "content": "<|endoftext|>",
25
- "lstrip": false,
26
- "normalized": false,
27
- "rstrip": false,
28
- "single_word": false
29
- }
30
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
checkpoint-228/tokenizer.json DELETED
The diff for this file is too large to render. See raw diff
 
checkpoint-228/tokenizer_config.json DELETED
@@ -1,69 +0,0 @@
1
- {
2
- "add_prefix_space": false,
3
- "added_tokens_decoder": {
4
- "100257": {
5
- "content": "<|endoftext|>",
6
- "lstrip": false,
7
- "normalized": false,
8
- "rstrip": false,
9
- "single_word": false,
10
- "special": true
11
- },
12
- "100258": {
13
- "content": "<|fim_prefix|>",
14
- "lstrip": false,
15
- "normalized": false,
16
- "rstrip": false,
17
- "single_word": false,
18
- "special": true
19
- },
20
- "100259": {
21
- "content": "<|fim_middle|>",
22
- "lstrip": false,
23
- "normalized": false,
24
- "rstrip": false,
25
- "single_word": false,
26
- "special": true
27
- },
28
- "100260": {
29
- "content": "<|fim_suffix|>",
30
- "lstrip": false,
31
- "normalized": false,
32
- "rstrip": false,
33
- "single_word": false,
34
- "special": true
35
- },
36
- "100264": {
37
- "content": "<|im_start|>",
38
- "lstrip": false,
39
- "normalized": false,
40
- "rstrip": false,
41
- "single_word": false,
42
- "special": false
43
- },
44
- "100265": {
45
- "content": "<|im_end|>",
46
- "lstrip": false,
47
- "normalized": false,
48
- "rstrip": false,
49
- "single_word": false,
50
- "special": true
51
- },
52
- "100276": {
53
- "content": "<|endofprompt|>",
54
- "lstrip": false,
55
- "normalized": false,
56
- "rstrip": false,
57
- "single_word": false,
58
- "special": true
59
- }
60
- },
61
- "bos_token": "<|endoftext|>",
62
- "clean_up_tokenization_spaces": false,
63
- "eos_token": "<|im_end|>",
64
- "extra_special_tokens": {},
65
- "model_max_length": 262144,
66
- "pad_token": "<|endoftext|>",
67
- "tokenizer_class": "GPT2Tokenizer",
68
- "unk_token": "<|endoftext|>"
69
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
checkpoint-228/trainer_state.json DELETED
@@ -1,1630 +0,0 @@
1
- {
2
- "best_global_step": null,
3
- "best_metric": null,
4
- "best_model_checkpoint": null,
5
- "epoch": 1.9868995633187772,
6
- "eval_steps": 500,
7
- "global_step": 228,
8
- "is_hyper_param_search": false,
9
- "is_local_process_zero": true,
10
- "is_world_process_zero": true,
11
- "log_history": [
12
- {
13
- "epoch": 0.008733624454148471,
14
- "grad_norm": 2.234375,
15
- "learning_rate": 0.0,
16
- "loss": 2.578,
17
- "step": 1
18
- },
19
- {
20
- "epoch": 0.017467248908296942,
21
- "grad_norm": 2.515625,
22
- "learning_rate": 4.5454545454545457e-07,
23
- "loss": 2.6324,
24
- "step": 2
25
- },
26
- {
27
- "epoch": 0.026200873362445413,
28
- "grad_norm": 2.546875,
29
- "learning_rate": 9.090909090909091e-07,
30
- "loss": 2.6563,
31
- "step": 3
32
- },
33
- {
34
- "epoch": 0.034934497816593885,
35
- "grad_norm": 2.21875,
36
- "learning_rate": 1.3636363636363636e-06,
37
- "loss": 2.5986,
38
- "step": 4
39
- },
40
- {
41
- "epoch": 0.043668122270742356,
42
- "grad_norm": 2.40625,
43
- "learning_rate": 1.8181818181818183e-06,
44
- "loss": 2.6456,
45
- "step": 5
46
- },
47
- {
48
- "epoch": 0.05240174672489083,
49
- "grad_norm": 2.28125,
50
- "learning_rate": 2.2727272727272728e-06,
51
- "loss": 2.638,
52
- "step": 6
53
- },
54
- {
55
- "epoch": 0.0611353711790393,
56
- "grad_norm": 2.421875,
57
- "learning_rate": 2.7272727272727272e-06,
58
- "loss": 2.6678,
59
- "step": 7
60
- },
61
- {
62
- "epoch": 0.06986899563318777,
63
- "grad_norm": 2.328125,
64
- "learning_rate": 3.181818181818182e-06,
65
- "loss": 2.6304,
66
- "step": 8
67
- },
68
- {
69
- "epoch": 0.07860262008733625,
70
- "grad_norm": 2.046875,
71
- "learning_rate": 3.6363636363636366e-06,
72
- "loss": 2.6253,
73
- "step": 9
74
- },
75
- {
76
- "epoch": 0.08733624454148471,
77
- "grad_norm": 2.109375,
78
- "learning_rate": 4.0909090909090915e-06,
79
- "loss": 2.6161,
80
- "step": 10
81
- },
82
- {
83
- "epoch": 0.09606986899563319,
84
- "grad_norm": 2.015625,
85
- "learning_rate": 4.5454545454545455e-06,
86
- "loss": 2.6971,
87
- "step": 11
88
- },
89
- {
90
- "epoch": 0.10480349344978165,
91
- "grad_norm": 1.75,
92
- "learning_rate": 5e-06,
93
- "loss": 2.5767,
94
- "step": 12
95
- },
96
- {
97
- "epoch": 0.11353711790393013,
98
- "grad_norm": 1.703125,
99
- "learning_rate": 5.4545454545454545e-06,
100
- "loss": 2.5748,
101
- "step": 13
102
- },
103
- {
104
- "epoch": 0.1222707423580786,
105
- "grad_norm": 1.6328125,
106
- "learning_rate": 5.90909090909091e-06,
107
- "loss": 2.5845,
108
- "step": 14
109
- },
110
- {
111
- "epoch": 0.13100436681222707,
112
- "grad_norm": 1.5234375,
113
- "learning_rate": 6.363636363636364e-06,
114
- "loss": 2.6236,
115
- "step": 15
116
- },
117
- {
118
- "epoch": 0.13973799126637554,
119
- "grad_norm": 1.3359375,
120
- "learning_rate": 6.818181818181818e-06,
121
- "loss": 2.6045,
122
- "step": 16
123
- },
124
- {
125
- "epoch": 0.14847161572052403,
126
- "grad_norm": 1.296875,
127
- "learning_rate": 7.272727272727273e-06,
128
- "loss": 2.617,
129
- "step": 17
130
- },
131
- {
132
- "epoch": 0.1572052401746725,
133
- "grad_norm": 1.15625,
134
- "learning_rate": 7.727272727272727e-06,
135
- "loss": 2.5447,
136
- "step": 18
137
- },
138
- {
139
- "epoch": 0.16593886462882096,
140
- "grad_norm": 1.1796875,
141
- "learning_rate": 8.181818181818183e-06,
142
- "loss": 2.6597,
143
- "step": 19
144
- },
145
- {
146
- "epoch": 0.17467248908296942,
147
- "grad_norm": 1.1328125,
148
- "learning_rate": 8.636363636363637e-06,
149
- "loss": 2.5515,
150
- "step": 20
151
- },
152
- {
153
- "epoch": 0.18340611353711792,
154
- "grad_norm": 1.140625,
155
- "learning_rate": 9.090909090909091e-06,
156
- "loss": 2.5699,
157
- "step": 21
158
- },
159
- {
160
- "epoch": 0.19213973799126638,
161
- "grad_norm": 1.1015625,
162
- "learning_rate": 9.545454545454547e-06,
163
- "loss": 2.6027,
164
- "step": 22
165
- },
166
- {
167
- "epoch": 0.20087336244541484,
168
- "grad_norm": 1.109375,
169
- "learning_rate": 1e-05,
170
- "loss": 2.5589,
171
- "step": 23
172
- },
173
- {
174
- "epoch": 0.2096069868995633,
175
- "grad_norm": 1.0859375,
176
- "learning_rate": 9.999869003890174e-06,
177
- "loss": 2.5869,
178
- "step": 24
179
- },
180
- {
181
- "epoch": 0.2183406113537118,
182
- "grad_norm": 1.078125,
183
- "learning_rate": 9.999476022424688e-06,
184
- "loss": 2.5414,
185
- "step": 25
186
- },
187
- {
188
- "epoch": 0.22707423580786026,
189
- "grad_norm": 1.03125,
190
- "learning_rate": 9.998821076195158e-06,
191
- "loss": 2.6005,
192
- "step": 26
193
- },
194
- {
195
- "epoch": 0.23580786026200873,
196
- "grad_norm": 1.078125,
197
- "learning_rate": 9.997904199519748e-06,
198
- "loss": 2.5769,
199
- "step": 27
200
- },
201
- {
202
- "epoch": 0.2445414847161572,
203
- "grad_norm": 1.0703125,
204
- "learning_rate": 9.996725440441368e-06,
205
- "loss": 2.6084,
206
- "step": 28
207
- },
208
- {
209
- "epoch": 0.25327510917030566,
210
- "grad_norm": 0.98828125,
211
- "learning_rate": 9.995284860725162e-06,
212
- "loss": 2.6237,
213
- "step": 29
214
- },
215
- {
216
- "epoch": 0.26200873362445415,
217
- "grad_norm": 0.9453125,
218
- "learning_rate": 9.993582535855265e-06,
219
- "loss": 2.577,
220
- "step": 30
221
- },
222
- {
223
- "epoch": 0.27074235807860264,
224
- "grad_norm": 0.953125,
225
- "learning_rate": 9.991618555030848e-06,
226
- "loss": 2.591,
227
- "step": 31
228
- },
229
- {
230
- "epoch": 0.2794759825327511,
231
- "grad_norm": 0.94921875,
232
- "learning_rate": 9.989393021161455e-06,
233
- "loss": 2.5533,
234
- "step": 32
235
- },
236
- {
237
- "epoch": 0.28820960698689957,
238
- "grad_norm": 0.99609375,
239
- "learning_rate": 9.986906050861595e-06,
240
- "loss": 2.5694,
241
- "step": 33
242
- },
243
- {
244
- "epoch": 0.29694323144104806,
245
- "grad_norm": 0.9765625,
246
- "learning_rate": 9.98415777444464e-06,
247
- "loss": 2.53,
248
- "step": 34
249
- },
250
- {
251
- "epoch": 0.3056768558951965,
252
- "grad_norm": 0.9296875,
253
- "learning_rate": 9.981148335916e-06,
254
- "loss": 2.5486,
255
- "step": 35
256
- },
257
- {
258
- "epoch": 0.314410480349345,
259
- "grad_norm": 0.92578125,
260
- "learning_rate": 9.977877892965572e-06,
261
- "loss": 2.5649,
262
- "step": 36
263
- },
264
- {
265
- "epoch": 0.3231441048034934,
266
- "grad_norm": 0.9609375,
267
- "learning_rate": 9.974346616959476e-06,
268
- "loss": 2.6342,
269
- "step": 37
270
- },
271
- {
272
- "epoch": 0.3318777292576419,
273
- "grad_norm": 0.9140625,
274
- "learning_rate": 9.970554692931081e-06,
275
- "loss": 2.5869,
276
- "step": 38
277
- },
278
- {
279
- "epoch": 0.3406113537117904,
280
- "grad_norm": 0.90234375,
281
- "learning_rate": 9.966502319571303e-06,
282
- "loss": 2.5639,
283
- "step": 39
284
- },
285
- {
286
- "epoch": 0.34934497816593885,
287
- "grad_norm": 0.9375,
288
- "learning_rate": 9.962189709218202e-06,
289
- "loss": 2.5443,
290
- "step": 40
291
- },
292
- {
293
- "epoch": 0.35807860262008734,
294
- "grad_norm": 0.9453125,
295
- "learning_rate": 9.95761708784585e-06,
296
- "loss": 2.5412,
297
- "step": 41
298
- },
299
- {
300
- "epoch": 0.36681222707423583,
301
- "grad_norm": 0.8828125,
302
- "learning_rate": 9.952784695052494e-06,
303
- "loss": 2.5115,
304
- "step": 42
305
- },
306
- {
307
- "epoch": 0.37554585152838427,
308
- "grad_norm": 0.99609375,
309
- "learning_rate": 9.94769278404799e-06,
310
- "loss": 2.4547,
311
- "step": 43
312
- },
313
- {
314
- "epoch": 0.38427947598253276,
315
- "grad_norm": 0.90234375,
316
- "learning_rate": 9.942341621640558e-06,
317
- "loss": 2.5528,
318
- "step": 44
319
- },
320
- {
321
- "epoch": 0.3930131004366812,
322
- "grad_norm": 0.96484375,
323
- "learning_rate": 9.936731488222776e-06,
324
- "loss": 2.5629,
325
- "step": 45
326
- },
327
- {
328
- "epoch": 0.4017467248908297,
329
- "grad_norm": 0.89453125,
330
- "learning_rate": 9.930862677756912e-06,
331
- "loss": 2.5632,
332
- "step": 46
333
- },
334
- {
335
- "epoch": 0.4104803493449782,
336
- "grad_norm": 0.890625,
337
- "learning_rate": 9.924735497759497e-06,
338
- "loss": 2.5625,
339
- "step": 47
340
- },
341
- {
342
- "epoch": 0.4192139737991266,
343
- "grad_norm": 0.875,
344
- "learning_rate": 9.918350269285228e-06,
345
- "loss": 2.5522,
346
- "step": 48
347
- },
348
- {
349
- "epoch": 0.4279475982532751,
350
- "grad_norm": 0.90234375,
351
- "learning_rate": 9.911707326910145e-06,
352
- "loss": 2.6349,
353
- "step": 49
354
- },
355
- {
356
- "epoch": 0.4366812227074236,
357
- "grad_norm": 0.91015625,
358
- "learning_rate": 9.904807018714088e-06,
359
- "loss": 2.6174,
360
- "step": 50
361
- },
362
- {
363
- "epoch": 0.44541484716157204,
364
- "grad_norm": 0.89453125,
365
- "learning_rate": 9.897649706262474e-06,
366
- "loss": 2.5732,
367
- "step": 51
368
- },
369
- {
370
- "epoch": 0.45414847161572053,
371
- "grad_norm": 0.91796875,
372
- "learning_rate": 9.890235764587332e-06,
373
- "loss": 2.6071,
374
- "step": 52
375
- },
376
- {
377
- "epoch": 0.462882096069869,
378
- "grad_norm": 0.89453125,
379
- "learning_rate": 9.882565582167673e-06,
380
- "loss": 2.5587,
381
- "step": 53
382
- },
383
- {
384
- "epoch": 0.47161572052401746,
385
- "grad_norm": 0.88671875,
386
- "learning_rate": 9.874639560909118e-06,
387
- "loss": 2.5676,
388
- "step": 54
389
- },
390
- {
391
- "epoch": 0.48034934497816595,
392
- "grad_norm": 0.890625,
393
- "learning_rate": 9.866458116122852e-06,
394
- "loss": 2.515,
395
- "step": 55
396
- },
397
- {
398
- "epoch": 0.4890829694323144,
399
- "grad_norm": 0.8984375,
400
- "learning_rate": 9.858021676503846e-06,
401
- "loss": 2.5355,
402
- "step": 56
403
- },
404
- {
405
- "epoch": 0.4978165938864629,
406
- "grad_norm": 0.90234375,
407
- "learning_rate": 9.849330684108409e-06,
408
- "loss": 2.5798,
409
- "step": 57
410
- },
411
- {
412
- "epoch": 0.5065502183406113,
413
- "grad_norm": 0.87890625,
414
- "learning_rate": 9.840385594331022e-06,
415
- "loss": 2.5391,
416
- "step": 58
417
- },
418
- {
419
- "epoch": 0.5152838427947598,
420
- "grad_norm": 0.90625,
421
- "learning_rate": 9.831186875880467e-06,
422
- "loss": 2.521,
423
- "step": 59
424
- },
425
- {
426
- "epoch": 0.5240174672489083,
427
- "grad_norm": 0.91796875,
428
- "learning_rate": 9.82173501075528e-06,
429
- "loss": 2.5553,
430
- "step": 60
431
- },
432
- {
433
- "epoch": 0.5327510917030568,
434
- "grad_norm": 0.890625,
435
- "learning_rate": 9.812030494218484e-06,
436
- "loss": 2.6096,
437
- "step": 61
438
- },
439
- {
440
- "epoch": 0.5414847161572053,
441
- "grad_norm": 0.88671875,
442
- "learning_rate": 9.802073834771642e-06,
443
- "loss": 2.5513,
444
- "step": 62
445
- },
446
- {
447
- "epoch": 0.5502183406113537,
448
- "grad_norm": 0.96484375,
449
- "learning_rate": 9.79186555412822e-06,
450
- "loss": 2.6127,
451
- "step": 63
452
- },
453
- {
454
- "epoch": 0.5589519650655022,
455
- "grad_norm": 0.90234375,
456
- "learning_rate": 9.781406187186237e-06,
457
- "loss": 2.5172,
458
- "step": 64
459
- },
460
- {
461
- "epoch": 0.5676855895196506,
462
- "grad_norm": 0.87890625,
463
- "learning_rate": 9.770696282000245e-06,
464
- "loss": 2.5562,
465
- "step": 65
466
- },
467
- {
468
- "epoch": 0.5764192139737991,
469
- "grad_norm": 0.88671875,
470
- "learning_rate": 9.759736399752611e-06,
471
- "loss": 2.5538,
472
- "step": 66
473
- },
474
- {
475
- "epoch": 0.5851528384279476,
476
- "grad_norm": 0.89453125,
477
- "learning_rate": 9.748527114724111e-06,
478
- "loss": 2.5661,
479
- "step": 67
480
- },
481
- {
482
- "epoch": 0.5938864628820961,
483
- "grad_norm": 0.91796875,
484
- "learning_rate": 9.737069014263837e-06,
485
- "loss": 2.6247,
486
- "step": 68
487
- },
488
- {
489
- "epoch": 0.6026200873362445,
490
- "grad_norm": 0.9296875,
491
- "learning_rate": 9.725362698758425e-06,
492
- "loss": 2.5432,
493
- "step": 69
494
- },
495
- {
496
- "epoch": 0.611353711790393,
497
- "grad_norm": 0.890625,
498
- "learning_rate": 9.713408781600588e-06,
499
- "loss": 2.51,
500
- "step": 70
501
- },
502
- {
503
- "epoch": 0.6200873362445415,
504
- "grad_norm": 0.8828125,
505
- "learning_rate": 9.701207889156989e-06,
506
- "loss": 2.5114,
507
- "step": 71
508
- },
509
- {
510
- "epoch": 0.62882096069869,
511
- "grad_norm": 0.9296875,
512
- "learning_rate": 9.688760660735403e-06,
513
- "loss": 2.5707,
514
- "step": 72
515
- },
516
- {
517
- "epoch": 0.6375545851528385,
518
- "grad_norm": 0.91015625,
519
- "learning_rate": 9.676067748551232e-06,
520
- "loss": 2.579,
521
- "step": 73
522
- },
523
- {
524
- "epoch": 0.6462882096069869,
525
- "grad_norm": 0.91015625,
526
- "learning_rate": 9.663129817693323e-06,
527
- "loss": 2.5844,
528
- "step": 74
529
- },
530
- {
531
- "epoch": 0.6550218340611353,
532
- "grad_norm": 0.9296875,
533
- "learning_rate": 9.64994754608912e-06,
534
- "loss": 2.5768,
535
- "step": 75
536
- },
537
- {
538
- "epoch": 0.6637554585152838,
539
- "grad_norm": 0.890625,
540
- "learning_rate": 9.636521624469144e-06,
541
- "loss": 2.5568,
542
- "step": 76
543
- },
544
- {
545
- "epoch": 0.6724890829694323,
546
- "grad_norm": 0.91796875,
547
- "learning_rate": 9.622852756330797e-06,
548
- "loss": 2.5348,
549
- "step": 77
550
- },
551
- {
552
- "epoch": 0.6812227074235808,
553
- "grad_norm": 0.88671875,
554
- "learning_rate": 9.608941657901496e-06,
555
- "loss": 2.5221,
556
- "step": 78
557
- },
558
- {
559
- "epoch": 0.6899563318777293,
560
- "grad_norm": 0.921875,
561
- "learning_rate": 9.594789058101154e-06,
562
- "loss": 2.6385,
563
- "step": 79
564
- },
565
- {
566
- "epoch": 0.6986899563318777,
567
- "grad_norm": 0.91796875,
568
- "learning_rate": 9.580395698503979e-06,
569
- "loss": 2.5107,
570
- "step": 80
571
- },
572
- {
573
- "epoch": 0.7074235807860262,
574
- "grad_norm": 0.953125,
575
- "learning_rate": 9.565762333299616e-06,
576
- "loss": 2.5751,
577
- "step": 81
578
- },
579
- {
580
- "epoch": 0.7161572052401747,
581
- "grad_norm": 0.93359375,
582
- "learning_rate": 9.550889729253631e-06,
583
- "loss": 2.6106,
584
- "step": 82
585
- },
586
- {
587
- "epoch": 0.7248908296943232,
588
- "grad_norm": 0.921875,
589
- "learning_rate": 9.535778665667334e-06,
590
- "loss": 2.6177,
591
- "step": 83
592
- },
593
- {
594
- "epoch": 0.7336244541484717,
595
- "grad_norm": 1.0,
596
- "learning_rate": 9.520429934336944e-06,
597
- "loss": 2.5421,
598
- "step": 84
599
- },
600
- {
601
- "epoch": 0.74235807860262,
602
- "grad_norm": 0.90234375,
603
- "learning_rate": 9.504844339512096e-06,
604
- "loss": 2.6234,
605
- "step": 85
606
- },
607
- {
608
- "epoch": 0.7510917030567685,
609
- "grad_norm": 0.91015625,
610
- "learning_rate": 9.48902269785371e-06,
611
- "loss": 2.5791,
612
- "step": 86
613
- },
614
- {
615
- "epoch": 0.759825327510917,
616
- "grad_norm": 0.8671875,
617
- "learning_rate": 9.472965838391187e-06,
618
- "loss": 2.5052,
619
- "step": 87
620
- },
621
- {
622
- "epoch": 0.7685589519650655,
623
- "grad_norm": 0.984375,
624
- "learning_rate": 9.456674602478975e-06,
625
- "loss": 2.5445,
626
- "step": 88
627
- },
628
- {
629
- "epoch": 0.777292576419214,
630
- "grad_norm": 1.1953125,
631
- "learning_rate": 9.44014984375249e-06,
632
- "loss": 2.5956,
633
- "step": 89
634
- },
635
- {
636
- "epoch": 0.7860262008733624,
637
- "grad_norm": 0.87109375,
638
- "learning_rate": 9.423392428083374e-06,
639
- "loss": 2.5261,
640
- "step": 90
641
- },
642
- {
643
- "epoch": 0.7947598253275109,
644
- "grad_norm": 0.89453125,
645
- "learning_rate": 9.406403233534134e-06,
646
- "loss": 2.5626,
647
- "step": 91
648
- },
649
- {
650
- "epoch": 0.8034934497816594,
651
- "grad_norm": 0.89453125,
652
- "learning_rate": 9.389183150312124e-06,
653
- "loss": 2.5228,
654
- "step": 92
655
- },
656
- {
657
- "epoch": 0.8122270742358079,
658
- "grad_norm": 0.875,
659
- "learning_rate": 9.371733080722911e-06,
660
- "loss": 2.5494,
661
- "step": 93
662
- },
663
- {
664
- "epoch": 0.8209606986899564,
665
- "grad_norm": 0.93359375,
666
- "learning_rate": 9.354053939122988e-06,
667
- "loss": 2.6738,
668
- "step": 94
669
- },
670
- {
671
- "epoch": 0.8296943231441049,
672
- "grad_norm": 0.91015625,
673
- "learning_rate": 9.33614665187187e-06,
674
- "loss": 2.5862,
675
- "step": 95
676
- },
677
- {
678
- "epoch": 0.8384279475982532,
679
- "grad_norm": 0.91015625,
680
- "learning_rate": 9.318012157283533e-06,
681
- "loss": 2.57,
682
- "step": 96
683
- },
684
- {
685
- "epoch": 0.8471615720524017,
686
- "grad_norm": 0.96875,
687
- "learning_rate": 9.299651405577286e-06,
688
- "loss": 2.6046,
689
- "step": 97
690
- },
691
- {
692
- "epoch": 0.8558951965065502,
693
- "grad_norm": 0.9140625,
694
- "learning_rate": 9.28106535882794e-06,
695
- "loss": 2.5911,
696
- "step": 98
697
- },
698
- {
699
- "epoch": 0.8646288209606987,
700
- "grad_norm": 0.90234375,
701
- "learning_rate": 9.262254990915427e-06,
702
- "loss": 2.5898,
703
- "step": 99
704
- },
705
- {
706
- "epoch": 0.8733624454148472,
707
- "grad_norm": 0.93359375,
708
- "learning_rate": 9.243221287473755e-06,
709
- "loss": 2.6268,
710
- "step": 100
711
- },
712
- {
713
- "epoch": 0.8820960698689956,
714
- "grad_norm": 0.91015625,
715
- "learning_rate": 9.223965245839367e-06,
716
- "loss": 2.5823,
717
- "step": 101
718
- },
719
- {
720
- "epoch": 0.8908296943231441,
721
- "grad_norm": 0.91796875,
722
- "learning_rate": 9.20448787499888e-06,
723
- "loss": 2.6937,
724
- "step": 102
725
- },
726
- {
727
- "epoch": 0.8995633187772926,
728
- "grad_norm": 0.9296875,
729
- "learning_rate": 9.184790195536217e-06,
730
- "loss": 2.5609,
731
- "step": 103
732
- },
733
- {
734
- "epoch": 0.9082969432314411,
735
- "grad_norm": 0.91015625,
736
- "learning_rate": 9.164873239579133e-06,
737
- "loss": 2.5716,
738
- "step": 104
739
- },
740
- {
741
- "epoch": 0.9170305676855895,
742
- "grad_norm": 0.99609375,
743
- "learning_rate": 9.144738050745129e-06,
744
- "loss": 2.5099,
745
- "step": 105
746
- },
747
- {
748
- "epoch": 0.925764192139738,
749
- "grad_norm": 0.91015625,
750
- "learning_rate": 9.124385684086762e-06,
751
- "loss": 2.5131,
752
- "step": 106
753
- },
754
- {
755
- "epoch": 0.9344978165938864,
756
- "grad_norm": 1.015625,
757
- "learning_rate": 9.103817206036383e-06,
758
- "loss": 2.5407,
759
- "step": 107
760
- },
761
- {
762
- "epoch": 0.9432314410480349,
763
- "grad_norm": 0.9609375,
764
- "learning_rate": 9.08303369435023e-06,
765
- "loss": 2.5364,
766
- "step": 108
767
- },
768
- {
769
- "epoch": 0.9519650655021834,
770
- "grad_norm": 0.9296875,
771
- "learning_rate": 9.062036238051978e-06,
772
- "loss": 2.6053,
773
- "step": 109
774
- },
775
- {
776
- "epoch": 0.9606986899563319,
777
- "grad_norm": 0.9296875,
778
- "learning_rate": 9.040825937375664e-06,
779
- "loss": 2.563,
780
- "step": 110
781
- },
782
- {
783
- "epoch": 0.9694323144104804,
784
- "grad_norm": 0.9375,
785
- "learning_rate": 9.019403903708036e-06,
786
- "loss": 2.5744,
787
- "step": 111
788
- },
789
- {
790
- "epoch": 0.9781659388646288,
791
- "grad_norm": 0.93359375,
792
- "learning_rate": 8.997771259530324e-06,
793
- "loss": 2.5768,
794
- "step": 112
795
- },
796
- {
797
- "epoch": 0.9868995633187773,
798
- "grad_norm": 0.96484375,
799
- "learning_rate": 8.975929138359423e-06,
800
- "loss": 2.577,
801
- "step": 113
802
- },
803
- {
804
- "epoch": 0.9956331877729258,
805
- "grad_norm": 0.92578125,
806
- "learning_rate": 8.953878684688492e-06,
807
- "loss": 2.6206,
808
- "step": 114
809
- },
810
- {
811
- "epoch": 1.0,
812
- "grad_norm": 1.8828125,
813
- "learning_rate": 8.931621053926998e-06,
814
- "loss": 2.5724,
815
- "step": 115
816
- },
817
- {
818
- "epoch": 1.0087336244541485,
819
- "grad_norm": 0.87890625,
820
- "learning_rate": 8.90915741234015e-06,
821
- "loss": 2.4993,
822
- "step": 116
823
- },
824
- {
825
- "epoch": 1.017467248908297,
826
- "grad_norm": 0.9296875,
827
- "learning_rate": 8.886488936987817e-06,
828
- "loss": 2.5537,
829
- "step": 117
830
- },
831
- {
832
- "epoch": 1.0262008733624455,
833
- "grad_norm": 0.93359375,
834
- "learning_rate": 8.863616815662833e-06,
835
- "loss": 2.6468,
836
- "step": 118
837
- },
838
- {
839
- "epoch": 1.034934497816594,
840
- "grad_norm": 0.91796875,
841
- "learning_rate": 8.840542246828763e-06,
842
- "loss": 2.5739,
843
- "step": 119
844
- },
845
- {
846
- "epoch": 1.0436681222707425,
847
- "grad_norm": 0.96484375,
848
- "learning_rate": 8.817266439557109e-06,
849
- "loss": 2.5705,
850
- "step": 120
851
- },
852
- {
853
- "epoch": 1.0524017467248907,
854
- "grad_norm": 0.89453125,
855
- "learning_rate": 8.793790613463956e-06,
856
- "loss": 2.587,
857
- "step": 121
858
- },
859
- {
860
- "epoch": 1.0611353711790392,
861
- "grad_norm": 0.88671875,
862
- "learning_rate": 8.770115998646057e-06,
863
- "loss": 2.522,
864
- "step": 122
865
- },
866
- {
867
- "epoch": 1.0698689956331877,
868
- "grad_norm": 0.91015625,
869
- "learning_rate": 8.746243835616392e-06,
870
- "loss": 2.5717,
871
- "step": 123
872
- },
873
- {
874
- "epoch": 1.0786026200873362,
875
- "grad_norm": 0.92578125,
876
- "learning_rate": 8.722175375239157e-06,
877
- "loss": 2.5287,
878
- "step": 124
879
- },
880
- {
881
- "epoch": 1.0873362445414847,
882
- "grad_norm": 0.8828125,
883
- "learning_rate": 8.697911878664222e-06,
884
- "loss": 2.4822,
885
- "step": 125
886
- },
887
- {
888
- "epoch": 1.0960698689956332,
889
- "grad_norm": 0.9296875,
890
- "learning_rate": 8.673454617261054e-06,
891
- "loss": 2.5232,
892
- "step": 126
893
- },
894
- {
895
- "epoch": 1.1048034934497817,
896
- "grad_norm": 0.87890625,
897
- "learning_rate": 8.648804872552092e-06,
898
- "loss": 2.5506,
899
- "step": 127
900
- },
901
- {
902
- "epoch": 1.1135371179039302,
903
- "grad_norm": 0.8984375,
904
- "learning_rate": 8.6239639361456e-06,
905
- "loss": 2.6104,
906
- "step": 128
907
- },
908
- {
909
- "epoch": 1.1222707423580787,
910
- "grad_norm": 0.95703125,
911
- "learning_rate": 8.598933109667995e-06,
912
- "loss": 2.642,
913
- "step": 129
914
- },
915
- {
916
- "epoch": 1.1310043668122272,
917
- "grad_norm": 0.90625,
918
- "learning_rate": 8.573713704695633e-06,
919
- "loss": 2.5569,
920
- "step": 130
921
- },
922
- {
923
- "epoch": 1.1397379912663754,
924
- "grad_norm": 0.91796875,
925
- "learning_rate": 8.548307042686093e-06,
926
- "loss": 2.567,
927
- "step": 131
928
- },
929
- {
930
- "epoch": 1.1484716157205241,
931
- "grad_norm": 0.90625,
932
- "learning_rate": 8.522714454908926e-06,
933
- "loss": 2.5177,
934
- "step": 132
935
- },
936
- {
937
- "epoch": 1.1572052401746724,
938
- "grad_norm": 0.91796875,
939
- "learning_rate": 8.496937282375912e-06,
940
- "loss": 2.5793,
941
- "step": 133
942
- },
943
- {
944
- "epoch": 1.165938864628821,
945
- "grad_norm": 0.8984375,
946
- "learning_rate": 8.470976875770776e-06,
947
- "loss": 2.543,
948
- "step": 134
949
- },
950
- {
951
- "epoch": 1.1746724890829694,
952
- "grad_norm": 0.90625,
953
- "learning_rate": 8.444834595378434e-06,
954
- "loss": 2.5375,
955
- "step": 135
956
- },
957
- {
958
- "epoch": 1.1834061135371179,
959
- "grad_norm": 0.92578125,
960
- "learning_rate": 8.418511811013694e-06,
961
- "loss": 2.5851,
962
- "step": 136
963
- },
964
- {
965
- "epoch": 1.1921397379912664,
966
- "grad_norm": 0.91796875,
967
- "learning_rate": 8.3920099019495e-06,
968
- "loss": 2.6014,
969
- "step": 137
970
- },
971
- {
972
- "epoch": 1.2008733624454149,
973
- "grad_norm": 1.0078125,
974
- "learning_rate": 8.365330256844647e-06,
975
- "loss": 2.5553,
976
- "step": 138
977
- },
978
- {
979
- "epoch": 1.2096069868995634,
980
- "grad_norm": 0.91796875,
981
- "learning_rate": 8.33847427367102e-06,
982
- "loss": 2.5817,
983
- "step": 139
984
- },
985
- {
986
- "epoch": 1.2183406113537119,
987
- "grad_norm": 0.9609375,
988
- "learning_rate": 8.311443359640353e-06,
989
- "loss": 2.5187,
990
- "step": 140
991
- },
992
- {
993
- "epoch": 1.2270742358078603,
994
- "grad_norm": 0.921875,
995
- "learning_rate": 8.284238931130476e-06,
996
- "loss": 2.5306,
997
- "step": 141
998
- },
999
- {
1000
- "epoch": 1.2358078602620086,
1001
- "grad_norm": 0.91796875,
1002
- "learning_rate": 8.256862413611113e-06,
1003
- "loss": 2.56,
1004
- "step": 142
1005
- },
1006
- {
1007
- "epoch": 1.244541484716157,
1008
- "grad_norm": 0.953125,
1009
- "learning_rate": 8.229315241569177e-06,
1010
- "loss": 2.5076,
1011
- "step": 143
1012
- },
1013
- {
1014
- "epoch": 1.2532751091703056,
1015
- "grad_norm": 0.94921875,
1016
- "learning_rate": 8.201598858433625e-06,
1017
- "loss": 2.5798,
1018
- "step": 144
1019
- },
1020
- {
1021
- "epoch": 1.262008733624454,
1022
- "grad_norm": 0.8671875,
1023
- "learning_rate": 8.173714716499801e-06,
1024
- "loss": 2.4532,
1025
- "step": 145
1026
- },
1027
- {
1028
- "epoch": 1.2707423580786026,
1029
- "grad_norm": 0.94140625,
1030
- "learning_rate": 8.145664276853353e-06,
1031
- "loss": 2.4997,
1032
- "step": 146
1033
- },
1034
- {
1035
- "epoch": 1.279475982532751,
1036
- "grad_norm": 0.94140625,
1037
- "learning_rate": 8.117449009293668e-06,
1038
- "loss": 2.5093,
1039
- "step": 147
1040
- },
1041
- {
1042
- "epoch": 1.2882096069868996,
1043
- "grad_norm": 0.94140625,
1044
- "learning_rate": 8.089070392256866e-06,
1045
- "loss": 2.5558,
1046
- "step": 148
1047
- },
1048
- {
1049
- "epoch": 1.296943231441048,
1050
- "grad_norm": 0.984375,
1051
- "learning_rate": 8.060529912738316e-06,
1052
- "loss": 2.5583,
1053
- "step": 149
1054
- },
1055
- {
1056
- "epoch": 1.3056768558951966,
1057
- "grad_norm": 1.0,
1058
- "learning_rate": 8.031829066214735e-06,
1059
- "loss": 2.5477,
1060
- "step": 150
1061
- },
1062
- {
1063
- "epoch": 1.314410480349345,
1064
- "grad_norm": 0.953125,
1065
- "learning_rate": 8.002969356565822e-06,
1066
- "loss": 2.56,
1067
- "step": 151
1068
- },
1069
- {
1070
- "epoch": 1.3231441048034935,
1071
- "grad_norm": 0.97265625,
1072
- "learning_rate": 7.973952295995452e-06,
1073
- "loss": 2.5918,
1074
- "step": 152
1075
- },
1076
- {
1077
- "epoch": 1.3318777292576418,
1078
- "grad_norm": 0.94140625,
1079
- "learning_rate": 7.94477940495245e-06,
1080
- "loss": 2.5883,
1081
- "step": 153
1082
- },
1083
- {
1084
- "epoch": 1.3406113537117905,
1085
- "grad_norm": 0.9765625,
1086
- "learning_rate": 7.91545221205091e-06,
1087
- "loss": 2.557,
1088
- "step": 154
1089
- },
1090
- {
1091
- "epoch": 1.3493449781659388,
1092
- "grad_norm": 0.921875,
1093
- "learning_rate": 7.885972253990104e-06,
1094
- "loss": 2.5619,
1095
- "step": 155
1096
- },
1097
- {
1098
- "epoch": 1.3580786026200873,
1099
- "grad_norm": 0.93359375,
1100
- "learning_rate": 7.856341075473963e-06,
1101
- "loss": 2.6278,
1102
- "step": 156
1103
- },
1104
- {
1105
- "epoch": 1.3668122270742358,
1106
- "grad_norm": 0.9453125,
1107
- "learning_rate": 7.826560229130132e-06,
1108
- "loss": 2.5959,
1109
- "step": 157
1110
- },
1111
- {
1112
- "epoch": 1.3755458515283843,
1113
- "grad_norm": 0.94140625,
1114
- "learning_rate": 7.796631275428617e-06,
1115
- "loss": 2.529,
1116
- "step": 158
1117
- },
1118
- {
1119
- "epoch": 1.3842794759825328,
1120
- "grad_norm": 0.91015625,
1121
- "learning_rate": 7.766555782600023e-06,
1122
- "loss": 2.5481,
1123
- "step": 159
1124
- },
1125
- {
1126
- "epoch": 1.3930131004366813,
1127
- "grad_norm": 0.99609375,
1128
- "learning_rate": 7.736335326553373e-06,
1129
- "loss": 2.4771,
1130
- "step": 160
1131
- },
1132
- {
1133
- "epoch": 1.4017467248908297,
1134
- "grad_norm": 0.98046875,
1135
- "learning_rate": 7.70597149079354e-06,
1136
- "loss": 2.5304,
1137
- "step": 161
1138
- },
1139
- {
1140
- "epoch": 1.4104803493449782,
1141
- "grad_norm": 0.9140625,
1142
- "learning_rate": 7.67546586633827e-06,
1143
- "loss": 2.47,
1144
- "step": 162
1145
- },
1146
- {
1147
- "epoch": 1.4192139737991267,
1148
- "grad_norm": 0.921875,
1149
- "learning_rate": 7.644820051634813e-06,
1150
- "loss": 2.5756,
1151
- "step": 163
1152
- },
1153
- {
1154
- "epoch": 1.427947598253275,
1155
- "grad_norm": 0.9375,
1156
- "learning_rate": 7.614035652476175e-06,
1157
- "loss": 2.5157,
1158
- "step": 164
1159
- },
1160
- {
1161
- "epoch": 1.4366812227074237,
1162
- "grad_norm": 0.9453125,
1163
- "learning_rate": 7.5831142819169664e-06,
1164
- "loss": 2.5364,
1165
- "step": 165
1166
- },
1167
- {
1168
- "epoch": 1.445414847161572,
1169
- "grad_norm": 0.93359375,
1170
- "learning_rate": 7.552057560188892e-06,
1171
- "loss": 2.5279,
1172
- "step": 166
1173
- },
1174
- {
1175
- "epoch": 1.4541484716157205,
1176
- "grad_norm": 0.90234375,
1177
- "learning_rate": 7.520867114615844e-06,
1178
- "loss": 2.491,
1179
- "step": 167
1180
- },
1181
- {
1182
- "epoch": 1.462882096069869,
1183
- "grad_norm": 0.9140625,
1184
- "learning_rate": 7.4895445795286325e-06,
1185
- "loss": 2.5906,
1186
- "step": 168
1187
- },
1188
- {
1189
- "epoch": 1.4716157205240175,
1190
- "grad_norm": 0.953125,
1191
- "learning_rate": 7.458091596179359e-06,
1192
- "loss": 2.5283,
1193
- "step": 169
1194
- },
1195
- {
1196
- "epoch": 1.480349344978166,
1197
- "grad_norm": 1.0078125,
1198
- "learning_rate": 7.4265098126554065e-06,
1199
- "loss": 2.5956,
1200
- "step": 170
1201
- },
1202
- {
1203
- "epoch": 1.4890829694323144,
1204
- "grad_norm": 0.93359375,
1205
- "learning_rate": 7.394800883793087e-06,
1206
- "loss": 2.5629,
1207
- "step": 171
1208
- },
1209
- {
1210
- "epoch": 1.497816593886463,
1211
- "grad_norm": 1.0390625,
1212
- "learning_rate": 7.3629664710909354e-06,
1213
- "loss": 2.5138,
1214
- "step": 172
1215
- },
1216
- {
1217
- "epoch": 1.5065502183406112,
1218
- "grad_norm": 1.0625,
1219
- "learning_rate": 7.331008242622637e-06,
1220
- "loss": 2.5712,
1221
- "step": 173
1222
- },
1223
- {
1224
- "epoch": 1.51528384279476,
1225
- "grad_norm": 0.96484375,
1226
- "learning_rate": 7.2989278729496374e-06,
1227
- "loss": 2.6415,
1228
- "step": 174
1229
- },
1230
- {
1231
- "epoch": 1.5240174672489082,
1232
- "grad_norm": 1.03125,
1233
- "learning_rate": 7.266727043033386e-06,
1234
- "loss": 2.5236,
1235
- "step": 175
1236
- },
1237
- {
1238
- "epoch": 1.532751091703057,
1239
- "grad_norm": 0.96875,
1240
- "learning_rate": 7.234407440147266e-06,
1241
- "loss": 2.5656,
1242
- "step": 176
1243
- },
1244
- {
1245
- "epoch": 1.5414847161572052,
1246
- "grad_norm": 0.98828125,
1247
- "learning_rate": 7.201970757788172e-06,
1248
- "loss": 2.5682,
1249
- "step": 177
1250
- },
1251
- {
1252
- "epoch": 1.5502183406113537,
1253
- "grad_norm": 0.96484375,
1254
- "learning_rate": 7.169418695587791e-06,
1255
- "loss": 2.5356,
1256
- "step": 178
1257
- },
1258
- {
1259
- "epoch": 1.5589519650655022,
1260
- "grad_norm": 0.96875,
1261
- "learning_rate": 7.136752959223527e-06,
1262
- "loss": 2.6174,
1263
- "step": 179
1264
- },
1265
- {
1266
- "epoch": 1.5676855895196506,
1267
- "grad_norm": 0.93359375,
1268
- "learning_rate": 7.103975260329136e-06,
1269
- "loss": 2.6003,
1270
- "step": 180
1271
- },
1272
- {
1273
- "epoch": 1.5764192139737991,
1274
- "grad_norm": 0.9140625,
1275
- "learning_rate": 7.071087316405037e-06,
1276
- "loss": 2.5504,
1277
- "step": 181
1278
- },
1279
- {
1280
- "epoch": 1.5851528384279476,
1281
- "grad_norm": 0.93359375,
1282
- "learning_rate": 7.038090850728312e-06,
1283
- "loss": 2.5491,
1284
- "step": 182
1285
- },
1286
- {
1287
- "epoch": 1.5938864628820961,
1288
- "grad_norm": 0.9296875,
1289
- "learning_rate": 7.00498759226242e-06,
1290
- "loss": 2.5331,
1291
- "step": 183
1292
- },
1293
- {
1294
- "epoch": 1.6026200873362444,
1295
- "grad_norm": 0.921875,
1296
- "learning_rate": 6.971779275566593e-06,
1297
- "loss": 2.537,
1298
- "step": 184
1299
- },
1300
- {
1301
- "epoch": 1.611353711790393,
1302
- "grad_norm": 0.94140625,
1303
- "learning_rate": 6.938467640704953e-06,
1304
- "loss": 2.5117,
1305
- "step": 185
1306
- },
1307
- {
1308
- "epoch": 1.6200873362445414,
1309
- "grad_norm": 0.93359375,
1310
- "learning_rate": 6.90505443315533e-06,
1311
- "loss": 2.6398,
1312
- "step": 186
1313
- },
1314
- {
1315
- "epoch": 1.62882096069869,
1316
- "grad_norm": 0.9609375,
1317
- "learning_rate": 6.871541403717808e-06,
1318
- "loss": 2.5182,
1319
- "step": 187
1320
- },
1321
- {
1322
- "epoch": 1.6375545851528384,
1323
- "grad_norm": 0.9296875,
1324
- "learning_rate": 6.8379303084229765e-06,
1325
- "loss": 2.6042,
1326
- "step": 188
1327
- },
1328
- {
1329
- "epoch": 1.6462882096069869,
1330
- "grad_norm": 0.953125,
1331
- "learning_rate": 6.8042229084399325e-06,
1332
- "loss": 2.5974,
1333
- "step": 189
1334
- },
1335
- {
1336
- "epoch": 1.6550218340611353,
1337
- "grad_norm": 0.98828125,
1338
- "learning_rate": 6.770420969983982e-06,
1339
- "loss": 2.5706,
1340
- "step": 190
1341
- },
1342
- {
1343
- "epoch": 1.6637554585152838,
1344
- "grad_norm": 1.0,
1345
- "learning_rate": 6.736526264224101e-06,
1346
- "loss": 2.4622,
1347
- "step": 191
1348
- },
1349
- {
1350
- "epoch": 1.6724890829694323,
1351
- "grad_norm": 0.9375,
1352
- "learning_rate": 6.702540567190132e-06,
1353
- "loss": 2.5172,
1354
- "step": 192
1355
- },
1356
- {
1357
- "epoch": 1.6812227074235808,
1358
- "grad_norm": 0.9453125,
1359
- "learning_rate": 6.668465659679714e-06,
1360
- "loss": 2.5317,
1361
- "step": 193
1362
- },
1363
- {
1364
- "epoch": 1.6899563318777293,
1365
- "grad_norm": 1.046875,
1366
- "learning_rate": 6.634303327164976e-06,
1367
- "loss": 2.5942,
1368
- "step": 194
1369
- },
1370
- {
1371
- "epoch": 1.6986899563318776,
1372
- "grad_norm": 0.9765625,
1373
- "learning_rate": 6.600055359698984e-06,
1374
- "loss": 2.5432,
1375
- "step": 195
1376
- },
1377
- {
1378
- "epoch": 1.7074235807860263,
1379
- "grad_norm": 1.0234375,
1380
- "learning_rate": 6.565723551821943e-06,
1381
- "loss": 2.532,
1382
- "step": 196
1383
- },
1384
- {
1385
- "epoch": 1.7161572052401746,
1386
- "grad_norm": 0.9609375,
1387
- "learning_rate": 6.531309702467159e-06,
1388
- "loss": 2.4832,
1389
- "step": 197
1390
- },
1391
- {
1392
- "epoch": 1.7248908296943233,
1393
- "grad_norm": 0.9296875,
1394
- "learning_rate": 6.496815614866792e-06,
1395
- "loss": 2.5488,
1396
- "step": 198
1397
- },
1398
- {
1399
- "epoch": 1.7336244541484715,
1400
- "grad_norm": 0.9296875,
1401
- "learning_rate": 6.462243096457352e-06,
1402
- "loss": 2.6082,
1403
- "step": 199
1404
- },
1405
- {
1406
- "epoch": 1.74235807860262,
1407
- "grad_norm": 0.9609375,
1408
- "learning_rate": 6.42759395878501e-06,
1409
- "loss": 2.5815,
1410
- "step": 200
1411
- },
1412
- {
1413
- "epoch": 1.7510917030567685,
1414
- "grad_norm": 0.9609375,
1415
- "learning_rate": 6.392870017410665e-06,
1416
- "loss": 2.4902,
1417
- "step": 201
1418
- },
1419
- {
1420
- "epoch": 1.759825327510917,
1421
- "grad_norm": 0.9375,
1422
- "learning_rate": 6.358073091814809e-06,
1423
- "loss": 2.4882,
1424
- "step": 202
1425
- },
1426
- {
1427
- "epoch": 1.7685589519650655,
1428
- "grad_norm": 1.0,
1429
- "learning_rate": 6.323205005302199e-06,
1430
- "loss": 2.5042,
1431
- "step": 203
1432
- },
1433
- {
1434
- "epoch": 1.777292576419214,
1435
- "grad_norm": 0.953125,
1436
- "learning_rate": 6.288267584906308e-06,
1437
- "loss": 2.5471,
1438
- "step": 204
1439
- },
1440
- {
1441
- "epoch": 1.7860262008733625,
1442
- "grad_norm": 0.91796875,
1443
- "learning_rate": 6.2532626612936035e-06,
1444
- "loss": 2.5491,
1445
- "step": 205
1446
- },
1447
- {
1448
- "epoch": 1.7947598253275108,
1449
- "grad_norm": 0.93359375,
1450
- "learning_rate": 6.21819206866761e-06,
1451
- "loss": 2.5086,
1452
- "step": 206
1453
- },
1454
- {
1455
- "epoch": 1.8034934497816595,
1456
- "grad_norm": 1.1484375,
1457
- "learning_rate": 6.18305764467281e-06,
1458
- "loss": 2.585,
1459
- "step": 207
1460
- },
1461
- {
1462
- "epoch": 1.8122270742358078,
1463
- "grad_norm": 0.99609375,
1464
- "learning_rate": 6.147861230298349e-06,
1465
- "loss": 2.564,
1466
- "step": 208
1467
- },
1468
- {
1469
- "epoch": 1.8209606986899565,
1470
- "grad_norm": 1.0859375,
1471
- "learning_rate": 6.112604669781572e-06,
1472
- "loss": 2.5534,
1473
- "step": 209
1474
- },
1475
- {
1476
- "epoch": 1.8296943231441047,
1477
- "grad_norm": 1.0234375,
1478
- "learning_rate": 6.077289810511389e-06,
1479
- "loss": 2.5989,
1480
- "step": 210
1481
- },
1482
- {
1483
- "epoch": 1.8384279475982532,
1484
- "grad_norm": 1.0078125,
1485
- "learning_rate": 6.041918502931473e-06,
1486
- "loss": 2.5781,
1487
- "step": 211
1488
- },
1489
- {
1490
- "epoch": 1.8471615720524017,
1491
- "grad_norm": 1.1171875,
1492
- "learning_rate": 6.006492600443301e-06,
1493
- "loss": 2.4495,
1494
- "step": 212
1495
- },
1496
- {
1497
- "epoch": 1.8558951965065502,
1498
- "grad_norm": 1.0,
1499
- "learning_rate": 5.971013959309038e-06,
1500
- "loss": 2.4965,
1501
- "step": 213
1502
- },
1503
- {
1504
- "epoch": 1.8646288209606987,
1505
- "grad_norm": 0.96875,
1506
- "learning_rate": 5.935484438554273e-06,
1507
- "loss": 2.5466,
1508
- "step": 214
1509
- },
1510
- {
1511
- "epoch": 1.8733624454148472,
1512
- "grad_norm": 0.94140625,
1513
- "learning_rate": 5.8999058998706046e-06,
1514
- "loss": 2.447,
1515
- "step": 215
1516
- },
1517
- {
1518
- "epoch": 1.8820960698689957,
1519
- "grad_norm": 0.98828125,
1520
- "learning_rate": 5.8642802075181e-06,
1521
- "loss": 2.5911,
1522
- "step": 216
1523
- },
1524
- {
1525
- "epoch": 1.890829694323144,
1526
- "grad_norm": 1.1015625,
1527
- "learning_rate": 5.828609228227603e-06,
1528
- "loss": 2.5723,
1529
- "step": 217
1530
- },
1531
- {
1532
- "epoch": 1.8995633187772927,
1533
- "grad_norm": 1.015625,
1534
- "learning_rate": 5.7928948311029175e-06,
1535
- "loss": 2.6029,
1536
- "step": 218
1537
- },
1538
- {
1539
- "epoch": 1.908296943231441,
1540
- "grad_norm": 1.0078125,
1541
- "learning_rate": 5.757138887522884e-06,
1542
- "loss": 2.5432,
1543
- "step": 219
1544
- },
1545
- {
1546
- "epoch": 1.9170305676855897,
1547
- "grad_norm": 0.94140625,
1548
- "learning_rate": 5.721343271043305e-06,
1549
- "loss": 2.5273,
1550
- "step": 220
1551
- },
1552
- {
1553
- "epoch": 1.925764192139738,
1554
- "grad_norm": 0.93359375,
1555
- "learning_rate": 5.685509857298781e-06,
1556
- "loss": 2.5822,
1557
- "step": 221
1558
- },
1559
- {
1560
- "epoch": 1.9344978165938864,
1561
- "grad_norm": 0.97265625,
1562
- "learning_rate": 5.649640523904438e-06,
1563
- "loss": 2.4545,
1564
- "step": 222
1565
- },
1566
- {
1567
- "epoch": 1.943231441048035,
1568
- "grad_norm": 0.95703125,
1569
- "learning_rate": 5.613737150357528e-06,
1570
- "loss": 2.6138,
1571
- "step": 223
1572
- },
1573
- {
1574
- "epoch": 1.9519650655021834,
1575
- "grad_norm": 0.984375,
1576
- "learning_rate": 5.577801617938956e-06,
1577
- "loss": 2.5428,
1578
- "step": 224
1579
- },
1580
- {
1581
- "epoch": 1.960698689956332,
1582
- "grad_norm": 0.96875,
1583
- "learning_rate": 5.541835809614704e-06,
1584
- "loss": 2.4883,
1585
- "step": 225
1586
- },
1587
- {
1588
- "epoch": 1.9694323144104804,
1589
- "grad_norm": 0.97265625,
1590
- "learning_rate": 5.505841609937162e-06,
1591
- "loss": 2.5128,
1592
- "step": 226
1593
- },
1594
- {
1595
- "epoch": 1.9781659388646289,
1596
- "grad_norm": 0.9375,
1597
- "learning_rate": 5.469820904946383e-06,
1598
- "loss": 2.5452,
1599
- "step": 227
1600
- },
1601
- {
1602
- "epoch": 1.9868995633187772,
1603
- "grad_norm": 0.98828125,
1604
- "learning_rate": 5.43377558207126e-06,
1605
- "loss": 2.5695,
1606
- "step": 228
1607
- }
1608
- ],
1609
- "logging_steps": 1,
1610
- "max_steps": 456,
1611
- "num_input_tokens_seen": 0,
1612
- "num_train_epochs": 4,
1613
- "save_steps": 114,
1614
- "stateful_callbacks": {
1615
- "TrainerControl": {
1616
- "args": {
1617
- "should_epoch_stop": false,
1618
- "should_evaluate": false,
1619
- "should_log": false,
1620
- "should_save": true,
1621
- "should_training_stop": false
1622
- },
1623
- "attributes": {}
1624
- }
1625
- },
1626
- "total_flos": 7.136533590677914e+17,
1627
- "train_batch_size": 2,
1628
- "trial_name": null,
1629
- "trial_params": null
1630
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
checkpoint-228/training_args.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:ae0592bc0dda5ec7f7a3ef05f7a468bec687ffdab3262199ff4c718485bd04da
3
- size 6520
 
 
 
 
checkpoint-228/vocab.json DELETED
The diff for this file is too large to render. See raw diff