jeiku commited on
Commit
45ca7bd
·
verified ·
1 Parent(s): 1fd5637

Delete checkpoint-349

Browse files
checkpoint-349/config.json DELETED
@@ -1,36 +0,0 @@
1
- {
2
- "_name_or_path": "IntervitensInc/Llama-3.1-Minitron-4B-Width-Base-chatml",
3
- "architectures": [
4
- "LlamaForCausalLM"
5
- ],
6
- "attention_bias": false,
7
- "attention_dropout": 0.0,
8
- "bos_token_id": 128000,
9
- "eos_token_id": 128019,
10
- "head_dim": 128,
11
- "hidden_act": "silu",
12
- "hidden_size": 3072,
13
- "initializer_range": 0.02,
14
- "intermediate_size": 9216,
15
- "max_position_embeddings": 131072,
16
- "mlp_bias": false,
17
- "model_type": "llama",
18
- "num_attention_heads": 32,
19
- "num_hidden_layers": 32,
20
- "num_key_value_heads": 8,
21
- "pretraining_tp": 1,
22
- "rms_norm_eps": 1e-05,
23
- "rope_scaling": {
24
- "factor": 8.0,
25
- "high_freq_factor": 4.0,
26
- "low_freq_factor": 1.0,
27
- "original_max_position_embeddings": 8192,
28
- "rope_type": "llama3"
29
- },
30
- "rope_theta": 500000.0,
31
- "tie_word_embeddings": false,
32
- "torch_dtype": "bfloat16",
33
- "transformers_version": "4.46.0.dev0",
34
- "use_cache": false,
35
- "vocab_size": 128256
36
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
checkpoint-349/generation_config.json DELETED
@@ -1,7 +0,0 @@
1
- {
2
- "_from_model_config": true,
3
- "bos_token_id": 128000,
4
- "do_sample": true,
5
- "eos_token_id": 128001,
6
- "transformers_version": "4.46.0.dev0"
7
- }
 
 
 
 
 
 
 
 
checkpoint-349/latest DELETED
@@ -1 +0,0 @@
1
- global_step349
 
 
checkpoint-349/model-00001-of-00002.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:35df2d99728a28b3a6b36b3b4c1bb0ed4c9e195b29e91026579d9758290f0f9d
3
- size 4978354640
 
 
 
 
checkpoint-349/model-00002-of-00002.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:2a1dab55a2b124aa3dc2b6445dd424a9c958dc3fa0e94da40b2e86ff8e149a15
3
- size 4047172128
 
 
 
 
checkpoint-349/model.safetensors.index.json DELETED
@@ -1,298 +0,0 @@
1
- {
2
- "metadata": {
3
- "total_size": 9025492992
4
- },
5
- "weight_map": {
6
- "lm_head.weight": "model-00002-of-00002.safetensors",
7
- "model.embed_tokens.weight": "model-00001-of-00002.safetensors",
8
- "model.layers.0.input_layernorm.weight": "model-00001-of-00002.safetensors",
9
- "model.layers.0.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
10
- "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
11
- "model.layers.0.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
12
- "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
13
- "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
14
- "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
15
- "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
16
- "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
17
- "model.layers.1.input_layernorm.weight": "model-00001-of-00002.safetensors",
18
- "model.layers.1.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
19
- "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
20
- "model.layers.1.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
21
- "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
22
- "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
23
- "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
24
- "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
25
- "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
26
- "model.layers.10.input_layernorm.weight": "model-00001-of-00002.safetensors",
27
- "model.layers.10.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
28
- "model.layers.10.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
29
- "model.layers.10.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
30
- "model.layers.10.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
31
- "model.layers.10.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
32
- "model.layers.10.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
33
- "model.layers.10.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
34
- "model.layers.10.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
35
- "model.layers.11.input_layernorm.weight": "model-00001-of-00002.safetensors",
36
- "model.layers.11.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
37
- "model.layers.11.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
38
- "model.layers.11.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
39
- "model.layers.11.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
40
- "model.layers.11.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
41
- "model.layers.11.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
42
- "model.layers.11.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
43
- "model.layers.11.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
44
- "model.layers.12.input_layernorm.weight": "model-00001-of-00002.safetensors",
45
- "model.layers.12.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
46
- "model.layers.12.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
47
- "model.layers.12.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
48
- "model.layers.12.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
49
- "model.layers.12.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
50
- "model.layers.12.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
51
- "model.layers.12.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
52
- "model.layers.12.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
53
- "model.layers.13.input_layernorm.weight": "model-00001-of-00002.safetensors",
54
- "model.layers.13.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
55
- "model.layers.13.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
56
- "model.layers.13.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
57
- "model.layers.13.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
58
- "model.layers.13.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
59
- "model.layers.13.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
60
- "model.layers.13.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
61
- "model.layers.13.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
62
- "model.layers.14.input_layernorm.weight": "model-00001-of-00002.safetensors",
63
- "model.layers.14.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
64
- "model.layers.14.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
65
- "model.layers.14.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
66
- "model.layers.14.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
67
- "model.layers.14.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
68
- "model.layers.14.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
69
- "model.layers.14.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
70
- "model.layers.14.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
71
- "model.layers.15.input_layernorm.weight": "model-00001-of-00002.safetensors",
72
- "model.layers.15.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
73
- "model.layers.15.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
74
- "model.layers.15.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
75
- "model.layers.15.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
76
- "model.layers.15.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
77
- "model.layers.15.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
78
- "model.layers.15.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
79
- "model.layers.15.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
80
- "model.layers.16.input_layernorm.weight": "model-00001-of-00002.safetensors",
81
- "model.layers.16.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
82
- "model.layers.16.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
83
- "model.layers.16.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
84
- "model.layers.16.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
85
- "model.layers.16.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
86
- "model.layers.16.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
87
- "model.layers.16.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
88
- "model.layers.16.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
89
- "model.layers.17.input_layernorm.weight": "model-00001-of-00002.safetensors",
90
- "model.layers.17.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
91
- "model.layers.17.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
92
- "model.layers.17.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
93
- "model.layers.17.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
94
- "model.layers.17.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
95
- "model.layers.17.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
96
- "model.layers.17.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
97
- "model.layers.17.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
98
- "model.layers.18.input_layernorm.weight": "model-00002-of-00002.safetensors",
99
- "model.layers.18.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
100
- "model.layers.18.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
101
- "model.layers.18.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
102
- "model.layers.18.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
103
- "model.layers.18.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
104
- "model.layers.18.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
105
- "model.layers.18.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
106
- "model.layers.18.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
107
- "model.layers.19.input_layernorm.weight": "model-00002-of-00002.safetensors",
108
- "model.layers.19.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
109
- "model.layers.19.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
110
- "model.layers.19.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
111
- "model.layers.19.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
112
- "model.layers.19.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
113
- "model.layers.19.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
114
- "model.layers.19.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
115
- "model.layers.19.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
116
- "model.layers.2.input_layernorm.weight": "model-00001-of-00002.safetensors",
117
- "model.layers.2.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
118
- "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
119
- "model.layers.2.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
120
- "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
121
- "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
122
- "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
123
- "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
124
- "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
125
- "model.layers.20.input_layernorm.weight": "model-00002-of-00002.safetensors",
126
- "model.layers.20.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
127
- "model.layers.20.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
128
- "model.layers.20.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
129
- "model.layers.20.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
130
- "model.layers.20.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
131
- "model.layers.20.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
132
- "model.layers.20.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
133
- "model.layers.20.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
134
- "model.layers.21.input_layernorm.weight": "model-00002-of-00002.safetensors",
135
- "model.layers.21.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
136
- "model.layers.21.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
137
- "model.layers.21.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
138
- "model.layers.21.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
139
- "model.layers.21.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
140
- "model.layers.21.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
141
- "model.layers.21.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
142
- "model.layers.21.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
143
- "model.layers.22.input_layernorm.weight": "model-00002-of-00002.safetensors",
144
- "model.layers.22.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
145
- "model.layers.22.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
146
- "model.layers.22.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
147
- "model.layers.22.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
148
- "model.layers.22.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
149
- "model.layers.22.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
150
- "model.layers.22.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
151
- "model.layers.22.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
152
- "model.layers.23.input_layernorm.weight": "model-00002-of-00002.safetensors",
153
- "model.layers.23.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
154
- "model.layers.23.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
155
- "model.layers.23.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
156
- "model.layers.23.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
157
- "model.layers.23.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
158
- "model.layers.23.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
159
- "model.layers.23.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
160
- "model.layers.23.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
161
- "model.layers.24.input_layernorm.weight": "model-00002-of-00002.safetensors",
162
- "model.layers.24.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
163
- "model.layers.24.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
164
- "model.layers.24.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
165
- "model.layers.24.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
166
- "model.layers.24.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
167
- "model.layers.24.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
168
- "model.layers.24.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
169
- "model.layers.24.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
170
- "model.layers.25.input_layernorm.weight": "model-00002-of-00002.safetensors",
171
- "model.layers.25.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
172
- "model.layers.25.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
173
- "model.layers.25.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
174
- "model.layers.25.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
175
- "model.layers.25.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
176
- "model.layers.25.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
177
- "model.layers.25.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
178
- "model.layers.25.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
179
- "model.layers.26.input_layernorm.weight": "model-00002-of-00002.safetensors",
180
- "model.layers.26.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
181
- "model.layers.26.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
182
- "model.layers.26.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
183
- "model.layers.26.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
184
- "model.layers.26.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
185
- "model.layers.26.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
186
- "model.layers.26.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
187
- "model.layers.26.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
188
- "model.layers.27.input_layernorm.weight": "model-00002-of-00002.safetensors",
189
- "model.layers.27.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
190
- "model.layers.27.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
191
- "model.layers.27.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
192
- "model.layers.27.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
193
- "model.layers.27.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
194
- "model.layers.27.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
195
- "model.layers.27.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
196
- "model.layers.27.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
197
- "model.layers.28.input_layernorm.weight": "model-00002-of-00002.safetensors",
198
- "model.layers.28.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
199
- "model.layers.28.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
200
- "model.layers.28.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
201
- "model.layers.28.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
202
- "model.layers.28.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
203
- "model.layers.28.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
204
- "model.layers.28.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
205
- "model.layers.28.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
206
- "model.layers.29.input_layernorm.weight": "model-00002-of-00002.safetensors",
207
- "model.layers.29.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
208
- "model.layers.29.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
209
- "model.layers.29.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
210
- "model.layers.29.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
211
- "model.layers.29.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
212
- "model.layers.29.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
213
- "model.layers.29.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
214
- "model.layers.29.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
215
- "model.layers.3.input_layernorm.weight": "model-00001-of-00002.safetensors",
216
- "model.layers.3.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
217
- "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
218
- "model.layers.3.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
219
- "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
220
- "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
221
- "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
222
- "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
223
- "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
224
- "model.layers.30.input_layernorm.weight": "model-00002-of-00002.safetensors",
225
- "model.layers.30.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
226
- "model.layers.30.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
227
- "model.layers.30.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
228
- "model.layers.30.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
229
- "model.layers.30.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
230
- "model.layers.30.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
231
- "model.layers.30.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
232
- "model.layers.30.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
233
- "model.layers.31.input_layernorm.weight": "model-00002-of-00002.safetensors",
234
- "model.layers.31.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
235
- "model.layers.31.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
236
- "model.layers.31.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
237
- "model.layers.31.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
238
- "model.layers.31.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
239
- "model.layers.31.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
240
- "model.layers.31.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
241
- "model.layers.31.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
242
- "model.layers.4.input_layernorm.weight": "model-00001-of-00002.safetensors",
243
- "model.layers.4.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
244
- "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
245
- "model.layers.4.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
246
- "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
247
- "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
248
- "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
249
- "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
250
- "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
251
- "model.layers.5.input_layernorm.weight": "model-00001-of-00002.safetensors",
252
- "model.layers.5.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
253
- "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
254
- "model.layers.5.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
255
- "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
256
- "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
257
- "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
258
- "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
259
- "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
260
- "model.layers.6.input_layernorm.weight": "model-00001-of-00002.safetensors",
261
- "model.layers.6.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
262
- "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
263
- "model.layers.6.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
264
- "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
265
- "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
266
- "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
267
- "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
268
- "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
269
- "model.layers.7.input_layernorm.weight": "model-00001-of-00002.safetensors",
270
- "model.layers.7.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
271
- "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
272
- "model.layers.7.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
273
- "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
274
- "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
275
- "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
276
- "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
277
- "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
278
- "model.layers.8.input_layernorm.weight": "model-00001-of-00002.safetensors",
279
- "model.layers.8.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
280
- "model.layers.8.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
281
- "model.layers.8.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
282
- "model.layers.8.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
283
- "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
284
- "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
285
- "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
286
- "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
287
- "model.layers.9.input_layernorm.weight": "model-00001-of-00002.safetensors",
288
- "model.layers.9.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
289
- "model.layers.9.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
290
- "model.layers.9.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
291
- "model.layers.9.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
292
- "model.layers.9.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
293
- "model.layers.9.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
294
- "model.layers.9.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
295
- "model.layers.9.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
296
- "model.norm.weight": "model-00002-of-00002.safetensors"
297
- }
298
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
checkpoint-349/rng_state_0.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:d7e52325e9d729519836af640f8f754a93ee06730fb2953b5309434b53b17562
3
- size 14512
 
 
 
 
checkpoint-349/rng_state_1.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:6a93593cf0342eb47876986e1063102e1546354426a2324c46ddcf1cbecae803
3
- size 14512
 
 
 
 
checkpoint-349/scheduler.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:1dc330a705ce44b1f0f9876c7f0a8a61c09c1784350d40ae4b0288eb56fb556b
3
- size 1064
 
 
 
 
checkpoint-349/special_tokens_map.json DELETED
@@ -1,23 +0,0 @@
1
- {
2
- "bos_token": {
3
- "content": "<|begin_of_text|>",
4
- "lstrip": false,
5
- "normalized": false,
6
- "rstrip": false,
7
- "single_word": false
8
- },
9
- "eos_token": {
10
- "content": "<|im_end|>",
11
- "lstrip": false,
12
- "normalized": false,
13
- "rstrip": false,
14
- "single_word": false
15
- },
16
- "pad_token": {
17
- "content": "<|finetune_right_pad_id|>",
18
- "lstrip": false,
19
- "normalized": false,
20
- "rstrip": false,
21
- "single_word": false
22
- }
23
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
checkpoint-349/tokenizer.json DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:907a7b3b13afcc9d481433f17277a6dd7cf852c6185262597f1a849d2ebeaa45
3
- size 17209884
 
 
 
 
checkpoint-349/tokenizer_config.json DELETED
@@ -1,2062 +0,0 @@
1
- {
2
- "added_tokens_decoder": {
3
- "128000": {
4
- "content": "<|begin_of_text|>",
5
- "lstrip": false,
6
- "normalized": false,
7
- "rstrip": false,
8
- "single_word": false,
9
- "special": true
10
- },
11
- "128001": {
12
- "content": "<|end_of_text|>",
13
- "lstrip": false,
14
- "normalized": false,
15
- "rstrip": false,
16
- "single_word": false,
17
- "special": true
18
- },
19
- "128002": {
20
- "content": "<|reserved_special_token_0|>",
21
- "lstrip": false,
22
- "normalized": false,
23
- "rstrip": false,
24
- "single_word": false,
25
- "special": true
26
- },
27
- "128003": {
28
- "content": "<|reserved_special_token_1|>",
29
- "lstrip": false,
30
- "normalized": false,
31
- "rstrip": false,
32
- "single_word": false,
33
- "special": true
34
- },
35
- "128004": {
36
- "content": "<|finetune_right_pad_id|>",
37
- "lstrip": false,
38
- "normalized": false,
39
- "rstrip": false,
40
- "single_word": false,
41
- "special": true
42
- },
43
- "128005": {
44
- "content": "<|reserved_special_token_2|>",
45
- "lstrip": false,
46
- "normalized": false,
47
- "rstrip": false,
48
- "single_word": false,
49
- "special": true
50
- },
51
- "128006": {
52
- "content": "<|start_header_id|>",
53
- "lstrip": false,
54
- "normalized": false,
55
- "rstrip": false,
56
- "single_word": false,
57
- "special": true
58
- },
59
- "128007": {
60
- "content": "<|end_header_id|>",
61
- "lstrip": false,
62
- "normalized": false,
63
- "rstrip": false,
64
- "single_word": false,
65
- "special": true
66
- },
67
- "128008": {
68
- "content": "<|eom_id|>",
69
- "lstrip": false,
70
- "normalized": false,
71
- "rstrip": false,
72
- "single_word": false,
73
- "special": true
74
- },
75
- "128009": {
76
- "content": "<|eot_id|>",
77
- "lstrip": false,
78
- "normalized": false,
79
- "rstrip": false,
80
- "single_word": false,
81
- "special": true
82
- },
83
- "128010": {
84
- "content": "<|python_tag|>",
85
- "lstrip": false,
86
- "normalized": false,
87
- "rstrip": false,
88
- "single_word": false,
89
- "special": true
90
- },
91
- "128011": {
92
- "content": "<|reserved_special_token_3|>",
93
- "lstrip": false,
94
- "normalized": false,
95
- "rstrip": false,
96
- "single_word": false,
97
- "special": true
98
- },
99
- "128012": {
100
- "content": "<|reserved_special_token_4|>",
101
- "lstrip": false,
102
- "normalized": false,
103
- "rstrip": false,
104
- "single_word": false,
105
- "special": true
106
- },
107
- "128013": {
108
- "content": "<|reserved_special_token_5|>",
109
- "lstrip": false,
110
- "normalized": false,
111
- "rstrip": false,
112
- "single_word": false,
113
- "special": true
114
- },
115
- "128014": {
116
- "content": "<|reserved_special_token_6|>",
117
- "lstrip": false,
118
- "normalized": false,
119
- "rstrip": false,
120
- "single_word": false,
121
- "special": true
122
- },
123
- "128015": {
124
- "content": "<|reserved_special_token_7|>",
125
- "lstrip": false,
126
- "normalized": false,
127
- "rstrip": false,
128
- "single_word": false,
129
- "special": true
130
- },
131
- "128016": {
132
- "content": "<|reserved_special_token_8|>",
133
- "lstrip": false,
134
- "normalized": false,
135
- "rstrip": false,
136
- "single_word": false,
137
- "special": true
138
- },
139
- "128017": {
140
- "content": "<|reserved_special_token_9|>",
141
- "lstrip": false,
142
- "normalized": false,
143
- "rstrip": false,
144
- "single_word": false,
145
- "special": true
146
- },
147
- "128018": {
148
- "content": "<|im_start|>",
149
- "lstrip": false,
150
- "normalized": false,
151
- "rstrip": false,
152
- "single_word": false,
153
- "special": true
154
- },
155
- "128019": {
156
- "content": "<|im_end|>",
157
- "lstrip": false,
158
- "normalized": false,
159
- "rstrip": false,
160
- "single_word": false,
161
- "special": true
162
- },
163
- "128020": {
164
- "content": "<|reserved_special_token_12|>",
165
- "lstrip": false,
166
- "normalized": false,
167
- "rstrip": false,
168
- "single_word": false,
169
- "special": true
170
- },
171
- "128021": {
172
- "content": "<|reserved_special_token_13|>",
173
- "lstrip": false,
174
- "normalized": false,
175
- "rstrip": false,
176
- "single_word": false,
177
- "special": true
178
- },
179
- "128022": {
180
- "content": "<|reserved_special_token_14|>",
181
- "lstrip": false,
182
- "normalized": false,
183
- "rstrip": false,
184
- "single_word": false,
185
- "special": true
186
- },
187
- "128023": {
188
- "content": "<|reserved_special_token_15|>",
189
- "lstrip": false,
190
- "normalized": false,
191
- "rstrip": false,
192
- "single_word": false,
193
- "special": true
194
- },
195
- "128024": {
196
- "content": "<|reserved_special_token_16|>",
197
- "lstrip": false,
198
- "normalized": false,
199
- "rstrip": false,
200
- "single_word": false,
201
- "special": true
202
- },
203
- "128025": {
204
- "content": "<|reserved_special_token_17|>",
205
- "lstrip": false,
206
- "normalized": false,
207
- "rstrip": false,
208
- "single_word": false,
209
- "special": true
210
- },
211
- "128026": {
212
- "content": "<|reserved_special_token_18|>",
213
- "lstrip": false,
214
- "normalized": false,
215
- "rstrip": false,
216
- "single_word": false,
217
- "special": true
218
- },
219
- "128027": {
220
- "content": "<|reserved_special_token_19|>",
221
- "lstrip": false,
222
- "normalized": false,
223
- "rstrip": false,
224
- "single_word": false,
225
- "special": true
226
- },
227
- "128028": {
228
- "content": "<|reserved_special_token_20|>",
229
- "lstrip": false,
230
- "normalized": false,
231
- "rstrip": false,
232
- "single_word": false,
233
- "special": true
234
- },
235
- "128029": {
236
- "content": "<|reserved_special_token_21|>",
237
- "lstrip": false,
238
- "normalized": false,
239
- "rstrip": false,
240
- "single_word": false,
241
- "special": true
242
- },
243
- "128030": {
244
- "content": "<|reserved_special_token_22|>",
245
- "lstrip": false,
246
- "normalized": false,
247
- "rstrip": false,
248
- "single_word": false,
249
- "special": true
250
- },
251
- "128031": {
252
- "content": "<|reserved_special_token_23|>",
253
- "lstrip": false,
254
- "normalized": false,
255
- "rstrip": false,
256
- "single_word": false,
257
- "special": true
258
- },
259
- "128032": {
260
- "content": "<|reserved_special_token_24|>",
261
- "lstrip": false,
262
- "normalized": false,
263
- "rstrip": false,
264
- "single_word": false,
265
- "special": true
266
- },
267
- "128033": {
268
- "content": "<|reserved_special_token_25|>",
269
- "lstrip": false,
270
- "normalized": false,
271
- "rstrip": false,
272
- "single_word": false,
273
- "special": true
274
- },
275
- "128034": {
276
- "content": "<|reserved_special_token_26|>",
277
- "lstrip": false,
278
- "normalized": false,
279
- "rstrip": false,
280
- "single_word": false,
281
- "special": true
282
- },
283
- "128035": {
284
- "content": "<|reserved_special_token_27|>",
285
- "lstrip": false,
286
- "normalized": false,
287
- "rstrip": false,
288
- "single_word": false,
289
- "special": true
290
- },
291
- "128036": {
292
- "content": "<|reserved_special_token_28|>",
293
- "lstrip": false,
294
- "normalized": false,
295
- "rstrip": false,
296
- "single_word": false,
297
- "special": true
298
- },
299
- "128037": {
300
- "content": "<|reserved_special_token_29|>",
301
- "lstrip": false,
302
- "normalized": false,
303
- "rstrip": false,
304
- "single_word": false,
305
- "special": true
306
- },
307
- "128038": {
308
- "content": "<|reserved_special_token_30|>",
309
- "lstrip": false,
310
- "normalized": false,
311
- "rstrip": false,
312
- "single_word": false,
313
- "special": true
314
- },
315
- "128039": {
316
- "content": "<|reserved_special_token_31|>",
317
- "lstrip": false,
318
- "normalized": false,
319
- "rstrip": false,
320
- "single_word": false,
321
- "special": true
322
- },
323
- "128040": {
324
- "content": "<|reserved_special_token_32|>",
325
- "lstrip": false,
326
- "normalized": false,
327
- "rstrip": false,
328
- "single_word": false,
329
- "special": true
330
- },
331
- "128041": {
332
- "content": "<|reserved_special_token_33|>",
333
- "lstrip": false,
334
- "normalized": false,
335
- "rstrip": false,
336
- "single_word": false,
337
- "special": true
338
- },
339
- "128042": {
340
- "content": "<|reserved_special_token_34|>",
341
- "lstrip": false,
342
- "normalized": false,
343
- "rstrip": false,
344
- "single_word": false,
345
- "special": true
346
- },
347
- "128043": {
348
- "content": "<|reserved_special_token_35|>",
349
- "lstrip": false,
350
- "normalized": false,
351
- "rstrip": false,
352
- "single_word": false,
353
- "special": true
354
- },
355
- "128044": {
356
- "content": "<|reserved_special_token_36|>",
357
- "lstrip": false,
358
- "normalized": false,
359
- "rstrip": false,
360
- "single_word": false,
361
- "special": true
362
- },
363
- "128045": {
364
- "content": "<|reserved_special_token_37|>",
365
- "lstrip": false,
366
- "normalized": false,
367
- "rstrip": false,
368
- "single_word": false,
369
- "special": true
370
- },
371
- "128046": {
372
- "content": "<|reserved_special_token_38|>",
373
- "lstrip": false,
374
- "normalized": false,
375
- "rstrip": false,
376
- "single_word": false,
377
- "special": true
378
- },
379
- "128047": {
380
- "content": "<|reserved_special_token_39|>",
381
- "lstrip": false,
382
- "normalized": false,
383
- "rstrip": false,
384
- "single_word": false,
385
- "special": true
386
- },
387
- "128048": {
388
- "content": "<|reserved_special_token_40|>",
389
- "lstrip": false,
390
- "normalized": false,
391
- "rstrip": false,
392
- "single_word": false,
393
- "special": true
394
- },
395
- "128049": {
396
- "content": "<|reserved_special_token_41|>",
397
- "lstrip": false,
398
- "normalized": false,
399
- "rstrip": false,
400
- "single_word": false,
401
- "special": true
402
- },
403
- "128050": {
404
- "content": "<|reserved_special_token_42|>",
405
- "lstrip": false,
406
- "normalized": false,
407
- "rstrip": false,
408
- "single_word": false,
409
- "special": true
410
- },
411
- "128051": {
412
- "content": "<|reserved_special_token_43|>",
413
- "lstrip": false,
414
- "normalized": false,
415
- "rstrip": false,
416
- "single_word": false,
417
- "special": true
418
- },
419
- "128052": {
420
- "content": "<|reserved_special_token_44|>",
421
- "lstrip": false,
422
- "normalized": false,
423
- "rstrip": false,
424
- "single_word": false,
425
- "special": true
426
- },
427
- "128053": {
428
- "content": "<|reserved_special_token_45|>",
429
- "lstrip": false,
430
- "normalized": false,
431
- "rstrip": false,
432
- "single_word": false,
433
- "special": true
434
- },
435
- "128054": {
436
- "content": "<|reserved_special_token_46|>",
437
- "lstrip": false,
438
- "normalized": false,
439
- "rstrip": false,
440
- "single_word": false,
441
- "special": true
442
- },
443
- "128055": {
444
- "content": "<|reserved_special_token_47|>",
445
- "lstrip": false,
446
- "normalized": false,
447
- "rstrip": false,
448
- "single_word": false,
449
- "special": true
450
- },
451
- "128056": {
452
- "content": "<|reserved_special_token_48|>",
453
- "lstrip": false,
454
- "normalized": false,
455
- "rstrip": false,
456
- "single_word": false,
457
- "special": true
458
- },
459
- "128057": {
460
- "content": "<|reserved_special_token_49|>",
461
- "lstrip": false,
462
- "normalized": false,
463
- "rstrip": false,
464
- "single_word": false,
465
- "special": true
466
- },
467
- "128058": {
468
- "content": "<|reserved_special_token_50|>",
469
- "lstrip": false,
470
- "normalized": false,
471
- "rstrip": false,
472
- "single_word": false,
473
- "special": true
474
- },
475
- "128059": {
476
- "content": "<|reserved_special_token_51|>",
477
- "lstrip": false,
478
- "normalized": false,
479
- "rstrip": false,
480
- "single_word": false,
481
- "special": true
482
- },
483
- "128060": {
484
- "content": "<|reserved_special_token_52|>",
485
- "lstrip": false,
486
- "normalized": false,
487
- "rstrip": false,
488
- "single_word": false,
489
- "special": true
490
- },
491
- "128061": {
492
- "content": "<|reserved_special_token_53|>",
493
- "lstrip": false,
494
- "normalized": false,
495
- "rstrip": false,
496
- "single_word": false,
497
- "special": true
498
- },
499
- "128062": {
500
- "content": "<|reserved_special_token_54|>",
501
- "lstrip": false,
502
- "normalized": false,
503
- "rstrip": false,
504
- "single_word": false,
505
- "special": true
506
- },
507
- "128063": {
508
- "content": "<|reserved_special_token_55|>",
509
- "lstrip": false,
510
- "normalized": false,
511
- "rstrip": false,
512
- "single_word": false,
513
- "special": true
514
- },
515
- "128064": {
516
- "content": "<|reserved_special_token_56|>",
517
- "lstrip": false,
518
- "normalized": false,
519
- "rstrip": false,
520
- "single_word": false,
521
- "special": true
522
- },
523
- "128065": {
524
- "content": "<|reserved_special_token_57|>",
525
- "lstrip": false,
526
- "normalized": false,
527
- "rstrip": false,
528
- "single_word": false,
529
- "special": true
530
- },
531
- "128066": {
532
- "content": "<|reserved_special_token_58|>",
533
- "lstrip": false,
534
- "normalized": false,
535
- "rstrip": false,
536
- "single_word": false,
537
- "special": true
538
- },
539
- "128067": {
540
- "content": "<|reserved_special_token_59|>",
541
- "lstrip": false,
542
- "normalized": false,
543
- "rstrip": false,
544
- "single_word": false,
545
- "special": true
546
- },
547
- "128068": {
548
- "content": "<|reserved_special_token_60|>",
549
- "lstrip": false,
550
- "normalized": false,
551
- "rstrip": false,
552
- "single_word": false,
553
- "special": true
554
- },
555
- "128069": {
556
- "content": "<|reserved_special_token_61|>",
557
- "lstrip": false,
558
- "normalized": false,
559
- "rstrip": false,
560
- "single_word": false,
561
- "special": true
562
- },
563
- "128070": {
564
- "content": "<|reserved_special_token_62|>",
565
- "lstrip": false,
566
- "normalized": false,
567
- "rstrip": false,
568
- "single_word": false,
569
- "special": true
570
- },
571
- "128071": {
572
- "content": "<|reserved_special_token_63|>",
573
- "lstrip": false,
574
- "normalized": false,
575
- "rstrip": false,
576
- "single_word": false,
577
- "special": true
578
- },
579
- "128072": {
580
- "content": "<|reserved_special_token_64|>",
581
- "lstrip": false,
582
- "normalized": false,
583
- "rstrip": false,
584
- "single_word": false,
585
- "special": true
586
- },
587
- "128073": {
588
- "content": "<|reserved_special_token_65|>",
589
- "lstrip": false,
590
- "normalized": false,
591
- "rstrip": false,
592
- "single_word": false,
593
- "special": true
594
- },
595
- "128074": {
596
- "content": "<|reserved_special_token_66|>",
597
- "lstrip": false,
598
- "normalized": false,
599
- "rstrip": false,
600
- "single_word": false,
601
- "special": true
602
- },
603
- "128075": {
604
- "content": "<|reserved_special_token_67|>",
605
- "lstrip": false,
606
- "normalized": false,
607
- "rstrip": false,
608
- "single_word": false,
609
- "special": true
610
- },
611
- "128076": {
612
- "content": "<|reserved_special_token_68|>",
613
- "lstrip": false,
614
- "normalized": false,
615
- "rstrip": false,
616
- "single_word": false,
617
- "special": true
618
- },
619
- "128077": {
620
- "content": "<|reserved_special_token_69|>",
621
- "lstrip": false,
622
- "normalized": false,
623
- "rstrip": false,
624
- "single_word": false,
625
- "special": true
626
- },
627
- "128078": {
628
- "content": "<|reserved_special_token_70|>",
629
- "lstrip": false,
630
- "normalized": false,
631
- "rstrip": false,
632
- "single_word": false,
633
- "special": true
634
- },
635
- "128079": {
636
- "content": "<|reserved_special_token_71|>",
637
- "lstrip": false,
638
- "normalized": false,
639
- "rstrip": false,
640
- "single_word": false,
641
- "special": true
642
- },
643
- "128080": {
644
- "content": "<|reserved_special_token_72|>",
645
- "lstrip": false,
646
- "normalized": false,
647
- "rstrip": false,
648
- "single_word": false,
649
- "special": true
650
- },
651
- "128081": {
652
- "content": "<|reserved_special_token_73|>",
653
- "lstrip": false,
654
- "normalized": false,
655
- "rstrip": false,
656
- "single_word": false,
657
- "special": true
658
- },
659
- "128082": {
660
- "content": "<|reserved_special_token_74|>",
661
- "lstrip": false,
662
- "normalized": false,
663
- "rstrip": false,
664
- "single_word": false,
665
- "special": true
666
- },
667
- "128083": {
668
- "content": "<|reserved_special_token_75|>",
669
- "lstrip": false,
670
- "normalized": false,
671
- "rstrip": false,
672
- "single_word": false,
673
- "special": true
674
- },
675
- "128084": {
676
- "content": "<|reserved_special_token_76|>",
677
- "lstrip": false,
678
- "normalized": false,
679
- "rstrip": false,
680
- "single_word": false,
681
- "special": true
682
- },
683
- "128085": {
684
- "content": "<|reserved_special_token_77|>",
685
- "lstrip": false,
686
- "normalized": false,
687
- "rstrip": false,
688
- "single_word": false,
689
- "special": true
690
- },
691
- "128086": {
692
- "content": "<|reserved_special_token_78|>",
693
- "lstrip": false,
694
- "normalized": false,
695
- "rstrip": false,
696
- "single_word": false,
697
- "special": true
698
- },
699
- "128087": {
700
- "content": "<|reserved_special_token_79|>",
701
- "lstrip": false,
702
- "normalized": false,
703
- "rstrip": false,
704
- "single_word": false,
705
- "special": true
706
- },
707
- "128088": {
708
- "content": "<|reserved_special_token_80|>",
709
- "lstrip": false,
710
- "normalized": false,
711
- "rstrip": false,
712
- "single_word": false,
713
- "special": true
714
- },
715
- "128089": {
716
- "content": "<|reserved_special_token_81|>",
717
- "lstrip": false,
718
- "normalized": false,
719
- "rstrip": false,
720
- "single_word": false,
721
- "special": true
722
- },
723
- "128090": {
724
- "content": "<|reserved_special_token_82|>",
725
- "lstrip": false,
726
- "normalized": false,
727
- "rstrip": false,
728
- "single_word": false,
729
- "special": true
730
- },
731
- "128091": {
732
- "content": "<|reserved_special_token_83|>",
733
- "lstrip": false,
734
- "normalized": false,
735
- "rstrip": false,
736
- "single_word": false,
737
- "special": true
738
- },
739
- "128092": {
740
- "content": "<|reserved_special_token_84|>",
741
- "lstrip": false,
742
- "normalized": false,
743
- "rstrip": false,
744
- "single_word": false,
745
- "special": true
746
- },
747
- "128093": {
748
- "content": "<|reserved_special_token_85|>",
749
- "lstrip": false,
750
- "normalized": false,
751
- "rstrip": false,
752
- "single_word": false,
753
- "special": true
754
- },
755
- "128094": {
756
- "content": "<|reserved_special_token_86|>",
757
- "lstrip": false,
758
- "normalized": false,
759
- "rstrip": false,
760
- "single_word": false,
761
- "special": true
762
- },
763
- "128095": {
764
- "content": "<|reserved_special_token_87|>",
765
- "lstrip": false,
766
- "normalized": false,
767
- "rstrip": false,
768
- "single_word": false,
769
- "special": true
770
- },
771
- "128096": {
772
- "content": "<|reserved_special_token_88|>",
773
- "lstrip": false,
774
- "normalized": false,
775
- "rstrip": false,
776
- "single_word": false,
777
- "special": true
778
- },
779
- "128097": {
780
- "content": "<|reserved_special_token_89|>",
781
- "lstrip": false,
782
- "normalized": false,
783
- "rstrip": false,
784
- "single_word": false,
785
- "special": true
786
- },
787
- "128098": {
788
- "content": "<|reserved_special_token_90|>",
789
- "lstrip": false,
790
- "normalized": false,
791
- "rstrip": false,
792
- "single_word": false,
793
- "special": true
794
- },
795
- "128099": {
796
- "content": "<|reserved_special_token_91|>",
797
- "lstrip": false,
798
- "normalized": false,
799
- "rstrip": false,
800
- "single_word": false,
801
- "special": true
802
- },
803
- "128100": {
804
- "content": "<|reserved_special_token_92|>",
805
- "lstrip": false,
806
- "normalized": false,
807
- "rstrip": false,
808
- "single_word": false,
809
- "special": true
810
- },
811
- "128101": {
812
- "content": "<|reserved_special_token_93|>",
813
- "lstrip": false,
814
- "normalized": false,
815
- "rstrip": false,
816
- "single_word": false,
817
- "special": true
818
- },
819
- "128102": {
820
- "content": "<|reserved_special_token_94|>",
821
- "lstrip": false,
822
- "normalized": false,
823
- "rstrip": false,
824
- "single_word": false,
825
- "special": true
826
- },
827
- "128103": {
828
- "content": "<|reserved_special_token_95|>",
829
- "lstrip": false,
830
- "normalized": false,
831
- "rstrip": false,
832
- "single_word": false,
833
- "special": true
834
- },
835
- "128104": {
836
- "content": "<|reserved_special_token_96|>",
837
- "lstrip": false,
838
- "normalized": false,
839
- "rstrip": false,
840
- "single_word": false,
841
- "special": true
842
- },
843
- "128105": {
844
- "content": "<|reserved_special_token_97|>",
845
- "lstrip": false,
846
- "normalized": false,
847
- "rstrip": false,
848
- "single_word": false,
849
- "special": true
850
- },
851
- "128106": {
852
- "content": "<|reserved_special_token_98|>",
853
- "lstrip": false,
854
- "normalized": false,
855
- "rstrip": false,
856
- "single_word": false,
857
- "special": true
858
- },
859
- "128107": {
860
- "content": "<|reserved_special_token_99|>",
861
- "lstrip": false,
862
- "normalized": false,
863
- "rstrip": false,
864
- "single_word": false,
865
- "special": true
866
- },
867
- "128108": {
868
- "content": "<|reserved_special_token_100|>",
869
- "lstrip": false,
870
- "normalized": false,
871
- "rstrip": false,
872
- "single_word": false,
873
- "special": true
874
- },
875
- "128109": {
876
- "content": "<|reserved_special_token_101|>",
877
- "lstrip": false,
878
- "normalized": false,
879
- "rstrip": false,
880
- "single_word": false,
881
- "special": true
882
- },
883
- "128110": {
884
- "content": "<|reserved_special_token_102|>",
885
- "lstrip": false,
886
- "normalized": false,
887
- "rstrip": false,
888
- "single_word": false,
889
- "special": true
890
- },
891
- "128111": {
892
- "content": "<|reserved_special_token_103|>",
893
- "lstrip": false,
894
- "normalized": false,
895
- "rstrip": false,
896
- "single_word": false,
897
- "special": true
898
- },
899
- "128112": {
900
- "content": "<|reserved_special_token_104|>",
901
- "lstrip": false,
902
- "normalized": false,
903
- "rstrip": false,
904
- "single_word": false,
905
- "special": true
906
- },
907
- "128113": {
908
- "content": "<|reserved_special_token_105|>",
909
- "lstrip": false,
910
- "normalized": false,
911
- "rstrip": false,
912
- "single_word": false,
913
- "special": true
914
- },
915
- "128114": {
916
- "content": "<|reserved_special_token_106|>",
917
- "lstrip": false,
918
- "normalized": false,
919
- "rstrip": false,
920
- "single_word": false,
921
- "special": true
922
- },
923
- "128115": {
924
- "content": "<|reserved_special_token_107|>",
925
- "lstrip": false,
926
- "normalized": false,
927
- "rstrip": false,
928
- "single_word": false,
929
- "special": true
930
- },
931
- "128116": {
932
- "content": "<|reserved_special_token_108|>",
933
- "lstrip": false,
934
- "normalized": false,
935
- "rstrip": false,
936
- "single_word": false,
937
- "special": true
938
- },
939
- "128117": {
940
- "content": "<|reserved_special_token_109|>",
941
- "lstrip": false,
942
- "normalized": false,
943
- "rstrip": false,
944
- "single_word": false,
945
- "special": true
946
- },
947
- "128118": {
948
- "content": "<|reserved_special_token_110|>",
949
- "lstrip": false,
950
- "normalized": false,
951
- "rstrip": false,
952
- "single_word": false,
953
- "special": true
954
- },
955
- "128119": {
956
- "content": "<|reserved_special_token_111|>",
957
- "lstrip": false,
958
- "normalized": false,
959
- "rstrip": false,
960
- "single_word": false,
961
- "special": true
962
- },
963
- "128120": {
964
- "content": "<|reserved_special_token_112|>",
965
- "lstrip": false,
966
- "normalized": false,
967
- "rstrip": false,
968
- "single_word": false,
969
- "special": true
970
- },
971
- "128121": {
972
- "content": "<|reserved_special_token_113|>",
973
- "lstrip": false,
974
- "normalized": false,
975
- "rstrip": false,
976
- "single_word": false,
977
- "special": true
978
- },
979
- "128122": {
980
- "content": "<|reserved_special_token_114|>",
981
- "lstrip": false,
982
- "normalized": false,
983
- "rstrip": false,
984
- "single_word": false,
985
- "special": true
986
- },
987
- "128123": {
988
- "content": "<|reserved_special_token_115|>",
989
- "lstrip": false,
990
- "normalized": false,
991
- "rstrip": false,
992
- "single_word": false,
993
- "special": true
994
- },
995
- "128124": {
996
- "content": "<|reserved_special_token_116|>",
997
- "lstrip": false,
998
- "normalized": false,
999
- "rstrip": false,
1000
- "single_word": false,
1001
- "special": true
1002
- },
1003
- "128125": {
1004
- "content": "<|reserved_special_token_117|>",
1005
- "lstrip": false,
1006
- "normalized": false,
1007
- "rstrip": false,
1008
- "single_word": false,
1009
- "special": true
1010
- },
1011
- "128126": {
1012
- "content": "<|reserved_special_token_118|>",
1013
- "lstrip": false,
1014
- "normalized": false,
1015
- "rstrip": false,
1016
- "single_word": false,
1017
- "special": true
1018
- },
1019
- "128127": {
1020
- "content": "<|reserved_special_token_119|>",
1021
- "lstrip": false,
1022
- "normalized": false,
1023
- "rstrip": false,
1024
- "single_word": false,
1025
- "special": true
1026
- },
1027
- "128128": {
1028
- "content": "<|reserved_special_token_120|>",
1029
- "lstrip": false,
1030
- "normalized": false,
1031
- "rstrip": false,
1032
- "single_word": false,
1033
- "special": true
1034
- },
1035
- "128129": {
1036
- "content": "<|reserved_special_token_121|>",
1037
- "lstrip": false,
1038
- "normalized": false,
1039
- "rstrip": false,
1040
- "single_word": false,
1041
- "special": true
1042
- },
1043
- "128130": {
1044
- "content": "<|reserved_special_token_122|>",
1045
- "lstrip": false,
1046
- "normalized": false,
1047
- "rstrip": false,
1048
- "single_word": false,
1049
- "special": true
1050
- },
1051
- "128131": {
1052
- "content": "<|reserved_special_token_123|>",
1053
- "lstrip": false,
1054
- "normalized": false,
1055
- "rstrip": false,
1056
- "single_word": false,
1057
- "special": true
1058
- },
1059
- "128132": {
1060
- "content": "<|reserved_special_token_124|>",
1061
- "lstrip": false,
1062
- "normalized": false,
1063
- "rstrip": false,
1064
- "single_word": false,
1065
- "special": true
1066
- },
1067
- "128133": {
1068
- "content": "<|reserved_special_token_125|>",
1069
- "lstrip": false,
1070
- "normalized": false,
1071
- "rstrip": false,
1072
- "single_word": false,
1073
- "special": true
1074
- },
1075
- "128134": {
1076
- "content": "<|reserved_special_token_126|>",
1077
- "lstrip": false,
1078
- "normalized": false,
1079
- "rstrip": false,
1080
- "single_word": false,
1081
- "special": true
1082
- },
1083
- "128135": {
1084
- "content": "<|reserved_special_token_127|>",
1085
- "lstrip": false,
1086
- "normalized": false,
1087
- "rstrip": false,
1088
- "single_word": false,
1089
- "special": true
1090
- },
1091
- "128136": {
1092
- "content": "<|reserved_special_token_128|>",
1093
- "lstrip": false,
1094
- "normalized": false,
1095
- "rstrip": false,
1096
- "single_word": false,
1097
- "special": true
1098
- },
1099
- "128137": {
1100
- "content": "<|reserved_special_token_129|>",
1101
- "lstrip": false,
1102
- "normalized": false,
1103
- "rstrip": false,
1104
- "single_word": false,
1105
- "special": true
1106
- },
1107
- "128138": {
1108
- "content": "<|reserved_special_token_130|>",
1109
- "lstrip": false,
1110
- "normalized": false,
1111
- "rstrip": false,
1112
- "single_word": false,
1113
- "special": true
1114
- },
1115
- "128139": {
1116
- "content": "<|reserved_special_token_131|>",
1117
- "lstrip": false,
1118
- "normalized": false,
1119
- "rstrip": false,
1120
- "single_word": false,
1121
- "special": true
1122
- },
1123
- "128140": {
1124
- "content": "<|reserved_special_token_132|>",
1125
- "lstrip": false,
1126
- "normalized": false,
1127
- "rstrip": false,
1128
- "single_word": false,
1129
- "special": true
1130
- },
1131
- "128141": {
1132
- "content": "<|reserved_special_token_133|>",
1133
- "lstrip": false,
1134
- "normalized": false,
1135
- "rstrip": false,
1136
- "single_word": false,
1137
- "special": true
1138
- },
1139
- "128142": {
1140
- "content": "<|reserved_special_token_134|>",
1141
- "lstrip": false,
1142
- "normalized": false,
1143
- "rstrip": false,
1144
- "single_word": false,
1145
- "special": true
1146
- },
1147
- "128143": {
1148
- "content": "<|reserved_special_token_135|>",
1149
- "lstrip": false,
1150
- "normalized": false,
1151
- "rstrip": false,
1152
- "single_word": false,
1153
- "special": true
1154
- },
1155
- "128144": {
1156
- "content": "<|reserved_special_token_136|>",
1157
- "lstrip": false,
1158
- "normalized": false,
1159
- "rstrip": false,
1160
- "single_word": false,
1161
- "special": true
1162
- },
1163
- "128145": {
1164
- "content": "<|reserved_special_token_137|>",
1165
- "lstrip": false,
1166
- "normalized": false,
1167
- "rstrip": false,
1168
- "single_word": false,
1169
- "special": true
1170
- },
1171
- "128146": {
1172
- "content": "<|reserved_special_token_138|>",
1173
- "lstrip": false,
1174
- "normalized": false,
1175
- "rstrip": false,
1176
- "single_word": false,
1177
- "special": true
1178
- },
1179
- "128147": {
1180
- "content": "<|reserved_special_token_139|>",
1181
- "lstrip": false,
1182
- "normalized": false,
1183
- "rstrip": false,
1184
- "single_word": false,
1185
- "special": true
1186
- },
1187
- "128148": {
1188
- "content": "<|reserved_special_token_140|>",
1189
- "lstrip": false,
1190
- "normalized": false,
1191
- "rstrip": false,
1192
- "single_word": false,
1193
- "special": true
1194
- },
1195
- "128149": {
1196
- "content": "<|reserved_special_token_141|>",
1197
- "lstrip": false,
1198
- "normalized": false,
1199
- "rstrip": false,
1200
- "single_word": false,
1201
- "special": true
1202
- },
1203
- "128150": {
1204
- "content": "<|reserved_special_token_142|>",
1205
- "lstrip": false,
1206
- "normalized": false,
1207
- "rstrip": false,
1208
- "single_word": false,
1209
- "special": true
1210
- },
1211
- "128151": {
1212
- "content": "<|reserved_special_token_143|>",
1213
- "lstrip": false,
1214
- "normalized": false,
1215
- "rstrip": false,
1216
- "single_word": false,
1217
- "special": true
1218
- },
1219
- "128152": {
1220
- "content": "<|reserved_special_token_144|>",
1221
- "lstrip": false,
1222
- "normalized": false,
1223
- "rstrip": false,
1224
- "single_word": false,
1225
- "special": true
1226
- },
1227
- "128153": {
1228
- "content": "<|reserved_special_token_145|>",
1229
- "lstrip": false,
1230
- "normalized": false,
1231
- "rstrip": false,
1232
- "single_word": false,
1233
- "special": true
1234
- },
1235
- "128154": {
1236
- "content": "<|reserved_special_token_146|>",
1237
- "lstrip": false,
1238
- "normalized": false,
1239
- "rstrip": false,
1240
- "single_word": false,
1241
- "special": true
1242
- },
1243
- "128155": {
1244
- "content": "<|reserved_special_token_147|>",
1245
- "lstrip": false,
1246
- "normalized": false,
1247
- "rstrip": false,
1248
- "single_word": false,
1249
- "special": true
1250
- },
1251
- "128156": {
1252
- "content": "<|reserved_special_token_148|>",
1253
- "lstrip": false,
1254
- "normalized": false,
1255
- "rstrip": false,
1256
- "single_word": false,
1257
- "special": true
1258
- },
1259
- "128157": {
1260
- "content": "<|reserved_special_token_149|>",
1261
- "lstrip": false,
1262
- "normalized": false,
1263
- "rstrip": false,
1264
- "single_word": false,
1265
- "special": true
1266
- },
1267
- "128158": {
1268
- "content": "<|reserved_special_token_150|>",
1269
- "lstrip": false,
1270
- "normalized": false,
1271
- "rstrip": false,
1272
- "single_word": false,
1273
- "special": true
1274
- },
1275
- "128159": {
1276
- "content": "<|reserved_special_token_151|>",
1277
- "lstrip": false,
1278
- "normalized": false,
1279
- "rstrip": false,
1280
- "single_word": false,
1281
- "special": true
1282
- },
1283
- "128160": {
1284
- "content": "<|reserved_special_token_152|>",
1285
- "lstrip": false,
1286
- "normalized": false,
1287
- "rstrip": false,
1288
- "single_word": false,
1289
- "special": true
1290
- },
1291
- "128161": {
1292
- "content": "<|reserved_special_token_153|>",
1293
- "lstrip": false,
1294
- "normalized": false,
1295
- "rstrip": false,
1296
- "single_word": false,
1297
- "special": true
1298
- },
1299
- "128162": {
1300
- "content": "<|reserved_special_token_154|>",
1301
- "lstrip": false,
1302
- "normalized": false,
1303
- "rstrip": false,
1304
- "single_word": false,
1305
- "special": true
1306
- },
1307
- "128163": {
1308
- "content": "<|reserved_special_token_155|>",
1309
- "lstrip": false,
1310
- "normalized": false,
1311
- "rstrip": false,
1312
- "single_word": false,
1313
- "special": true
1314
- },
1315
- "128164": {
1316
- "content": "<|reserved_special_token_156|>",
1317
- "lstrip": false,
1318
- "normalized": false,
1319
- "rstrip": false,
1320
- "single_word": false,
1321
- "special": true
1322
- },
1323
- "128165": {
1324
- "content": "<|reserved_special_token_157|>",
1325
- "lstrip": false,
1326
- "normalized": false,
1327
- "rstrip": false,
1328
- "single_word": false,
1329
- "special": true
1330
- },
1331
- "128166": {
1332
- "content": "<|reserved_special_token_158|>",
1333
- "lstrip": false,
1334
- "normalized": false,
1335
- "rstrip": false,
1336
- "single_word": false,
1337
- "special": true
1338
- },
1339
- "128167": {
1340
- "content": "<|reserved_special_token_159|>",
1341
- "lstrip": false,
1342
- "normalized": false,
1343
- "rstrip": false,
1344
- "single_word": false,
1345
- "special": true
1346
- },
1347
- "128168": {
1348
- "content": "<|reserved_special_token_160|>",
1349
- "lstrip": false,
1350
- "normalized": false,
1351
- "rstrip": false,
1352
- "single_word": false,
1353
- "special": true
1354
- },
1355
- "128169": {
1356
- "content": "<|reserved_special_token_161|>",
1357
- "lstrip": false,
1358
- "normalized": false,
1359
- "rstrip": false,
1360
- "single_word": false,
1361
- "special": true
1362
- },
1363
- "128170": {
1364
- "content": "<|reserved_special_token_162|>",
1365
- "lstrip": false,
1366
- "normalized": false,
1367
- "rstrip": false,
1368
- "single_word": false,
1369
- "special": true
1370
- },
1371
- "128171": {
1372
- "content": "<|reserved_special_token_163|>",
1373
- "lstrip": false,
1374
- "normalized": false,
1375
- "rstrip": false,
1376
- "single_word": false,
1377
- "special": true
1378
- },
1379
- "128172": {
1380
- "content": "<|reserved_special_token_164|>",
1381
- "lstrip": false,
1382
- "normalized": false,
1383
- "rstrip": false,
1384
- "single_word": false,
1385
- "special": true
1386
- },
1387
- "128173": {
1388
- "content": "<|reserved_special_token_165|>",
1389
- "lstrip": false,
1390
- "normalized": false,
1391
- "rstrip": false,
1392
- "single_word": false,
1393
- "special": true
1394
- },
1395
- "128174": {
1396
- "content": "<|reserved_special_token_166|>",
1397
- "lstrip": false,
1398
- "normalized": false,
1399
- "rstrip": false,
1400
- "single_word": false,
1401
- "special": true
1402
- },
1403
- "128175": {
1404
- "content": "<|reserved_special_token_167|>",
1405
- "lstrip": false,
1406
- "normalized": false,
1407
- "rstrip": false,
1408
- "single_word": false,
1409
- "special": true
1410
- },
1411
- "128176": {
1412
- "content": "<|reserved_special_token_168|>",
1413
- "lstrip": false,
1414
- "normalized": false,
1415
- "rstrip": false,
1416
- "single_word": false,
1417
- "special": true
1418
- },
1419
- "128177": {
1420
- "content": "<|reserved_special_token_169|>",
1421
- "lstrip": false,
1422
- "normalized": false,
1423
- "rstrip": false,
1424
- "single_word": false,
1425
- "special": true
1426
- },
1427
- "128178": {
1428
- "content": "<|reserved_special_token_170|>",
1429
- "lstrip": false,
1430
- "normalized": false,
1431
- "rstrip": false,
1432
- "single_word": false,
1433
- "special": true
1434
- },
1435
- "128179": {
1436
- "content": "<|reserved_special_token_171|>",
1437
- "lstrip": false,
1438
- "normalized": false,
1439
- "rstrip": false,
1440
- "single_word": false,
1441
- "special": true
1442
- },
1443
- "128180": {
1444
- "content": "<|reserved_special_token_172|>",
1445
- "lstrip": false,
1446
- "normalized": false,
1447
- "rstrip": false,
1448
- "single_word": false,
1449
- "special": true
1450
- },
1451
- "128181": {
1452
- "content": "<|reserved_special_token_173|>",
1453
- "lstrip": false,
1454
- "normalized": false,
1455
- "rstrip": false,
1456
- "single_word": false,
1457
- "special": true
1458
- },
1459
- "128182": {
1460
- "content": "<|reserved_special_token_174|>",
1461
- "lstrip": false,
1462
- "normalized": false,
1463
- "rstrip": false,
1464
- "single_word": false,
1465
- "special": true
1466
- },
1467
- "128183": {
1468
- "content": "<|reserved_special_token_175|>",
1469
- "lstrip": false,
1470
- "normalized": false,
1471
- "rstrip": false,
1472
- "single_word": false,
1473
- "special": true
1474
- },
1475
- "128184": {
1476
- "content": "<|reserved_special_token_176|>",
1477
- "lstrip": false,
1478
- "normalized": false,
1479
- "rstrip": false,
1480
- "single_word": false,
1481
- "special": true
1482
- },
1483
- "128185": {
1484
- "content": "<|reserved_special_token_177|>",
1485
- "lstrip": false,
1486
- "normalized": false,
1487
- "rstrip": false,
1488
- "single_word": false,
1489
- "special": true
1490
- },
1491
- "128186": {
1492
- "content": "<|reserved_special_token_178|>",
1493
- "lstrip": false,
1494
- "normalized": false,
1495
- "rstrip": false,
1496
- "single_word": false,
1497
- "special": true
1498
- },
1499
- "128187": {
1500
- "content": "<|reserved_special_token_179|>",
1501
- "lstrip": false,
1502
- "normalized": false,
1503
- "rstrip": false,
1504
- "single_word": false,
1505
- "special": true
1506
- },
1507
- "128188": {
1508
- "content": "<|reserved_special_token_180|>",
1509
- "lstrip": false,
1510
- "normalized": false,
1511
- "rstrip": false,
1512
- "single_word": false,
1513
- "special": true
1514
- },
1515
- "128189": {
1516
- "content": "<|reserved_special_token_181|>",
1517
- "lstrip": false,
1518
- "normalized": false,
1519
- "rstrip": false,
1520
- "single_word": false,
1521
- "special": true
1522
- },
1523
- "128190": {
1524
- "content": "<|reserved_special_token_182|>",
1525
- "lstrip": false,
1526
- "normalized": false,
1527
- "rstrip": false,
1528
- "single_word": false,
1529
- "special": true
1530
- },
1531
- "128191": {
1532
- "content": "<|reserved_special_token_183|>",
1533
- "lstrip": false,
1534
- "normalized": false,
1535
- "rstrip": false,
1536
- "single_word": false,
1537
- "special": true
1538
- },
1539
- "128192": {
1540
- "content": "<|reserved_special_token_184|>",
1541
- "lstrip": false,
1542
- "normalized": false,
1543
- "rstrip": false,
1544
- "single_word": false,
1545
- "special": true
1546
- },
1547
- "128193": {
1548
- "content": "<|reserved_special_token_185|>",
1549
- "lstrip": false,
1550
- "normalized": false,
1551
- "rstrip": false,
1552
- "single_word": false,
1553
- "special": true
1554
- },
1555
- "128194": {
1556
- "content": "<|reserved_special_token_186|>",
1557
- "lstrip": false,
1558
- "normalized": false,
1559
- "rstrip": false,
1560
- "single_word": false,
1561
- "special": true
1562
- },
1563
- "128195": {
1564
- "content": "<|reserved_special_token_187|>",
1565
- "lstrip": false,
1566
- "normalized": false,
1567
- "rstrip": false,
1568
- "single_word": false,
1569
- "special": true
1570
- },
1571
- "128196": {
1572
- "content": "<|reserved_special_token_188|>",
1573
- "lstrip": false,
1574
- "normalized": false,
1575
- "rstrip": false,
1576
- "single_word": false,
1577
- "special": true
1578
- },
1579
- "128197": {
1580
- "content": "<|reserved_special_token_189|>",
1581
- "lstrip": false,
1582
- "normalized": false,
1583
- "rstrip": false,
1584
- "single_word": false,
1585
- "special": true
1586
- },
1587
- "128198": {
1588
- "content": "<|reserved_special_token_190|>",
1589
- "lstrip": false,
1590
- "normalized": false,
1591
- "rstrip": false,
1592
- "single_word": false,
1593
- "special": true
1594
- },
1595
- "128199": {
1596
- "content": "<|reserved_special_token_191|>",
1597
- "lstrip": false,
1598
- "normalized": false,
1599
- "rstrip": false,
1600
- "single_word": false,
1601
- "special": true
1602
- },
1603
- "128200": {
1604
- "content": "<|reserved_special_token_192|>",
1605
- "lstrip": false,
1606
- "normalized": false,
1607
- "rstrip": false,
1608
- "single_word": false,
1609
- "special": true
1610
- },
1611
- "128201": {
1612
- "content": "<|reserved_special_token_193|>",
1613
- "lstrip": false,
1614
- "normalized": false,
1615
- "rstrip": false,
1616
- "single_word": false,
1617
- "special": true
1618
- },
1619
- "128202": {
1620
- "content": "<|reserved_special_token_194|>",
1621
- "lstrip": false,
1622
- "normalized": false,
1623
- "rstrip": false,
1624
- "single_word": false,
1625
- "special": true
1626
- },
1627
- "128203": {
1628
- "content": "<|reserved_special_token_195|>",
1629
- "lstrip": false,
1630
- "normalized": false,
1631
- "rstrip": false,
1632
- "single_word": false,
1633
- "special": true
1634
- },
1635
- "128204": {
1636
- "content": "<|reserved_special_token_196|>",
1637
- "lstrip": false,
1638
- "normalized": false,
1639
- "rstrip": false,
1640
- "single_word": false,
1641
- "special": true
1642
- },
1643
- "128205": {
1644
- "content": "<|reserved_special_token_197|>",
1645
- "lstrip": false,
1646
- "normalized": false,
1647
- "rstrip": false,
1648
- "single_word": false,
1649
- "special": true
1650
- },
1651
- "128206": {
1652
- "content": "<|reserved_special_token_198|>",
1653
- "lstrip": false,
1654
- "normalized": false,
1655
- "rstrip": false,
1656
- "single_word": false,
1657
- "special": true
1658
- },
1659
- "128207": {
1660
- "content": "<|reserved_special_token_199|>",
1661
- "lstrip": false,
1662
- "normalized": false,
1663
- "rstrip": false,
1664
- "single_word": false,
1665
- "special": true
1666
- },
1667
- "128208": {
1668
- "content": "<|reserved_special_token_200|>",
1669
- "lstrip": false,
1670
- "normalized": false,
1671
- "rstrip": false,
1672
- "single_word": false,
1673
- "special": true
1674
- },
1675
- "128209": {
1676
- "content": "<|reserved_special_token_201|>",
1677
- "lstrip": false,
1678
- "normalized": false,
1679
- "rstrip": false,
1680
- "single_word": false,
1681
- "special": true
1682
- },
1683
- "128210": {
1684
- "content": "<|reserved_special_token_202|>",
1685
- "lstrip": false,
1686
- "normalized": false,
1687
- "rstrip": false,
1688
- "single_word": false,
1689
- "special": true
1690
- },
1691
- "128211": {
1692
- "content": "<|reserved_special_token_203|>",
1693
- "lstrip": false,
1694
- "normalized": false,
1695
- "rstrip": false,
1696
- "single_word": false,
1697
- "special": true
1698
- },
1699
- "128212": {
1700
- "content": "<|reserved_special_token_204|>",
1701
- "lstrip": false,
1702
- "normalized": false,
1703
- "rstrip": false,
1704
- "single_word": false,
1705
- "special": true
1706
- },
1707
- "128213": {
1708
- "content": "<|reserved_special_token_205|>",
1709
- "lstrip": false,
1710
- "normalized": false,
1711
- "rstrip": false,
1712
- "single_word": false,
1713
- "special": true
1714
- },
1715
- "128214": {
1716
- "content": "<|reserved_special_token_206|>",
1717
- "lstrip": false,
1718
- "normalized": false,
1719
- "rstrip": false,
1720
- "single_word": false,
1721
- "special": true
1722
- },
1723
- "128215": {
1724
- "content": "<|reserved_special_token_207|>",
1725
- "lstrip": false,
1726
- "normalized": false,
1727
- "rstrip": false,
1728
- "single_word": false,
1729
- "special": true
1730
- },
1731
- "128216": {
1732
- "content": "<|reserved_special_token_208|>",
1733
- "lstrip": false,
1734
- "normalized": false,
1735
- "rstrip": false,
1736
- "single_word": false,
1737
- "special": true
1738
- },
1739
- "128217": {
1740
- "content": "<|reserved_special_token_209|>",
1741
- "lstrip": false,
1742
- "normalized": false,
1743
- "rstrip": false,
1744
- "single_word": false,
1745
- "special": true
1746
- },
1747
- "128218": {
1748
- "content": "<|reserved_special_token_210|>",
1749
- "lstrip": false,
1750
- "normalized": false,
1751
- "rstrip": false,
1752
- "single_word": false,
1753
- "special": true
1754
- },
1755
- "128219": {
1756
- "content": "<|reserved_special_token_211|>",
1757
- "lstrip": false,
1758
- "normalized": false,
1759
- "rstrip": false,
1760
- "single_word": false,
1761
- "special": true
1762
- },
1763
- "128220": {
1764
- "content": "<|reserved_special_token_212|>",
1765
- "lstrip": false,
1766
- "normalized": false,
1767
- "rstrip": false,
1768
- "single_word": false,
1769
- "special": true
1770
- },
1771
- "128221": {
1772
- "content": "<|reserved_special_token_213|>",
1773
- "lstrip": false,
1774
- "normalized": false,
1775
- "rstrip": false,
1776
- "single_word": false,
1777
- "special": true
1778
- },
1779
- "128222": {
1780
- "content": "<|reserved_special_token_214|>",
1781
- "lstrip": false,
1782
- "normalized": false,
1783
- "rstrip": false,
1784
- "single_word": false,
1785
- "special": true
1786
- },
1787
- "128223": {
1788
- "content": "<|reserved_special_token_215|>",
1789
- "lstrip": false,
1790
- "normalized": false,
1791
- "rstrip": false,
1792
- "single_word": false,
1793
- "special": true
1794
- },
1795
- "128224": {
1796
- "content": "<|reserved_special_token_216|>",
1797
- "lstrip": false,
1798
- "normalized": false,
1799
- "rstrip": false,
1800
- "single_word": false,
1801
- "special": true
1802
- },
1803
- "128225": {
1804
- "content": "<|reserved_special_token_217|>",
1805
- "lstrip": false,
1806
- "normalized": false,
1807
- "rstrip": false,
1808
- "single_word": false,
1809
- "special": true
1810
- },
1811
- "128226": {
1812
- "content": "<|reserved_special_token_218|>",
1813
- "lstrip": false,
1814
- "normalized": false,
1815
- "rstrip": false,
1816
- "single_word": false,
1817
- "special": true
1818
- },
1819
- "128227": {
1820
- "content": "<|reserved_special_token_219|>",
1821
- "lstrip": false,
1822
- "normalized": false,
1823
- "rstrip": false,
1824
- "single_word": false,
1825
- "special": true
1826
- },
1827
- "128228": {
1828
- "content": "<|reserved_special_token_220|>",
1829
- "lstrip": false,
1830
- "normalized": false,
1831
- "rstrip": false,
1832
- "single_word": false,
1833
- "special": true
1834
- },
1835
- "128229": {
1836
- "content": "<|reserved_special_token_221|>",
1837
- "lstrip": false,
1838
- "normalized": false,
1839
- "rstrip": false,
1840
- "single_word": false,
1841
- "special": true
1842
- },
1843
- "128230": {
1844
- "content": "<|reserved_special_token_222|>",
1845
- "lstrip": false,
1846
- "normalized": false,
1847
- "rstrip": false,
1848
- "single_word": false,
1849
- "special": true
1850
- },
1851
- "128231": {
1852
- "content": "<|reserved_special_token_223|>",
1853
- "lstrip": false,
1854
- "normalized": false,
1855
- "rstrip": false,
1856
- "single_word": false,
1857
- "special": true
1858
- },
1859
- "128232": {
1860
- "content": "<|reserved_special_token_224|>",
1861
- "lstrip": false,
1862
- "normalized": false,
1863
- "rstrip": false,
1864
- "single_word": false,
1865
- "special": true
1866
- },
1867
- "128233": {
1868
- "content": "<|reserved_special_token_225|>",
1869
- "lstrip": false,
1870
- "normalized": false,
1871
- "rstrip": false,
1872
- "single_word": false,
1873
- "special": true
1874
- },
1875
- "128234": {
1876
- "content": "<|reserved_special_token_226|>",
1877
- "lstrip": false,
1878
- "normalized": false,
1879
- "rstrip": false,
1880
- "single_word": false,
1881
- "special": true
1882
- },
1883
- "128235": {
1884
- "content": "<|reserved_special_token_227|>",
1885
- "lstrip": false,
1886
- "normalized": false,
1887
- "rstrip": false,
1888
- "single_word": false,
1889
- "special": true
1890
- },
1891
- "128236": {
1892
- "content": "<|reserved_special_token_228|>",
1893
- "lstrip": false,
1894
- "normalized": false,
1895
- "rstrip": false,
1896
- "single_word": false,
1897
- "special": true
1898
- },
1899
- "128237": {
1900
- "content": "<|reserved_special_token_229|>",
1901
- "lstrip": false,
1902
- "normalized": false,
1903
- "rstrip": false,
1904
- "single_word": false,
1905
- "special": true
1906
- },
1907
- "128238": {
1908
- "content": "<|reserved_special_token_230|>",
1909
- "lstrip": false,
1910
- "normalized": false,
1911
- "rstrip": false,
1912
- "single_word": false,
1913
- "special": true
1914
- },
1915
- "128239": {
1916
- "content": "<|reserved_special_token_231|>",
1917
- "lstrip": false,
1918
- "normalized": false,
1919
- "rstrip": false,
1920
- "single_word": false,
1921
- "special": true
1922
- },
1923
- "128240": {
1924
- "content": "<|reserved_special_token_232|>",
1925
- "lstrip": false,
1926
- "normalized": false,
1927
- "rstrip": false,
1928
- "single_word": false,
1929
- "special": true
1930
- },
1931
- "128241": {
1932
- "content": "<|reserved_special_token_233|>",
1933
- "lstrip": false,
1934
- "normalized": false,
1935
- "rstrip": false,
1936
- "single_word": false,
1937
- "special": true
1938
- },
1939
- "128242": {
1940
- "content": "<|reserved_special_token_234|>",
1941
- "lstrip": false,
1942
- "normalized": false,
1943
- "rstrip": false,
1944
- "single_word": false,
1945
- "special": true
1946
- },
1947
- "128243": {
1948
- "content": "<|reserved_special_token_235|>",
1949
- "lstrip": false,
1950
- "normalized": false,
1951
- "rstrip": false,
1952
- "single_word": false,
1953
- "special": true
1954
- },
1955
- "128244": {
1956
- "content": "<|reserved_special_token_236|>",
1957
- "lstrip": false,
1958
- "normalized": false,
1959
- "rstrip": false,
1960
- "single_word": false,
1961
- "special": true
1962
- },
1963
- "128245": {
1964
- "content": "<|reserved_special_token_237|>",
1965
- "lstrip": false,
1966
- "normalized": false,
1967
- "rstrip": false,
1968
- "single_word": false,
1969
- "special": true
1970
- },
1971
- "128246": {
1972
- "content": "<|reserved_special_token_238|>",
1973
- "lstrip": false,
1974
- "normalized": false,
1975
- "rstrip": false,
1976
- "single_word": false,
1977
- "special": true
1978
- },
1979
- "128247": {
1980
- "content": "<|reserved_special_token_239|>",
1981
- "lstrip": false,
1982
- "normalized": false,
1983
- "rstrip": false,
1984
- "single_word": false,
1985
- "special": true
1986
- },
1987
- "128248": {
1988
- "content": "<|reserved_special_token_240|>",
1989
- "lstrip": false,
1990
- "normalized": false,
1991
- "rstrip": false,
1992
- "single_word": false,
1993
- "special": true
1994
- },
1995
- "128249": {
1996
- "content": "<|reserved_special_token_241|>",
1997
- "lstrip": false,
1998
- "normalized": false,
1999
- "rstrip": false,
2000
- "single_word": false,
2001
- "special": true
2002
- },
2003
- "128250": {
2004
- "content": "<|reserved_special_token_242|>",
2005
- "lstrip": false,
2006
- "normalized": false,
2007
- "rstrip": false,
2008
- "single_word": false,
2009
- "special": true
2010
- },
2011
- "128251": {
2012
- "content": "<|reserved_special_token_243|>",
2013
- "lstrip": false,
2014
- "normalized": false,
2015
- "rstrip": false,
2016
- "single_word": false,
2017
- "special": true
2018
- },
2019
- "128252": {
2020
- "content": "<|reserved_special_token_244|>",
2021
- "lstrip": false,
2022
- "normalized": false,
2023
- "rstrip": false,
2024
- "single_word": false,
2025
- "special": true
2026
- },
2027
- "128253": {
2028
- "content": "<|reserved_special_token_245|>",
2029
- "lstrip": false,
2030
- "normalized": false,
2031
- "rstrip": false,
2032
- "single_word": false,
2033
- "special": true
2034
- },
2035
- "128254": {
2036
- "content": "<|reserved_special_token_246|>",
2037
- "lstrip": false,
2038
- "normalized": false,
2039
- "rstrip": false,
2040
- "single_word": false,
2041
- "special": true
2042
- },
2043
- "128255": {
2044
- "content": "<|reserved_special_token_247|>",
2045
- "lstrip": false,
2046
- "normalized": false,
2047
- "rstrip": false,
2048
- "single_word": false,
2049
- "special": true
2050
- }
2051
- },
2052
- "bos_token": "<|begin_of_text|>",
2053
- "clean_up_tokenization_spaces": true,
2054
- "eos_token": "<|im_end|>",
2055
- "model_input_names": [
2056
- "input_ids",
2057
- "attention_mask"
2058
- ],
2059
- "model_max_length": 131072,
2060
- "pad_token": "<|finetune_right_pad_id|>",
2061
- "tokenizer_class": "PreTrainedTokenizerFast"
2062
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
checkpoint-349/trainer_state.json DELETED
@@ -1,2508 +0,0 @@
1
- {
2
- "best_metric": null,
3
- "best_model_checkpoint": null,
4
- "epoch": 0.9992841803865425,
5
- "eval_steps": 88,
6
- "global_step": 349,
7
- "is_hyper_param_search": false,
8
- "is_local_process_zero": true,
9
- "is_world_process_zero": true,
10
- "log_history": [
11
- {
12
- "epoch": 0.002863278453829635,
13
- "grad_norm": 1.7433022469392645,
14
- "learning_rate": 2.9411764705882356e-07,
15
- "loss": 2.5227,
16
- "step": 1
17
- },
18
- {
19
- "epoch": 0.002863278453829635,
20
- "eval_loss": 2.9798059463500977,
21
- "eval_runtime": 15.4107,
22
- "eval_samples_per_second": 14.146,
23
- "eval_steps_per_second": 2.401,
24
- "step": 1
25
- },
26
- {
27
- "epoch": 0.00572655690765927,
28
- "grad_norm": 1.7260085141016746,
29
- "learning_rate": 5.882352941176471e-07,
30
- "loss": 2.5557,
31
- "step": 2
32
- },
33
- {
34
- "epoch": 0.008589835361488905,
35
- "grad_norm": 1.6875415011788866,
36
- "learning_rate": 8.823529411764707e-07,
37
- "loss": 2.5153,
38
- "step": 3
39
- },
40
- {
41
- "epoch": 0.01145311381531854,
42
- "grad_norm": 1.501231559534811,
43
- "learning_rate": 1.1764705882352942e-06,
44
- "loss": 2.5383,
45
- "step": 4
46
- },
47
- {
48
- "epoch": 0.014316392269148175,
49
- "grad_norm": 1.3386535922727172,
50
- "learning_rate": 1.4705882352941177e-06,
51
- "loss": 2.5313,
52
- "step": 5
53
- },
54
- {
55
- "epoch": 0.01717967072297781,
56
- "grad_norm": 1.4453252560511072,
57
- "learning_rate": 1.7647058823529414e-06,
58
- "loss": 2.5023,
59
- "step": 6
60
- },
61
- {
62
- "epoch": 0.020042949176807445,
63
- "grad_norm": 1.2649460157616863,
64
- "learning_rate": 2.058823529411765e-06,
65
- "loss": 2.516,
66
- "step": 7
67
- },
68
- {
69
- "epoch": 0.02290622763063708,
70
- "grad_norm": 1.2272485421450576,
71
- "learning_rate": 2.3529411764705885e-06,
72
- "loss": 2.4859,
73
- "step": 8
74
- },
75
- {
76
- "epoch": 0.025769506084466716,
77
- "grad_norm": 0.9566083602405432,
78
- "learning_rate": 2.647058823529412e-06,
79
- "loss": 2.505,
80
- "step": 9
81
- },
82
- {
83
- "epoch": 0.02863278453829635,
84
- "grad_norm": 0.9324778074733336,
85
- "learning_rate": 2.9411764705882355e-06,
86
- "loss": 2.5398,
87
- "step": 10
88
- },
89
- {
90
- "epoch": 0.031496062992125984,
91
- "grad_norm": 0.9121134598727566,
92
- "learning_rate": 3.2352941176470594e-06,
93
- "loss": 2.4925,
94
- "step": 11
95
- },
96
- {
97
- "epoch": 0.03435934144595562,
98
- "grad_norm": 0.7810248065316661,
99
- "learning_rate": 3.529411764705883e-06,
100
- "loss": 2.5252,
101
- "step": 12
102
- },
103
- {
104
- "epoch": 0.03722261989978525,
105
- "grad_norm": 0.7605027751280174,
106
- "learning_rate": 3.8235294117647055e-06,
107
- "loss": 2.4853,
108
- "step": 13
109
- },
110
- {
111
- "epoch": 0.04008589835361489,
112
- "grad_norm": 0.7103184324186846,
113
- "learning_rate": 4.11764705882353e-06,
114
- "loss": 2.5164,
115
- "step": 14
116
- },
117
- {
118
- "epoch": 0.04294917680744453,
119
- "grad_norm": 0.7150261519787532,
120
- "learning_rate": 4.411764705882353e-06,
121
- "loss": 2.5179,
122
- "step": 15
123
- },
124
- {
125
- "epoch": 0.04581245526127416,
126
- "grad_norm": 0.6401893492590393,
127
- "learning_rate": 4.705882352941177e-06,
128
- "loss": 2.5337,
129
- "step": 16
130
- },
131
- {
132
- "epoch": 0.048675733715103794,
133
- "grad_norm": 0.6382120155027352,
134
- "learning_rate": 5e-06,
135
- "loss": 2.4971,
136
- "step": 17
137
- },
138
- {
139
- "epoch": 0.05153901216893343,
140
- "grad_norm": 0.638974871748697,
141
- "learning_rate": 5.294117647058824e-06,
142
- "loss": 2.5137,
143
- "step": 18
144
- },
145
- {
146
- "epoch": 0.05440229062276306,
147
- "grad_norm": 0.6276417784569189,
148
- "learning_rate": 5.588235294117647e-06,
149
- "loss": 2.5221,
150
- "step": 19
151
- },
152
- {
153
- "epoch": 0.0572655690765927,
154
- "grad_norm": 0.6267712807753728,
155
- "learning_rate": 5.882352941176471e-06,
156
- "loss": 2.4958,
157
- "step": 20
158
- },
159
- {
160
- "epoch": 0.06012884753042233,
161
- "grad_norm": 0.6277644572158624,
162
- "learning_rate": 6.176470588235295e-06,
163
- "loss": 2.4878,
164
- "step": 21
165
- },
166
- {
167
- "epoch": 0.06299212598425197,
168
- "grad_norm": 0.6127762919814617,
169
- "learning_rate": 6.470588235294119e-06,
170
- "loss": 2.4914,
171
- "step": 22
172
- },
173
- {
174
- "epoch": 0.0658554044380816,
175
- "grad_norm": 0.6038751254503039,
176
- "learning_rate": 6.764705882352942e-06,
177
- "loss": 2.51,
178
- "step": 23
179
- },
180
- {
181
- "epoch": 0.06871868289191124,
182
- "grad_norm": 0.6003698592486557,
183
- "learning_rate": 7.058823529411766e-06,
184
- "loss": 2.4906,
185
- "step": 24
186
- },
187
- {
188
- "epoch": 0.07158196134574088,
189
- "grad_norm": 0.6282240768376209,
190
- "learning_rate": 7.352941176470589e-06,
191
- "loss": 2.5082,
192
- "step": 25
193
- },
194
- {
195
- "epoch": 0.0744452397995705,
196
- "grad_norm": 0.6087841519965377,
197
- "learning_rate": 7.647058823529411e-06,
198
- "loss": 2.4878,
199
- "step": 26
200
- },
201
- {
202
- "epoch": 0.07730851825340014,
203
- "grad_norm": 0.5860330254627644,
204
- "learning_rate": 7.941176470588236e-06,
205
- "loss": 2.5182,
206
- "step": 27
207
- },
208
- {
209
- "epoch": 0.08017179670722978,
210
- "grad_norm": 0.5939132759806514,
211
- "learning_rate": 8.23529411764706e-06,
212
- "loss": 2.501,
213
- "step": 28
214
- },
215
- {
216
- "epoch": 0.08303507516105942,
217
- "grad_norm": 0.6359589764914113,
218
- "learning_rate": 8.529411764705883e-06,
219
- "loss": 2.523,
220
- "step": 29
221
- },
222
- {
223
- "epoch": 0.08589835361488905,
224
- "grad_norm": 0.6255572369664097,
225
- "learning_rate": 8.823529411764707e-06,
226
- "loss": 2.4931,
227
- "step": 30
228
- },
229
- {
230
- "epoch": 0.08876163206871868,
231
- "grad_norm": 0.5910507560604619,
232
- "learning_rate": 9.11764705882353e-06,
233
- "loss": 2.4838,
234
- "step": 31
235
- },
236
- {
237
- "epoch": 0.09162491052254831,
238
- "grad_norm": 0.581588749176898,
239
- "learning_rate": 9.411764705882354e-06,
240
- "loss": 2.4853,
241
- "step": 32
242
- },
243
- {
244
- "epoch": 0.09448818897637795,
245
- "grad_norm": 0.6483321707461923,
246
- "learning_rate": 9.705882352941177e-06,
247
- "loss": 2.4944,
248
- "step": 33
249
- },
250
- {
251
- "epoch": 0.09735146743020759,
252
- "grad_norm": 0.5987568368261761,
253
- "learning_rate": 1e-05,
254
- "loss": 2.4699,
255
- "step": 34
256
- },
257
- {
258
- "epoch": 0.10021474588403723,
259
- "grad_norm": 0.6206579432531898,
260
- "learning_rate": 9.999751334779716e-06,
261
- "loss": 2.523,
262
- "step": 35
263
- },
264
- {
265
- "epoch": 0.10307802433786686,
266
- "grad_norm": 0.6064407921800384,
267
- "learning_rate": 9.999005363852619e-06,
268
- "loss": 2.5203,
269
- "step": 36
270
- },
271
- {
272
- "epoch": 0.10594130279169649,
273
- "grad_norm": 0.5721412252832796,
274
- "learning_rate": 9.997762161417517e-06,
275
- "loss": 2.5012,
276
- "step": 37
277
- },
278
- {
279
- "epoch": 0.10880458124552612,
280
- "grad_norm": 0.5497451217796617,
281
- "learning_rate": 9.996021851130897e-06,
282
- "loss": 2.4914,
283
- "step": 38
284
- },
285
- {
286
- "epoch": 0.11166785969935576,
287
- "grad_norm": 0.5470018162226529,
288
- "learning_rate": 9.993784606094612e-06,
289
- "loss": 2.501,
290
- "step": 39
291
- },
292
- {
293
- "epoch": 0.1145311381531854,
294
- "grad_norm": 0.5866491336876943,
295
- "learning_rate": 9.991050648838676e-06,
296
- "loss": 2.48,
297
- "step": 40
298
- },
299
- {
300
- "epoch": 0.11739441660701504,
301
- "grad_norm": 0.5438529330919207,
302
- "learning_rate": 9.987820251299121e-06,
303
- "loss": 2.4983,
304
- "step": 41
305
- },
306
- {
307
- "epoch": 0.12025769506084466,
308
- "grad_norm": 0.5740915531654763,
309
- "learning_rate": 9.984093734790955e-06,
310
- "loss": 2.5182,
311
- "step": 42
312
- },
313
- {
314
- "epoch": 0.1231209735146743,
315
- "grad_norm": 0.5664275625259377,
316
- "learning_rate": 9.979871469976197e-06,
317
- "loss": 2.4862,
318
- "step": 43
319
- },
320
- {
321
- "epoch": 0.12598425196850394,
322
- "grad_norm": 0.5926385697048842,
323
- "learning_rate": 9.975153876827008e-06,
324
- "loss": 2.4896,
325
- "step": 44
326
- },
327
- {
328
- "epoch": 0.12884753042233357,
329
- "grad_norm": 0.574391809445724,
330
- "learning_rate": 9.969941424583926e-06,
331
- "loss": 2.5367,
332
- "step": 45
333
- },
334
- {
335
- "epoch": 0.1317108088761632,
336
- "grad_norm": 0.5397781778798922,
337
- "learning_rate": 9.964234631709188e-06,
338
- "loss": 2.5025,
339
- "step": 46
340
- },
341
- {
342
- "epoch": 0.13457408732999285,
343
- "grad_norm": 0.5539122724303173,
344
- "learning_rate": 9.958034065835151e-06,
345
- "loss": 2.5278,
346
- "step": 47
347
- },
348
- {
349
- "epoch": 0.13743736578382248,
350
- "grad_norm": 0.5818328936194783,
351
- "learning_rate": 9.951340343707852e-06,
352
- "loss": 2.5188,
353
- "step": 48
354
- },
355
- {
356
- "epoch": 0.14030064423765212,
357
- "grad_norm": 0.7458319373172223,
358
- "learning_rate": 9.944154131125643e-06,
359
- "loss": 2.4617,
360
- "step": 49
361
- },
362
- {
363
- "epoch": 0.14316392269148176,
364
- "grad_norm": 0.5661010380152568,
365
- "learning_rate": 9.936476142872979e-06,
366
- "loss": 2.4926,
367
- "step": 50
368
- },
369
- {
370
- "epoch": 0.14602720114531137,
371
- "grad_norm": 0.5650834703916657,
372
- "learning_rate": 9.928307142649315e-06,
373
- "loss": 2.4848,
374
- "step": 51
375
- },
376
- {
377
- "epoch": 0.148890479599141,
378
- "grad_norm": 0.548453507209605,
379
- "learning_rate": 9.91964794299315e-06,
380
- "loss": 2.4969,
381
- "step": 52
382
- },
383
- {
384
- "epoch": 0.15175375805297064,
385
- "grad_norm": 0.5758188221734539,
386
- "learning_rate": 9.910499405201195e-06,
387
- "loss": 2.484,
388
- "step": 53
389
- },
390
- {
391
- "epoch": 0.15461703650680028,
392
- "grad_norm": 0.5753520047837293,
393
- "learning_rate": 9.900862439242719e-06,
394
- "loss": 2.4941,
395
- "step": 54
396
- },
397
- {
398
- "epoch": 0.15748031496062992,
399
- "grad_norm": 0.5769855830607382,
400
- "learning_rate": 9.890738003669029e-06,
401
- "loss": 2.4856,
402
- "step": 55
403
- },
404
- {
405
- "epoch": 0.16034359341445956,
406
- "grad_norm": 0.5490460548187522,
407
- "learning_rate": 9.880127105518122e-06,
408
- "loss": 2.5226,
409
- "step": 56
410
- },
411
- {
412
- "epoch": 0.1632068718682892,
413
- "grad_norm": 0.601542983516753,
414
- "learning_rate": 9.869030800214531e-06,
415
- "loss": 2.5096,
416
- "step": 57
417
- },
418
- {
419
- "epoch": 0.16607015032211883,
420
- "grad_norm": 0.5613978478420705,
421
- "learning_rate": 9.857450191464337e-06,
422
- "loss": 2.5145,
423
- "step": 58
424
- },
425
- {
426
- "epoch": 0.16893342877594847,
427
- "grad_norm": 0.5600847385523823,
428
- "learning_rate": 9.84538643114539e-06,
429
- "loss": 2.5111,
430
- "step": 59
431
- },
432
- {
433
- "epoch": 0.1717967072297781,
434
- "grad_norm": 0.5401691538446458,
435
- "learning_rate": 9.832840719192737e-06,
436
- "loss": 2.498,
437
- "step": 60
438
- },
439
- {
440
- "epoch": 0.17465998568360774,
441
- "grad_norm": 0.5520094893813158,
442
- "learning_rate": 9.819814303479268e-06,
443
- "loss": 2.494,
444
- "step": 61
445
- },
446
- {
447
- "epoch": 0.17752326413743735,
448
- "grad_norm": 0.5497902576608895,
449
- "learning_rate": 9.806308479691595e-06,
450
- "loss": 2.5017,
451
- "step": 62
452
- },
453
- {
454
- "epoch": 0.180386542591267,
455
- "grad_norm": 0.525740103564976,
456
- "learning_rate": 9.792324591201179e-06,
457
- "loss": 2.5068,
458
- "step": 63
459
- },
460
- {
461
- "epoch": 0.18324982104509663,
462
- "grad_norm": 0.5544436996206911,
463
- "learning_rate": 9.777864028930705e-06,
464
- "loss": 2.4976,
465
- "step": 64
466
- },
467
- {
468
- "epoch": 0.18611309949892627,
469
- "grad_norm": 0.555608149815371,
470
- "learning_rate": 9.762928231215731e-06,
471
- "loss": 2.5112,
472
- "step": 65
473
- },
474
- {
475
- "epoch": 0.1889763779527559,
476
- "grad_norm": 0.5356931315907684,
477
- "learning_rate": 9.747518683661632e-06,
478
- "loss": 2.4967,
479
- "step": 66
480
- },
481
- {
482
- "epoch": 0.19183965640658554,
483
- "grad_norm": 0.5603711133278504,
484
- "learning_rate": 9.731636918995821e-06,
485
- "loss": 2.5134,
486
- "step": 67
487
- },
488
- {
489
- "epoch": 0.19470293486041518,
490
- "grad_norm": 0.5414742895107459,
491
- "learning_rate": 9.715284516915303e-06,
492
- "loss": 2.525,
493
- "step": 68
494
- },
495
- {
496
- "epoch": 0.19756621331424482,
497
- "grad_norm": 0.5348649401490828,
498
- "learning_rate": 9.698463103929542e-06,
499
- "loss": 2.4903,
500
- "step": 69
501
- },
502
- {
503
- "epoch": 0.20042949176807445,
504
- "grad_norm": 0.5179063198083461,
505
- "learning_rate": 9.681174353198687e-06,
506
- "loss": 2.5107,
507
- "step": 70
508
- },
509
- {
510
- "epoch": 0.2032927702219041,
511
- "grad_norm": 0.5246232399143359,
512
- "learning_rate": 9.663419984367139e-06,
513
- "loss": 2.5203,
514
- "step": 71
515
- },
516
- {
517
- "epoch": 0.20615604867573373,
518
- "grad_norm": 0.5289273463036179,
519
- "learning_rate": 9.645201763392513e-06,
520
- "loss": 2.5053,
521
- "step": 72
522
- },
523
- {
524
- "epoch": 0.20901932712956334,
525
- "grad_norm": 0.5433820476794674,
526
- "learning_rate": 9.626521502369984e-06,
527
- "loss": 2.4764,
528
- "step": 73
529
- },
530
- {
531
- "epoch": 0.21188260558339297,
532
- "grad_norm": 0.5505407639513397,
533
- "learning_rate": 9.60738105935204e-06,
534
- "loss": 2.4838,
535
- "step": 74
536
- },
537
- {
538
- "epoch": 0.2147458840372226,
539
- "grad_norm": 0.5690742761627252,
540
- "learning_rate": 9.58778233816367e-06,
541
- "loss": 2.4774,
542
- "step": 75
543
- },
544
- {
545
- "epoch": 0.21760916249105225,
546
- "grad_norm": 0.514161333996788,
547
- "learning_rate": 9.567727288213005e-06,
548
- "loss": 2.4778,
549
- "step": 76
550
- },
551
- {
552
- "epoch": 0.2204724409448819,
553
- "grad_norm": 0.5512425303188625,
554
- "learning_rate": 9.547217904297411e-06,
555
- "loss": 2.488,
556
- "step": 77
557
- },
558
- {
559
- "epoch": 0.22333571939871152,
560
- "grad_norm": 0.547345521950976,
561
- "learning_rate": 9.526256226405075e-06,
562
- "loss": 2.5231,
563
- "step": 78
564
- },
565
- {
566
- "epoch": 0.22619899785254116,
567
- "grad_norm": 0.572668835030032,
568
- "learning_rate": 9.504844339512096e-06,
569
- "loss": 2.4904,
570
- "step": 79
571
- },
572
- {
573
- "epoch": 0.2290622763063708,
574
- "grad_norm": 0.5586691177416543,
575
- "learning_rate": 9.482984373375105e-06,
576
- "loss": 2.5099,
577
- "step": 80
578
- },
579
- {
580
- "epoch": 0.23192555476020044,
581
- "grad_norm": 0.5580296085776706,
582
- "learning_rate": 9.460678502319419e-06,
583
- "loss": 2.4894,
584
- "step": 81
585
- },
586
- {
587
- "epoch": 0.23478883321403007,
588
- "grad_norm": 0.5485012119106963,
589
- "learning_rate": 9.437928945022772e-06,
590
- "loss": 2.4949,
591
- "step": 82
592
- },
593
- {
594
- "epoch": 0.2376521116678597,
595
- "grad_norm": 0.543333232228658,
596
- "learning_rate": 9.414737964294636e-06,
597
- "loss": 2.4868,
598
- "step": 83
599
- },
600
- {
601
- "epoch": 0.24051539012168932,
602
- "grad_norm": 0.5890968894329803,
603
- "learning_rate": 9.391107866851143e-06,
604
- "loss": 2.5089,
605
- "step": 84
606
- },
607
- {
608
- "epoch": 0.24337866857551896,
609
- "grad_norm": 0.5506851813496826,
610
- "learning_rate": 9.36704100308565e-06,
611
- "loss": 2.4458,
612
- "step": 85
613
- },
614
- {
615
- "epoch": 0.2462419470293486,
616
- "grad_norm": 0.5504307062507774,
617
- "learning_rate": 9.342539766834945e-06,
618
- "loss": 2.4797,
619
- "step": 86
620
- },
621
- {
622
- "epoch": 0.24910522548317823,
623
- "grad_norm": 0.548732984699912,
624
- "learning_rate": 9.317606595141156e-06,
625
- "loss": 2.5029,
626
- "step": 87
627
- },
628
- {
629
- "epoch": 0.25196850393700787,
630
- "grad_norm": 0.5505255392480715,
631
- "learning_rate": 9.292243968009332e-06,
632
- "loss": 2.5027,
633
- "step": 88
634
- },
635
- {
636
- "epoch": 0.25196850393700787,
637
- "eval_loss": 2.9500625133514404,
638
- "eval_runtime": 15.442,
639
- "eval_samples_per_second": 14.117,
640
- "eval_steps_per_second": 2.396,
641
- "step": 88
642
- },
643
- {
644
- "epoch": 0.25483178239083754,
645
- "grad_norm": 0.5482944395523252,
646
- "learning_rate": 9.266454408160779e-06,
647
- "loss": 2.5322,
648
- "step": 89
649
- },
650
- {
651
- "epoch": 0.25769506084466715,
652
- "grad_norm": 0.541771069630789,
653
- "learning_rate": 9.24024048078213e-06,
654
- "loss": 2.5155,
655
- "step": 90
656
- },
657
- {
658
- "epoch": 0.26055833929849676,
659
- "grad_norm": 0.5325088342727101,
660
- "learning_rate": 9.213604793270196e-06,
661
- "loss": 2.4651,
662
- "step": 91
663
- },
664
- {
665
- "epoch": 0.2634216177523264,
666
- "grad_norm": 0.5265061377076917,
667
- "learning_rate": 9.186549994972618e-06,
668
- "loss": 2.4755,
669
- "step": 92
670
- },
671
- {
672
- "epoch": 0.26628489620615603,
673
- "grad_norm": 0.5466047048898959,
674
- "learning_rate": 9.159078776924347e-06,
675
- "loss": 2.4785,
676
- "step": 93
677
- },
678
- {
679
- "epoch": 0.2691481746599857,
680
- "grad_norm": 0.5303191520111897,
681
- "learning_rate": 9.131193871579975e-06,
682
- "loss": 2.5137,
683
- "step": 94
684
- },
685
- {
686
- "epoch": 0.2720114531138153,
687
- "grad_norm": 0.5517463944794626,
688
- "learning_rate": 9.102898052541959e-06,
689
- "loss": 2.4892,
690
- "step": 95
691
- },
692
- {
693
- "epoch": 0.27487473156764497,
694
- "grad_norm": 0.5100270059915952,
695
- "learning_rate": 9.074194134284726e-06,
696
- "loss": 2.5048,
697
- "step": 96
698
- },
699
- {
700
- "epoch": 0.2777380100214746,
701
- "grad_norm": 0.5278965930755488,
702
- "learning_rate": 9.045084971874738e-06,
703
- "loss": 2.4806,
704
- "step": 97
705
- },
706
- {
707
- "epoch": 0.28060128847530424,
708
- "grad_norm": 0.5386440502957012,
709
- "learning_rate": 9.01557346068651e-06,
710
- "loss": 2.4637,
711
- "step": 98
712
- },
713
- {
714
- "epoch": 0.28346456692913385,
715
- "grad_norm": 0.5424442293047355,
716
- "learning_rate": 8.985662536114614e-06,
717
- "loss": 2.4731,
718
- "step": 99
719
- },
720
- {
721
- "epoch": 0.2863278453829635,
722
- "grad_norm": 0.5447548421095255,
723
- "learning_rate": 8.955355173281709e-06,
724
- "loss": 2.456,
725
- "step": 100
726
- },
727
- {
728
- "epoch": 0.28919112383679313,
729
- "grad_norm": 0.5056892441431114,
730
- "learning_rate": 8.924654386742613e-06,
731
- "loss": 2.5027,
732
- "step": 101
733
- },
734
- {
735
- "epoch": 0.29205440229062274,
736
- "grad_norm": 0.5577307275143056,
737
- "learning_rate": 8.89356323018447e-06,
738
- "loss": 2.5396,
739
- "step": 102
740
- },
741
- {
742
- "epoch": 0.2949176807444524,
743
- "grad_norm": 0.5269321742421326,
744
- "learning_rate": 8.862084796122998e-06,
745
- "loss": 2.5348,
746
- "step": 103
747
- },
748
- {
749
- "epoch": 0.297780959198282,
750
- "grad_norm": 0.573483487118278,
751
- "learning_rate": 8.83022221559489e-06,
752
- "loss": 2.5129,
753
- "step": 104
754
- },
755
- {
756
- "epoch": 0.3006442376521117,
757
- "grad_norm": 0.5053757854271658,
758
- "learning_rate": 8.797978657846391e-06,
759
- "loss": 2.4992,
760
- "step": 105
761
- },
762
- {
763
- "epoch": 0.3035075161059413,
764
- "grad_norm": 0.5463649574920005,
765
- "learning_rate": 8.765357330018056e-06,
766
- "loss": 2.4745,
767
- "step": 106
768
- },
769
- {
770
- "epoch": 0.30637079455977095,
771
- "grad_norm": 0.5380399569585859,
772
- "learning_rate": 8.732361476825752e-06,
773
- "loss": 2.5305,
774
- "step": 107
775
- },
776
- {
777
- "epoch": 0.30923407301360056,
778
- "grad_norm": 0.501404327425355,
779
- "learning_rate": 8.698994380237921e-06,
780
- "loss": 2.493,
781
- "step": 108
782
- },
783
- {
784
- "epoch": 0.31209735146743023,
785
- "grad_norm": 0.5204332392598845,
786
- "learning_rate": 8.665259359149132e-06,
787
- "loss": 2.4834,
788
- "step": 109
789
- },
790
- {
791
- "epoch": 0.31496062992125984,
792
- "grad_norm": 0.5172941461879834,
793
- "learning_rate": 8.631159769049965e-06,
794
- "loss": 2.4663,
795
- "step": 110
796
- },
797
- {
798
- "epoch": 0.3178239083750895,
799
- "grad_norm": 0.5633507992944607,
800
- "learning_rate": 8.596699001693257e-06,
801
- "loss": 2.5106,
802
- "step": 111
803
- },
804
- {
805
- "epoch": 0.3206871868289191,
806
- "grad_norm": 0.5111870840713941,
807
- "learning_rate": 8.561880484756726e-06,
808
- "loss": 2.4961,
809
- "step": 112
810
- },
811
- {
812
- "epoch": 0.3235504652827487,
813
- "grad_norm": 0.5204943642446078,
814
- "learning_rate": 8.526707681502045e-06,
815
- "loss": 2.459,
816
- "step": 113
817
- },
818
- {
819
- "epoch": 0.3264137437365784,
820
- "grad_norm": 0.5243848667219405,
821
- "learning_rate": 8.491184090430365e-06,
822
- "loss": 2.4847,
823
- "step": 114
824
- },
825
- {
826
- "epoch": 0.329277022190408,
827
- "grad_norm": 0.5095475752397022,
828
- "learning_rate": 8.455313244934324e-06,
829
- "loss": 2.5041,
830
- "step": 115
831
- },
832
- {
833
- "epoch": 0.33214030064423766,
834
- "grad_norm": 0.5324839977569346,
835
- "learning_rate": 8.4190987129466e-06,
836
- "loss": 2.4581,
837
- "step": 116
838
- },
839
- {
840
- "epoch": 0.3350035790980673,
841
- "grad_norm": 0.537216858096812,
842
- "learning_rate": 8.382544096585028e-06,
843
- "loss": 2.4956,
844
- "step": 117
845
- },
846
- {
847
- "epoch": 0.33786685755189694,
848
- "grad_norm": 0.5026214156562598,
849
- "learning_rate": 8.345653031794292e-06,
850
- "loss": 2.5109,
851
- "step": 118
852
- },
853
- {
854
- "epoch": 0.34073013600572655,
855
- "grad_norm": 0.5589087292743029,
856
- "learning_rate": 8.308429187984298e-06,
857
- "loss": 2.4508,
858
- "step": 119
859
- },
860
- {
861
- "epoch": 0.3435934144595562,
862
- "grad_norm": 0.5175402219605563,
863
- "learning_rate": 8.270876267665173e-06,
864
- "loss": 2.4915,
865
- "step": 120
866
- },
867
- {
868
- "epoch": 0.3464566929133858,
869
- "grad_norm": 0.5228528180395297,
870
- "learning_rate": 8.232998006078998e-06,
871
- "loss": 2.4766,
872
- "step": 121
873
- },
874
- {
875
- "epoch": 0.3493199713672155,
876
- "grad_norm": 0.5654685794544859,
877
- "learning_rate": 8.19479817082828e-06,
878
- "loss": 2.4669,
879
- "step": 122
880
- },
881
- {
882
- "epoch": 0.3521832498210451,
883
- "grad_norm": 0.517826924265438,
884
- "learning_rate": 8.156280561501196e-06,
885
- "loss": 2.4913,
886
- "step": 123
887
- },
888
- {
889
- "epoch": 0.3550465282748747,
890
- "grad_norm": 0.5546777654441133,
891
- "learning_rate": 8.117449009293668e-06,
892
- "loss": 2.4551,
893
- "step": 124
894
- },
895
- {
896
- "epoch": 0.35790980672870437,
897
- "grad_norm": 0.5128294773578065,
898
- "learning_rate": 8.078307376628292e-06,
899
- "loss": 2.5143,
900
- "step": 125
901
- },
902
- {
903
- "epoch": 0.360773085182534,
904
- "grad_norm": 0.5181572014660524,
905
- "learning_rate": 8.038859556770152e-06,
906
- "loss": 2.4918,
907
- "step": 126
908
- },
909
- {
910
- "epoch": 0.36363636363636365,
911
- "grad_norm": 0.5580601606690623,
912
- "learning_rate": 7.99910947343957e-06,
913
- "loss": 2.4703,
914
- "step": 127
915
- },
916
- {
917
- "epoch": 0.36649964209019326,
918
- "grad_norm": 0.5310828252758204,
919
- "learning_rate": 7.95906108042184e-06,
920
- "loss": 2.4942,
921
- "step": 128
922
- },
923
- {
924
- "epoch": 0.3693629205440229,
925
- "grad_norm": 0.5572432262386191,
926
- "learning_rate": 7.918718361173951e-06,
927
- "loss": 2.5351,
928
- "step": 129
929
- },
930
- {
931
- "epoch": 0.37222619899785253,
932
- "grad_norm": 0.5627081677105857,
933
- "learning_rate": 7.87808532842837e-06,
934
- "loss": 2.4636,
935
- "step": 130
936
- },
937
- {
938
- "epoch": 0.3750894774516822,
939
- "grad_norm": 0.5613271800153685,
940
- "learning_rate": 7.83716602379391e-06,
941
- "loss": 2.4953,
942
- "step": 131
943
- },
944
- {
945
- "epoch": 0.3779527559055118,
946
- "grad_norm": 0.5394988971278871,
947
- "learning_rate": 7.795964517353734e-06,
948
- "loss": 2.5189,
949
- "step": 132
950
- },
951
- {
952
- "epoch": 0.38081603435934147,
953
- "grad_norm": 0.5313679498243665,
954
- "learning_rate": 7.754484907260513e-06,
955
- "loss": 2.4768,
956
- "step": 133
957
- },
958
- {
959
- "epoch": 0.3836793128131711,
960
- "grad_norm": 0.5703379561975805,
961
- "learning_rate": 7.712731319328798e-06,
962
- "loss": 2.4659,
963
- "step": 134
964
- },
965
- {
966
- "epoch": 0.3865425912670007,
967
- "grad_norm": 0.49255468232562744,
968
- "learning_rate": 7.670707906624644e-06,
969
- "loss": 2.4989,
970
- "step": 135
971
- },
972
- {
973
- "epoch": 0.38940586972083036,
974
- "grad_norm": 0.5185698028962875,
975
- "learning_rate": 7.628418849052523e-06,
976
- "loss": 2.4941,
977
- "step": 136
978
- },
979
- {
980
- "epoch": 0.39226914817465997,
981
- "grad_norm": 0.5378386670092136,
982
- "learning_rate": 7.585868352939564e-06,
983
- "loss": 2.4772,
984
- "step": 137
985
- },
986
- {
987
- "epoch": 0.39513242662848963,
988
- "grad_norm": 0.5379669385240866,
989
- "learning_rate": 7.543060650617159e-06,
990
- "loss": 2.4902,
991
- "step": 138
992
- },
993
- {
994
- "epoch": 0.39799570508231924,
995
- "grad_norm": 0.527227129367134,
996
- "learning_rate": 7.500000000000001e-06,
997
- "loss": 2.4898,
998
- "step": 139
999
- },
1000
- {
1001
- "epoch": 0.4008589835361489,
1002
- "grad_norm": 0.5185278503926084,
1003
- "learning_rate": 7.456690684162557e-06,
1004
- "loss": 2.479,
1005
- "step": 140
1006
- },
1007
- {
1008
- "epoch": 0.4037222619899785,
1009
- "grad_norm": 0.5028759823161459,
1010
- "learning_rate": 7.413137010913055e-06,
1011
- "loss": 2.5108,
1012
- "step": 141
1013
- },
1014
- {
1015
- "epoch": 0.4065855404438082,
1016
- "grad_norm": 0.5323427436694725,
1017
- "learning_rate": 7.369343312364994e-06,
1018
- "loss": 2.4895,
1019
- "step": 142
1020
- },
1021
- {
1022
- "epoch": 0.4094488188976378,
1023
- "grad_norm": 0.5303373348854972,
1024
- "learning_rate": 7.3253139445062535e-06,
1025
- "loss": 2.4831,
1026
- "step": 143
1027
- },
1028
- {
1029
- "epoch": 0.41231209735146745,
1030
- "grad_norm": 0.5171505606227076,
1031
- "learning_rate": 7.281053286765816e-06,
1032
- "loss": 2.4691,
1033
- "step": 144
1034
- },
1035
- {
1036
- "epoch": 0.41517537580529706,
1037
- "grad_norm": 0.5007625110632795,
1038
- "learning_rate": 7.236565741578163e-06,
1039
- "loss": 2.4864,
1040
- "step": 145
1041
- },
1042
- {
1043
- "epoch": 0.4180386542591267,
1044
- "grad_norm": 0.5192090084896421,
1045
- "learning_rate": 7.191855733945388e-06,
1046
- "loss": 2.5329,
1047
- "step": 146
1048
- },
1049
- {
1050
- "epoch": 0.42090193271295634,
1051
- "grad_norm": 0.5408008884476913,
1052
- "learning_rate": 7.146927710997047e-06,
1053
- "loss": 2.4846,
1054
- "step": 147
1055
- },
1056
- {
1057
- "epoch": 0.42376521116678595,
1058
- "grad_norm": 0.4927740392753858,
1059
- "learning_rate": 7.101786141547829e-06,
1060
- "loss": 2.458,
1061
- "step": 148
1062
- },
1063
- {
1064
- "epoch": 0.4266284896206156,
1065
- "grad_norm": 0.5063973450614866,
1066
- "learning_rate": 7.056435515653059e-06,
1067
- "loss": 2.4772,
1068
- "step": 149
1069
- },
1070
- {
1071
- "epoch": 0.4294917680744452,
1072
- "grad_norm": 0.5403619047312843,
1073
- "learning_rate": 7.010880344162087e-06,
1074
- "loss": 2.4837,
1075
- "step": 150
1076
- },
1077
- {
1078
- "epoch": 0.4323550465282749,
1079
- "grad_norm": 0.4922676337577914,
1080
- "learning_rate": 6.965125158269619e-06,
1081
- "loss": 2.4819,
1082
- "step": 151
1083
- },
1084
- {
1085
- "epoch": 0.4352183249821045,
1086
- "grad_norm": 0.521792840976189,
1087
- "learning_rate": 6.919174509065003e-06,
1088
- "loss": 2.5105,
1089
- "step": 152
1090
- },
1091
- {
1092
- "epoch": 0.43808160343593416,
1093
- "grad_norm": 0.5219763904404502,
1094
- "learning_rate": 6.873032967079562e-06,
1095
- "loss": 2.4779,
1096
- "step": 153
1097
- },
1098
- {
1099
- "epoch": 0.4409448818897638,
1100
- "grad_norm": 0.5154672827971456,
1101
- "learning_rate": 6.8267051218319766e-06,
1102
- "loss": 2.52,
1103
- "step": 154
1104
- },
1105
- {
1106
- "epoch": 0.44380816034359344,
1107
- "grad_norm": 0.5301945278339433,
1108
- "learning_rate": 6.780195581371785e-06,
1109
- "loss": 2.4721,
1110
- "step": 155
1111
- },
1112
- {
1113
- "epoch": 0.44667143879742305,
1114
- "grad_norm": 0.5014158054403869,
1115
- "learning_rate": 6.733508971821037e-06,
1116
- "loss": 2.4652,
1117
- "step": 156
1118
- },
1119
- {
1120
- "epoch": 0.44953471725125266,
1121
- "grad_norm": 0.5221719192401763,
1122
- "learning_rate": 6.686649936914151e-06,
1123
- "loss": 2.5095,
1124
- "step": 157
1125
- },
1126
- {
1127
- "epoch": 0.4523979957050823,
1128
- "grad_norm": 0.5633793319541605,
1129
- "learning_rate": 6.639623137536023e-06,
1130
- "loss": 2.5088,
1131
- "step": 158
1132
- },
1133
- {
1134
- "epoch": 0.45526127415891193,
1135
- "grad_norm": 0.503797400013898,
1136
- "learning_rate": 6.592433251258423e-06,
1137
- "loss": 2.4392,
1138
- "step": 159
1139
- },
1140
- {
1141
- "epoch": 0.4581245526127416,
1142
- "grad_norm": 0.5353571393146068,
1143
- "learning_rate": 6.545084971874738e-06,
1144
- "loss": 2.4806,
1145
- "step": 160
1146
- },
1147
- {
1148
- "epoch": 0.4609878310665712,
1149
- "grad_norm": 0.5394132569835262,
1150
- "learning_rate": 6.497583008933097e-06,
1151
- "loss": 2.4437,
1152
- "step": 161
1153
- },
1154
- {
1155
- "epoch": 0.4638511095204009,
1156
- "grad_norm": 0.49177737499930074,
1157
- "learning_rate": 6.449932087267932e-06,
1158
- "loss": 2.4602,
1159
- "step": 162
1160
- },
1161
- {
1162
- "epoch": 0.4667143879742305,
1163
- "grad_norm": 0.5299162942575597,
1164
- "learning_rate": 6.402136946530014e-06,
1165
- "loss": 2.458,
1166
- "step": 163
1167
- },
1168
- {
1169
- "epoch": 0.46957766642806015,
1170
- "grad_norm": 0.5268297730689246,
1171
- "learning_rate": 6.354202340715027e-06,
1172
- "loss": 2.4701,
1173
- "step": 164
1174
- },
1175
- {
1176
- "epoch": 0.47244094488188976,
1177
- "grad_norm": 0.5120064053501564,
1178
- "learning_rate": 6.306133037690693e-06,
1179
- "loss": 2.485,
1180
- "step": 165
1181
- },
1182
- {
1183
- "epoch": 0.4753042233357194,
1184
- "grad_norm": 0.5110676269579153,
1185
- "learning_rate": 6.257933818722544e-06,
1186
- "loss": 2.4721,
1187
- "step": 166
1188
- },
1189
- {
1190
- "epoch": 0.47816750178954903,
1191
- "grad_norm": 0.5476368667166479,
1192
- "learning_rate": 6.209609477998339e-06,
1193
- "loss": 2.4884,
1194
- "step": 167
1195
- },
1196
- {
1197
- "epoch": 0.48103078024337864,
1198
- "grad_norm": 0.4764530610045073,
1199
- "learning_rate": 6.161164822151213e-06,
1200
- "loss": 2.4384,
1201
- "step": 168
1202
- },
1203
- {
1204
- "epoch": 0.4838940586972083,
1205
- "grad_norm": 0.5083738885386776,
1206
- "learning_rate": 6.112604669781572e-06,
1207
- "loss": 2.5163,
1208
- "step": 169
1209
- },
1210
- {
1211
- "epoch": 0.4867573371510379,
1212
- "grad_norm": 0.5721234428494102,
1213
- "learning_rate": 6.063933850977811e-06,
1214
- "loss": 2.4187,
1215
- "step": 170
1216
- },
1217
- {
1218
- "epoch": 0.4896206156048676,
1219
- "grad_norm": 0.48356591072844135,
1220
- "learning_rate": 6.015157206835881e-06,
1221
- "loss": 2.4661,
1222
- "step": 171
1223
- },
1224
- {
1225
- "epoch": 0.4924838940586972,
1226
- "grad_norm": 0.5318155836725099,
1227
- "learning_rate": 5.9662795889777666e-06,
1228
- "loss": 2.4874,
1229
- "step": 172
1230
- },
1231
- {
1232
- "epoch": 0.49534717251252686,
1233
- "grad_norm": 0.532932583531948,
1234
- "learning_rate": 5.917305859068912e-06,
1235
- "loss": 2.4611,
1236
- "step": 173
1237
- },
1238
- {
1239
- "epoch": 0.49821045096635647,
1240
- "grad_norm": 0.4992558226313865,
1241
- "learning_rate": 5.8682408883346535e-06,
1242
- "loss": 2.4847,
1243
- "step": 174
1244
- },
1245
- {
1246
- "epoch": 0.5010737294201861,
1247
- "grad_norm": 0.49892566334047106,
1248
- "learning_rate": 5.819089557075689e-06,
1249
- "loss": 2.4672,
1250
- "step": 175
1251
- },
1252
- {
1253
- "epoch": 0.5039370078740157,
1254
- "grad_norm": 0.5162760616236272,
1255
- "learning_rate": 5.769856754182668e-06,
1256
- "loss": 2.481,
1257
- "step": 176
1258
- },
1259
- {
1260
- "epoch": 0.5039370078740157,
1261
- "eval_loss": 2.939789295196533,
1262
- "eval_runtime": 15.4591,
1263
- "eval_samples_per_second": 14.102,
1264
- "eval_steps_per_second": 2.393,
1265
- "step": 176
1266
- },
1267
- {
1268
- "epoch": 0.5068002863278454,
1269
- "grad_norm": 0.5155049998342655,
1270
- "learning_rate": 5.720547376649901e-06,
1271
- "loss": 2.4383,
1272
- "step": 177
1273
- },
1274
- {
1275
- "epoch": 0.5096635647816751,
1276
- "grad_norm": 0.49383297653530694,
1277
- "learning_rate": 5.671166329088278e-06,
1278
- "loss": 2.4963,
1279
- "step": 178
1280
- },
1281
- {
1282
- "epoch": 0.5125268432355047,
1283
- "grad_norm": 0.5451897427189254,
1284
- "learning_rate": 5.621718523237427e-06,
1285
- "loss": 2.458,
1286
- "step": 179
1287
- },
1288
- {
1289
- "epoch": 0.5153901216893343,
1290
- "grad_norm": 0.5067050740905722,
1291
- "learning_rate": 5.57220887747716e-06,
1292
- "loss": 2.4615,
1293
- "step": 180
1294
- },
1295
- {
1296
- "epoch": 0.5182534001431639,
1297
- "grad_norm": 0.5118844197490995,
1298
- "learning_rate": 5.522642316338268e-06,
1299
- "loss": 2.4782,
1300
- "step": 181
1301
- },
1302
- {
1303
- "epoch": 0.5211166785969935,
1304
- "grad_norm": 0.49843713068761214,
1305
- "learning_rate": 5.473023770012686e-06,
1306
- "loss": 2.4639,
1307
- "step": 182
1308
- },
1309
- {
1310
- "epoch": 0.5239799570508232,
1311
- "grad_norm": 0.49039624756309863,
1312
- "learning_rate": 5.423358173863117e-06,
1313
- "loss": 2.5034,
1314
- "step": 183
1315
- },
1316
- {
1317
- "epoch": 0.5268432355046528,
1318
- "grad_norm": 0.5397522237987987,
1319
- "learning_rate": 5.373650467932122e-06,
1320
- "loss": 2.4828,
1321
- "step": 184
1322
- },
1323
- {
1324
- "epoch": 0.5297065139584825,
1325
- "grad_norm": 0.46938500048241816,
1326
- "learning_rate": 5.323905596450759e-06,
1327
- "loss": 2.4896,
1328
- "step": 185
1329
- },
1330
- {
1331
- "epoch": 0.5325697924123121,
1332
- "grad_norm": 0.46678391598361535,
1333
- "learning_rate": 5.274128507346801e-06,
1334
- "loss": 2.513,
1335
- "step": 186
1336
- },
1337
- {
1338
- "epoch": 0.5354330708661418,
1339
- "grad_norm": 0.497328791756282,
1340
- "learning_rate": 5.224324151752575e-06,
1341
- "loss": 2.4681,
1342
- "step": 187
1343
- },
1344
- {
1345
- "epoch": 0.5382963493199714,
1346
- "grad_norm": 0.49519258041200537,
1347
- "learning_rate": 5.174497483512506e-06,
1348
- "loss": 2.5165,
1349
- "step": 188
1350
- },
1351
- {
1352
- "epoch": 0.541159627773801,
1353
- "grad_norm": 0.51742332377377,
1354
- "learning_rate": 5.1246534586903655e-06,
1355
- "loss": 2.5054,
1356
- "step": 189
1357
- },
1358
- {
1359
- "epoch": 0.5440229062276306,
1360
- "grad_norm": 0.47861405099169874,
1361
- "learning_rate": 5.074797035076319e-06,
1362
- "loss": 2.4731,
1363
- "step": 190
1364
- },
1365
- {
1366
- "epoch": 0.5468861846814602,
1367
- "grad_norm": 0.49802176173801116,
1368
- "learning_rate": 5.024933171693791e-06,
1369
- "loss": 2.4637,
1370
- "step": 191
1371
- },
1372
- {
1373
- "epoch": 0.5497494631352899,
1374
- "grad_norm": 0.4840967047698607,
1375
- "learning_rate": 4.9750668283062104e-06,
1376
- "loss": 2.4968,
1377
- "step": 192
1378
- },
1379
- {
1380
- "epoch": 0.5526127415891195,
1381
- "grad_norm": 0.47135451227221825,
1382
- "learning_rate": 4.9252029649236835e-06,
1383
- "loss": 2.4511,
1384
- "step": 193
1385
- },
1386
- {
1387
- "epoch": 0.5554760200429492,
1388
- "grad_norm": 0.48895069084444304,
1389
- "learning_rate": 4.875346541309637e-06,
1390
- "loss": 2.4481,
1391
- "step": 194
1392
- },
1393
- {
1394
- "epoch": 0.5583392984967788,
1395
- "grad_norm": 0.4965170322744245,
1396
- "learning_rate": 4.825502516487497e-06,
1397
- "loss": 2.4937,
1398
- "step": 195
1399
- },
1400
- {
1401
- "epoch": 0.5612025769506085,
1402
- "grad_norm": 0.4978261261637217,
1403
- "learning_rate": 4.775675848247427e-06,
1404
- "loss": 2.479,
1405
- "step": 196
1406
- },
1407
- {
1408
- "epoch": 0.5640658554044381,
1409
- "grad_norm": 0.4773588207221208,
1410
- "learning_rate": 4.7258714926532e-06,
1411
- "loss": 2.492,
1412
- "step": 197
1413
- },
1414
- {
1415
- "epoch": 0.5669291338582677,
1416
- "grad_norm": 0.4759022046084612,
1417
- "learning_rate": 4.676094403549241e-06,
1418
- "loss": 2.4585,
1419
- "step": 198
1420
- },
1421
- {
1422
- "epoch": 0.5697924123120973,
1423
- "grad_norm": 0.48965996741234036,
1424
- "learning_rate": 4.626349532067879e-06,
1425
- "loss": 2.4761,
1426
- "step": 199
1427
- },
1428
- {
1429
- "epoch": 0.572655690765927,
1430
- "grad_norm": 0.46747661270431873,
1431
- "learning_rate": 4.576641826136884e-06,
1432
- "loss": 2.4748,
1433
- "step": 200
1434
- },
1435
- {
1436
- "epoch": 0.5755189692197566,
1437
- "grad_norm": 0.47493437805144983,
1438
- "learning_rate": 4.526976229987315e-06,
1439
- "loss": 2.4631,
1440
- "step": 201
1441
- },
1442
- {
1443
- "epoch": 0.5783822476735863,
1444
- "grad_norm": 0.4915217558336455,
1445
- "learning_rate": 4.477357683661734e-06,
1446
- "loss": 2.4768,
1447
- "step": 202
1448
- },
1449
- {
1450
- "epoch": 0.5812455261274159,
1451
- "grad_norm": 0.4793297296845006,
1452
- "learning_rate": 4.427791122522841e-06,
1453
- "loss": 2.4861,
1454
- "step": 203
1455
- },
1456
- {
1457
- "epoch": 0.5841088045812455,
1458
- "grad_norm": 0.4847288969292467,
1459
- "learning_rate": 4.3782814767625755e-06,
1460
- "loss": 2.4528,
1461
- "step": 204
1462
- },
1463
- {
1464
- "epoch": 0.5869720830350752,
1465
- "grad_norm": 0.46840483063360394,
1466
- "learning_rate": 4.3288336709117246e-06,
1467
- "loss": 2.4844,
1468
- "step": 205
1469
- },
1470
- {
1471
- "epoch": 0.5898353614889048,
1472
- "grad_norm": 0.49201004049650826,
1473
- "learning_rate": 4.279452623350101e-06,
1474
- "loss": 2.4908,
1475
- "step": 206
1476
- },
1477
- {
1478
- "epoch": 0.5926986399427344,
1479
- "grad_norm": 0.49621336600388116,
1480
- "learning_rate": 4.230143245817332e-06,
1481
- "loss": 2.4997,
1482
- "step": 207
1483
- },
1484
- {
1485
- "epoch": 0.595561918396564,
1486
- "grad_norm": 0.4922422256843438,
1487
- "learning_rate": 4.180910442924312e-06,
1488
- "loss": 2.4725,
1489
- "step": 208
1490
- },
1491
- {
1492
- "epoch": 0.5984251968503937,
1493
- "grad_norm": 0.49243609704503866,
1494
- "learning_rate": 4.131759111665349e-06,
1495
- "loss": 2.4525,
1496
- "step": 209
1497
- },
1498
- {
1499
- "epoch": 0.6012884753042234,
1500
- "grad_norm": 0.5310002493376136,
1501
- "learning_rate": 4.0826941409310885e-06,
1502
- "loss": 2.4959,
1503
- "step": 210
1504
- },
1505
- {
1506
- "epoch": 0.604151753758053,
1507
- "grad_norm": 0.485979098182275,
1508
- "learning_rate": 4.033720411022235e-06,
1509
- "loss": 2.4817,
1510
- "step": 211
1511
- },
1512
- {
1513
- "epoch": 0.6070150322118826,
1514
- "grad_norm": 0.4893545713811437,
1515
- "learning_rate": 3.98484279316412e-06,
1516
- "loss": 2.4926,
1517
- "step": 212
1518
- },
1519
- {
1520
- "epoch": 0.6098783106657122,
1521
- "grad_norm": 0.5177988359519327,
1522
- "learning_rate": 3.936066149022191e-06,
1523
- "loss": 2.5085,
1524
- "step": 213
1525
- },
1526
- {
1527
- "epoch": 0.6127415891195419,
1528
- "grad_norm": 0.49306653272715745,
1529
- "learning_rate": 3.887395330218429e-06,
1530
- "loss": 2.473,
1531
- "step": 214
1532
- },
1533
- {
1534
- "epoch": 0.6156048675733715,
1535
- "grad_norm": 0.47728344715096344,
1536
- "learning_rate": 3.8388351778487884e-06,
1537
- "loss": 2.4729,
1538
- "step": 215
1539
- },
1540
- {
1541
- "epoch": 0.6184681460272011,
1542
- "grad_norm": 0.47456363408410085,
1543
- "learning_rate": 3.790390522001662e-06,
1544
- "loss": 2.4628,
1545
- "step": 216
1546
- },
1547
- {
1548
- "epoch": 0.6213314244810307,
1549
- "grad_norm": 0.4914122331915044,
1550
- "learning_rate": 3.7420661812774577e-06,
1551
- "loss": 2.479,
1552
- "step": 217
1553
- },
1554
- {
1555
- "epoch": 0.6241947029348605,
1556
- "grad_norm": 0.4835020063832526,
1557
- "learning_rate": 3.6938669623093086e-06,
1558
- "loss": 2.4775,
1559
- "step": 218
1560
- },
1561
- {
1562
- "epoch": 0.6270579813886901,
1563
- "grad_norm": 0.4819963834404957,
1564
- "learning_rate": 3.6457976592849753e-06,
1565
- "loss": 2.4627,
1566
- "step": 219
1567
- },
1568
- {
1569
- "epoch": 0.6299212598425197,
1570
- "grad_norm": 0.48578165660736744,
1571
- "learning_rate": 3.5978630534699873e-06,
1572
- "loss": 2.4921,
1573
- "step": 220
1574
- },
1575
- {
1576
- "epoch": 0.6327845382963493,
1577
- "grad_norm": 0.4946223923765296,
1578
- "learning_rate": 3.550067912732069e-06,
1579
- "loss": 2.4802,
1580
- "step": 221
1581
- },
1582
- {
1583
- "epoch": 0.635647816750179,
1584
- "grad_norm": 0.47731930944616413,
1585
- "learning_rate": 3.502416991066904e-06,
1586
- "loss": 2.4862,
1587
- "step": 222
1588
- },
1589
- {
1590
- "epoch": 0.6385110952040086,
1591
- "grad_norm": 0.46672416010636814,
1592
- "learning_rate": 3.4549150281252635e-06,
1593
- "loss": 2.4746,
1594
- "step": 223
1595
- },
1596
- {
1597
- "epoch": 0.6413743736578382,
1598
- "grad_norm": 0.47432031556982895,
1599
- "learning_rate": 3.4075667487415785e-06,
1600
- "loss": 2.4461,
1601
- "step": 224
1602
- },
1603
- {
1604
- "epoch": 0.6442376521116678,
1605
- "grad_norm": 0.4613340476214188,
1606
- "learning_rate": 3.3603768624639786e-06,
1607
- "loss": 2.4734,
1608
- "step": 225
1609
- },
1610
- {
1611
- "epoch": 0.6471009305654974,
1612
- "grad_norm": 0.4879808421776038,
1613
- "learning_rate": 3.3133500630858507e-06,
1614
- "loss": 2.4458,
1615
- "step": 226
1616
- },
1617
- {
1618
- "epoch": 0.6499642090193272,
1619
- "grad_norm": 0.49504385746210705,
1620
- "learning_rate": 3.266491028178964e-06,
1621
- "loss": 2.4931,
1622
- "step": 227
1623
- },
1624
- {
1625
- "epoch": 0.6528274874731568,
1626
- "grad_norm": 0.46696567892600954,
1627
- "learning_rate": 3.219804418628216e-06,
1628
- "loss": 2.4582,
1629
- "step": 228
1630
- },
1631
- {
1632
- "epoch": 0.6556907659269864,
1633
- "grad_norm": 0.46371322697466877,
1634
- "learning_rate": 3.173294878168025e-06,
1635
- "loss": 2.4688,
1636
- "step": 229
1637
- },
1638
- {
1639
- "epoch": 0.658554044380816,
1640
- "grad_norm": 0.45817247273224015,
1641
- "learning_rate": 3.12696703292044e-06,
1642
- "loss": 2.4942,
1643
- "step": 230
1644
- },
1645
- {
1646
- "epoch": 0.6614173228346457,
1647
- "grad_norm": 0.47446326987653453,
1648
- "learning_rate": 3.0808254909349987e-06,
1649
- "loss": 2.4717,
1650
- "step": 231
1651
- },
1652
- {
1653
- "epoch": 0.6642806012884753,
1654
- "grad_norm": 0.46895233817717097,
1655
- "learning_rate": 3.0348748417303826e-06,
1656
- "loss": 2.4737,
1657
- "step": 232
1658
- },
1659
- {
1660
- "epoch": 0.6671438797423049,
1661
- "grad_norm": 0.47218853082979256,
1662
- "learning_rate": 2.989119655837913e-06,
1663
- "loss": 2.4635,
1664
- "step": 233
1665
- },
1666
- {
1667
- "epoch": 0.6700071581961345,
1668
- "grad_norm": 0.4704459933176104,
1669
- "learning_rate": 2.9435644843469434e-06,
1670
- "loss": 2.4932,
1671
- "step": 234
1672
- },
1673
- {
1674
- "epoch": 0.6728704366499642,
1675
- "grad_norm": 0.4649089431955579,
1676
- "learning_rate": 2.8982138584521734e-06,
1677
- "loss": 2.4827,
1678
- "step": 235
1679
- },
1680
- {
1681
- "epoch": 0.6757337151037939,
1682
- "grad_norm": 0.47709482926494806,
1683
- "learning_rate": 2.853072289002954e-06,
1684
- "loss": 2.465,
1685
- "step": 236
1686
- },
1687
- {
1688
- "epoch": 0.6785969935576235,
1689
- "grad_norm": 0.4637487649689432,
1690
- "learning_rate": 2.8081442660546126e-06,
1691
- "loss": 2.4768,
1692
- "step": 237
1693
- },
1694
- {
1695
- "epoch": 0.6814602720114531,
1696
- "grad_norm": 0.46866785131238103,
1697
- "learning_rate": 2.7634342584218364e-06,
1698
- "loss": 2.4784,
1699
- "step": 238
1700
- },
1701
- {
1702
- "epoch": 0.6843235504652827,
1703
- "grad_norm": 0.4894597393103176,
1704
- "learning_rate": 2.718946713234185e-06,
1705
- "loss": 2.5016,
1706
- "step": 239
1707
- },
1708
- {
1709
- "epoch": 0.6871868289191124,
1710
- "grad_norm": 0.4667475078124399,
1711
- "learning_rate": 2.674686055493748e-06,
1712
- "loss": 2.5086,
1713
- "step": 240
1714
- },
1715
- {
1716
- "epoch": 0.690050107372942,
1717
- "grad_norm": 0.4696416005150825,
1718
- "learning_rate": 2.6306566876350072e-06,
1719
- "loss": 2.4469,
1720
- "step": 241
1721
- },
1722
- {
1723
- "epoch": 0.6929133858267716,
1724
- "grad_norm": 0.46824017819944164,
1725
- "learning_rate": 2.5868629890869467e-06,
1726
- "loss": 2.4876,
1727
- "step": 242
1728
- },
1729
- {
1730
- "epoch": 0.6957766642806013,
1731
- "grad_norm": 0.46557559754046707,
1732
- "learning_rate": 2.543309315837444e-06,
1733
- "loss": 2.4287,
1734
- "step": 243
1735
- },
1736
- {
1737
- "epoch": 0.698639942734431,
1738
- "grad_norm": 0.4762974026954119,
1739
- "learning_rate": 2.5000000000000015e-06,
1740
- "loss": 2.5047,
1741
- "step": 244
1742
- },
1743
- {
1744
- "epoch": 0.7015032211882606,
1745
- "grad_norm": 0.4644049626901058,
1746
- "learning_rate": 2.4569393493828433e-06,
1747
- "loss": 2.457,
1748
- "step": 245
1749
- },
1750
- {
1751
- "epoch": 0.7043664996420902,
1752
- "grad_norm": 0.4731570583945912,
1753
- "learning_rate": 2.4141316470604362e-06,
1754
- "loss": 2.4875,
1755
- "step": 246
1756
- },
1757
- {
1758
- "epoch": 0.7072297780959198,
1759
- "grad_norm": 0.4652373549525223,
1760
- "learning_rate": 2.371581150947476e-06,
1761
- "loss": 2.501,
1762
- "step": 247
1763
- },
1764
- {
1765
- "epoch": 0.7100930565497494,
1766
- "grad_norm": 0.46149990291747417,
1767
- "learning_rate": 2.3292920933753566e-06,
1768
- "loss": 2.4767,
1769
- "step": 248
1770
- },
1771
- {
1772
- "epoch": 0.7129563350035791,
1773
- "grad_norm": 0.45894170076752305,
1774
- "learning_rate": 2.2872686806712037e-06,
1775
- "loss": 2.466,
1776
- "step": 249
1777
- },
1778
- {
1779
- "epoch": 0.7158196134574087,
1780
- "grad_norm": 0.4587340338845331,
1781
- "learning_rate": 2.245515092739488e-06,
1782
- "loss": 2.4843,
1783
- "step": 250
1784
- },
1785
- {
1786
- "epoch": 0.7186828919112384,
1787
- "grad_norm": 0.4568306102089786,
1788
- "learning_rate": 2.204035482646267e-06,
1789
- "loss": 2.4452,
1790
- "step": 251
1791
- },
1792
- {
1793
- "epoch": 0.721546170365068,
1794
- "grad_norm": 0.45503692900873527,
1795
- "learning_rate": 2.162833976206092e-06,
1796
- "loss": 2.4854,
1797
- "step": 252
1798
- },
1799
- {
1800
- "epoch": 0.7244094488188977,
1801
- "grad_norm": 0.45864934324739526,
1802
- "learning_rate": 2.1219146715716332e-06,
1803
- "loss": 2.5048,
1804
- "step": 253
1805
- },
1806
- {
1807
- "epoch": 0.7272727272727273,
1808
- "grad_norm": 0.4616476773972489,
1809
- "learning_rate": 2.081281638826052e-06,
1810
- "loss": 2.4602,
1811
- "step": 254
1812
- },
1813
- {
1814
- "epoch": 0.7301360057265569,
1815
- "grad_norm": 0.4691408422165569,
1816
- "learning_rate": 2.0409389195781627e-06,
1817
- "loss": 2.4943,
1818
- "step": 255
1819
- },
1820
- {
1821
- "epoch": 0.7329992841803865,
1822
- "grad_norm": 0.46727999266440634,
1823
- "learning_rate": 2.0008905265604316e-06,
1824
- "loss": 2.4899,
1825
- "step": 256
1826
- },
1827
- {
1828
- "epoch": 0.7358625626342162,
1829
- "grad_norm": 0.4751400894854323,
1830
- "learning_rate": 1.9611404432298505e-06,
1831
- "loss": 2.4554,
1832
- "step": 257
1833
- },
1834
- {
1835
- "epoch": 0.7387258410880458,
1836
- "grad_norm": 0.47283617833966596,
1837
- "learning_rate": 1.9216926233717087e-06,
1838
- "loss": 2.4631,
1839
- "step": 258
1840
- },
1841
- {
1842
- "epoch": 0.7415891195418755,
1843
- "grad_norm": 0.4579533439995314,
1844
- "learning_rate": 1.8825509907063328e-06,
1845
- "loss": 2.4794,
1846
- "step": 259
1847
- },
1848
- {
1849
- "epoch": 0.7444523979957051,
1850
- "grad_norm": 0.46285462884634804,
1851
- "learning_rate": 1.843719438498806e-06,
1852
- "loss": 2.4447,
1853
- "step": 260
1854
- },
1855
- {
1856
- "epoch": 0.7473156764495347,
1857
- "grad_norm": 0.45609200392812344,
1858
- "learning_rate": 1.8052018291717216e-06,
1859
- "loss": 2.5097,
1860
- "step": 261
1861
- },
1862
- {
1863
- "epoch": 0.7501789549033644,
1864
- "grad_norm": 0.4736752845679603,
1865
- "learning_rate": 1.7670019939210025e-06,
1866
- "loss": 2.4632,
1867
- "step": 262
1868
- },
1869
- {
1870
- "epoch": 0.753042233357194,
1871
- "grad_norm": 0.47156563387390643,
1872
- "learning_rate": 1.7291237323348287e-06,
1873
- "loss": 2.4749,
1874
- "step": 263
1875
- },
1876
- {
1877
- "epoch": 0.7559055118110236,
1878
- "grad_norm": 0.46047471823042235,
1879
- "learning_rate": 1.6915708120157042e-06,
1880
- "loss": 2.4313,
1881
- "step": 264
1882
- },
1883
- {
1884
- "epoch": 0.7559055118110236,
1885
- "eval_loss": 2.936035633087158,
1886
- "eval_runtime": 15.4546,
1887
- "eval_samples_per_second": 14.106,
1888
- "eval_steps_per_second": 2.394,
1889
- "step": 264
1890
- },
1891
- {
1892
- "epoch": 0.7587687902648532,
1893
- "grad_norm": 0.46741816845331546,
1894
- "learning_rate": 1.6543469682057105e-06,
1895
- "loss": 2.4685,
1896
- "step": 265
1897
- },
1898
- {
1899
- "epoch": 0.7616320687186829,
1900
- "grad_norm": 0.44870053167803253,
1901
- "learning_rate": 1.617455903414974e-06,
1902
- "loss": 2.4712,
1903
- "step": 266
1904
- },
1905
- {
1906
- "epoch": 0.7644953471725126,
1907
- "grad_norm": 0.4561531748512701,
1908
- "learning_rate": 1.5809012870533996e-06,
1909
- "loss": 2.4874,
1910
- "step": 267
1911
- },
1912
- {
1913
- "epoch": 0.7673586256263422,
1914
- "grad_norm": 0.45480554069330664,
1915
- "learning_rate": 1.544686755065677e-06,
1916
- "loss": 2.443,
1917
- "step": 268
1918
- },
1919
- {
1920
- "epoch": 0.7702219040801718,
1921
- "grad_norm": 0.46600032777395256,
1922
- "learning_rate": 1.5088159095696365e-06,
1923
- "loss": 2.5145,
1924
- "step": 269
1925
- },
1926
- {
1927
- "epoch": 0.7730851825340014,
1928
- "grad_norm": 0.48866030251102854,
1929
- "learning_rate": 1.4732923184979563e-06,
1930
- "loss": 2.4231,
1931
- "step": 270
1932
- },
1933
- {
1934
- "epoch": 0.7759484609878311,
1935
- "grad_norm": 0.48186026098585555,
1936
- "learning_rate": 1.438119515243277e-06,
1937
- "loss": 2.4738,
1938
- "step": 271
1939
- },
1940
- {
1941
- "epoch": 0.7788117394416607,
1942
- "grad_norm": 0.4589872555488135,
1943
- "learning_rate": 1.4033009983067454e-06,
1944
- "loss": 2.4859,
1945
- "step": 272
1946
- },
1947
- {
1948
- "epoch": 0.7816750178954903,
1949
- "grad_norm": 0.4459291232495709,
1950
- "learning_rate": 1.3688402309500353e-06,
1951
- "loss": 2.5005,
1952
- "step": 273
1953
- },
1954
- {
1955
- "epoch": 0.7845382963493199,
1956
- "grad_norm": 0.4513699250772011,
1957
- "learning_rate": 1.3347406408508695e-06,
1958
- "loss": 2.487,
1959
- "step": 274
1960
- },
1961
- {
1962
- "epoch": 0.7874015748031497,
1963
- "grad_norm": 0.4567378969589174,
1964
- "learning_rate": 1.3010056197620813e-06,
1965
- "loss": 2.4573,
1966
- "step": 275
1967
- },
1968
- {
1969
- "epoch": 0.7902648532569793,
1970
- "grad_norm": 0.4692124594286597,
1971
- "learning_rate": 1.2676385231742493e-06,
1972
- "loss": 2.4628,
1973
- "step": 276
1974
- },
1975
- {
1976
- "epoch": 0.7931281317108089,
1977
- "grad_norm": 0.457853009198463,
1978
- "learning_rate": 1.234642669981946e-06,
1979
- "loss": 2.4425,
1980
- "step": 277
1981
- },
1982
- {
1983
- "epoch": 0.7959914101646385,
1984
- "grad_norm": 0.46355613438320215,
1985
- "learning_rate": 1.2020213421536103e-06,
1986
- "loss": 2.5078,
1987
- "step": 278
1988
- },
1989
- {
1990
- "epoch": 0.7988546886184682,
1991
- "grad_norm": 0.4560209731740047,
1992
- "learning_rate": 1.1697777844051105e-06,
1993
- "loss": 2.4658,
1994
- "step": 279
1995
- },
1996
- {
1997
- "epoch": 0.8017179670722978,
1998
- "grad_norm": 0.45676904584836525,
1999
- "learning_rate": 1.137915203877003e-06,
2000
- "loss": 2.456,
2001
- "step": 280
2002
- },
2003
- {
2004
- "epoch": 0.8045812455261274,
2005
- "grad_norm": 0.454564657506919,
2006
- "learning_rate": 1.1064367698155303e-06,
2007
- "loss": 2.4476,
2008
- "step": 281
2009
- },
2010
- {
2011
- "epoch": 0.807444523979957,
2012
- "grad_norm": 0.46526091929632524,
2013
- "learning_rate": 1.0753456132573886e-06,
2014
- "loss": 2.478,
2015
- "step": 282
2016
- },
2017
- {
2018
- "epoch": 0.8103078024337866,
2019
- "grad_norm": 0.4552190309373214,
2020
- "learning_rate": 1.044644826718295e-06,
2021
- "loss": 2.4653,
2022
- "step": 283
2023
- },
2024
- {
2025
- "epoch": 0.8131710808876164,
2026
- "grad_norm": 0.46555915439648266,
2027
- "learning_rate": 1.0143374638853892e-06,
2028
- "loss": 2.4766,
2029
- "step": 284
2030
- },
2031
- {
2032
- "epoch": 0.816034359341446,
2033
- "grad_norm": 0.4572802006505305,
2034
- "learning_rate": 9.844265393134927e-07,
2035
- "loss": 2.4569,
2036
- "step": 285
2037
- },
2038
- {
2039
- "epoch": 0.8188976377952756,
2040
- "grad_norm": 0.4458605895386478,
2041
- "learning_rate": 9.549150281252633e-07,
2042
- "loss": 2.4695,
2043
- "step": 286
2044
- },
2045
- {
2046
- "epoch": 0.8217609162491052,
2047
- "grad_norm": 0.4591574303861129,
2048
- "learning_rate": 9.258058657152763e-07,
2049
- "loss": 2.4416,
2050
- "step": 287
2051
- },
2052
- {
2053
- "epoch": 0.8246241947029349,
2054
- "grad_norm": 0.45928170428426124,
2055
- "learning_rate": 8.971019474580428e-07,
2056
- "loss": 2.4722,
2057
- "step": 288
2058
- },
2059
- {
2060
- "epoch": 0.8274874731567645,
2061
- "grad_norm": 0.4546308467235846,
2062
- "learning_rate": 8.688061284200266e-07,
2063
- "loss": 2.4638,
2064
- "step": 289
2065
- },
2066
- {
2067
- "epoch": 0.8303507516105941,
2068
- "grad_norm": 0.46135706461175,
2069
- "learning_rate": 8.409212230756564e-07,
2070
- "loss": 2.4896,
2071
- "step": 290
2072
- },
2073
- {
2074
- "epoch": 0.8332140300644237,
2075
- "grad_norm": 0.4357442522497734,
2076
- "learning_rate": 8.134500050273841e-07,
2077
- "loss": 2.4886,
2078
- "step": 291
2079
- },
2080
- {
2081
- "epoch": 0.8360773085182533,
2082
- "grad_norm": 0.4563872305836051,
2083
- "learning_rate": 7.863952067298042e-07,
2084
- "loss": 2.4891,
2085
- "step": 292
2086
- },
2087
- {
2088
- "epoch": 0.8389405869720831,
2089
- "grad_norm": 0.4598435875124808,
2090
- "learning_rate": 7.597595192178702e-07,
2091
- "loss": 2.4513,
2092
- "step": 293
2093
- },
2094
- {
2095
- "epoch": 0.8418038654259127,
2096
- "grad_norm": 0.44658046634222204,
2097
- "learning_rate": 7.33545591839222e-07,
2098
- "loss": 2.4831,
2099
- "step": 294
2100
- },
2101
- {
2102
- "epoch": 0.8446671438797423,
2103
- "grad_norm": 0.44490014330370886,
2104
- "learning_rate": 7.077560319906696e-07,
2105
- "loss": 2.4633,
2106
- "step": 295
2107
- },
2108
- {
2109
- "epoch": 0.8475304223335719,
2110
- "grad_norm": 0.45416636123029175,
2111
- "learning_rate": 6.82393404858846e-07,
2112
- "loss": 2.4548,
2113
- "step": 296
2114
- },
2115
- {
2116
- "epoch": 0.8503937007874016,
2117
- "grad_norm": 0.49083956893955283,
2118
- "learning_rate": 6.574602331650559e-07,
2119
- "loss": 2.4829,
2120
- "step": 297
2121
- },
2122
- {
2123
- "epoch": 0.8532569792412312,
2124
- "grad_norm": 0.45761237430081714,
2125
- "learning_rate": 6.329589969143518e-07,
2126
- "loss": 2.4761,
2127
- "step": 298
2128
- },
2129
- {
2130
- "epoch": 0.8561202576950608,
2131
- "grad_norm": 0.46149826051853654,
2132
- "learning_rate": 6.088921331488568e-07,
2133
- "loss": 2.4669,
2134
- "step": 299
2135
- },
2136
- {
2137
- "epoch": 0.8589835361488904,
2138
- "grad_norm": 0.44852500170386056,
2139
- "learning_rate": 5.852620357053651e-07,
2140
- "loss": 2.4889,
2141
- "step": 300
2142
- },
2143
- {
2144
- "epoch": 0.8618468146027202,
2145
- "grad_norm": 0.44535055340755936,
2146
- "learning_rate": 5.620710549772295e-07,
2147
- "loss": 2.454,
2148
- "step": 301
2149
- },
2150
- {
2151
- "epoch": 0.8647100930565498,
2152
- "grad_norm": 0.4523724462946578,
2153
- "learning_rate": 5.393214976805833e-07,
2154
- "loss": 2.4829,
2155
- "step": 302
2156
- },
2157
- {
2158
- "epoch": 0.8675733715103794,
2159
- "grad_norm": 0.4512432935032381,
2160
- "learning_rate": 5.17015626624896e-07,
2161
- "loss": 2.4964,
2162
- "step": 303
2163
- },
2164
- {
2165
- "epoch": 0.870436649964209,
2166
- "grad_norm": 0.4502752764610012,
2167
- "learning_rate": 4.951556604879049e-07,
2168
- "loss": 2.4652,
2169
- "step": 304
2170
- },
2171
- {
2172
- "epoch": 0.8732999284180386,
2173
- "grad_norm": 0.4418666890355134,
2174
- "learning_rate": 4.737437735949263e-07,
2175
- "loss": 2.4771,
2176
- "step": 305
2177
- },
2178
- {
2179
- "epoch": 0.8761632068718683,
2180
- "grad_norm": 0.4661160898935762,
2181
- "learning_rate": 4.5278209570258914e-07,
2182
- "loss": 2.4867,
2183
- "step": 306
2184
- },
2185
- {
2186
- "epoch": 0.8790264853256979,
2187
- "grad_norm": 0.4611073292882843,
2188
- "learning_rate": 4.322727117869951e-07,
2189
- "loss": 2.4746,
2190
- "step": 307
2191
- },
2192
- {
2193
- "epoch": 0.8818897637795275,
2194
- "grad_norm": 0.4446102922832806,
2195
- "learning_rate": 4.122176618363305e-07,
2196
- "loss": 2.4695,
2197
- "step": 308
2198
- },
2199
- {
2200
- "epoch": 0.8847530422333572,
2201
- "grad_norm": 0.44729778668314735,
2202
- "learning_rate": 3.9261894064796136e-07,
2203
- "loss": 2.4678,
2204
- "step": 309
2205
- },
2206
- {
2207
- "epoch": 0.8876163206871869,
2208
- "grad_norm": 0.45295141361260305,
2209
- "learning_rate": 3.734784976300165e-07,
2210
- "loss": 2.4926,
2211
- "step": 310
2212
- },
2213
- {
2214
- "epoch": 0.8904795991410165,
2215
- "grad_norm": 0.46930631413078255,
2216
- "learning_rate": 3.5479823660748703e-07,
2217
- "loss": 2.4514,
2218
- "step": 311
2219
- },
2220
- {
2221
- "epoch": 0.8933428775948461,
2222
- "grad_norm": 0.4417853093677557,
2223
- "learning_rate": 3.365800156328619e-07,
2224
- "loss": 2.4905,
2225
- "step": 312
2226
- },
2227
- {
2228
- "epoch": 0.8962061560486757,
2229
- "grad_norm": 0.4440433821954867,
2230
- "learning_rate": 3.18825646801314e-07,
2231
- "loss": 2.4864,
2232
- "step": 313
2233
- },
2234
- {
2235
- "epoch": 0.8990694345025053,
2236
- "grad_norm": 0.45330282639589614,
2237
- "learning_rate": 3.015368960704584e-07,
2238
- "loss": 2.4328,
2239
- "step": 314
2240
- },
2241
- {
2242
- "epoch": 0.901932712956335,
2243
- "grad_norm": 0.44590581999226264,
2244
- "learning_rate": 2.847154830846971e-07,
2245
- "loss": 2.501,
2246
- "step": 315
2247
- },
2248
- {
2249
- "epoch": 0.9047959914101646,
2250
- "grad_norm": 0.4534475837301284,
2251
- "learning_rate": 2.6836308100417874e-07,
2252
- "loss": 2.4455,
2253
- "step": 316
2254
- },
2255
- {
2256
- "epoch": 0.9076592698639943,
2257
- "grad_norm": 0.4448096353367094,
2258
- "learning_rate": 2.524813163383683e-07,
2259
- "loss": 2.4311,
2260
- "step": 317
2261
- },
2262
- {
2263
- "epoch": 0.9105225483178239,
2264
- "grad_norm": 0.44389847976222563,
2265
- "learning_rate": 2.3707176878426886e-07,
2266
- "loss": 2.4872,
2267
- "step": 318
2268
- },
2269
- {
2270
- "epoch": 0.9133858267716536,
2271
- "grad_norm": 0.44380665859676643,
2272
- "learning_rate": 2.2213597106929608e-07,
2273
- "loss": 2.4491,
2274
- "step": 319
2275
- },
2276
- {
2277
- "epoch": 0.9162491052254832,
2278
- "grad_norm": 0.44824967084915274,
2279
- "learning_rate": 2.0767540879882143e-07,
2280
- "loss": 2.4336,
2281
- "step": 320
2282
- },
2283
- {
2284
- "epoch": 0.9191123836793128,
2285
- "grad_norm": 0.4615916047912525,
2286
- "learning_rate": 1.9369152030840553e-07,
2287
- "loss": 2.4893,
2288
- "step": 321
2289
- },
2290
- {
2291
- "epoch": 0.9219756621331424,
2292
- "grad_norm": 0.44637662830765845,
2293
- "learning_rate": 1.801856965207338e-07,
2294
- "loss": 2.4488,
2295
- "step": 322
2296
- },
2297
- {
2298
- "epoch": 0.9248389405869721,
2299
- "grad_norm": 0.446240721408501,
2300
- "learning_rate": 1.6715928080726417e-07,
2301
- "loss": 2.4527,
2302
- "step": 323
2303
- },
2304
- {
2305
- "epoch": 0.9277022190408017,
2306
- "grad_norm": 0.4560572613231896,
2307
- "learning_rate": 1.5461356885461077e-07,
2308
- "loss": 2.4604,
2309
- "step": 324
2310
- },
2311
- {
2312
- "epoch": 0.9305654974946314,
2313
- "grad_norm": 0.4314293357419993,
2314
- "learning_rate": 1.4254980853566248e-07,
2315
- "loss": 2.479,
2316
- "step": 325
2317
- },
2318
- {
2319
- "epoch": 0.933428775948461,
2320
- "grad_norm": 0.44433384067807596,
2321
- "learning_rate": 1.3096919978546842e-07,
2322
- "loss": 2.4698,
2323
- "step": 326
2324
- },
2325
- {
2326
- "epoch": 0.9362920544022906,
2327
- "grad_norm": 0.43730852720830754,
2328
- "learning_rate": 1.1987289448187777e-07,
2329
- "loss": 2.4865,
2330
- "step": 327
2331
- },
2332
- {
2333
- "epoch": 0.9391553328561203,
2334
- "grad_norm": 0.4284159960564044,
2335
- "learning_rate": 1.0926199633097156e-07,
2336
- "loss": 2.4316,
2337
- "step": 328
2338
- },
2339
- {
2340
- "epoch": 0.9420186113099499,
2341
- "grad_norm": 0.44571916504011744,
2342
- "learning_rate": 9.913756075728088e-08,
2343
- "loss": 2.5041,
2344
- "step": 329
2345
- },
2346
- {
2347
- "epoch": 0.9448818897637795,
2348
- "grad_norm": 0.45304237672597053,
2349
- "learning_rate": 8.950059479880591e-08,
2350
- "loss": 2.5114,
2351
- "step": 330
2352
- },
2353
- {
2354
- "epoch": 0.9477451682176091,
2355
- "grad_norm": 0.43598348530628495,
2356
- "learning_rate": 8.035205700685167e-08,
2357
- "loss": 2.5127,
2358
- "step": 331
2359
- },
2360
- {
2361
- "epoch": 0.9506084466714388,
2362
- "grad_norm": 0.4356879074001767,
2363
- "learning_rate": 7.169285735068531e-08,
2364
- "loss": 2.4692,
2365
- "step": 332
2366
- },
2367
- {
2368
- "epoch": 0.9534717251252685,
2369
- "grad_norm": 0.44682377084462166,
2370
- "learning_rate": 6.352385712702191e-08,
2371
- "loss": 2.4778,
2372
- "step": 333
2373
- },
2374
- {
2375
- "epoch": 0.9563350035790981,
2376
- "grad_norm": 0.4474749989010966,
2377
- "learning_rate": 5.584586887435739e-08,
2378
- "loss": 2.4738,
2379
- "step": 334
2380
- },
2381
- {
2382
- "epoch": 0.9591982820329277,
2383
- "grad_norm": 0.44254145584508675,
2384
- "learning_rate": 4.865965629214819e-08,
2385
- "loss": 2.4445,
2386
- "step": 335
2387
- },
2388
- {
2389
- "epoch": 0.9620615604867573,
2390
- "grad_norm": 0.4814270534171316,
2391
- "learning_rate": 4.196593416484873e-08,
2392
- "loss": 2.5038,
2393
- "step": 336
2394
- },
2395
- {
2396
- "epoch": 0.964924838940587,
2397
- "grad_norm": 0.4449207768038768,
2398
- "learning_rate": 3.576536829081323e-08,
2399
- "loss": 2.4704,
2400
- "step": 337
2401
- },
2402
- {
2403
- "epoch": 0.9677881173944166,
2404
- "grad_norm": 0.44401542703062347,
2405
- "learning_rate": 3.005857541607371e-08,
2406
- "loss": 2.5191,
2407
- "step": 338
2408
- },
2409
- {
2410
- "epoch": 0.9706513958482462,
2411
- "grad_norm": 0.43424375987912595,
2412
- "learning_rate": 2.4846123172992953e-08,
2413
- "loss": 2.4885,
2414
- "step": 339
2415
- },
2416
- {
2417
- "epoch": 0.9735146743020758,
2418
- "grad_norm": 0.4473592208860157,
2419
- "learning_rate": 2.012853002380466e-08,
2420
- "loss": 2.4681,
2421
- "step": 340
2422
- },
2423
- {
2424
- "epoch": 0.9763779527559056,
2425
- "grad_norm": 0.45638058238294077,
2426
- "learning_rate": 1.590626520904526e-08,
2427
- "loss": 2.4722,
2428
- "step": 341
2429
- },
2430
- {
2431
- "epoch": 0.9792412312097352,
2432
- "grad_norm": 0.4442471751823356,
2433
- "learning_rate": 1.2179748700879013e-08,
2434
- "loss": 2.4685,
2435
- "step": 342
2436
- },
2437
- {
2438
- "epoch": 0.9821045096635648,
2439
- "grad_norm": 0.44699422552952645,
2440
- "learning_rate": 8.949351161324227e-09,
2441
- "loss": 2.4736,
2442
- "step": 343
2443
- },
2444
- {
2445
- "epoch": 0.9849677881173944,
2446
- "grad_norm": 0.44843427084177795,
2447
- "learning_rate": 6.215393905388278e-09,
2448
- "loss": 2.4758,
2449
- "step": 344
2450
- },
2451
- {
2452
- "epoch": 0.9878310665712241,
2453
- "grad_norm": 0.4430207903952749,
2454
- "learning_rate": 3.978148869103748e-09,
2455
- "loss": 2.4823,
2456
- "step": 345
2457
- },
2458
- {
2459
- "epoch": 0.9906943450250537,
2460
- "grad_norm": 0.44535966484744366,
2461
- "learning_rate": 2.237838582483387e-09,
2462
- "loss": 2.4529,
2463
- "step": 346
2464
- },
2465
- {
2466
- "epoch": 0.9935576234788833,
2467
- "grad_norm": 0.43129364144349974,
2468
- "learning_rate": 9.946361473822664e-10,
2469
- "loss": 2.4717,
2470
- "step": 347
2471
- },
2472
- {
2473
- "epoch": 0.9964209019327129,
2474
- "grad_norm": 0.44688887849751735,
2475
- "learning_rate": 2.486652202848827e-10,
2476
- "loss": 2.4743,
2477
- "step": 348
2478
- },
2479
- {
2480
- "epoch": 0.9992841803865425,
2481
- "grad_norm": 0.44578455043995546,
2482
- "learning_rate": 0.0,
2483
- "loss": 2.5218,
2484
- "step": 349
2485
- }
2486
- ],
2487
- "logging_steps": 1,
2488
- "max_steps": 349,
2489
- "num_input_tokens_seen": 0,
2490
- "num_train_epochs": 1,
2491
- "save_steps": 500,
2492
- "stateful_callbacks": {
2493
- "TrainerControl": {
2494
- "args": {
2495
- "should_epoch_stop": false,
2496
- "should_evaluate": false,
2497
- "should_log": false,
2498
- "should_save": true,
2499
- "should_training_stop": true
2500
- },
2501
- "attributes": {}
2502
- }
2503
- },
2504
- "total_flos": 4.4632318176290406e+17,
2505
- "train_batch_size": 3,
2506
- "trial_name": null,
2507
- "trial_params": null
2508
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
checkpoint-349/training_args.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:592fa413ce36c00588c414891e8d44bffe7432823a442a999ac046d72a5df0aa
3
- size 8120
 
 
 
 
checkpoint-349/zero_to_fp32.py DELETED
@@ -1,604 +0,0 @@
1
- #!/usr/bin/env python
2
-
3
- # Copyright (c) Microsoft Corporation.
4
- # SPDX-License-Identifier: Apache-2.0
5
-
6
- # DeepSpeed Team
7
-
8
- # This script extracts fp32 consolidated weights from a zero 1, 2 and 3 DeepSpeed checkpoints. It gets
9
- # copied into the top level checkpoint dir, so the user can easily do the conversion at any point in
10
- # the future. Once extracted, the weights don't require DeepSpeed and can be used in any
11
- # application.
12
- #
13
- # example: python zero_to_fp32.py . pytorch_model.bin
14
-
15
- import argparse
16
- import torch
17
- import glob
18
- import math
19
- import os
20
- import re
21
- from collections import OrderedDict
22
- from dataclasses import dataclass
23
-
24
- # while this script doesn't use deepspeed to recover data, since the checkpoints are pickled with
25
- # DeepSpeed data structures it has to be available in the current python environment.
26
- from deepspeed.utils import logger
27
- from deepspeed.checkpoint.constants import (DS_VERSION, OPTIMIZER_STATE_DICT, SINGLE_PARTITION_OF_FP32_GROUPS,
28
- FP32_FLAT_GROUPS, ZERO_STAGE, PARTITION_COUNT, PARAM_SHAPES, BUFFER_NAMES,
29
- FROZEN_PARAM_SHAPES, FROZEN_PARAM_FRAGMENTS)
30
-
31
-
32
- @dataclass
33
- class zero_model_state:
34
- buffers: dict()
35
- param_shapes: dict()
36
- shared_params: list
37
- ds_version: int
38
- frozen_param_shapes: dict()
39
- frozen_param_fragments: dict()
40
-
41
-
42
- debug = 0
43
-
44
- # load to cpu
45
- device = torch.device('cpu')
46
-
47
-
48
- def atoi(text):
49
- return int(text) if text.isdigit() else text
50
-
51
-
52
- def natural_keys(text):
53
- '''
54
- alist.sort(key=natural_keys) sorts in human order
55
- http://nedbatchelder.com/blog/200712/human_sorting.html
56
- (See Toothy's implementation in the comments)
57
- '''
58
- return [atoi(c) for c in re.split(r'(\d+)', text)]
59
-
60
-
61
- def get_model_state_file(checkpoint_dir, zero_stage):
62
- if not os.path.isdir(checkpoint_dir):
63
- raise FileNotFoundError(f"Directory '{checkpoint_dir}' doesn't exist")
64
-
65
- # there should be only one file
66
- if zero_stage <= 2:
67
- file = os.path.join(checkpoint_dir, "mp_rank_00_model_states.pt")
68
- elif zero_stage == 3:
69
- file = os.path.join(checkpoint_dir, "zero_pp_rank_0_mp_rank_00_model_states.pt")
70
-
71
- if not os.path.exists(file):
72
- raise FileNotFoundError(f"can't find model states file at '{file}'")
73
-
74
- return file
75
-
76
-
77
- def get_checkpoint_files(checkpoint_dir, glob_pattern):
78
- # XXX: need to test that this simple glob rule works for multi-node setup too
79
- ckpt_files = sorted(glob.glob(os.path.join(checkpoint_dir, glob_pattern)), key=natural_keys)
80
-
81
- if len(ckpt_files) == 0:
82
- raise FileNotFoundError(f"can't find {glob_pattern} files in directory '{checkpoint_dir}'")
83
-
84
- return ckpt_files
85
-
86
-
87
- def get_optim_files(checkpoint_dir):
88
- return get_checkpoint_files(checkpoint_dir, "*_optim_states.pt")
89
-
90
-
91
- def get_model_state_files(checkpoint_dir):
92
- return get_checkpoint_files(checkpoint_dir, "*_model_states.pt")
93
-
94
-
95
- def parse_model_states(files):
96
- zero_model_states = []
97
- for file in files:
98
- state_dict = torch.load(file, map_location=device)
99
-
100
- if BUFFER_NAMES not in state_dict:
101
- raise ValueError(f"{file} is not a model state checkpoint")
102
- buffer_names = state_dict[BUFFER_NAMES]
103
- if debug:
104
- print("Found buffers:", buffer_names)
105
-
106
- # recover just the buffers while restoring them to fp32 if they were saved in fp16
107
- buffers = {k: v.float() for k, v in state_dict["module"].items() if k in buffer_names}
108
- param_shapes = state_dict[PARAM_SHAPES]
109
-
110
- # collect parameters that are included in param_shapes
111
- param_names = []
112
- for s in param_shapes:
113
- for name in s.keys():
114
- param_names.append(name)
115
-
116
- # update with frozen parameters
117
- frozen_param_shapes = state_dict.get(FROZEN_PARAM_SHAPES, None)
118
- if frozen_param_shapes is not None:
119
- if debug:
120
- print(f"Found frozen_param_shapes: {frozen_param_shapes}")
121
- param_names += list(frozen_param_shapes.keys())
122
-
123
- # handle shared params
124
- shared_params = [[k, v] for k, v in state_dict["shared_params"].items()]
125
-
126
- ds_version = state_dict.get(DS_VERSION, None)
127
-
128
- frozen_param_fragments = state_dict.get(FROZEN_PARAM_FRAGMENTS, None)
129
-
130
- z_model_state = zero_model_state(buffers=buffers,
131
- param_shapes=param_shapes,
132
- shared_params=shared_params,
133
- ds_version=ds_version,
134
- frozen_param_shapes=frozen_param_shapes,
135
- frozen_param_fragments=frozen_param_fragments)
136
- zero_model_states.append(z_model_state)
137
-
138
- return zero_model_states
139
-
140
-
141
- def parse_optim_states(files, ds_checkpoint_dir):
142
-
143
- total_files = len(files)
144
- state_dicts = []
145
- for f in files:
146
- state_dict = torch.load(f, map_location=device)
147
- # immediately discard the potentially huge 2 optimizer states as we only care for fp32 master weights
148
- # and also handle the case where it was already removed by another helper script
149
- state_dict["optimizer_state_dict"].pop("optimizer_state_dict", None)
150
- state_dicts.append(state_dict)
151
-
152
- if not ZERO_STAGE in state_dicts[0][OPTIMIZER_STATE_DICT]:
153
- raise ValueError(f"{files[0]} is not a zero checkpoint")
154
- zero_stage = state_dicts[0][OPTIMIZER_STATE_DICT][ZERO_STAGE]
155
- world_size = state_dicts[0][OPTIMIZER_STATE_DICT][PARTITION_COUNT]
156
-
157
- # For ZeRO-2 each param group can have different partition_count as data parallelism for expert
158
- # parameters can be different from data parallelism for non-expert parameters. So we can just
159
- # use the max of the partition_count to get the dp world_size.
160
-
161
- if type(world_size) is list:
162
- world_size = max(world_size)
163
-
164
- if world_size != total_files:
165
- raise ValueError(
166
- f"Expected {world_size} of '*_optim_states.pt' under '{ds_checkpoint_dir}' but found {total_files} files. "
167
- "Possibly due to an overwrite of an old checkpoint, or a checkpoint didn't get saved by one or more processes."
168
- )
169
-
170
- # the groups are named differently in each stage
171
- if zero_stage <= 2:
172
- fp32_groups_key = SINGLE_PARTITION_OF_FP32_GROUPS
173
- elif zero_stage == 3:
174
- fp32_groups_key = FP32_FLAT_GROUPS
175
- else:
176
- raise ValueError(f"unknown zero stage {zero_stage}")
177
-
178
- if zero_stage <= 2:
179
- fp32_flat_groups = [state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key] for i in range(len(state_dicts))]
180
- elif zero_stage == 3:
181
- # if there is more than one param group, there will be multiple flattened tensors - one
182
- # flattened tensor per group - for simplicity merge them into a single tensor
183
- #
184
- # XXX: could make the script more memory efficient for when there are multiple groups - it
185
- # will require matching the sub-lists of param_shapes for each param group flattened tensor
186
-
187
- fp32_flat_groups = [
188
- torch.cat(state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key], 0) for i in range(len(state_dicts))
189
- ]
190
-
191
- return zero_stage, world_size, fp32_flat_groups
192
-
193
-
194
- def _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir, exclude_frozen_parameters):
195
- """
196
- Returns fp32 state_dict reconstructed from ds checkpoint
197
-
198
- Args:
199
- - ``ds_checkpoint_dir``: path to the deepspeed checkpoint folder (where the optimizer files are)
200
-
201
- """
202
- print(f"Processing zero checkpoint '{ds_checkpoint_dir}'")
203
-
204
- optim_files = get_optim_files(ds_checkpoint_dir)
205
- zero_stage, world_size, fp32_flat_groups = parse_optim_states(optim_files, ds_checkpoint_dir)
206
- print(f"Detected checkpoint of type zero stage {zero_stage}, world_size: {world_size}")
207
-
208
- model_files = get_model_state_files(ds_checkpoint_dir)
209
-
210
- zero_model_states = parse_model_states(model_files)
211
- print(f'Parsing checkpoint created by deepspeed=={zero_model_states[0].ds_version}')
212
-
213
- if zero_stage <= 2:
214
- return _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states,
215
- exclude_frozen_parameters)
216
- elif zero_stage == 3:
217
- return _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states,
218
- exclude_frozen_parameters)
219
-
220
-
221
- def _zero2_merge_frozen_params(state_dict, zero_model_states):
222
- if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
223
- return
224
-
225
- frozen_param_shapes = zero_model_states[0].frozen_param_shapes
226
- frozen_param_fragments = zero_model_states[0].frozen_param_fragments
227
-
228
- if debug:
229
- num_elem = sum(s.numel() for s in frozen_param_shapes.values())
230
- print(f'rank 0: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
231
-
232
- wanted_params = len(frozen_param_shapes)
233
- wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
234
- avail_numel = sum([p.numel() for p in frozen_param_fragments.values()])
235
- print(f'Frozen params: Have {avail_numel} numels to process.')
236
- print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
237
-
238
- total_params = 0
239
- total_numel = 0
240
- for name, shape in frozen_param_shapes.items():
241
- total_params += 1
242
- unpartitioned_numel = shape.numel()
243
- total_numel += unpartitioned_numel
244
-
245
- state_dict[name] = frozen_param_fragments[name]
246
-
247
- if debug:
248
- print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
249
-
250
- print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
251
-
252
-
253
- def _has_callable(obj, fn):
254
- attr = getattr(obj, fn, None)
255
- return callable(attr)
256
-
257
-
258
- def _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
259
- param_shapes = zero_model_states[0].param_shapes
260
-
261
- # Reconstruction protocol:
262
- #
263
- # XXX: document this
264
-
265
- if debug:
266
- for i in range(world_size):
267
- for j in range(len(fp32_flat_groups[0])):
268
- print(f"{FP32_FLAT_GROUPS}[{i}][{j}].shape={fp32_flat_groups[i][j].shape}")
269
-
270
- # XXX: memory usage doubles here (zero2)
271
- num_param_groups = len(fp32_flat_groups[0])
272
- merged_single_partition_of_fp32_groups = []
273
- for i in range(num_param_groups):
274
- merged_partitions = [sd[i] for sd in fp32_flat_groups]
275
- full_single_fp32_vector = torch.cat(merged_partitions, 0)
276
- merged_single_partition_of_fp32_groups.append(full_single_fp32_vector)
277
- avail_numel = sum(
278
- [full_single_fp32_vector.numel() for full_single_fp32_vector in merged_single_partition_of_fp32_groups])
279
-
280
- if debug:
281
- wanted_params = sum([len(shapes) for shapes in param_shapes])
282
- wanted_numel = sum([sum(shape.numel() for shape in shapes.values()) for shapes in param_shapes])
283
- # not asserting if there is a mismatch due to possible padding
284
- print(f"Have {avail_numel} numels to process.")
285
- print(f"Need {wanted_numel} numels in {wanted_params} params.")
286
-
287
- # params
288
- # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
289
- # out-of-core computing solution
290
- total_numel = 0
291
- total_params = 0
292
- for shapes, full_single_fp32_vector in zip(param_shapes, merged_single_partition_of_fp32_groups):
293
- offset = 0
294
- avail_numel = full_single_fp32_vector.numel()
295
- for name, shape in shapes.items():
296
-
297
- unpartitioned_numel = shape.numel() if _has_callable(shape, 'numel') else math.prod(shape)
298
- total_numel += unpartitioned_numel
299
- total_params += 1
300
-
301
- if debug:
302
- print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
303
- state_dict[name] = full_single_fp32_vector.narrow(0, offset, unpartitioned_numel).view(shape)
304
- offset += unpartitioned_numel
305
-
306
- # Z2 started to align to 2*world_size to improve nccl performance. Therefore both offset and
307
- # avail_numel can differ by anywhere between 0..2*world_size. Due to two unrelated complex
308
- # paddings performed in the code it's almost impossible to predict the exact numbers w/o the
309
- # live optimizer object, so we are checking that the numbers are within the right range
310
- align_to = 2 * world_size
311
-
312
- def zero2_align(x):
313
- return align_to * math.ceil(x / align_to)
314
-
315
- if debug:
316
- print(f"original offset={offset}, avail_numel={avail_numel}")
317
-
318
- offset = zero2_align(offset)
319
- avail_numel = zero2_align(avail_numel)
320
-
321
- if debug:
322
- print(f"aligned offset={offset}, avail_numel={avail_numel}")
323
-
324
- # Sanity check
325
- if offset != avail_numel:
326
- raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
327
-
328
- print(f"Reconstructed fp32 state dict with {total_params} params {total_numel} elements")
329
-
330
-
331
- def _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states,
332
- exclude_frozen_parameters):
333
- state_dict = OrderedDict()
334
-
335
- # buffers
336
- buffers = zero_model_states[0].buffers
337
- state_dict.update(buffers)
338
- if debug:
339
- print(f"added {len(buffers)} buffers")
340
-
341
- if not exclude_frozen_parameters:
342
- _zero2_merge_frozen_params(state_dict, zero_model_states)
343
-
344
- _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
345
-
346
- # recover shared parameters
347
- for pair in zero_model_states[0].shared_params:
348
- if pair[1] in state_dict:
349
- state_dict[pair[0]] = state_dict[pair[1]]
350
-
351
- return state_dict
352
-
353
-
354
- def zero3_partitioned_param_info(unpartitioned_numel, world_size):
355
- remainder = unpartitioned_numel % world_size
356
- padding_numel = (world_size - remainder) if remainder else 0
357
- partitioned_numel = math.ceil(unpartitioned_numel / world_size)
358
- return partitioned_numel, padding_numel
359
-
360
-
361
- def _zero3_merge_frozen_params(state_dict, world_size, zero_model_states):
362
- if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
363
- return
364
-
365
- if debug:
366
- for i in range(world_size):
367
- num_elem = sum(s.numel() for s in zero_model_states[i].frozen_param_fragments.values())
368
- print(f'rank {i}: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
369
-
370
- frozen_param_shapes = zero_model_states[0].frozen_param_shapes
371
- wanted_params = len(frozen_param_shapes)
372
- wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
373
- avail_numel = sum([p.numel() for p in zero_model_states[0].frozen_param_fragments.values()]) * world_size
374
- print(f'Frozen params: Have {avail_numel} numels to process.')
375
- print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
376
-
377
- total_params = 0
378
- total_numel = 0
379
- for name, shape in zero_model_states[0].frozen_param_shapes.items():
380
- total_params += 1
381
- unpartitioned_numel = shape.numel()
382
- total_numel += unpartitioned_numel
383
-
384
- param_frags = tuple(model_state.frozen_param_fragments[name] for model_state in zero_model_states)
385
- state_dict[name] = torch.cat(param_frags, 0).narrow(0, 0, unpartitioned_numel).view(shape)
386
-
387
- partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
388
-
389
- if debug:
390
- print(
391
- f"Frozen params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
392
- )
393
-
394
- print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
395
-
396
-
397
- def _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
398
- param_shapes = zero_model_states[0].param_shapes
399
- avail_numel = fp32_flat_groups[0].numel() * world_size
400
- # Reconstruction protocol: For zero3 we need to zip the partitions together at boundary of each
401
- # param, re-consolidating each param, while dealing with padding if any
402
-
403
- # merge list of dicts, preserving order
404
- param_shapes = {k: v for d in param_shapes for k, v in d.items()}
405
-
406
- if debug:
407
- for i in range(world_size):
408
- print(f"{FP32_FLAT_GROUPS}[{i}].shape={fp32_flat_groups[i].shape}")
409
-
410
- wanted_params = len(param_shapes)
411
- wanted_numel = sum(shape.numel() for shape in param_shapes.values())
412
- # not asserting if there is a mismatch due to possible padding
413
- avail_numel = fp32_flat_groups[0].numel() * world_size
414
- print(f"Trainable params: Have {avail_numel} numels to process.")
415
- print(f"Trainable params: Need {wanted_numel} numels in {wanted_params} params.")
416
-
417
- # params
418
- # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
419
- # out-of-core computing solution
420
- offset = 0
421
- total_numel = 0
422
- total_params = 0
423
- for name, shape in param_shapes.items():
424
-
425
- unpartitioned_numel = shape.numel()
426
- total_numel += unpartitioned_numel
427
- total_params += 1
428
-
429
- partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
430
-
431
- if debug:
432
- print(
433
- f"Trainable params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
434
- )
435
-
436
- # XXX: memory usage doubles here
437
- state_dict[name] = torch.cat(
438
- tuple(fp32_flat_groups[i].narrow(0, offset, partitioned_numel) for i in range(world_size)),
439
- 0).narrow(0, 0, unpartitioned_numel).view(shape)
440
- offset += partitioned_numel
441
-
442
- offset *= world_size
443
-
444
- # Sanity check
445
- if offset != avail_numel:
446
- raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
447
-
448
- print(f"Reconstructed Trainable fp32 state dict with {total_params} params {total_numel} elements")
449
-
450
-
451
- def _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states,
452
- exclude_frozen_parameters):
453
- state_dict = OrderedDict()
454
-
455
- # buffers
456
- buffers = zero_model_states[0].buffers
457
- state_dict.update(buffers)
458
- if debug:
459
- print(f"added {len(buffers)} buffers")
460
-
461
- if not exclude_frozen_parameters:
462
- _zero3_merge_frozen_params(state_dict, world_size, zero_model_states)
463
-
464
- _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
465
-
466
- # recover shared parameters
467
- for pair in zero_model_states[0].shared_params:
468
- if pair[1] in state_dict:
469
- state_dict[pair[0]] = state_dict[pair[1]]
470
-
471
- return state_dict
472
-
473
-
474
- def get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag=None, exclude_frozen_parameters=False):
475
- """
476
- Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated state_dict that can be loaded with
477
- ``load_state_dict()`` and used for training without DeepSpeed or shared with others, for example
478
- via a model hub.
479
-
480
- Args:
481
- - ``checkpoint_dir``: path to the desired checkpoint folder
482
- - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in 'latest' file. e.g., ``global_step14``
483
- - ``exclude_frozen_parameters``: exclude frozen parameters
484
-
485
- Returns:
486
- - pytorch ``state_dict``
487
-
488
- Note: this approach may not work if your application doesn't have sufficient free CPU memory and
489
- you may need to use the offline approach using the ``zero_to_fp32.py`` script that is saved with
490
- the checkpoint.
491
-
492
- A typical usage might be ::
493
-
494
- from deepspeed.utils.zero_to_fp32 import get_fp32_state_dict_from_zero_checkpoint
495
- # do the training and checkpoint saving
496
- state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir) # already on cpu
497
- model = model.cpu() # move to cpu
498
- model.load_state_dict(state_dict)
499
- # submit to model hub or save the model to share with others
500
-
501
- In this example the ``model`` will no longer be usable in the deepspeed context of the same
502
- application. i.e. you will need to re-initialize the deepspeed engine, since
503
- ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
504
-
505
- If you want it all done for you, use ``load_state_dict_from_zero_checkpoint`` instead.
506
-
507
- """
508
- if tag is None:
509
- latest_path = os.path.join(checkpoint_dir, 'latest')
510
- if os.path.isfile(latest_path):
511
- with open(latest_path, 'r') as fd:
512
- tag = fd.read().strip()
513
- else:
514
- raise ValueError(f"Unable to find 'latest' file at {latest_path}")
515
-
516
- ds_checkpoint_dir = os.path.join(checkpoint_dir, tag)
517
-
518
- if not os.path.isdir(ds_checkpoint_dir):
519
- raise FileNotFoundError(f"Directory '{ds_checkpoint_dir}' doesn't exist")
520
-
521
- return _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir, exclude_frozen_parameters)
522
-
523
-
524
- def convert_zero_checkpoint_to_fp32_state_dict(checkpoint_dir, output_file, tag=None, exclude_frozen_parameters=False):
525
- """
526
- Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict`` file that can be
527
- loaded with ``torch.load(file)`` + ``load_state_dict()`` and used for training without DeepSpeed.
528
-
529
- Args:
530
- - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
531
- - ``output_file``: path to the pytorch fp32 state_dict output file (e.g. path/pytorch_model.bin)
532
- - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
533
- - ``exclude_frozen_parameters``: exclude frozen parameters
534
- """
535
-
536
- state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag, exclude_frozen_parameters)
537
- print(f"Saving fp32 state dict to {output_file}")
538
- torch.save(state_dict, output_file)
539
-
540
-
541
- def load_state_dict_from_zero_checkpoint(model, checkpoint_dir, tag=None):
542
- """
543
- 1. Put the provided model to cpu
544
- 2. Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict``
545
- 3. Load it into the provided model
546
-
547
- Args:
548
- - ``model``: the model object to update
549
- - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
550
- - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
551
-
552
- Returns:
553
- - ``model`: modified model
554
-
555
- Make sure you have plenty of CPU memory available before you call this function. If you don't
556
- have enough use the ``zero_to_fp32.py`` utility to do the conversion. You will find it
557
- conveniently placed for you in the checkpoint folder.
558
-
559
- A typical usage might be ::
560
-
561
- from deepspeed.utils.zero_to_fp32 import load_state_dict_from_zero_checkpoint
562
- model = load_state_dict_from_zero_checkpoint(trainer.model, checkpoint_dir)
563
- # submit to model hub or save the model to share with others
564
-
565
- Note, that once this was run, the ``model`` will no longer be usable in the deepspeed context
566
- of the same application. i.e. you will need to re-initialize the deepspeed engine, since
567
- ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
568
-
569
- """
570
- logger.info(f"Extracting fp32 weights")
571
- state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag)
572
-
573
- logger.info(f"Overwriting model with fp32 weights")
574
- model = model.cpu()
575
- model.load_state_dict(state_dict, strict=False)
576
-
577
- return model
578
-
579
-
580
- if __name__ == "__main__":
581
-
582
- parser = argparse.ArgumentParser()
583
- parser.add_argument("checkpoint_dir",
584
- type=str,
585
- help="path to the desired checkpoint folder, e.g., path/checkpoint-12")
586
- parser.add_argument(
587
- "output_file",
588
- type=str,
589
- help="path to the pytorch fp32 state_dict output file (e.g. path/checkpoint-12/pytorch_model.bin)")
590
- parser.add_argument("-t",
591
- "--tag",
592
- type=str,
593
- default=None,
594
- help="checkpoint tag used as a unique identifier for checkpoint. e.g., global_step1")
595
- parser.add_argument("--exclude_frozen_parameters", action='store_true', help="exclude frozen parameters")
596
- parser.add_argument("-d", "--debug", action='store_true', help="enable debug")
597
- args = parser.parse_args()
598
-
599
- debug = args.debug
600
-
601
- convert_zero_checkpoint_to_fp32_state_dict(args.checkpoint_dir,
602
- args.output_file,
603
- tag=args.tag,
604
- exclude_frozen_parameters=args.exclude_frozen_parameters)