Lingyue1 commited on
Commit
c66737e
·
verified ·
1 Parent(s): 6c2b9d7

Upload folder using huggingface_hub

Browse files
Files changed (36) hide show
  1. .gitattributes +4 -0
  2. .ipynb_checkpoints/README-checkpoint.md +61 -0
  3. .ipynb_checkpoints/tokenizer-checkpoint.json +3 -0
  4. .ipynb_checkpoints/tokenizer_config-checkpoint.json +295 -0
  5. .ipynb_checkpoints/tokenizer_config_ol-checkpoint.json +295 -0
  6. .ipynb_checkpoints/tokenizer_old-checkpoint.json +3 -0
  7. .ipynb_checkpoints/trainer_log-checkpoint.jsonl +502 -0
  8. .ipynb_checkpoints/training_loss-checkpoint.png +0 -0
  9. README.md +61 -0
  10. all_results.json +8 -0
  11. config.json +30 -0
  12. generation_config.json +9 -0
  13. model-00001-of-00004.safetensors +3 -0
  14. model-00002-of-00004.safetensors +3 -0
  15. model-00003-of-00004.safetensors +3 -0
  16. model-00004-of-00004.safetensors +3 -0
  17. model.safetensors.index.json +346 -0
  18. runs/Apr11_15-49-42_hx-rs4804g/events.out.tfevents.1744358013.hx-rs4804g +3 -0
  19. runs/Apr11_15-57-09_hx-rs4804g/events.out.tfevents.1744358435.hx-rs4804g +3 -0
  20. runs/Apr11_16-24-16_hx-rs4804g/events.out.tfevents.1744360084.hx-rs4804g +3 -0
  21. runs/Apr17_15-49-32_hx-rs4804g/events.out.tfevents.1744876389.hx-rs4804g +3 -0
  22. runs/Apr17_16-15-17_hx-rs4804g/events.out.tfevents.1744877945.hx-rs4804g +3 -0
  23. runs/Apr17_16-34-35_hx-rs4804g/events.out.tfevents.1744879059.hx-rs4804g +3 -0
  24. runs/Apr17_16-47-38_hx-rs4804g/events.out.tfevents.1744879849.hx-rs4804g +3 -0
  25. special_tokens_map.json +26 -0
  26. special_tokens_map_old.json +32 -0
  27. tokenizer.json +3 -0
  28. tokenizer_config.json +295 -0
  29. tokenizer_config_ol.json +295 -0
  30. tokenizer_config_old.json +199 -0
  31. tokenizer_old.json +3 -0
  32. train_results.json +8 -0
  33. trainer_log.jsonl +0 -0
  34. trainer_state.json +0 -0
  35. training_args.bin +3 -0
  36. training_loss.png +0 -0
.gitattributes CHANGED
@@ -33,3 +33,7 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ .ipynb_checkpoints/tokenizer-checkpoint.json filter=lfs diff=lfs merge=lfs -text
37
+ .ipynb_checkpoints/tokenizer_old-checkpoint.json filter=lfs diff=lfs merge=lfs -text
38
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
39
+ tokenizer_old.json filter=lfs diff=lfs merge=lfs -text
.ipynb_checkpoints/README-checkpoint.md ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ library_name: transformers
3
+ license: other
4
+ base_model: deepseek-ai/DeepSeek-R1-Distill-Qwen-7B
5
+ tags:
6
+ - llama-factory
7
+ - full
8
+ - generated_from_trainer
9
+ model-index:
10
+ - name: Light-R1-ly
11
+ results: []
12
+ ---
13
+
14
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
15
+ should probably proofread and complete it, then remove this comment. -->
16
+
17
+ # Light-R1
18
+
19
+ This model is a fine-tuned version of [deepseek-ai/DeepSeek-R1-Distill-Qwen-7B](https://huggingface.co//lamda12/zhouz/models/DeepSeek-R1-Distill-Qwen-7B) on the light_r1_3k dataset.
20
+
21
+ ## Model description
22
+
23
+ More information needed
24
+
25
+ ## Intended uses & limitations
26
+
27
+ More information needed
28
+
29
+ ## Training and evaluation data
30
+
31
+ More information needed
32
+
33
+ ## Training procedure
34
+
35
+ ### Training hyperparameters
36
+
37
+ The following hyperparameters were used during training:
38
+ - learning_rate: 5e-06
39
+ - train_batch_size: 2
40
+ - eval_batch_size: 8
41
+ - seed: 42
42
+ - distributed_type: multi-GPU
43
+ - num_devices: 2
44
+ - gradient_accumulation_steps: 16
45
+ - total_train_batch_size: 64
46
+ - total_eval_batch_size: 16
47
+ - optimizer: Adam with betas=(0.9,0.95) and epsilon=1e-08
48
+ - lr_scheduler_type: cosine
49
+ - lr_scheduler_warmup_ratio: 0.01
50
+ - num_epochs: 10.0
51
+
52
+ ### Training results
53
+
54
+
55
+
56
+ ### Framework versions
57
+
58
+ - Transformers 4.45.2
59
+ - Pytorch 2.6.0+cu124
60
+ - Datasets 3.1.0
61
+ - Tokenizers 0.20.3
.ipynb_checkpoints/tokenizer-checkpoint.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c7b1665d34943ed836d7c1277a72b52345855a4f7e295d475d00f9cd90af2891
3
+ size 11425024
.ipynb_checkpoints/tokenizer_config-checkpoint.json ADDED
@@ -0,0 +1,295 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "add_prefix_space": null,
5
+ "added_tokens_decoder": {
6
+ "151643": {
7
+ "content": "<|end▁of▁sentence|>",
8
+ "lstrip": false,
9
+ "normalized": false,
10
+ "rstrip": false,
11
+ "single_word": false,
12
+ "special": true
13
+ },
14
+ "151644": {
15
+ "content": "<|User|>",
16
+ "lstrip": false,
17
+ "normalized": false,
18
+ "rstrip": false,
19
+ "single_word": false,
20
+ "special": false
21
+ },
22
+ "151645": {
23
+ "content": "<|Assistant|>",
24
+ "lstrip": false,
25
+ "normalized": false,
26
+ "rstrip": false,
27
+ "single_word": false,
28
+ "special": false
29
+ },
30
+ "151646": {
31
+ "content": "<|begin▁of▁sentence|>",
32
+ "lstrip": false,
33
+ "normalized": false,
34
+ "rstrip": false,
35
+ "single_word": false,
36
+ "special": true
37
+ },
38
+ "151647": {
39
+ "content": "<|EOT|>",
40
+ "lstrip": false,
41
+ "normalized": false,
42
+ "rstrip": false,
43
+ "single_word": false,
44
+ "special": false
45
+ },
46
+ "151648": {
47
+ "content": "<think>",
48
+ "lstrip": false,
49
+ "normalized": false,
50
+ "rstrip": false,
51
+ "single_word": false,
52
+ "special": false
53
+ },
54
+ "151649": {
55
+ "content": "</think>",
56
+ "lstrip": false,
57
+ "normalized": false,
58
+ "rstrip": false,
59
+ "single_word": false,
60
+ "special": false
61
+ },
62
+ "151650": {
63
+ "content": "<|quad_start|>",
64
+ "lstrip": false,
65
+ "normalized": false,
66
+ "rstrip": false,
67
+ "single_word": false,
68
+ "special": true
69
+ },
70
+ "151651": {
71
+ "content": "<|quad_end|>",
72
+ "lstrip": false,
73
+ "normalized": false,
74
+ "rstrip": false,
75
+ "single_word": false,
76
+ "special": true
77
+ },
78
+ "151652": {
79
+ "content": "<|vision_start|>",
80
+ "lstrip": false,
81
+ "normalized": false,
82
+ "rstrip": false,
83
+ "single_word": false,
84
+ "special": true
85
+ },
86
+ "151653": {
87
+ "content": "<|vision_end|>",
88
+ "lstrip": false,
89
+ "normalized": false,
90
+ "rstrip": false,
91
+ "single_word": false,
92
+ "special": true
93
+ },
94
+ "151654": {
95
+ "content": "<|vision_pad|>",
96
+ "lstrip": false,
97
+ "normalized": false,
98
+ "rstrip": false,
99
+ "single_word": false,
100
+ "special": true
101
+ },
102
+ "151655": {
103
+ "content": "<|image_pad|>",
104
+ "lstrip": false,
105
+ "normalized": false,
106
+ "rstrip": false,
107
+ "single_word": false,
108
+ "special": true
109
+ },
110
+ "151656": {
111
+ "content": "<|video_pad|>",
112
+ "lstrip": false,
113
+ "normalized": false,
114
+ "rstrip": false,
115
+ "single_word": false,
116
+ "special": true
117
+ },
118
+ "151657": {
119
+ "content": "<tool_call>",
120
+ "lstrip": false,
121
+ "normalized": false,
122
+ "rstrip": false,
123
+ "single_word": false,
124
+ "special": false
125
+ },
126
+ "151658": {
127
+ "content": "</tool_call>",
128
+ "lstrip": false,
129
+ "normalized": false,
130
+ "rstrip": false,
131
+ "single_word": false,
132
+ "special": false
133
+ },
134
+ "151659": {
135
+ "content": "<|fim_prefix|>",
136
+ "lstrip": false,
137
+ "normalized": false,
138
+ "rstrip": false,
139
+ "single_word": false,
140
+ "special": false
141
+ },
142
+ "151660": {
143
+ "content": "<|fim_middle|>",
144
+ "lstrip": false,
145
+ "normalized": false,
146
+ "rstrip": false,
147
+ "single_word": false,
148
+ "special": false
149
+ },
150
+ "151661": {
151
+ "content": "<|fim_suffix|>",
152
+ "lstrip": false,
153
+ "normalized": false,
154
+ "rstrip": false,
155
+ "single_word": false,
156
+ "special": false
157
+ },
158
+ "151662": {
159
+ "content": "<|fim_pad|>",
160
+ "lstrip": false,
161
+ "normalized": false,
162
+ "rstrip": false,
163
+ "single_word": false,
164
+ "special": false
165
+ },
166
+ "151663": {
167
+ "content": "<|repo_name|>",
168
+ "lstrip": false,
169
+ "normalized": false,
170
+ "rstrip": false,
171
+ "single_word": false,
172
+ "special": false
173
+ },
174
+ "151664": {
175
+ "content": "<|file_sep|>",
176
+ "lstrip": false,
177
+ "normalized": false,
178
+ "rstrip": false,
179
+ "single_word": false,
180
+ "special": false
181
+ },
182
+ "151665": {
183
+ "content": "<rephrase>",
184
+ "lstrip": false,
185
+ "normalized": true,
186
+ "rstrip": false,
187
+ "single_word": false,
188
+ "special": false
189
+ },
190
+ "151666": {
191
+ "content": "</rephrase>",
192
+ "lstrip": false,
193
+ "normalized": true,
194
+ "rstrip": false,
195
+ "single_word": false,
196
+ "special": false
197
+ },
198
+ "151667": {
199
+ "content": "<decompose>",
200
+ "lstrip": false,
201
+ "normalized": true,
202
+ "rstrip": false,
203
+ "single_word": false,
204
+ "special": false
205
+ },
206
+ "151668": {
207
+ "content": "</decompose>",
208
+ "lstrip": false,
209
+ "normalized": true,
210
+ "rstrip": false,
211
+ "single_word": false,
212
+ "special": false
213
+ },
214
+ "151669": {
215
+ "content": "<thought>",
216
+ "lstrip": false,
217
+ "normalized": true,
218
+ "rstrip": false,
219
+ "single_word": false,
220
+ "special": false
221
+ },
222
+ "151670": {
223
+ "content": "</thought>",
224
+ "lstrip": false,
225
+ "normalized": true,
226
+ "rstrip": false,
227
+ "single_word": false,
228
+ "special": false
229
+ },
230
+ "151671": {
231
+ "content": "<answer>",
232
+ "lstrip": false,
233
+ "normalized": true,
234
+ "rstrip": false,
235
+ "single_word": false,
236
+ "special": false
237
+ },
238
+ "151672": {
239
+ "content": "</answer>",
240
+ "lstrip": false,
241
+ "normalized": true,
242
+ "rstrip": false,
243
+ "single_word": false,
244
+ "special": false
245
+ },
246
+ "151673": {
247
+ "content": "<reflect>",
248
+ "lstrip": false,
249
+ "normalized": true,
250
+ "rstrip": false,
251
+ "single_word": false,
252
+ "special": false
253
+ },
254
+ "151674": {
255
+ "content": "</reflect>",
256
+ "lstrip": false,
257
+ "normalized": true,
258
+ "rstrip": false,
259
+ "single_word": false,
260
+ "special": false
261
+ },
262
+ "151675": {
263
+ "content": "<summarize>",
264
+ "lstrip": false,
265
+ "normalized": true,
266
+ "rstrip": false,
267
+ "single_word": false,
268
+ "special": false
269
+ },
270
+ "151676": {
271
+ "content": "</summarize>",
272
+ "lstrip": false,
273
+ "normalized": true,
274
+ "rstrip": false,
275
+ "single_word": false,
276
+ "special": false
277
+ }
278
+ },
279
+ "additional_special_tokens": [
280
+ "<|end▁of▁sentence|>"
281
+ ],
282
+ "bos_token": "<|begin▁of▁sentence|>",
283
+ "chat_template": "{% if messages[0]['role'] == 'system' %}{% set loop_messages = messages[1:] %}{% set system_message = messages[0]['content'] %}{% else %}{% set loop_messages = messages %}{% endif %}{% if system_message is defined %}{{ system_message }}{% endif %}{% for message in loop_messages %}{% set content = message['content'] %}{% if message['role'] == 'user' %}{{ '<|begin▁of▁sentence|><|User|>' + content + '<|Assistant|>' }}{% elif message['role'] == 'assistant' %}{{ content + '<|end▁of▁sentence|>' + '\n' }}{% endif %}{% endfor %}",
284
+ "clean_up_tokenization_spaces": false,
285
+ "eos_token": "<|end▁of▁sentence|>",
286
+ "legacy": true,
287
+ "model_max_length": 32768,
288
+ "pad_token": "<|end▁of▁sentence|>",
289
+ "padding_side": "right",
290
+ "sp_model_kwargs": {},
291
+ "split_special_tokens": false,
292
+ "tokenizer_class": "LlamaTokenizer",
293
+ "unk_token": null,
294
+ "use_default_system_prompt": false
295
+ }
.ipynb_checkpoints/tokenizer_config_ol-checkpoint.json ADDED
@@ -0,0 +1,295 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "add_prefix_space": null,
5
+ "added_tokens_decoder": {
6
+ "151643": {
7
+ "content": "<|end▁of▁sentence|>",
8
+ "lstrip": false,
9
+ "normalized": false,
10
+ "rstrip": false,
11
+ "single_word": false,
12
+ "special": true
13
+ },
14
+ "151644": {
15
+ "content": "<|User|>",
16
+ "lstrip": false,
17
+ "normalized": false,
18
+ "rstrip": false,
19
+ "single_word": false,
20
+ "special": false
21
+ },
22
+ "151645": {
23
+ "content": "<|Assistant|>",
24
+ "lstrip": false,
25
+ "normalized": false,
26
+ "rstrip": false,
27
+ "single_word": false,
28
+ "special": false
29
+ },
30
+ "151646": {
31
+ "content": "<|begin▁of▁sentence|>",
32
+ "lstrip": false,
33
+ "normalized": false,
34
+ "rstrip": false,
35
+ "single_word": false,
36
+ "special": true
37
+ },
38
+ "151647": {
39
+ "content": "<|EOT|>",
40
+ "lstrip": false,
41
+ "normalized": false,
42
+ "rstrip": false,
43
+ "single_word": false,
44
+ "special": false
45
+ },
46
+ "151648": {
47
+ "content": "<think>",
48
+ "lstrip": false,
49
+ "normalized": false,
50
+ "rstrip": false,
51
+ "single_word": false,
52
+ "special": false
53
+ },
54
+ "151649": {
55
+ "content": "</think>",
56
+ "lstrip": false,
57
+ "normalized": false,
58
+ "rstrip": false,
59
+ "single_word": false,
60
+ "special": false
61
+ },
62
+ "151650": {
63
+ "content": "<|quad_start|>",
64
+ "lstrip": false,
65
+ "normalized": false,
66
+ "rstrip": false,
67
+ "single_word": false,
68
+ "special": true
69
+ },
70
+ "151651": {
71
+ "content": "<|quad_end|>",
72
+ "lstrip": false,
73
+ "normalized": false,
74
+ "rstrip": false,
75
+ "single_word": false,
76
+ "special": true
77
+ },
78
+ "151652": {
79
+ "content": "<|vision_start|>",
80
+ "lstrip": false,
81
+ "normalized": false,
82
+ "rstrip": false,
83
+ "single_word": false,
84
+ "special": true
85
+ },
86
+ "151653": {
87
+ "content": "<|vision_end|>",
88
+ "lstrip": false,
89
+ "normalized": false,
90
+ "rstrip": false,
91
+ "single_word": false,
92
+ "special": true
93
+ },
94
+ "151654": {
95
+ "content": "<|vision_pad|>",
96
+ "lstrip": false,
97
+ "normalized": false,
98
+ "rstrip": false,
99
+ "single_word": false,
100
+ "special": true
101
+ },
102
+ "151655": {
103
+ "content": "<|image_pad|>",
104
+ "lstrip": false,
105
+ "normalized": false,
106
+ "rstrip": false,
107
+ "single_word": false,
108
+ "special": true
109
+ },
110
+ "151656": {
111
+ "content": "<|video_pad|>",
112
+ "lstrip": false,
113
+ "normalized": false,
114
+ "rstrip": false,
115
+ "single_word": false,
116
+ "special": true
117
+ },
118
+ "151657": {
119
+ "content": "<tool_call>",
120
+ "lstrip": false,
121
+ "normalized": false,
122
+ "rstrip": false,
123
+ "single_word": false,
124
+ "special": false
125
+ },
126
+ "151658": {
127
+ "content": "</tool_call>",
128
+ "lstrip": false,
129
+ "normalized": false,
130
+ "rstrip": false,
131
+ "single_word": false,
132
+ "special": false
133
+ },
134
+ "151659": {
135
+ "content": "<|fim_prefix|>",
136
+ "lstrip": false,
137
+ "normalized": false,
138
+ "rstrip": false,
139
+ "single_word": false,
140
+ "special": false
141
+ },
142
+ "151660": {
143
+ "content": "<|fim_middle|>",
144
+ "lstrip": false,
145
+ "normalized": false,
146
+ "rstrip": false,
147
+ "single_word": false,
148
+ "special": false
149
+ },
150
+ "151661": {
151
+ "content": "<|fim_suffix|>",
152
+ "lstrip": false,
153
+ "normalized": false,
154
+ "rstrip": false,
155
+ "single_word": false,
156
+ "special": false
157
+ },
158
+ "151662": {
159
+ "content": "<|fim_pad|>",
160
+ "lstrip": false,
161
+ "normalized": false,
162
+ "rstrip": false,
163
+ "single_word": false,
164
+ "special": false
165
+ },
166
+ "151663": {
167
+ "content": "<|repo_name|>",
168
+ "lstrip": false,
169
+ "normalized": false,
170
+ "rstrip": false,
171
+ "single_word": false,
172
+ "special": false
173
+ },
174
+ "151664": {
175
+ "content": "<|file_sep|>",
176
+ "lstrip": false,
177
+ "normalized": false,
178
+ "rstrip": false,
179
+ "single_word": false,
180
+ "special": false
181
+ },
182
+ "151665": {
183
+ "content": "<rephrase>",
184
+ "lstrip": false,
185
+ "normalized": true,
186
+ "rstrip": false,
187
+ "single_word": false,
188
+ "special": false
189
+ },
190
+ "151666": {
191
+ "content": "</rephrase>",
192
+ "lstrip": false,
193
+ "normalized": true,
194
+ "rstrip": false,
195
+ "single_word": false,
196
+ "special": false
197
+ },
198
+ "151667": {
199
+ "content": "<decompose>",
200
+ "lstrip": false,
201
+ "normalized": true,
202
+ "rstrip": false,
203
+ "single_word": false,
204
+ "special": false
205
+ },
206
+ "151668": {
207
+ "content": "</decompose>",
208
+ "lstrip": false,
209
+ "normalized": true,
210
+ "rstrip": false,
211
+ "single_word": false,
212
+ "special": false
213
+ },
214
+ "151669": {
215
+ "content": "<thought>",
216
+ "lstrip": false,
217
+ "normalized": true,
218
+ "rstrip": false,
219
+ "single_word": false,
220
+ "special": false
221
+ },
222
+ "151670": {
223
+ "content": "</thought>",
224
+ "lstrip": false,
225
+ "normalized": true,
226
+ "rstrip": false,
227
+ "single_word": false,
228
+ "special": false
229
+ },
230
+ "151671": {
231
+ "content": "<answer>",
232
+ "lstrip": false,
233
+ "normalized": true,
234
+ "rstrip": false,
235
+ "single_word": false,
236
+ "special": false
237
+ },
238
+ "151672": {
239
+ "content": "</answer>",
240
+ "lstrip": false,
241
+ "normalized": true,
242
+ "rstrip": false,
243
+ "single_word": false,
244
+ "special": false
245
+ },
246
+ "151673": {
247
+ "content": "<reflect>",
248
+ "lstrip": false,
249
+ "normalized": true,
250
+ "rstrip": false,
251
+ "single_word": false,
252
+ "special": false
253
+ },
254
+ "151674": {
255
+ "content": "</reflect>",
256
+ "lstrip": false,
257
+ "normalized": true,
258
+ "rstrip": false,
259
+ "single_word": false,
260
+ "special": false
261
+ },
262
+ "151675": {
263
+ "content": "<summarize>",
264
+ "lstrip": false,
265
+ "normalized": true,
266
+ "rstrip": false,
267
+ "single_word": false,
268
+ "special": false
269
+ },
270
+ "151676": {
271
+ "content": "</summarize>",
272
+ "lstrip": false,
273
+ "normalized": true,
274
+ "rstrip": false,
275
+ "single_word": false,
276
+ "special": false
277
+ }
278
+ },
279
+ "additional_special_tokens": [
280
+ "<|end▁of▁sentence|>"
281
+ ],
282
+ "bos_token": "<|begin▁of▁sentence|>",
283
+ "chat_template": "{% if messages[0]['role'] == 'system' %}{% set loop_messages = messages[1:] %}{% set system_message = messages[0]['content'] %}{% else %}{% set loop_messages = messages %}{% endif %}{% if system_message is defined %}{{ system_message }}{% endif %}{% for message in loop_messages %}{% set content = message['content'] %}{% if message['role'] == 'user' %}{{ '<|begin▁of▁sentence|><|User|>' + content + '<|Assistant|>' }}{% elif message['role'] == 'assistant' %}{{ content + '<|end▁of▁sentence|>' + '\n' }}{% endif %}{% endfor %}",
284
+ "clean_up_tokenization_spaces": false,
285
+ "eos_token": "<|end▁of▁sentence|>",
286
+ "legacy": true,
287
+ "model_max_length": 32768,
288
+ "pad_token": "<|end▁of▁sentence|>",
289
+ "padding_side": "right",
290
+ "sp_model_kwargs": {},
291
+ "split_special_tokens": false,
292
+ "tokenizer_class": "LlamaTokenizer",
293
+ "unk_token": null,
294
+ "use_default_system_prompt": false
295
+ }
.ipynb_checkpoints/tokenizer_old-checkpoint.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e20ddafc659ba90242154b55275402edeca0715e5dbb30f56815a4ce081f4893
3
+ size 11422778
.ipynb_checkpoints/trainer_log-checkpoint.jsonl ADDED
@@ -0,0 +1,502 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {"current_steps": 1, "total_steps": 550, "loss": 0.392, "lr": 8.333333333333333e-07, "epoch": 0.01809954751131222, "percentage": 0.18, "elapsed_time": "0:03:24", "remaining_time": "1 day, 7:06:53"}
2
+ {"current_steps": 2, "total_steps": 550, "loss": 0.3479, "lr": 1.6666666666666667e-06, "epoch": 0.03619909502262444, "percentage": 0.36, "elapsed_time": "0:06:13", "remaining_time": "1 day, 4:24:06"}
3
+ {"current_steps": 3, "total_steps": 550, "loss": 0.363, "lr": 2.5e-06, "epoch": 0.05429864253393665, "percentage": 0.55, "elapsed_time": "0:09:03", "remaining_time": "1 day, 3:31:24"}
4
+ {"current_steps": 4, "total_steps": 550, "loss": 0.3513, "lr": 3.3333333333333333e-06, "epoch": 0.07239819004524888, "percentage": 0.73, "elapsed_time": "0:13:40", "remaining_time": "1 day, 7:06:04"}
5
+ {"current_steps": 5, "total_steps": 550, "loss": 0.3759, "lr": 4.166666666666667e-06, "epoch": 0.09049773755656108, "percentage": 0.91, "elapsed_time": "0:18:09", "remaining_time": "1 day, 8:58:34"}
6
+ {"current_steps": 6, "total_steps": 550, "loss": 0.3525, "lr": 5e-06, "epoch": 0.1085972850678733, "percentage": 1.09, "elapsed_time": "0:22:38", "remaining_time": "1 day, 10:12:31"}
7
+ {"current_steps": 7, "total_steps": 550, "loss": 0.3904, "lr": 4.99995831202958e-06, "epoch": 0.12669683257918551, "percentage": 1.27, "elapsed_time": "0:27:19", "remaining_time": "1 day, 11:19:28"}
8
+ {"current_steps": 8, "total_steps": 550, "loss": 0.3924, "lr": 4.999833249508629e-06, "epoch": 0.14479638009049775, "percentage": 1.45, "elapsed_time": "0:32:00", "remaining_time": "1 day, 12:09:06"}
9
+ {"current_steps": 9, "total_steps": 550, "loss": 0.375, "lr": 4.999624816608027e-06, "epoch": 0.16289592760180996, "percentage": 1.64, "elapsed_time": "0:36:26", "remaining_time": "1 day, 12:30:06"}
10
+ {"current_steps": 10, "total_steps": 550, "loss": 0.356, "lr": 4.999333020279094e-06, "epoch": 0.18099547511312217, "percentage": 1.82, "elapsed_time": "0:40:36", "remaining_time": "1 day, 12:32:58"}
11
+ {"current_steps": 11, "total_steps": 550, "loss": 0.3598, "lr": 4.998957870253344e-06, "epoch": 0.19909502262443438, "percentage": 2.0, "elapsed_time": "0:45:09", "remaining_time": "1 day, 12:52:39"}
12
+ {"current_steps": 12, "total_steps": 550, "loss": 0.3392, "lr": 4.998499379042172e-06, "epoch": 0.2171945701357466, "percentage": 2.18, "elapsed_time": "0:49:38", "remaining_time": "1 day, 13:05:20"}
13
+ {"current_steps": 13, "total_steps": 550, "loss": 0.3691, "lr": 4.997957561936433e-06, "epoch": 0.23529411764705882, "percentage": 2.36, "elapsed_time": "0:52:29", "remaining_time": "1 day, 12:08:09"}
14
+ {"current_steps": 14, "total_steps": 550, "loss": 0.352, "lr": 4.997332437005932e-06, "epoch": 0.25339366515837103, "percentage": 2.55, "elapsed_time": "0:55:06", "remaining_time": "1 day, 11:09:45"}
15
+ {"current_steps": 15, "total_steps": 550, "loss": 0.3449, "lr": 4.996624025098819e-06, "epoch": 0.27149321266968324, "percentage": 2.73, "elapsed_time": "0:57:40", "remaining_time": "1 day, 10:17:15"}
16
+ {"current_steps": 16, "total_steps": 550, "loss": 0.3401, "lr": 4.9958323498409e-06, "epoch": 0.2895927601809955, "percentage": 2.91, "elapsed_time": "1:00:51", "remaining_time": "1 day, 9:51:25"}
17
+ {"current_steps": 17, "total_steps": 550, "loss": 0.3567, "lr": 4.99495743763484e-06, "epoch": 0.3076923076923077, "percentage": 3.09, "elapsed_time": "1:05:14", "remaining_time": "1 day, 10:05:37"}
18
+ {"current_steps": 18, "total_steps": 550, "loss": 0.3585, "lr": 4.993999317659293e-06, "epoch": 0.3257918552036199, "percentage": 3.27, "elapsed_time": "1:09:51", "remaining_time": "1 day, 10:24:45"}
19
+ {"current_steps": 19, "total_steps": 550, "loss": 0.3293, "lr": 4.9929580218679195e-06, "epoch": 0.3438914027149321, "percentage": 3.45, "elapsed_time": "1:14:20", "remaining_time": "1 day, 10:37:32"}
20
+ {"current_steps": 20, "total_steps": 550, "loss": 0.3437, "lr": 4.991833584988326e-06, "epoch": 0.36199095022624433, "percentage": 3.64, "elapsed_time": "1:18:25", "remaining_time": "1 day, 10:38:26"}
21
+ {"current_steps": 21, "total_steps": 550, "loss": 0.3249, "lr": 4.990626044520905e-06, "epoch": 0.38009049773755654, "percentage": 3.82, "elapsed_time": "1:23:07", "remaining_time": "1 day, 10:54:08"}
22
+ {"current_steps": 22, "total_steps": 550, "loss": 0.3532, "lr": 4.989335440737587e-06, "epoch": 0.39819004524886875, "percentage": 4.0, "elapsed_time": "1:27:40", "remaining_time": "1 day, 11:04:18"}
23
+ {"current_steps": 23, "total_steps": 550, "loss": 0.3533, "lr": 4.987961816680493e-06, "epoch": 0.416289592760181, "percentage": 4.18, "elapsed_time": "1:32:23", "remaining_time": "1 day, 11:17:03"}
24
+ {"current_steps": 24, "total_steps": 550, "loss": 0.3268, "lr": 4.986505218160502e-06, "epoch": 0.4343891402714932, "percentage": 4.36, "elapsed_time": "1:37:03", "remaining_time": "1 day, 11:27:07"}
25
+ {"current_steps": 25, "total_steps": 550, "loss": 0.3332, "lr": 4.984965693755723e-06, "epoch": 0.45248868778280543, "percentage": 4.55, "elapsed_time": "1:41:50", "remaining_time": "1 day, 11:38:34"}
26
+ {"current_steps": 26, "total_steps": 550, "loss": 0.3245, "lr": 4.983343294809875e-06, "epoch": 0.47058823529411764, "percentage": 4.73, "elapsed_time": "1:45:59", "remaining_time": "1 day, 11:36:13"}
27
+ {"current_steps": 27, "total_steps": 550, "loss": 0.3199, "lr": 4.981638075430572e-06, "epoch": 0.48868778280542985, "percentage": 4.91, "elapsed_time": "1:50:35", "remaining_time": "1 day, 11:42:09"}
28
+ {"current_steps": 28, "total_steps": 550, "loss": 0.3282, "lr": 4.979850092487525e-06, "epoch": 0.5067873303167421, "percentage": 5.09, "elapsed_time": "1:54:56", "remaining_time": "1 day, 11:42:42"}
29
+ {"current_steps": 29, "total_steps": 550, "loss": 0.337, "lr": 4.977979405610635e-06, "epoch": 0.5248868778280543, "percentage": 5.27, "elapsed_time": "1:59:04", "remaining_time": "1 day, 11:39:07"}
30
+ {"current_steps": 30, "total_steps": 550, "loss": 0.3265, "lr": 4.976026077188013e-06, "epoch": 0.5429864253393665, "percentage": 5.45, "elapsed_time": "2:01:58", "remaining_time": "1 day, 11:14:15"}
31
+ {"current_steps": 31, "total_steps": 550, "loss": 0.3568, "lr": 4.973990172363899e-06, "epoch": 0.5610859728506787, "percentage": 5.64, "elapsed_time": "2:04:47", "remaining_time": "1 day, 10:49:19"}
32
+ {"current_steps": 32, "total_steps": 550, "loss": 0.3287, "lr": 4.9718717590364855e-06, "epoch": 0.579185520361991, "percentage": 5.82, "elapsed_time": "2:07:12", "remaining_time": "1 day, 10:19:06"}
33
+ {"current_steps": 33, "total_steps": 550, "loss": 0.3267, "lr": 4.969670907855651e-06, "epoch": 0.5972850678733032, "percentage": 6.0, "elapsed_time": "2:09:48", "remaining_time": "1 day, 9:53:41"}
34
+ {"current_steps": 34, "total_steps": 550, "loss": 0.3367, "lr": 4.967387692220615e-06, "epoch": 0.6153846153846154, "percentage": 6.18, "elapsed_time": "2:12:31", "remaining_time": "1 day, 9:31:12"}
35
+ {"current_steps": 35, "total_steps": 550, "loss": 0.3236, "lr": 4.965022188277474e-06, "epoch": 0.6334841628959276, "percentage": 6.36, "elapsed_time": "2:14:55", "remaining_time": "1 day, 9:05:21"}
36
+ {"current_steps": 36, "total_steps": 550, "loss": 0.3236, "lr": 4.962574474916678e-06, "epoch": 0.6515837104072398, "percentage": 6.55, "elapsed_time": "2:17:27", "remaining_time": "1 day, 8:42:29"}
37
+ {"current_steps": 37, "total_steps": 550, "loss": 0.3295, "lr": 4.960044633770387e-06, "epoch": 0.669683257918552, "percentage": 6.73, "elapsed_time": "2:20:07", "remaining_time": "1 day, 8:22:50"}
38
+ {"current_steps": 38, "total_steps": 550, "loss": 0.3453, "lr": 4.957432749209755e-06, "epoch": 0.6877828054298643, "percentage": 6.91, "elapsed_time": "2:22:41", "remaining_time": "1 day, 8:02:33"}
39
+ {"current_steps": 39, "total_steps": 550, "loss": 0.3645, "lr": 4.954738908342116e-06, "epoch": 0.7058823529411765, "percentage": 7.09, "elapsed_time": "2:25:21", "remaining_time": "1 day, 7:44:29"}
40
+ {"current_steps": 40, "total_steps": 550, "loss": 0.3372, "lr": 4.9519632010080765e-06, "epoch": 0.7239819004524887, "percentage": 7.27, "elapsed_time": "2:27:55", "remaining_time": "1 day, 7:26:06"}
41
+ {"current_steps": 41, "total_steps": 550, "loss": 0.3349, "lr": 4.9491057197785205e-06, "epoch": 0.7420814479638009, "percentage": 7.45, "elapsed_time": "2:30:40", "remaining_time": "1 day, 7:10:33"}
42
+ {"current_steps": 42, "total_steps": 550, "loss": 0.3174, "lr": 4.946166559951523e-06, "epoch": 0.7601809954751131, "percentage": 7.64, "elapsed_time": "2:33:04", "remaining_time": "1 day, 6:51:28"}
43
+ {"current_steps": 43, "total_steps": 550, "loss": 0.3464, "lr": 4.943145819549169e-06, "epoch": 0.7782805429864253, "percentage": 7.82, "elapsed_time": "2:35:36", "remaining_time": "1 day, 6:34:39"}
44
+ {"current_steps": 44, "total_steps": 550, "loss": 0.3277, "lr": 4.9400435993142895e-06, "epoch": 0.7963800904977375, "percentage": 8.0, "elapsed_time": "2:38:07", "remaining_time": "1 day, 6:18:28"}
45
+ {"current_steps": 45, "total_steps": 550, "loss": 0.343, "lr": 4.936860002707096e-06, "epoch": 0.8144796380090498, "percentage": 8.18, "elapsed_time": "2:40:36", "remaining_time": "1 day, 6:02:27"}
46
+ {"current_steps": 46, "total_steps": 550, "loss": 0.3425, "lr": 4.933595135901733e-06, "epoch": 0.832579185520362, "percentage": 8.36, "elapsed_time": "2:43:23", "remaining_time": "1 day, 5:50:06"}
47
+ {"current_steps": 47, "total_steps": 550, "loss": 0.3345, "lr": 4.9302491077827366e-06, "epoch": 0.8506787330316742, "percentage": 8.55, "elapsed_time": "2:45:50", "remaining_time": "1 day, 5:34:50"}
48
+ {"current_steps": 48, "total_steps": 550, "loss": 0.3599, "lr": 4.926822029941406e-06, "epoch": 0.8687782805429864, "percentage": 8.73, "elapsed_time": "2:48:39", "remaining_time": "1 day, 5:23:48"}
49
+ {"current_steps": 49, "total_steps": 550, "loss": 0.3293, "lr": 4.923314016672075e-06, "epoch": 0.8868778280542986, "percentage": 8.91, "elapsed_time": "2:51:24", "remaining_time": "1 day, 5:12:37"}
50
+ {"current_steps": 50, "total_steps": 550, "loss": 0.3231, "lr": 4.919725184968307e-06, "epoch": 0.9049773755656109, "percentage": 9.09, "elapsed_time": "2:53:57", "remaining_time": "1 day, 4:59:36"}
51
+ {"current_steps": 51, "total_steps": 550, "loss": 0.3248, "lr": 4.9160556545189895e-06, "epoch": 0.9230769230769231, "percentage": 9.27, "elapsed_time": "2:59:30", "remaining_time": "1 day, 5:16:18"}
52
+ {"current_steps": 52, "total_steps": 550, "loss": 0.3314, "lr": 4.9123055477043454e-06, "epoch": 0.9411764705882353, "percentage": 9.45, "elapsed_time": "3:01:57", "remaining_time": "1 day, 5:02:39"}
53
+ {"current_steps": 53, "total_steps": 550, "loss": 0.3341, "lr": 4.908474989591846e-06, "epoch": 0.9592760180995475, "percentage": 9.64, "elapsed_time": "3:04:33", "remaining_time": "1 day, 4:50:40"}
54
+ {"current_steps": 54, "total_steps": 550, "loss": 0.3189, "lr": 4.904564107932048e-06, "epoch": 0.9773755656108597, "percentage": 9.82, "elapsed_time": "3:06:44", "remaining_time": "1 day, 4:35:16"}
55
+ {"current_steps": 55, "total_steps": 550, "loss": 0.3198, "lr": 4.900573033154325e-06, "epoch": 0.995475113122172, "percentage": 10.0, "elapsed_time": "3:09:11", "remaining_time": "1 day, 4:22:41"}
56
+ {"current_steps": 56, "total_steps": 550, "loss": 0.3273, "lr": 4.8965018983625245e-06, "epoch": 1.0135746606334841, "percentage": 10.18, "elapsed_time": "3:11:44", "remaining_time": "1 day, 4:11:27"}
57
+ {"current_steps": 57, "total_steps": 550, "loss": 0.3058, "lr": 4.8923508393305224e-06, "epoch": 1.0316742081447963, "percentage": 10.36, "elapsed_time": "3:14:13", "remaining_time": "1 day, 3:59:51"}
58
+ {"current_steps": 58, "total_steps": 550, "loss": 0.2949, "lr": 4.888119994497701e-06, "epoch": 1.0497737556561086, "percentage": 10.55, "elapsed_time": "3:16:39", "remaining_time": "1 day, 3:48:15"}
59
+ {"current_steps": 59, "total_steps": 550, "loss": 0.298, "lr": 4.883809504964325e-06, "epoch": 1.0678733031674208, "percentage": 10.73, "elapsed_time": "3:19:00", "remaining_time": "1 day, 3:36:12"}
60
+ {"current_steps": 60, "total_steps": 550, "loss": 0.3201, "lr": 4.879419514486846e-06, "epoch": 1.085972850678733, "percentage": 10.91, "elapsed_time": "3:21:26", "remaining_time": "1 day, 3:25:10"}
61
+ {"current_steps": 61, "total_steps": 550, "loss": 0.3338, "lr": 4.874950169473097e-06, "epoch": 1.1040723981900453, "percentage": 11.09, "elapsed_time": "3:24:12", "remaining_time": "1 day, 3:17:03"}
62
+ {"current_steps": 62, "total_steps": 550, "loss": 0.3053, "lr": 4.870401618977415e-06, "epoch": 1.1221719457013575, "percentage": 11.27, "elapsed_time": "3:26:52", "remaining_time": "1 day, 3:08:16"}
63
+ {"current_steps": 63, "total_steps": 550, "loss": 0.3346, "lr": 4.8657740146956724e-06, "epoch": 1.1402714932126696, "percentage": 11.45, "elapsed_time": "3:29:35", "remaining_time": "1 day, 3:00:07"}
64
+ {"current_steps": 64, "total_steps": 550, "loss": 0.3175, "lr": 4.8610675109602135e-06, "epoch": 1.1583710407239818, "percentage": 11.64, "elapsed_time": "3:32:04", "remaining_time": "1 day, 2:50:27"}
65
+ {"current_steps": 65, "total_steps": 550, "loss": 0.2973, "lr": 4.856282264734708e-06, "epoch": 1.1764705882352942, "percentage": 11.82, "elapsed_time": "3:34:27", "remaining_time": "1 day, 2:40:09"}
66
+ {"current_steps": 66, "total_steps": 550, "loss": 0.3328, "lr": 4.851418435608919e-06, "epoch": 1.1945701357466063, "percentage": 12.0, "elapsed_time": "3:36:52", "remaining_time": "1 day, 2:30:25"}
67
+ {"current_steps": 67, "total_steps": 550, "loss": 0.3233, "lr": 4.84647618579338e-06, "epoch": 1.2126696832579185, "percentage": 12.18, "elapsed_time": "3:39:28", "remaining_time": "1 day, 2:22:08"}
68
+ {"current_steps": 68, "total_steps": 550, "loss": 0.3401, "lr": 4.841455680113979e-06, "epoch": 1.2307692307692308, "percentage": 12.36, "elapsed_time": "3:41:55", "remaining_time": "1 day, 2:13:03"}
69
+ {"current_steps": 69, "total_steps": 550, "loss": 0.3199, "lr": 4.836357086006471e-06, "epoch": 1.248868778280543, "percentage": 12.55, "elapsed_time": "3:44:46", "remaining_time": "1 day, 2:06:56"}
70
+ {"current_steps": 70, "total_steps": 550, "loss": 0.3193, "lr": 4.83118057351089e-06, "epoch": 1.2669683257918551, "percentage": 12.73, "elapsed_time": "3:47:31", "remaining_time": "1 day, 2:00:10"}
71
+ {"current_steps": 71, "total_steps": 550, "loss": 0.3093, "lr": 4.825926315265874e-06, "epoch": 1.2850678733031673, "percentage": 12.91, "elapsed_time": "3:49:50", "remaining_time": "1 day, 1:50:38"}
72
+ {"current_steps": 72, "total_steps": 550, "loss": 0.3147, "lr": 4.820594486502913e-06, "epoch": 1.3031674208144797, "percentage": 13.09, "elapsed_time": "3:52:15", "remaining_time": "1 day, 1:41:53"}
73
+ {"current_steps": 73, "total_steps": 550, "loss": 0.3371, "lr": 4.815185265040504e-06, "epoch": 1.3212669683257918, "percentage": 13.27, "elapsed_time": "3:55:05", "remaining_time": "1 day, 1:36:10"}
74
+ {"current_steps": 74, "total_steps": 550, "loss": 0.3556, "lr": 4.809698831278217e-06, "epoch": 1.3393665158371042, "percentage": 13.45, "elapsed_time": "3:57:38", "remaining_time": "1 day, 1:28:34"}
75
+ {"current_steps": 75, "total_steps": 550, "loss": 0.3098, "lr": 4.804135368190684e-06, "epoch": 1.3574660633484164, "percentage": 13.64, "elapsed_time": "4:00:02", "remaining_time": "1 day, 1:20:14"}
76
+ {"current_steps": 76, "total_steps": 550, "loss": 0.3037, "lr": 4.798495061321492e-06, "epoch": 1.3755656108597285, "percentage": 13.82, "elapsed_time": "4:02:26", "remaining_time": "1 day, 1:12:03"}
77
+ {"current_steps": 77, "total_steps": 550, "loss": 0.3046, "lr": 4.792778098776997e-06, "epoch": 1.3936651583710407, "percentage": 14.0, "elapsed_time": "4:04:51", "remaining_time": "1 day, 1:04:10"}
78
+ {"current_steps": 78, "total_steps": 550, "loss": 0.3146, "lr": 4.786984671220053e-06, "epoch": 1.4117647058823528, "percentage": 14.18, "elapsed_time": "4:07:25", "remaining_time": "1 day, 0:57:14"}
79
+ {"current_steps": 79, "total_steps": 550, "loss": 0.3133, "lr": 4.7811149718636475e-06, "epoch": 1.4298642533936652, "percentage": 14.36, "elapsed_time": "4:09:53", "remaining_time": "1 day, 0:49:53"}
80
+ {"current_steps": 80, "total_steps": 550, "loss": 0.3181, "lr": 4.7751691964644655e-06, "epoch": 1.4479638009049773, "percentage": 14.55, "elapsed_time": "4:12:31", "remaining_time": "1 day, 0:43:36"}
81
+ {"current_steps": 81, "total_steps": 550, "loss": 0.3107, "lr": 4.7691475433163515e-06, "epoch": 1.4660633484162897, "percentage": 14.73, "elapsed_time": "4:15:03", "remaining_time": "1 day, 0:36:51"}
82
+ {"current_steps": 82, "total_steps": 550, "loss": 0.3193, "lr": 4.763050213243705e-06, "epoch": 1.4841628959276019, "percentage": 14.91, "elapsed_time": "4:17:36", "remaining_time": "1 day, 0:30:17"}
83
+ {"current_steps": 83, "total_steps": 550, "loss": 0.3372, "lr": 4.7568774095947804e-06, "epoch": 1.502262443438914, "percentage": 15.09, "elapsed_time": "4:20:15", "remaining_time": "1 day, 0:24:19"}
84
+ {"current_steps": 84, "total_steps": 550, "loss": 0.3058, "lr": 4.7506293382349e-06, "epoch": 1.5203619909502262, "percentage": 15.27, "elapsed_time": "4:22:43", "remaining_time": "1 day, 0:17:28"}
85
+ {"current_steps": 85, "total_steps": 550, "loss": 0.34, "lr": 4.744306207539595e-06, "epoch": 1.5384615384615383, "percentage": 15.45, "elapsed_time": "4:25:17", "remaining_time": "1 day, 0:11:15"}
86
+ {"current_steps": 86, "total_steps": 550, "loss": 0.3285, "lr": 4.737908228387656e-06, "epoch": 1.5565610859728507, "percentage": 15.64, "elapsed_time": "4:27:46", "remaining_time": "1 day, 0:04:46"}
87
+ {"current_steps": 87, "total_steps": 550, "loss": 0.3134, "lr": 4.731435614154094e-06, "epoch": 1.5746606334841629, "percentage": 15.82, "elapsed_time": "4:30:23", "remaining_time": "23:58:57"}
88
+ {"current_steps": 88, "total_steps": 550, "loss": 0.305, "lr": 4.72488858070303e-06, "epoch": 1.5927601809954752, "percentage": 16.0, "elapsed_time": "4:32:48", "remaining_time": "23:52:16"}
89
+ {"current_steps": 89, "total_steps": 550, "loss": 0.3157, "lr": 4.718267346380492e-06, "epoch": 1.6108597285067874, "percentage": 16.18, "elapsed_time": "4:35:15", "remaining_time": "23:45:45"}
90
+ {"current_steps": 90, "total_steps": 550, "loss": 0.3124, "lr": 4.711572132007139e-06, "epoch": 1.6289592760180995, "percentage": 16.36, "elapsed_time": "4:37:40", "remaining_time": "23:39:13"}
91
+ {"current_steps": 91, "total_steps": 550, "loss": 0.3306, "lr": 4.704803160870888e-06, "epoch": 1.6470588235294117, "percentage": 16.55, "elapsed_time": "4:40:15", "remaining_time": "23:33:37"}
92
+ {"current_steps": 92, "total_steps": 550, "loss": 0.3061, "lr": 4.697960658719475e-06, "epoch": 1.6651583710407238, "percentage": 16.73, "elapsed_time": "4:42:47", "remaining_time": "23:27:46"}
93
+ {"current_steps": 93, "total_steps": 550, "loss": 0.3098, "lr": 4.69104485375292e-06, "epoch": 1.6832579185520362, "percentage": 16.91, "elapsed_time": "4:45:17", "remaining_time": "23:21:53"}
94
+ {"current_steps": 94, "total_steps": 550, "loss": 0.3088, "lr": 4.684055976615924e-06, "epoch": 1.7013574660633484, "percentage": 17.09, "elapsed_time": "4:47:42", "remaining_time": "23:15:39"}
95
+ {"current_steps": 95, "total_steps": 550, "loss": 0.2912, "lr": 4.676994260390168e-06, "epoch": 1.7194570135746607, "percentage": 17.27, "elapsed_time": "4:50:02", "remaining_time": "23:09:09"}
96
+ {"current_steps": 96, "total_steps": 550, "loss": 0.303, "lr": 4.6698599405865465e-06, "epoch": 1.737556561085973, "percentage": 17.45, "elapsed_time": "4:52:18", "remaining_time": "23:02:22"}
97
+ {"current_steps": 97, "total_steps": 550, "loss": 0.3348, "lr": 4.662653255137308e-06, "epoch": 1.755656108597285, "percentage": 17.64, "elapsed_time": "4:54:51", "remaining_time": "22:56:59"}
98
+ {"current_steps": 98, "total_steps": 550, "loss": 0.327, "lr": 4.655374444388127e-06, "epoch": 1.7737556561085972, "percentage": 17.82, "elapsed_time": "4:57:27", "remaining_time": "22:51:59"}
99
+ {"current_steps": 99, "total_steps": 550, "loss": 0.3363, "lr": 4.648023751090079e-06, "epoch": 1.7918552036199094, "percentage": 18.0, "elapsed_time": "4:59:56", "remaining_time": "22:46:22"}
100
+ {"current_steps": 100, "total_steps": 550, "loss": 0.3113, "lr": 4.640601420391554e-06, "epoch": 1.8099547511312217, "percentage": 18.18, "elapsed_time": "5:02:21", "remaining_time": "22:40:38"}
101
+ {"current_steps": 101, "total_steps": 550, "loss": 0.3148, "lr": 4.633107699830073e-06, "epoch": 1.8280542986425339, "percentage": 18.36, "elapsed_time": "5:07:39", "remaining_time": "22:47:41"}
102
+ {"current_steps": 102, "total_steps": 550, "loss": 0.2967, "lr": 4.625542839324036e-06, "epoch": 1.8461538461538463, "percentage": 18.55, "elapsed_time": "5:10:04", "remaining_time": "22:41:53"}
103
+ {"current_steps": 103, "total_steps": 550, "loss": 0.3188, "lr": 4.617907091164389e-06, "epoch": 1.8642533936651584, "percentage": 18.73, "elapsed_time": "5:12:30", "remaining_time": "22:36:13"}
104
+ {"current_steps": 104, "total_steps": 550, "loss": 0.3121, "lr": 4.610200710006206e-06, "epoch": 1.8823529411764706, "percentage": 18.91, "elapsed_time": "5:14:52", "remaining_time": "22:30:19"}
105
+ {"current_steps": 105, "total_steps": 550, "loss": 0.3146, "lr": 4.602423952860199e-06, "epoch": 1.9004524886877827, "percentage": 19.09, "elapsed_time": "5:17:25", "remaining_time": "22:25:15"}
106
+ {"current_steps": 106, "total_steps": 550, "loss": 0.3405, "lr": 4.594577079084146e-06, "epoch": 1.9185520361990949, "percentage": 19.27, "elapsed_time": "5:19:52", "remaining_time": "22:19:52"}
107
+ {"current_steps": 107, "total_steps": 550, "loss": 0.3089, "lr": 4.58666035037424e-06, "epoch": 1.9366515837104072, "percentage": 19.45, "elapsed_time": "5:22:21", "remaining_time": "22:14:39"}
108
+ {"current_steps": 108, "total_steps": 550, "loss": 0.3229, "lr": 4.578674030756364e-06, "epoch": 1.9547511312217196, "percentage": 19.64, "elapsed_time": "5:24:50", "remaining_time": "22:09:27"}
109
+ {"current_steps": 109, "total_steps": 550, "loss": 0.3237, "lr": 4.57061838657728e-06, "epoch": 1.9728506787330318, "percentage": 19.82, "elapsed_time": "5:27:16", "remaining_time": "22:04:07"}
110
+ {"current_steps": 110, "total_steps": 550, "loss": 0.3255, "lr": 4.562493686495756e-06, "epoch": 1.990950226244344, "percentage": 20.0, "elapsed_time": "5:29:53", "remaining_time": "21:59:34"}
111
+ {"current_steps": 111, "total_steps": 550, "loss": 0.2988, "lr": 4.5543002014735955e-06, "epoch": 2.009049773755656, "percentage": 20.18, "elapsed_time": "5:32:20", "remaining_time": "21:54:23"}
112
+ {"current_steps": 112, "total_steps": 550, "loss": 0.3109, "lr": 4.546038204766609e-06, "epoch": 2.0271493212669682, "percentage": 20.36, "elapsed_time": "5:34:47", "remaining_time": "21:49:18"}
113
+ {"current_steps": 113, "total_steps": 550, "loss": 0.3066, "lr": 4.537707971915495e-06, "epoch": 2.0452488687782804, "percentage": 20.55, "elapsed_time": "5:37:22", "remaining_time": "21:44:43"}
114
+ {"current_steps": 114, "total_steps": 550, "loss": 0.2939, "lr": 4.529309780736654e-06, "epoch": 2.0633484162895925, "percentage": 20.73, "elapsed_time": "5:39:59", "remaining_time": "21:40:19"}
115
+ {"current_steps": 115, "total_steps": 550, "loss": 0.294, "lr": 4.520843911312922e-06, "epoch": 2.081447963800905, "percentage": 20.91, "elapsed_time": "5:42:14", "remaining_time": "21:34:35"}
116
+ {"current_steps": 116, "total_steps": 550, "loss": 0.2984, "lr": 4.512310645984231e-06, "epoch": 2.0995475113122173, "percentage": 21.09, "elapsed_time": "5:44:43", "remaining_time": "21:29:43"}
117
+ {"current_steps": 117, "total_steps": 550, "loss": 0.2694, "lr": 4.503710269338191e-06, "epoch": 2.1176470588235294, "percentage": 21.27, "elapsed_time": "5:46:54", "remaining_time": "21:23:50"}
118
+ {"current_steps": 118, "total_steps": 550, "loss": 0.2979, "lr": 4.4950430682005995e-06, "epoch": 2.1357466063348416, "percentage": 21.45, "elapsed_time": "5:49:16", "remaining_time": "21:18:41"}
119
+ {"current_steps": 119, "total_steps": 550, "loss": 0.2874, "lr": 4.486309331625877e-06, "epoch": 2.1538461538461537, "percentage": 21.64, "elapsed_time": "5:51:39", "remaining_time": "21:13:40"}
120
+ {"current_steps": 120, "total_steps": 550, "loss": 0.291, "lr": 4.477509350887424e-06, "epoch": 2.171945701357466, "percentage": 21.82, "elapsed_time": "5:53:57", "remaining_time": "21:08:20"}
121
+ {"current_steps": 121, "total_steps": 550, "loss": 0.2921, "lr": 4.468643419467909e-06, "epoch": 2.1900452488687785, "percentage": 22.0, "elapsed_time": "5:56:22", "remaining_time": "21:03:29"}
122
+ {"current_steps": 122, "total_steps": 550, "loss": 0.3061, "lr": 4.459711833049485e-06, "epoch": 2.2081447963800906, "percentage": 22.18, "elapsed_time": "5:58:47", "remaining_time": "20:58:43"}
123
+ {"current_steps": 123, "total_steps": 550, "loss": 0.2765, "lr": 4.4507148895039165e-06, "epoch": 2.226244343891403, "percentage": 22.36, "elapsed_time": "6:01:12", "remaining_time": "20:53:56"}
124
+ {"current_steps": 124, "total_steps": 550, "loss": 0.2969, "lr": 4.4416528888826595e-06, "epoch": 2.244343891402715, "percentage": 22.55, "elapsed_time": "6:03:41", "remaining_time": "20:49:25"}
125
+ {"current_steps": 125, "total_steps": 550, "loss": 0.3044, "lr": 4.432526133406843e-06, "epoch": 2.262443438914027, "percentage": 22.73, "elapsed_time": "6:06:08", "remaining_time": "20:44:51"}
126
+ {"current_steps": 126, "total_steps": 550, "loss": 0.3132, "lr": 4.423334927457198e-06, "epoch": 2.2805429864253393, "percentage": 22.91, "elapsed_time": "6:08:46", "remaining_time": "20:40:58"}
127
+ {"current_steps": 127, "total_steps": 550, "loss": 0.3032, "lr": 4.414079577563901e-06, "epoch": 2.2986425339366514, "percentage": 23.09, "elapsed_time": "6:11:17", "remaining_time": "20:36:40"}
128
+ {"current_steps": 128, "total_steps": 550, "loss": 0.3033, "lr": 4.404760392396355e-06, "epoch": 2.3167420814479636, "percentage": 23.27, "elapsed_time": "6:13:35", "remaining_time": "20:31:40"}
129
+ {"current_steps": 129, "total_steps": 550, "loss": 0.3039, "lr": 4.3953776827528925e-06, "epoch": 2.334841628959276, "percentage": 23.45, "elapsed_time": "6:16:03", "remaining_time": "20:27:16"}
130
+ {"current_steps": 130, "total_steps": 550, "loss": 0.2793, "lr": 4.385931761550411e-06, "epoch": 2.3529411764705883, "percentage": 23.64, "elapsed_time": "6:18:21", "remaining_time": "20:22:22"}
131
+ {"current_steps": 131, "total_steps": 550, "loss": 0.2849, "lr": 4.376422943813936e-06, "epoch": 2.3710407239819005, "percentage": 23.82, "elapsed_time": "6:20:41", "remaining_time": "20:17:39"}
132
+ {"current_steps": 132, "total_steps": 550, "loss": 0.3129, "lr": 4.366851546666118e-06, "epoch": 2.3891402714932126, "percentage": 24.0, "elapsed_time": "6:23:20", "remaining_time": "20:13:55"}
133
+ {"current_steps": 133, "total_steps": 550, "loss": 0.3041, "lr": 4.357217889316657e-06, "epoch": 2.4072398190045248, "percentage": 24.18, "elapsed_time": "6:25:57", "remaining_time": "20:10:06"}
134
+ {"current_steps": 134, "total_steps": 550, "loss": 0.2839, "lr": 4.3475222930516484e-06, "epoch": 2.425339366515837, "percentage": 24.36, "elapsed_time": "6:28:18", "remaining_time": "20:05:30"}
135
+ {"current_steps": 135, "total_steps": 550, "loss": 0.3014, "lr": 4.3377650812228765e-06, "epoch": 2.4434389140271495, "percentage": 24.55, "elapsed_time": "6:30:43", "remaining_time": "20:01:06"}
136
+ {"current_steps": 136, "total_steps": 550, "loss": 0.2834, "lr": 4.327946579237028e-06, "epoch": 2.4615384615384617, "percentage": 24.73, "elapsed_time": "6:33:10", "remaining_time": "19:56:52"}
137
+ {"current_steps": 137, "total_steps": 550, "loss": 0.2796, "lr": 4.318067114544838e-06, "epoch": 2.479638009049774, "percentage": 24.91, "elapsed_time": "6:35:41", "remaining_time": "19:52:51"}
138
+ {"current_steps": 138, "total_steps": 550, "loss": 0.2972, "lr": 4.308127016630176e-06, "epoch": 2.497737556561086, "percentage": 25.09, "elapsed_time": "6:38:04", "remaining_time": "19:48:27"}
139
+ {"current_steps": 139, "total_steps": 550, "loss": 0.3196, "lr": 4.2981266169990436e-06, "epoch": 2.515837104072398, "percentage": 25.27, "elapsed_time": "6:40:45", "remaining_time": "19:44:58"}
140
+ {"current_steps": 140, "total_steps": 550, "loss": 0.3003, "lr": 4.2880662491685345e-06, "epoch": 2.5339366515837103, "percentage": 25.45, "elapsed_time": "6:43:10", "remaining_time": "19:40:43"}
141
+ {"current_steps": 141, "total_steps": 550, "loss": 0.2947, "lr": 4.277946248655701e-06, "epoch": 2.5520361990950224, "percentage": 25.64, "elapsed_time": "6:45:38", "remaining_time": "19:36:38"}
142
+ {"current_steps": 142, "total_steps": 550, "loss": 0.2958, "lr": 4.267766952966369e-06, "epoch": 2.5701357466063346, "percentage": 25.82, "elapsed_time": "6:47:59", "remaining_time": "19:32:14"}
143
+ {"current_steps": 143, "total_steps": 550, "loss": 0.2998, "lr": 4.257528701583882e-06, "epoch": 2.588235294117647, "percentage": 26.0, "elapsed_time": "6:50:32", "remaining_time": "19:28:27"}
144
+ {"current_steps": 144, "total_steps": 550, "loss": 0.3408, "lr": 4.247231835957773e-06, "epoch": 2.6063348416289593, "percentage": 26.18, "elapsed_time": "6:52:56", "remaining_time": "19:24:14"}
145
+ {"current_steps": 145, "total_steps": 550, "loss": 0.3117, "lr": 4.236876699492391e-06, "epoch": 2.6244343891402715, "percentage": 26.36, "elapsed_time": "6:55:28", "remaining_time": "19:20:27"}
146
+ {"current_steps": 146, "total_steps": 550, "loss": 0.3152, "lr": 4.226463637535429e-06, "epoch": 2.6425339366515836, "percentage": 26.55, "elapsed_time": "6:57:56", "remaining_time": "19:16:30"}
147
+ {"current_steps": 147, "total_steps": 550, "loss": 0.3142, "lr": 4.215992997366425e-06, "epoch": 2.660633484162896, "percentage": 26.73, "elapsed_time": "7:00:34", "remaining_time": "19:12:59"}
148
+ {"current_steps": 148, "total_steps": 550, "loss": 0.3081, "lr": 4.2054651281851685e-06, "epoch": 2.6787330316742084, "percentage": 26.91, "elapsed_time": "7:03:22", "remaining_time": "19:09:58"}
149
+ {"current_steps": 149, "total_steps": 550, "loss": 0.2894, "lr": 4.1948803811000585e-06, "epoch": 2.6968325791855206, "percentage": 27.09, "elapsed_time": "7:05:50", "remaining_time": "19:06:02"}
150
+ {"current_steps": 150, "total_steps": 550, "loss": 0.2984, "lr": 4.184239109116393e-06, "epoch": 2.7149321266968327, "percentage": 27.27, "elapsed_time": "7:08:21", "remaining_time": "19:02:16"}
151
+ {"current_steps": 151, "total_steps": 550, "loss": 0.3097, "lr": 4.173541667124599e-06, "epoch": 2.733031674208145, "percentage": 27.45, "elapsed_time": "7:13:39", "remaining_time": "19:05:53"}
152
+ {"current_steps": 152, "total_steps": 550, "loss": 0.3177, "lr": 4.1627884118883925e-06, "epoch": 2.751131221719457, "percentage": 27.64, "elapsed_time": "7:16:14", "remaining_time": "19:02:15"}
153
+ {"current_steps": 153, "total_steps": 550, "loss": 0.3101, "lr": 4.1519797020328815e-06, "epoch": 2.769230769230769, "percentage": 27.82, "elapsed_time": "7:18:40", "remaining_time": "18:58:16"}
154
+ {"current_steps": 154, "total_steps": 550, "loss": 0.274, "lr": 4.141115898032607e-06, "epoch": 2.7873303167420813, "percentage": 28.0, "elapsed_time": "7:20:58", "remaining_time": "18:53:55"}
155
+ {"current_steps": 155, "total_steps": 550, "loss": 0.2926, "lr": 4.130197362199521e-06, "epoch": 2.8054298642533935, "percentage": 28.18, "elapsed_time": "7:23:35", "remaining_time": "18:50:25"}
156
+ {"current_steps": 156, "total_steps": 550, "loss": 0.2875, "lr": 4.119224458670905e-06, "epoch": 2.8235294117647056, "percentage": 28.36, "elapsed_time": "7:25:55", "remaining_time": "18:46:13"}
157
+ {"current_steps": 157, "total_steps": 550, "loss": 0.2947, "lr": 4.1081975533972185e-06, "epoch": 2.841628959276018, "percentage": 28.55, "elapsed_time": "7:28:30", "remaining_time": "18:42:41"}
158
+ {"current_steps": 158, "total_steps": 550, "loss": 0.296, "lr": 4.097117014129903e-06, "epoch": 2.8597285067873304, "percentage": 28.73, "elapsed_time": "7:31:06", "remaining_time": "18:39:13"}
159
+ {"current_steps": 159, "total_steps": 550, "loss": 0.2988, "lr": 4.085983210409114e-06, "epoch": 2.8778280542986425, "percentage": 28.91, "elapsed_time": "7:33:44", "remaining_time": "18:35:48"}
160
+ {"current_steps": 160, "total_steps": 550, "loss": 0.2952, "lr": 4.074796513551395e-06, "epoch": 2.8959276018099547, "percentage": 29.09, "elapsed_time": "7:36:24", "remaining_time": "18:32:29"}
161
+ {"current_steps": 161, "total_steps": 550, "loss": 0.3099, "lr": 4.063557296637295e-06, "epoch": 2.914027149321267, "percentage": 29.27, "elapsed_time": "7:38:43", "remaining_time": "18:28:20"}
162
+ {"current_steps": 162, "total_steps": 550, "loss": 0.2974, "lr": 4.052265934498929e-06, "epoch": 2.9321266968325794, "percentage": 29.45, "elapsed_time": "7:41:36", "remaining_time": "18:25:34"}
163
+ {"current_steps": 163, "total_steps": 550, "loss": 0.3065, "lr": 4.040922803707474e-06, "epoch": 2.9502262443438916, "percentage": 29.64, "elapsed_time": "7:44:03", "remaining_time": "18:21:46"}
164
+ {"current_steps": 164, "total_steps": 550, "loss": 0.2886, "lr": 4.029528282560609e-06, "epoch": 2.9683257918552037, "percentage": 29.82, "elapsed_time": "7:46:33", "remaining_time": "18:18:06"}
165
+ {"current_steps": 165, "total_steps": 550, "loss": 0.3076, "lr": 4.018082751069904e-06, "epoch": 2.986425339366516, "percentage": 30.0, "elapsed_time": "7:49:14", "remaining_time": "18:14:53"}
166
+ {"current_steps": 166, "total_steps": 550, "loss": 0.2985, "lr": 4.006586590948141e-06, "epoch": 3.004524886877828, "percentage": 30.18, "elapsed_time": "7:51:41", "remaining_time": "18:11:08"}
167
+ {"current_steps": 167, "total_steps": 550, "loss": 0.2754, "lr": 3.995040185596588e-06, "epoch": 3.02262443438914, "percentage": 30.36, "elapsed_time": "7:54:11", "remaining_time": "18:07:31"}
168
+ {"current_steps": 168, "total_steps": 550, "loss": 0.2854, "lr": 3.983443920092206e-06, "epoch": 3.0407239819004523, "percentage": 30.55, "elapsed_time": "7:56:41", "remaining_time": "18:03:54"}
169
+ {"current_steps": 169, "total_steps": 550, "loss": 0.2832, "lr": 3.971798181174816e-06, "epoch": 3.0588235294117645, "percentage": 30.73, "elapsed_time": "7:59:07", "remaining_time": "18:00:08"}
170
+ {"current_steps": 170, "total_steps": 550, "loss": 0.2986, "lr": 3.960103357234192e-06, "epoch": 3.076923076923077, "percentage": 30.91, "elapsed_time": "8:01:35", "remaining_time": "17:56:31"}
171
+ {"current_steps": 171, "total_steps": 550, "loss": 0.2876, "lr": 3.948359838297115e-06, "epoch": 3.0950226244343892, "percentage": 31.09, "elapsed_time": "8:03:57", "remaining_time": "17:52:38"}
172
+ {"current_steps": 172, "total_steps": 550, "loss": 0.2971, "lr": 3.9365680160143595e-06, "epoch": 3.1131221719457014, "percentage": 31.27, "elapsed_time": "8:06:24", "remaining_time": "17:48:58"}
173
+ {"current_steps": 173, "total_steps": 550, "loss": 0.279, "lr": 3.924728283647638e-06, "epoch": 3.1312217194570136, "percentage": 31.45, "elapsed_time": "8:09:03", "remaining_time": "17:45:44"}
174
+ {"current_steps": 174, "total_steps": 550, "loss": 0.2903, "lr": 3.91284103605648e-06, "epoch": 3.1493212669683257, "percentage": 31.64, "elapsed_time": "8:11:40", "remaining_time": "17:42:28"}
175
+ {"current_steps": 175, "total_steps": 550, "loss": 0.2964, "lr": 3.9009066696850664e-06, "epoch": 3.167420814479638, "percentage": 31.82, "elapsed_time": "8:14:13", "remaining_time": "17:39:02"}
176
+ {"current_steps": 176, "total_steps": 550, "loss": 0.2946, "lr": 3.888925582549006e-06, "epoch": 3.1855203619909505, "percentage": 32.0, "elapsed_time": "8:16:47", "remaining_time": "17:35:40"}
177
+ {"current_steps": 177, "total_steps": 550, "loss": 0.2811, "lr": 3.8768981742220646e-06, "epoch": 3.2036199095022626, "percentage": 32.18, "elapsed_time": "8:19:13", "remaining_time": "17:32:02"}
178
+ {"current_steps": 178, "total_steps": 550, "loss": 0.2825, "lr": 3.864824845822837e-06, "epoch": 3.2217194570135748, "percentage": 32.36, "elapsed_time": "8:21:45", "remaining_time": "17:28:37"}
179
+ {"current_steps": 179, "total_steps": 550, "loss": 0.2903, "lr": 3.852706000001367e-06, "epoch": 3.239819004524887, "percentage": 32.55, "elapsed_time": "8:24:24", "remaining_time": "17:25:26"}
180
+ {"current_steps": 180, "total_steps": 550, "loss": 0.2626, "lr": 3.840542040925725e-06, "epoch": 3.257918552036199, "percentage": 32.73, "elapsed_time": "8:26:33", "remaining_time": "17:21:15"}
181
+ {"current_steps": 181, "total_steps": 550, "loss": 0.2906, "lr": 3.828333374268523e-06, "epoch": 3.276018099547511, "percentage": 32.91, "elapsed_time": "8:29:08", "remaining_time": "17:17:57"}
182
+ {"current_steps": 182, "total_steps": 550, "loss": 0.2862, "lr": 3.81608040719339e-06, "epoch": 3.2941176470588234, "percentage": 33.09, "elapsed_time": "8:31:47", "remaining_time": "17:14:50"}
183
+ {"current_steps": 183, "total_steps": 550, "loss": 0.2855, "lr": 3.8037835483413877e-06, "epoch": 3.3122171945701355, "percentage": 33.27, "elapsed_time": "8:34:09", "remaining_time": "17:11:08"}
184
+ {"current_steps": 184, "total_steps": 550, "loss": 0.2795, "lr": 3.7914432078173867e-06, "epoch": 3.330316742081448, "percentage": 33.45, "elapsed_time": "8:36:40", "remaining_time": "17:07:44"}
185
+ {"current_steps": 185, "total_steps": 550, "loss": 0.2836, "lr": 3.7790597971763892e-06, "epoch": 3.3484162895927603, "percentage": 33.64, "elapsed_time": "8:39:29", "remaining_time": "17:04:56"}
186
+ {"current_steps": 186, "total_steps": 550, "loss": 0.288, "lr": 3.7666337294097987e-06, "epoch": 3.3665158371040724, "percentage": 33.82, "elapsed_time": "8:41:56", "remaining_time": "17:01:25"}
187
+ {"current_steps": 187, "total_steps": 550, "loss": 0.275, "lr": 3.7541654189316525e-06, "epoch": 3.3846153846153846, "percentage": 34.0, "elapsed_time": "8:44:23", "remaining_time": "16:57:56"}
188
+ {"current_steps": 188, "total_steps": 550, "loss": 0.2966, "lr": 3.741655281564796e-06, "epoch": 3.4027149321266967, "percentage": 34.18, "elapsed_time": "8:46:51", "remaining_time": "16:54:29"}
189
+ {"current_steps": 189, "total_steps": 550, "loss": 0.2702, "lr": 3.72910373452702e-06, "epoch": 3.420814479638009, "percentage": 34.36, "elapsed_time": "8:49:15", "remaining_time": "16:50:54"}
190
+ {"current_steps": 190, "total_steps": 550, "loss": 0.2718, "lr": 3.7165111964171407e-06, "epoch": 3.4389140271493215, "percentage": 34.55, "elapsed_time": "8:51:50", "remaining_time": "16:47:41"}
191
+ {"current_steps": 191, "total_steps": 550, "loss": 0.2785, "lr": 3.703878087201044e-06, "epoch": 3.4570135746606336, "percentage": 34.73, "elapsed_time": "8:54:31", "remaining_time": "16:44:41"}
192
+ {"current_steps": 192, "total_steps": 550, "loss": 0.2991, "lr": 3.6912048281976764e-06, "epoch": 3.475113122171946, "percentage": 34.91, "elapsed_time": "8:56:51", "remaining_time": "16:41:01"}
193
+ {"current_steps": 193, "total_steps": 550, "loss": 0.2814, "lr": 3.6784918420649952e-06, "epoch": 3.493212669683258, "percentage": 35.09, "elapsed_time": "8:59:26", "remaining_time": "16:37:49"}
194
+ {"current_steps": 194, "total_steps": 550, "loss": 0.2719, "lr": 3.66573955278587e-06, "epoch": 3.51131221719457, "percentage": 35.27, "elapsed_time": "9:01:53", "remaining_time": "16:34:23"}
195
+ {"current_steps": 195, "total_steps": 550, "loss": 0.2639, "lr": 3.6529483856539512e-06, "epoch": 3.5294117647058822, "percentage": 35.45, "elapsed_time": "9:04:21", "remaining_time": "16:31:01"}
196
+ {"current_steps": 196, "total_steps": 550, "loss": 0.2712, "lr": 3.640118767259474e-06, "epoch": 3.5475113122171944, "percentage": 35.64, "elapsed_time": "9:06:59", "remaining_time": "16:27:55"}
197
+ {"current_steps": 197, "total_steps": 550, "loss": 0.2825, "lr": 3.6272511254750403e-06, "epoch": 3.5656108597285066, "percentage": 35.82, "elapsed_time": "9:09:28", "remaining_time": "16:24:35"}
198
+ {"current_steps": 198, "total_steps": 550, "loss": 0.2977, "lr": 3.6143458894413463e-06, "epoch": 3.583710407239819, "percentage": 36.0, "elapsed_time": "9:11:51", "remaining_time": "16:21:04"}
199
+ {"current_steps": 199, "total_steps": 550, "loss": 0.284, "lr": 3.6014034895528705e-06, "epoch": 3.6018099547511313, "percentage": 36.18, "elapsed_time": "9:14:12", "remaining_time": "16:17:32"}
200
+ {"current_steps": 200, "total_steps": 550, "loss": 0.2782, "lr": 3.588424357443521e-06, "epoch": 3.6199095022624435, "percentage": 36.36, "elapsed_time": "9:16:58", "remaining_time": "16:14:42"}
201
+ {"current_steps": 201, "total_steps": 550, "loss": 0.2902, "lr": 3.5754089259722365e-06, "epoch": 3.6380090497737556, "percentage": 36.55, "elapsed_time": "9:21:54", "remaining_time": "16:15:39"}
202
+ {"current_steps": 202, "total_steps": 550, "loss": 0.294, "lr": 3.5623576292085555e-06, "epoch": 3.6561085972850678, "percentage": 36.73, "elapsed_time": "9:24:23", "remaining_time": "16:12:19"}
203
+ {"current_steps": 203, "total_steps": 550, "loss": 0.2715, "lr": 3.549270902418136e-06, "epoch": 3.6742081447963804, "percentage": 36.91, "elapsed_time": "9:26:54", "remaining_time": "16:09:03"}
204
+ {"current_steps": 204, "total_steps": 550, "loss": 0.2823, "lr": 3.536149182048243e-06, "epoch": 3.6923076923076925, "percentage": 37.09, "elapsed_time": "9:29:32", "remaining_time": "16:05:58"}
205
+ {"current_steps": 205, "total_steps": 550, "loss": 0.2881, "lr": 3.5229929057131877e-06, "epoch": 3.7104072398190047, "percentage": 37.27, "elapsed_time": "9:31:59", "remaining_time": "16:02:36"}
206
+ {"current_steps": 206, "total_steps": 550, "loss": 0.2999, "lr": 3.5098025121797375e-06, "epoch": 3.728506787330317, "percentage": 37.45, "elapsed_time": "9:34:34", "remaining_time": "15:59:28"}
207
+ {"current_steps": 207, "total_steps": 550, "loss": 0.2687, "lr": 3.496578441352481e-06, "epoch": 3.746606334841629, "percentage": 37.64, "elapsed_time": "9:37:12", "remaining_time": "15:56:26"}
208
+ {"current_steps": 208, "total_steps": 550, "loss": 0.2866, "lr": 3.4833211342591565e-06, "epoch": 3.764705882352941, "percentage": 37.82, "elapsed_time": "9:39:58", "remaining_time": "15:53:37"}
209
+ {"current_steps": 209, "total_steps": 550, "loss": 0.2805, "lr": 3.4700310330359456e-06, "epoch": 3.7828054298642533, "percentage": 38.0, "elapsed_time": "9:42:37", "remaining_time": "15:50:36"}
210
+ {"current_steps": 210, "total_steps": 550, "loss": 0.2864, "lr": 3.4567085809127247e-06, "epoch": 3.8009049773755654, "percentage": 38.18, "elapsed_time": "9:45:14", "remaining_time": "15:47:32"}
211
+ {"current_steps": 211, "total_steps": 550, "loss": 0.2847, "lr": 3.4433542221982863e-06, "epoch": 3.8190045248868776, "percentage": 38.36, "elapsed_time": "9:47:44", "remaining_time": "15:44:16"}
212
+ {"current_steps": 212, "total_steps": 550, "loss": 0.285, "lr": 3.4299684022655196e-06, "epoch": 3.83710407239819, "percentage": 38.55, "elapsed_time": "9:50:16", "remaining_time": "15:41:05"}
213
+ {"current_steps": 213, "total_steps": 550, "loss": 0.2862, "lr": 3.4165515675365558e-06, "epoch": 3.8552036199095023, "percentage": 38.73, "elapsed_time": "9:52:43", "remaining_time": "15:37:46"}
214
+ {"current_steps": 214, "total_steps": 550, "loss": 0.2748, "lr": 3.403104165467883e-06, "epoch": 3.8733031674208145, "percentage": 38.91, "elapsed_time": "9:55:16", "remaining_time": "15:34:38"}
215
+ {"current_steps": 215, "total_steps": 550, "loss": 0.2875, "lr": 3.3896266445354208e-06, "epoch": 3.8914027149321266, "percentage": 39.09, "elapsed_time": "9:57:39", "remaining_time": "15:31:14"}
216
+ {"current_steps": 216, "total_steps": 550, "loss": 0.2811, "lr": 3.376119454219565e-06, "epoch": 3.909502262443439, "percentage": 39.27, "elapsed_time": "10:00:14", "remaining_time": "15:28:08"}
217
+ {"current_steps": 217, "total_steps": 550, "loss": 0.2923, "lr": 3.362583044990195e-06, "epoch": 3.9276018099547514, "percentage": 39.45, "elapsed_time": "10:02:38", "remaining_time": "15:24:48"}
218
+ {"current_steps": 218, "total_steps": 550, "loss": 0.2784, "lr": 3.3490178682916534e-06, "epoch": 3.9457013574660635, "percentage": 39.64, "elapsed_time": "10:04:51", "remaining_time": "15:21:09"}
219
+ {"current_steps": 219, "total_steps": 550, "loss": 0.2796, "lr": 3.335424376527688e-06, "epoch": 3.9638009049773757, "percentage": 39.82, "elapsed_time": "10:07:31", "remaining_time": "15:18:13"}
220
+ {"current_steps": 220, "total_steps": 550, "loss": 0.2855, "lr": 3.321803023046366e-06, "epoch": 3.981900452488688, "percentage": 40.0, "elapsed_time": "10:09:56", "remaining_time": "15:14:54"}
221
+ {"current_steps": 221, "total_steps": 550, "loss": 0.2722, "lr": 3.3081542621249503e-06, "epoch": 4.0, "percentage": 40.18, "elapsed_time": "10:12:22", "remaining_time": "15:11:37"}
222
+ {"current_steps": 222, "total_steps": 550, "loss": 0.2769, "lr": 3.2944785489547544e-06, "epoch": 4.018099547511312, "percentage": 40.36, "elapsed_time": "10:14:52", "remaining_time": "15:08:28"}
223
+ {"current_steps": 223, "total_steps": 550, "loss": 0.2755, "lr": 3.2807763396259597e-06, "epoch": 4.036199095022624, "percentage": 40.55, "elapsed_time": "10:17:19", "remaining_time": "15:05:13"}
224
+ {"current_steps": 224, "total_steps": 550, "loss": 0.2457, "lr": 3.2670480911124045e-06, "epoch": 4.0542986425339365, "percentage": 40.73, "elapsed_time": "10:19:41", "remaining_time": "15:01:51"}
225
+ {"current_steps": 225, "total_steps": 550, "loss": 0.3084, "lr": 3.2532942612563436e-06, "epoch": 4.072398190045249, "percentage": 40.91, "elapsed_time": "10:22:15", "remaining_time": "14:58:49"}
226
+ {"current_steps": 226, "total_steps": 550, "loss": 0.2688, "lr": 3.2395153087531767e-06, "epoch": 4.090497737556561, "percentage": 41.09, "elapsed_time": "10:24:50", "remaining_time": "14:55:47"}
227
+ {"current_steps": 227, "total_steps": 550, "loss": 0.2678, "lr": 3.225711693136156e-06, "epoch": 4.108597285067873, "percentage": 41.27, "elapsed_time": "10:27:17", "remaining_time": "14:52:34"}
228
+ {"current_steps": 228, "total_steps": 550, "loss": 0.2636, "lr": 3.211883874761058e-06, "epoch": 4.126696832579185, "percentage": 41.45, "elapsed_time": "10:29:46", "remaining_time": "14:49:24"}
229
+ {"current_steps": 229, "total_steps": 550, "loss": 0.282, "lr": 3.19803231479083e-06, "epoch": 4.144796380090498, "percentage": 41.64, "elapsed_time": "10:32:21", "remaining_time": "14:46:23"}
230
+ {"current_steps": 230, "total_steps": 550, "loss": 0.2689, "lr": 3.184157475180208e-06, "epoch": 4.16289592760181, "percentage": 41.82, "elapsed_time": "10:34:53", "remaining_time": "14:43:19"}
231
+ {"current_steps": 231, "total_steps": 550, "loss": 0.2583, "lr": 3.1702598186603152e-06, "epoch": 4.180995475113122, "percentage": 42.0, "elapsed_time": "10:37:23", "remaining_time": "14:40:12"}
232
+ {"current_steps": 232, "total_steps": 550, "loss": 0.2795, "lr": 3.1563398087232265e-06, "epoch": 4.199095022624435, "percentage": 42.18, "elapsed_time": "10:39:50", "remaining_time": "14:37:00"}
233
+ {"current_steps": 233, "total_steps": 550, "loss": 0.2605, "lr": 3.1423979096065134e-06, "epoch": 4.217194570135747, "percentage": 42.36, "elapsed_time": "10:42:19", "remaining_time": "14:33:54"}
234
+ {"current_steps": 234, "total_steps": 550, "loss": 0.2592, "lr": 3.1284345862777572e-06, "epoch": 4.235294117647059, "percentage": 42.55, "elapsed_time": "10:44:53", "remaining_time": "14:30:52"}
235
+ {"current_steps": 235, "total_steps": 550, "loss": 0.2642, "lr": 3.1144503044190456e-06, "epoch": 4.253393665158371, "percentage": 42.73, "elapsed_time": "10:47:25", "remaining_time": "14:27:49"}
236
+ {"current_steps": 236, "total_steps": 550, "loss": 0.2376, "lr": 3.100445530411442e-06, "epoch": 4.271493212669683, "percentage": 42.91, "elapsed_time": "10:49:46", "remaining_time": "14:24:32"}
237
+ {"current_steps": 237, "total_steps": 550, "loss": 0.2708, "lr": 3.086420731319429e-06, "epoch": 4.289592760180995, "percentage": 43.09, "elapsed_time": "10:52:22", "remaining_time": "14:21:34"}
238
+ {"current_steps": 238, "total_steps": 550, "loss": 0.2844, "lr": 3.0723763748753354e-06, "epoch": 4.3076923076923075, "percentage": 43.27, "elapsed_time": "10:54:49", "remaining_time": "14:18:25"}
239
+ {"current_steps": 239, "total_steps": 550, "loss": 0.2487, "lr": 3.0583129294637342e-06, "epoch": 4.32579185520362, "percentage": 43.45, "elapsed_time": "10:57:16", "remaining_time": "14:15:17"}
240
+ {"current_steps": 240, "total_steps": 550, "loss": 0.256, "lr": 3.044230864105821e-06, "epoch": 4.343891402714932, "percentage": 43.64, "elapsed_time": "10:59:44", "remaining_time": "14:12:10"}
241
+ {"current_steps": 241, "total_steps": 550, "loss": 0.2788, "lr": 3.030130648443777e-06, "epoch": 4.361990950226244, "percentage": 43.82, "elapsed_time": "11:02:30", "remaining_time": "14:09:26"}
242
+ {"current_steps": 242, "total_steps": 550, "loss": 0.2808, "lr": 3.0160127527250993e-06, "epoch": 4.380090497737557, "percentage": 44.0, "elapsed_time": "11:05:04", "remaining_time": "14:06:27"}
243
+ {"current_steps": 243, "total_steps": 550, "loss": 0.2654, "lr": 3.0018776477869244e-06, "epoch": 4.398190045248869, "percentage": 44.18, "elapsed_time": "11:07:28", "remaining_time": "14:03:16"}
244
+ {"current_steps": 244, "total_steps": 550, "loss": 0.2753, "lr": 2.9877258050403214e-06, "epoch": 4.416289592760181, "percentage": 44.36, "elapsed_time": "11:09:46", "remaining_time": "13:59:57"}
245
+ {"current_steps": 245, "total_steps": 550, "loss": 0.2627, "lr": 2.973557696454571e-06, "epoch": 4.4343891402714934, "percentage": 44.55, "elapsed_time": "11:12:12", "remaining_time": "13:56:49"}
246
+ {"current_steps": 246, "total_steps": 550, "loss": 0.2779, "lr": 2.9593737945414264e-06, "epoch": 4.452488687782806, "percentage": 44.73, "elapsed_time": "11:14:41", "remaining_time": "13:53:46"}
247
+ {"current_steps": 247, "total_steps": 550, "loss": 0.2747, "lr": 2.9451745723393547e-06, "epoch": 4.470588235294118, "percentage": 44.91, "elapsed_time": "11:17:19", "remaining_time": "13:50:53"}
248
+ {"current_steps": 248, "total_steps": 550, "loss": 0.2726, "lr": 2.930960503397761e-06, "epoch": 4.48868778280543, "percentage": 45.09, "elapsed_time": "11:19:50", "remaining_time": "13:47:52"}
249
+ {"current_steps": 249, "total_steps": 550, "loss": 0.2646, "lr": 2.916732061761192e-06, "epoch": 4.506787330316742, "percentage": 45.27, "elapsed_time": "11:22:19", "remaining_time": "13:44:49"}
250
+ {"current_steps": 250, "total_steps": 550, "loss": 0.279, "lr": 2.9024897219535326e-06, "epoch": 4.524886877828054, "percentage": 45.45, "elapsed_time": "11:25:05", "remaining_time": "13:42:06"}
251
+ {"current_steps": 251, "total_steps": 550, "loss": 0.2795, "lr": 2.8882339589621742e-06, "epoch": 4.542986425339366, "percentage": 45.64, "elapsed_time": "11:30:47", "remaining_time": "13:42:54"}
252
+ {"current_steps": 252, "total_steps": 550, "loss": 0.2672, "lr": 2.873965248222178e-06, "epoch": 4.5610859728506785, "percentage": 45.82, "elapsed_time": "11:33:04", "remaining_time": "13:39:34"}
253
+ {"current_steps": 253, "total_steps": 550, "loss": 0.2478, "lr": 2.859684065600417e-06, "epoch": 4.579185520361991, "percentage": 46.0, "elapsed_time": "11:35:24", "remaining_time": "13:36:20"}
254
+ {"current_steps": 254, "total_steps": 550, "loss": 0.2639, "lr": 2.845390887379706e-06, "epoch": 4.597285067873303, "percentage": 46.18, "elapsed_time": "11:38:00", "remaining_time": "13:33:25"}
255
+ {"current_steps": 255, "total_steps": 550, "loss": 0.2725, "lr": 2.8310861902429176e-06, "epoch": 4.615384615384615, "percentage": 46.36, "elapsed_time": "11:40:36", "remaining_time": "13:30:30"}
256
+ {"current_steps": 256, "total_steps": 550, "loss": 0.2685, "lr": 2.816770451257085e-06, "epoch": 4.633484162895927, "percentage": 46.55, "elapsed_time": "11:42:54", "remaining_time": "13:27:14"}
257
+ {"current_steps": 257, "total_steps": 550, "loss": 0.2572, "lr": 2.80244414785749e-06, "epoch": 4.65158371040724, "percentage": 46.73, "elapsed_time": "11:45:18", "remaining_time": "13:24:06"}
258
+ {"current_steps": 258, "total_steps": 550, "loss": 0.2924, "lr": 2.7881077578317445e-06, "epoch": 4.669683257918552, "percentage": 46.91, "elapsed_time": "11:47:51", "remaining_time": "13:21:08"}
259
+ {"current_steps": 259, "total_steps": 550, "loss": 0.2714, "lr": 2.7737617593038493e-06, "epoch": 4.6877828054298645, "percentage": 47.09, "elapsed_time": "11:50:19", "remaining_time": "13:18:05"}
260
+ {"current_steps": 260, "total_steps": 550, "loss": 0.2609, "lr": 2.759406630718255e-06, "epoch": 4.705882352941177, "percentage": 47.27, "elapsed_time": "11:52:48", "remaining_time": "13:15:03"}
261
+ {"current_steps": 261, "total_steps": 550, "loss": 0.2662, "lr": 2.7450428508239024e-06, "epoch": 4.723981900452489, "percentage": 47.45, "elapsed_time": "11:55:12", "remaining_time": "13:11:56"}
262
+ {"current_steps": 262, "total_steps": 550, "loss": 0.2549, "lr": 2.730670898658255e-06, "epoch": 4.742081447963801, "percentage": 47.64, "elapsed_time": "11:57:41", "remaining_time": "13:08:54"}
263
+ {"current_steps": 263, "total_steps": 550, "loss": 0.2873, "lr": 2.716291253531329e-06, "epoch": 4.760180995475113, "percentage": 47.82, "elapsed_time": "12:00:13", "remaining_time": "13:05:57"}
264
+ {"current_steps": 264, "total_steps": 550, "loss": 0.2674, "lr": 2.7019043950096992e-06, "epoch": 4.778280542986425, "percentage": 48.0, "elapsed_time": "12:02:41", "remaining_time": "13:02:54"}
265
+ {"current_steps": 265, "total_steps": 550, "loss": 0.2724, "lr": 2.6875108029005113e-06, "epoch": 4.796380090497737, "percentage": 48.18, "elapsed_time": "12:05:11", "remaining_time": "12:59:55"}
266
+ {"current_steps": 266, "total_steps": 550, "loss": 0.2684, "lr": 2.6731109572354795e-06, "epoch": 4.8144796380090495, "percentage": 48.36, "elapsed_time": "12:07:40", "remaining_time": "12:56:55"}
267
+ {"current_steps": 267, "total_steps": 550, "loss": 0.271, "lr": 2.658705338254876e-06, "epoch": 4.832579185520362, "percentage": 48.55, "elapsed_time": "12:10:11", "remaining_time": "12:53:56"}
268
+ {"current_steps": 268, "total_steps": 550, "loss": 0.2719, "lr": 2.6442944263915153e-06, "epoch": 4.850678733031674, "percentage": 48.73, "elapsed_time": "12:12:52", "remaining_time": "12:51:09"}
269
+ {"current_steps": 269, "total_steps": 550, "loss": 0.2666, "lr": 2.6298787022547317e-06, "epoch": 4.868778280542987, "percentage": 48.91, "elapsed_time": "12:15:16", "remaining_time": "12:48:04"}
270
+ {"current_steps": 270, "total_steps": 550, "loss": 0.2755, "lr": 2.6154586466143495e-06, "epoch": 4.886877828054299, "percentage": 49.09, "elapsed_time": "12:17:51", "remaining_time": "12:45:10"}
271
+ {"current_steps": 271, "total_steps": 550, "loss": 0.2864, "lr": 2.6010347403846508e-06, "epoch": 4.904977375565611, "percentage": 49.27, "elapsed_time": "12:20:25", "remaining_time": "12:42:17"}
272
+ {"current_steps": 272, "total_steps": 550, "loss": 0.2694, "lr": 2.5866074646083385e-06, "epoch": 4.923076923076923, "percentage": 49.45, "elapsed_time": "12:22:46", "remaining_time": "12:39:09"}
273
+ {"current_steps": 273, "total_steps": 550, "loss": 0.2597, "lr": 2.572177300440487e-06, "epoch": 4.9411764705882355, "percentage": 49.64, "elapsed_time": "12:24:51", "remaining_time": "12:35:46"}
274
+ {"current_steps": 274, "total_steps": 550, "loss": 0.2825, "lr": 2.557744729132503e-06, "epoch": 4.959276018099548, "percentage": 49.82, "elapsed_time": "12:27:20", "remaining_time": "12:32:48"}
275
+ {"current_steps": 275, "total_steps": 550, "loss": 0.2893, "lr": 2.5433102320160713e-06, "epoch": 4.97737556561086, "percentage": 50.0, "elapsed_time": "12:30:03", "remaining_time": "12:30:03"}
276
+ {"current_steps": 276, "total_steps": 550, "loss": 0.2508, "lr": 2.528874290487102e-06, "epoch": 4.995475113122172, "percentage": 50.18, "elapsed_time": "12:32:38", "remaining_time": "12:27:11"}
277
+ {"current_steps": 277, "total_steps": 550, "loss": 0.2589, "lr": 2.5144373859896792e-06, "epoch": 5.013574660633484, "percentage": 50.36, "elapsed_time": "12:35:18", "remaining_time": "12:24:24"}
278
+ {"current_steps": 278, "total_steps": 550, "loss": 0.2621, "lr": 2.5e-06, "epoch": 5.031674208144796, "percentage": 50.55, "elapsed_time": "12:37:46", "remaining_time": "12:21:25"}
279
+ {"current_steps": 279, "total_steps": 550, "loss": 0.245, "lr": 2.4855626140103216e-06, "epoch": 5.049773755656108, "percentage": 50.73, "elapsed_time": "12:40:03", "remaining_time": "12:18:15"}
280
+ {"current_steps": 280, "total_steps": 550, "loss": 0.2428, "lr": 2.4711257095128987e-06, "epoch": 5.067873303167421, "percentage": 50.91, "elapsed_time": "12:42:13", "remaining_time": "12:15:00"}
281
+ {"current_steps": 281, "total_steps": 550, "loss": 0.2756, "lr": 2.4566897679839295e-06, "epoch": 5.085972850678733, "percentage": 51.09, "elapsed_time": "12:44:33", "remaining_time": "12:11:54"}
282
+ {"current_steps": 282, "total_steps": 550, "loss": 0.2626, "lr": 2.4422552708674977e-06, "epoch": 5.104072398190045, "percentage": 51.27, "elapsed_time": "12:46:57", "remaining_time": "12:08:52"}
283
+ {"current_steps": 283, "total_steps": 550, "loss": 0.2616, "lr": 2.427822699559514e-06, "epoch": 5.122171945701357, "percentage": 51.45, "elapsed_time": "12:49:31", "remaining_time": "12:06:01"}
284
+ {"current_steps": 284, "total_steps": 550, "loss": 0.2671, "lr": 2.413392535391663e-06, "epoch": 5.14027149321267, "percentage": 51.64, "elapsed_time": "12:52:04", "remaining_time": "12:03:08"}
285
+ {"current_steps": 285, "total_steps": 550, "loss": 0.2518, "lr": 2.3989652596153496e-06, "epoch": 5.158371040723982, "percentage": 51.82, "elapsed_time": "12:54:36", "remaining_time": "12:00:15"}
286
+ {"current_steps": 286, "total_steps": 550, "loss": 0.2691, "lr": 2.3845413533856517e-06, "epoch": 5.176470588235294, "percentage": 52.0, "elapsed_time": "12:57:09", "remaining_time": "11:57:22"}
287
+ {"current_steps": 287, "total_steps": 550, "loss": 0.2662, "lr": 2.3701212977452683e-06, "epoch": 5.1945701357466065, "percentage": 52.18, "elapsed_time": "12:59:49", "remaining_time": "11:54:36"}
288
+ {"current_steps": 288, "total_steps": 550, "loss": 0.2706, "lr": 2.3557055736084847e-06, "epoch": 5.212669683257919, "percentage": 52.36, "elapsed_time": "13:02:15", "remaining_time": "11:51:38"}
289
+ {"current_steps": 289, "total_steps": 550, "loss": 0.2651, "lr": 2.3412946617451242e-06, "epoch": 5.230769230769231, "percentage": 52.55, "elapsed_time": "13:04:43", "remaining_time": "11:48:41"}
290
+ {"current_steps": 290, "total_steps": 550, "loss": 0.2809, "lr": 2.3268890427645213e-06, "epoch": 5.248868778280543, "percentage": 52.73, "elapsed_time": "13:07:16", "remaining_time": "11:45:49"}
291
+ {"current_steps": 291, "total_steps": 550, "loss": 0.242, "lr": 2.312489197099489e-06, "epoch": 5.266968325791855, "percentage": 52.91, "elapsed_time": "13:09:39", "remaining_time": "11:42:49"}
292
+ {"current_steps": 292, "total_steps": 550, "loss": 0.252, "lr": 2.298095604990302e-06, "epoch": 5.285067873303167, "percentage": 53.09, "elapsed_time": "13:12:12", "remaining_time": "11:39:57"}
293
+ {"current_steps": 293, "total_steps": 550, "loss": 0.2687, "lr": 2.283708746468672e-06, "epoch": 5.3031674208144794, "percentage": 53.27, "elapsed_time": "13:14:42", "remaining_time": "11:37:03"}
294
+ {"current_steps": 294, "total_steps": 550, "loss": 0.2749, "lr": 2.269329101341745e-06, "epoch": 5.321266968325792, "percentage": 53.45, "elapsed_time": "13:17:14", "remaining_time": "11:34:11"}
295
+ {"current_steps": 295, "total_steps": 550, "loss": 0.2423, "lr": 2.2549571491760985e-06, "epoch": 5.339366515837104, "percentage": 53.64, "elapsed_time": "13:19:57", "remaining_time": "11:31:29"}
296
+ {"current_steps": 296, "total_steps": 550, "loss": 0.2582, "lr": 2.2405933692817458e-06, "epoch": 5.357466063348416, "percentage": 53.82, "elapsed_time": "13:22:22", "remaining_time": "11:28:31"}
297
+ {"current_steps": 297, "total_steps": 550, "loss": 0.2505, "lr": 2.226238240696151e-06, "epoch": 5.375565610859729, "percentage": 54.0, "elapsed_time": "13:24:57", "remaining_time": "11:25:42"}
298
+ {"current_steps": 298, "total_steps": 550, "loss": 0.2547, "lr": 2.2118922421682563e-06, "epoch": 5.393665158371041, "percentage": 54.18, "elapsed_time": "13:27:10", "remaining_time": "11:22:34"}
299
+ {"current_steps": 299, "total_steps": 550, "loss": 0.2541, "lr": 2.1975558521425106e-06, "epoch": 5.411764705882353, "percentage": 54.36, "elapsed_time": "13:29:47", "remaining_time": "11:19:47"}
300
+ {"current_steps": 300, "total_steps": 550, "loss": 0.2449, "lr": 2.183229548742916e-06, "epoch": 5.429864253393665, "percentage": 54.55, "elapsed_time": "13:32:14", "remaining_time": "11:16:51"}
301
+ {"current_steps": 301, "total_steps": 550, "loss": 0.2529, "lr": 2.1689138097570832e-06, "epoch": 5.447963800904978, "percentage": 54.73, "elapsed_time": "13:37:31", "remaining_time": "11:16:17"}
302
+ {"current_steps": 302, "total_steps": 550, "loss": 0.2549, "lr": 2.1546091126202955e-06, "epoch": 5.46606334841629, "percentage": 54.91, "elapsed_time": "13:40:02", "remaining_time": "11:13:25"}
303
+ {"current_steps": 303, "total_steps": 550, "loss": 0.2544, "lr": 2.1403159343995845e-06, "epoch": 5.484162895927602, "percentage": 55.09, "elapsed_time": "13:42:33", "remaining_time": "11:10:31"}
304
+ {"current_steps": 304, "total_steps": 550, "loss": 0.2472, "lr": 2.1260347517778223e-06, "epoch": 5.502262443438914, "percentage": 55.27, "elapsed_time": "13:45:12", "remaining_time": "11:07:46"}
305
+ {"current_steps": 305, "total_steps": 550, "loss": 0.2663, "lr": 2.111766041037826e-06, "epoch": 5.520361990950226, "percentage": 55.45, "elapsed_time": "13:47:39", "remaining_time": "11:04:50"}
306
+ {"current_steps": 306, "total_steps": 550, "loss": 0.2654, "lr": 2.0975102780464674e-06, "epoch": 5.538461538461538, "percentage": 55.64, "elapsed_time": "13:50:14", "remaining_time": "11:02:01"}
307
+ {"current_steps": 307, "total_steps": 550, "loss": 0.2521, "lr": 2.083267938238808e-06, "epoch": 5.5565610859728505, "percentage": 55.82, "elapsed_time": "13:52:44", "remaining_time": "10:59:08"}
308
+ {"current_steps": 308, "total_steps": 550, "loss": 0.2599, "lr": 2.0690394966022397e-06, "epoch": 5.574660633484163, "percentage": 56.0, "elapsed_time": "13:55:13", "remaining_time": "10:56:14"}
309
+ {"current_steps": 309, "total_steps": 550, "loss": 0.253, "lr": 2.0548254276606457e-06, "epoch": 5.592760180995475, "percentage": 56.18, "elapsed_time": "13:57:35", "remaining_time": "10:53:16"}
310
+ {"current_steps": 310, "total_steps": 550, "loss": 0.268, "lr": 2.040626205458574e-06, "epoch": 5.610859728506787, "percentage": 56.36, "elapsed_time": "14:00:02", "remaining_time": "10:50:21"}
311
+ {"current_steps": 311, "total_steps": 550, "loss": 0.2794, "lr": 2.02644230354543e-06, "epoch": 5.628959276018099, "percentage": 56.55, "elapsed_time": "14:02:43", "remaining_time": "10:47:37"}
312
+ {"current_steps": 312, "total_steps": 550, "loss": 0.2466, "lr": 2.01227419495968e-06, "epoch": 5.647058823529412, "percentage": 56.73, "elapsed_time": "14:05:14", "remaining_time": "10:44:46"}
313
+ {"current_steps": 313, "total_steps": 550, "loss": 0.2646, "lr": 1.9981223522130764e-06, "epoch": 5.665158371040724, "percentage": 56.91, "elapsed_time": "14:07:44", "remaining_time": "10:41:54"}
314
+ {"current_steps": 314, "total_steps": 550, "loss": 0.2524, "lr": 1.9839872472749016e-06, "epoch": 5.683257918552036, "percentage": 57.09, "elapsed_time": "14:10:22", "remaining_time": "10:39:08"}
315
+ {"current_steps": 315, "total_steps": 550, "loss": 0.2484, "lr": 1.9698693515562235e-06, "epoch": 5.701357466063349, "percentage": 57.27, "elapsed_time": "14:12:56", "remaining_time": "10:36:19"}
316
+ {"current_steps": 316, "total_steps": 550, "loss": 0.241, "lr": 1.9557691358941796e-06, "epoch": 5.719457013574661, "percentage": 57.45, "elapsed_time": "14:15:15", "remaining_time": "10:33:19"}
317
+ {"current_steps": 317, "total_steps": 550, "loss": 0.2834, "lr": 1.941687070536267e-06, "epoch": 5.737556561085973, "percentage": 57.64, "elapsed_time": "14:18:00", "remaining_time": "10:30:38"}
318
+ {"current_steps": 318, "total_steps": 550, "loss": 0.2617, "lr": 1.9276236251246655e-06, "epoch": 5.755656108597285, "percentage": 57.82, "elapsed_time": "14:20:28", "remaining_time": "10:27:46"}
319
+ {"current_steps": 319, "total_steps": 550, "loss": 0.251, "lr": 1.913579268680572e-06, "epoch": 5.773755656108597, "percentage": 58.0, "elapsed_time": "14:22:56", "remaining_time": "10:24:53"}
320
+ {"current_steps": 320, "total_steps": 550, "loss": 0.2528, "lr": 1.8995544695885593e-06, "epoch": 5.791855203619909, "percentage": 58.18, "elapsed_time": "14:25:40", "remaining_time": "10:22:12"}
321
+ {"current_steps": 321, "total_steps": 550, "loss": 0.2623, "lr": 1.8855496955809546e-06, "epoch": 5.8099547511312215, "percentage": 58.36, "elapsed_time": "14:28:08", "remaining_time": "10:19:19"}
322
+ {"current_steps": 322, "total_steps": 550, "loss": 0.2603, "lr": 1.8715654137222434e-06, "epoch": 5.828054298642534, "percentage": 58.55, "elapsed_time": "14:30:36", "remaining_time": "10:16:27"}
323
+ {"current_steps": 323, "total_steps": 550, "loss": 0.2461, "lr": 1.8576020903934872e-06, "epoch": 5.846153846153846, "percentage": 58.73, "elapsed_time": "14:33:21", "remaining_time": "10:13:46"}
324
+ {"current_steps": 324, "total_steps": 550, "loss": 0.2443, "lr": 1.8436601912767737e-06, "epoch": 5.864253393665159, "percentage": 58.91, "elapsed_time": "14:35:48", "remaining_time": "10:10:54"}
325
+ {"current_steps": 325, "total_steps": 550, "loss": 0.2606, "lr": 1.8297401813396854e-06, "epoch": 5.882352941176471, "percentage": 59.09, "elapsed_time": "14:38:20", "remaining_time": "10:08:04"}
326
+ {"current_steps": 326, "total_steps": 550, "loss": 0.2683, "lr": 1.8158425248197931e-06, "epoch": 5.900452488687783, "percentage": 59.27, "elapsed_time": "14:41:00", "remaining_time": "10:05:21"}
327
+ {"current_steps": 327, "total_steps": 550, "loss": 0.2674, "lr": 1.801967685209171e-06, "epoch": 5.918552036199095, "percentage": 59.45, "elapsed_time": "14:43:47", "remaining_time": "10:02:42"}
328
+ {"current_steps": 328, "total_steps": 550, "loss": 0.2518, "lr": 1.7881161252389423e-06, "epoch": 5.9366515837104075, "percentage": 59.64, "elapsed_time": "14:46:05", "remaining_time": "9:59:43"}
329
+ {"current_steps": 329, "total_steps": 550, "loss": 0.2332, "lr": 1.7742883068638447e-06, "epoch": 5.95475113122172, "percentage": 59.82, "elapsed_time": "14:48:26", "remaining_time": "9:56:47"}
330
+ {"current_steps": 330, "total_steps": 550, "loss": 0.2758, "lr": 1.7604846912468243e-06, "epoch": 5.972850678733032, "percentage": 60.0, "elapsed_time": "14:51:03", "remaining_time": "9:54:02"}
331
+ {"current_steps": 331, "total_steps": 550, "loss": 0.2722, "lr": 1.7467057387436577e-06, "epoch": 5.990950226244344, "percentage": 60.18, "elapsed_time": "14:53:35", "remaining_time": "9:51:13"}
332
+ {"current_steps": 332, "total_steps": 550, "loss": 0.2505, "lr": 1.7329519088875959e-06, "epoch": 6.009049773755656, "percentage": 60.36, "elapsed_time": "14:56:06", "remaining_time": "9:48:24"}
333
+ {"current_steps": 333, "total_steps": 550, "loss": 0.2406, "lr": 1.719223660374041e-06, "epoch": 6.027149321266968, "percentage": 60.55, "elapsed_time": "14:58:49", "remaining_time": "9:45:42"}
334
+ {"current_steps": 334, "total_steps": 550, "loss": 0.2459, "lr": 1.7055214510452462e-06, "epoch": 6.04524886877828, "percentage": 60.73, "elapsed_time": "15:01:25", "remaining_time": "9:42:57"}
335
+ {"current_steps": 335, "total_steps": 550, "loss": 0.256, "lr": 1.6918457378750511e-06, "epoch": 6.0633484162895925, "percentage": 60.91, "elapsed_time": "15:03:54", "remaining_time": "9:40:07"}
336
+ {"current_steps": 336, "total_steps": 550, "loss": 0.2606, "lr": 1.6781969769536356e-06, "epoch": 6.081447963800905, "percentage": 61.09, "elapsed_time": "15:06:31", "remaining_time": "9:37:22"}
337
+ {"current_steps": 337, "total_steps": 550, "loss": 0.2445, "lr": 1.6645756234723127e-06, "epoch": 6.099547511312217, "percentage": 61.27, "elapsed_time": "15:09:09", "remaining_time": "9:34:37"}
338
+ {"current_steps": 338, "total_steps": 550, "loss": 0.2346, "lr": 1.6509821317083466e-06, "epoch": 6.117647058823529, "percentage": 61.45, "elapsed_time": "15:11:32", "remaining_time": "9:31:44"}
339
+ {"current_steps": 339, "total_steps": 550, "loss": 0.2645, "lr": 1.6374169550098052e-06, "epoch": 6.135746606334842, "percentage": 61.64, "elapsed_time": "15:14:15", "remaining_time": "9:29:03"}
340
+ {"current_steps": 340, "total_steps": 550, "loss": 0.2409, "lr": 1.6238805457804353e-06, "epoch": 6.153846153846154, "percentage": 61.82, "elapsed_time": "15:16:46", "remaining_time": "9:26:14"}
341
+ {"current_steps": 341, "total_steps": 550, "loss": 0.2511, "lr": 1.6103733554645794e-06, "epoch": 6.171945701357466, "percentage": 62.0, "elapsed_time": "15:19:27", "remaining_time": "9:23:32"}
342
+ {"current_steps": 342, "total_steps": 550, "loss": 0.2562, "lr": 1.5968958345321178e-06, "epoch": 6.1900452488687785, "percentage": 62.18, "elapsed_time": "15:22:15", "remaining_time": "9:20:54"}
343
+ {"current_steps": 343, "total_steps": 550, "loss": 0.2558, "lr": 1.5834484324634453e-06, "epoch": 6.208144796380091, "percentage": 62.36, "elapsed_time": "15:24:54", "remaining_time": "9:18:10"}
344
+ {"current_steps": 344, "total_steps": 550, "loss": 0.2619, "lr": 1.5700315977344813e-06, "epoch": 6.226244343891403, "percentage": 62.55, "elapsed_time": "15:27:25", "remaining_time": "9:15:22"}
345
+ {"current_steps": 345, "total_steps": 550, "loss": 0.2357, "lr": 1.5566457778017141e-06, "epoch": 6.244343891402715, "percentage": 62.73, "elapsed_time": "15:29:56", "remaining_time": "9:12:34"}
346
+ {"current_steps": 346, "total_steps": 550, "loss": 0.2547, "lr": 1.5432914190872757e-06, "epoch": 6.262443438914027, "percentage": 62.91, "elapsed_time": "15:32:29", "remaining_time": "9:09:47"}
347
+ {"current_steps": 347, "total_steps": 550, "loss": 0.253, "lr": 1.529968966964055e-06, "epoch": 6.280542986425339, "percentage": 63.09, "elapsed_time": "15:34:57", "remaining_time": "9:06:58"}
348
+ {"current_steps": 348, "total_steps": 550, "loss": 0.2632, "lr": 1.5166788657408441e-06, "epoch": 6.298642533936651, "percentage": 63.27, "elapsed_time": "15:37:37", "remaining_time": "9:04:15"}
349
+ {"current_steps": 349, "total_steps": 550, "loss": 0.2647, "lr": 1.5034215586475194e-06, "epoch": 6.316742081447964, "percentage": 63.45, "elapsed_time": "15:40:15", "remaining_time": "9:01:31"}
350
+ {"current_steps": 350, "total_steps": 550, "loss": 0.2395, "lr": 1.490197487820263e-06, "epoch": 6.334841628959276, "percentage": 63.64, "elapsed_time": "15:42:45", "remaining_time": "8:58:43"}
351
+ {"current_steps": 351, "total_steps": 550, "loss": 0.2516, "lr": 1.477007094286813e-06, "epoch": 6.352941176470588, "percentage": 63.82, "elapsed_time": "15:48:14", "remaining_time": "8:57:36"}
352
+ {"current_steps": 352, "total_steps": 550, "loss": 0.2709, "lr": 1.4638508179517583e-06, "epoch": 6.371040723981901, "percentage": 64.0, "elapsed_time": "15:51:01", "remaining_time": "8:54:57"}
353
+ {"current_steps": 353, "total_steps": 550, "loss": 0.2497, "lr": 1.4507290975818648e-06, "epoch": 6.389140271493213, "percentage": 64.18, "elapsed_time": "15:53:24", "remaining_time": "8:52:04"}
354
+ {"current_steps": 354, "total_steps": 550, "loss": 0.2518, "lr": 1.4376423707914462e-06, "epoch": 6.407239819004525, "percentage": 64.36, "elapsed_time": "15:56:05", "remaining_time": "8:49:21"}
355
+ {"current_steps": 355, "total_steps": 550, "loss": 0.2464, "lr": 1.4245910740277642e-06, "epoch": 6.425339366515837, "percentage": 64.55, "elapsed_time": "15:58:24", "remaining_time": "8:46:27"}
356
+ {"current_steps": 356, "total_steps": 550, "loss": 0.2554, "lr": 1.4115756425564798e-06, "epoch": 6.4434389140271495, "percentage": 64.73, "elapsed_time": "16:01:03", "remaining_time": "8:43:43"}
357
+ {"current_steps": 357, "total_steps": 550, "loss": 0.2677, "lr": 1.39859651044713e-06, "epoch": 6.461538461538462, "percentage": 64.91, "elapsed_time": "16:03:44", "remaining_time": "8:41:00"}
358
+ {"current_steps": 358, "total_steps": 550, "loss": 0.2433, "lr": 1.3856541105586545e-06, "epoch": 6.479638009049774, "percentage": 65.09, "elapsed_time": "16:05:58", "remaining_time": "8:38:03"}
359
+ {"current_steps": 359, "total_steps": 550, "loss": 0.248, "lr": 1.372748874524961e-06, "epoch": 6.497737556561086, "percentage": 65.27, "elapsed_time": "16:08:27", "remaining_time": "8:35:15"}
360
+ {"current_steps": 360, "total_steps": 550, "loss": 0.2433, "lr": 1.3598812327405274e-06, "epoch": 6.515837104072398, "percentage": 65.45, "elapsed_time": "16:11:05", "remaining_time": "8:32:31"}
361
+ {"current_steps": 361, "total_steps": 550, "loss": 0.2419, "lr": 1.3470516143460494e-06, "epoch": 6.53393665158371, "percentage": 65.64, "elapsed_time": "16:13:30", "remaining_time": "8:29:40"}
362
+ {"current_steps": 362, "total_steps": 550, "loss": 0.2485, "lr": 1.3342604472141296e-06, "epoch": 6.552036199095022, "percentage": 65.82, "elapsed_time": "16:15:57", "remaining_time": "8:26:51"}
363
+ {"current_steps": 363, "total_steps": 550, "loss": 0.2514, "lr": 1.3215081579350058e-06, "epoch": 6.570135746606335, "percentage": 66.0, "elapsed_time": "16:18:44", "remaining_time": "8:24:11"}
364
+ {"current_steps": 364, "total_steps": 550, "loss": 0.2623, "lr": 1.308795171802324e-06, "epoch": 6.588235294117647, "percentage": 66.18, "elapsed_time": "16:21:22", "remaining_time": "8:21:28"}
365
+ {"current_steps": 365, "total_steps": 550, "loss": 0.2523, "lr": 1.2961219127989562e-06, "epoch": 6.606334841628959, "percentage": 66.36, "elapsed_time": "16:23:48", "remaining_time": "8:18:38"}
366
+ {"current_steps": 366, "total_steps": 550, "loss": 0.2434, "lr": 1.2834888035828597e-06, "epoch": 6.624434389140271, "percentage": 66.55, "elapsed_time": "16:26:20", "remaining_time": "8:15:51"}
367
+ {"current_steps": 367, "total_steps": 550, "loss": 0.2246, "lr": 1.2708962654729812e-06, "epoch": 6.642533936651584, "percentage": 66.73, "elapsed_time": "16:28:39", "remaining_time": "8:12:58"}
368
+ {"current_steps": 368, "total_steps": 550, "loss": 0.2548, "lr": 1.258344718435205e-06, "epoch": 6.660633484162896, "percentage": 66.91, "elapsed_time": "16:31:10", "remaining_time": "8:10:11"}
369
+ {"current_steps": 369, "total_steps": 550, "loss": 0.2517, "lr": 1.2458345810683492e-06, "epoch": 6.678733031674208, "percentage": 67.09, "elapsed_time": "16:33:52", "remaining_time": "8:07:30"}
370
+ {"current_steps": 370, "total_steps": 550, "loss": 0.2373, "lr": 1.233366270590202e-06, "epoch": 6.6968325791855206, "percentage": 67.27, "elapsed_time": "16:36:11", "remaining_time": "8:04:38"}
371
+ {"current_steps": 371, "total_steps": 550, "loss": 0.2444, "lr": 1.2209402028236114e-06, "epoch": 6.714932126696833, "percentage": 67.45, "elapsed_time": "16:38:56", "remaining_time": "8:01:58"}
372
+ {"current_steps": 372, "total_steps": 550, "loss": 0.2429, "lr": 1.2085567921826128e-06, "epoch": 6.733031674208145, "percentage": 67.64, "elapsed_time": "16:41:21", "remaining_time": "7:59:08"}
373
+ {"current_steps": 373, "total_steps": 550, "loss": 0.2408, "lr": 1.1962164516586123e-06, "epoch": 6.751131221719457, "percentage": 67.82, "elapsed_time": "16:43:46", "remaining_time": "7:56:19"}
374
+ {"current_steps": 374, "total_steps": 550, "loss": 0.2364, "lr": 1.1839195928066101e-06, "epoch": 6.769230769230769, "percentage": 68.0, "elapsed_time": "16:46:21", "remaining_time": "7:53:35"}
375
+ {"current_steps": 375, "total_steps": 550, "loss": 0.2502, "lr": 1.171666625731477e-06, "epoch": 6.787330316742081, "percentage": 68.18, "elapsed_time": "16:48:49", "remaining_time": "7:50:46"}
376
+ {"current_steps": 376, "total_steps": 550, "loss": 0.2495, "lr": 1.1594579590742758e-06, "epoch": 6.8054298642533935, "percentage": 68.36, "elapsed_time": "16:51:21", "remaining_time": "7:48:01"}
377
+ {"current_steps": 377, "total_steps": 550, "loss": 0.2444, "lr": 1.1472939999986338e-06, "epoch": 6.823529411764706, "percentage": 68.55, "elapsed_time": "16:53:37", "remaining_time": "7:45:08"}
378
+ {"current_steps": 378, "total_steps": 550, "loss": 0.2423, "lr": 1.1351751541771644e-06, "epoch": 6.841628959276018, "percentage": 68.73, "elapsed_time": "16:56:07", "remaining_time": "7:42:21"}
379
+ {"current_steps": 379, "total_steps": 550, "loss": 0.2641, "lr": 1.1231018257779363e-06, "epoch": 6.859728506787331, "percentage": 68.91, "elapsed_time": "16:58:41", "remaining_time": "7:39:37"}
380
+ {"current_steps": 380, "total_steps": 550, "loss": 0.2463, "lr": 1.1110744174509952e-06, "epoch": 6.877828054298643, "percentage": 69.09, "elapsed_time": "17:01:05", "remaining_time": "7:36:48"}
381
+ {"current_steps": 381, "total_steps": 550, "loss": 0.2631, "lr": 1.0990933303149342e-06, "epoch": 6.895927601809955, "percentage": 69.27, "elapsed_time": "17:03:49", "remaining_time": "7:34:08"}
382
+ {"current_steps": 382, "total_steps": 550, "loss": 0.2481, "lr": 1.0871589639435204e-06, "epoch": 6.914027149321267, "percentage": 69.45, "elapsed_time": "17:06:21", "remaining_time": "7:31:22"}
383
+ {"current_steps": 383, "total_steps": 550, "loss": 0.241, "lr": 1.0752717163523623e-06, "epoch": 6.932126696832579, "percentage": 69.64, "elapsed_time": "17:08:59", "remaining_time": "7:28:40"}
384
+ {"current_steps": 384, "total_steps": 550, "loss": 0.2527, "lr": 1.0634319839856407e-06, "epoch": 6.950226244343892, "percentage": 69.82, "elapsed_time": "17:11:47", "remaining_time": "7:26:02"}
385
+ {"current_steps": 385, "total_steps": 550, "loss": 0.2322, "lr": 1.0516401617028863e-06, "epoch": 6.968325791855204, "percentage": 70.0, "elapsed_time": "17:14:26", "remaining_time": "7:23:19"}
386
+ {"current_steps": 386, "total_steps": 550, "loss": 0.2357, "lr": 1.0398966427658091e-06, "epoch": 6.986425339366516, "percentage": 70.18, "elapsed_time": "17:17:05", "remaining_time": "7:20:37"}
387
+ {"current_steps": 387, "total_steps": 550, "loss": 0.2568, "lr": 1.0282018188251854e-06, "epoch": 7.004524886877828, "percentage": 70.36, "elapsed_time": "17:19:36", "remaining_time": "7:17:52"}
388
+ {"current_steps": 388, "total_steps": 550, "loss": 0.2387, "lr": 1.0165560799077952e-06, "epoch": 7.02262443438914, "percentage": 70.55, "elapsed_time": "17:22:05", "remaining_time": "7:15:05"}
389
+ {"current_steps": 389, "total_steps": 550, "loss": 0.262, "lr": 1.004959814403413e-06, "epoch": 7.040723981900452, "percentage": 70.73, "elapsed_time": "17:24:51", "remaining_time": "7:12:26"}
390
+ {"current_steps": 390, "total_steps": 550, "loss": 0.2374, "lr": 9.934134090518593e-07, "epoch": 7.0588235294117645, "percentage": 70.91, "elapsed_time": "17:27:18", "remaining_time": "7:09:39"}
391
+ {"current_steps": 391, "total_steps": 550, "loss": 0.2162, "lr": 9.81917248930096e-07, "epoch": 7.076923076923077, "percentage": 71.09, "elapsed_time": "17:29:34", "remaining_time": "7:06:48"}
392
+ {"current_steps": 392, "total_steps": 550, "loss": 0.2495, "lr": 9.704717174393912e-07, "epoch": 7.095022624434389, "percentage": 71.27, "elapsed_time": "17:32:01", "remaining_time": "7:04:01"}
393
+ {"current_steps": 393, "total_steps": 550, "loss": 0.2596, "lr": 9.590771962925272e-07, "epoch": 7.113122171945701, "percentage": 71.45, "elapsed_time": "17:34:28", "remaining_time": "7:01:15"}
394
+ {"current_steps": 394, "total_steps": 550, "loss": 0.2465, "lr": 9.477340655010717e-07, "epoch": 7.131221719457014, "percentage": 71.64, "elapsed_time": "17:37:12", "remaining_time": "6:58:35"}
395
+ {"current_steps": 395, "total_steps": 550, "loss": 0.2395, "lr": 9.36442703362706e-07, "epoch": 7.149321266968326, "percentage": 71.82, "elapsed_time": "17:39:35", "remaining_time": "6:55:47"}
396
+ {"current_steps": 396, "total_steps": 550, "loss": 0.2425, "lr": 9.252034864486062e-07, "epoch": 7.167420814479638, "percentage": 72.0, "elapsed_time": "17:42:16", "remaining_time": "6:53:06"}
397
+ {"current_steps": 397, "total_steps": 550, "loss": 0.2257, "lr": 9.140167895908867e-07, "epoch": 7.1855203619909505, "percentage": 72.18, "elapsed_time": "17:44:44", "remaining_time": "6:50:20"}
398
+ {"current_steps": 398, "total_steps": 550, "loss": 0.2313, "lr": 9.028829858700974e-07, "epoch": 7.203619909502263, "percentage": 72.36, "elapsed_time": "17:47:16", "remaining_time": "6:47:36"}
399
+ {"current_steps": 399, "total_steps": 550, "loss": 0.2462, "lr": 8.918024466027822e-07, "epoch": 7.221719457013575, "percentage": 72.55, "elapsed_time": "17:49:36", "remaining_time": "6:44:47"}
400
+ {"current_steps": 400, "total_steps": 550, "loss": 0.2502, "lr": 8.807755413290953e-07, "epoch": 7.239819004524887, "percentage": 72.73, "elapsed_time": "17:52:15", "remaining_time": "6:42:05"}
401
+ {"current_steps": 401, "total_steps": 550, "loss": 0.2433, "lr": 8.698026378004787e-07, "epoch": 7.257918552036199, "percentage": 72.91, "elapsed_time": "17:58:19", "remaining_time": "6:40:40"}
402
+ {"current_steps": 402, "total_steps": 550, "loss": 0.2604, "lr": 8.588841019673938e-07, "epoch": 7.276018099547511, "percentage": 73.09, "elapsed_time": "18:00:53", "remaining_time": "6:37:56"}
403
+ {"current_steps": 403, "total_steps": 550, "loss": 0.2327, "lr": 8.480202979671201e-07, "epoch": 7.294117647058823, "percentage": 73.27, "elapsed_time": "18:03:24", "remaining_time": "6:35:11"}
404
+ {"current_steps": 404, "total_steps": 550, "loss": 0.2409, "lr": 8.372115881116089e-07, "epoch": 7.3122171945701355, "percentage": 73.45, "elapsed_time": "18:05:47", "remaining_time": "6:32:23"}
405
+ {"current_steps": 405, "total_steps": 550, "loss": 0.2393, "lr": 8.264583328754017e-07, "epoch": 7.330316742081448, "percentage": 73.64, "elapsed_time": "18:08:15", "remaining_time": "6:29:37"}
406
+ {"current_steps": 406, "total_steps": 550, "loss": 0.2312, "lr": 8.157608908836071e-07, "epoch": 7.34841628959276, "percentage": 73.82, "elapsed_time": "18:10:57", "remaining_time": "6:26:56"}
407
+ {"current_steps": 407, "total_steps": 550, "loss": 0.2503, "lr": 8.051196188999425e-07, "epoch": 7.366515837104072, "percentage": 74.0, "elapsed_time": "18:13:37", "remaining_time": "6:24:14"}
408
+ {"current_steps": 408, "total_steps": 550, "loss": 0.2419, "lr": 7.945348718148324e-07, "epoch": 7.384615384615385, "percentage": 74.18, "elapsed_time": "18:16:07", "remaining_time": "6:21:29"}
409
+ {"current_steps": 409, "total_steps": 550, "loss": 0.2332, "lr": 7.840070026335758e-07, "epoch": 7.402714932126697, "percentage": 74.36, "elapsed_time": "18:18:42", "remaining_time": "6:18:46"}
410
+ {"current_steps": 410, "total_steps": 550, "loss": 0.2484, "lr": 7.735363624645712e-07, "epoch": 7.420814479638009, "percentage": 74.55, "elapsed_time": "18:21:10", "remaining_time": "6:16:00"}
411
+ {"current_steps": 411, "total_steps": 550, "loss": 0.2404, "lr": 7.6312330050761e-07, "epoch": 7.4389140271493215, "percentage": 74.73, "elapsed_time": "18:23:45", "remaining_time": "6:13:17"}
412
+ {"current_steps": 412, "total_steps": 550, "loss": 0.2526, "lr": 7.527681640422265e-07, "epoch": 7.457013574660634, "percentage": 74.91, "elapsed_time": "18:26:12", "remaining_time": "6:10:31"}
413
+ {"current_steps": 413, "total_steps": 550, "loss": 0.2688, "lr": 7.424712984161192e-07, "epoch": 7.475113122171946, "percentage": 75.09, "elapsed_time": "18:28:47", "remaining_time": "6:07:48"}
414
+ {"current_steps": 414, "total_steps": 550, "loss": 0.2508, "lr": 7.322330470336314e-07, "epoch": 7.493212669683258, "percentage": 75.27, "elapsed_time": "18:31:30", "remaining_time": "6:05:07"}
415
+ {"current_steps": 415, "total_steps": 550, "loss": 0.2486, "lr": 7.220537513442999e-07, "epoch": 7.51131221719457, "percentage": 75.45, "elapsed_time": "18:34:12", "remaining_time": "6:02:27"}
416
+ {"current_steps": 416, "total_steps": 550, "loss": 0.2618, "lr": 7.11933750831467e-07, "epoch": 7.529411764705882, "percentage": 75.64, "elapsed_time": "18:36:39", "remaining_time": "5:59:41"}
417
+ {"current_steps": 417, "total_steps": 550, "loss": 0.2745, "lr": 7.018733830009578e-07, "epoch": 7.547511312217194, "percentage": 75.82, "elapsed_time": "18:39:19", "remaining_time": "5:57:00"}
418
+ {"current_steps": 418, "total_steps": 550, "loss": 0.2575, "lr": 6.91872983369826e-07, "epoch": 7.5656108597285066, "percentage": 76.0, "elapsed_time": "18:41:58", "remaining_time": "5:54:18"}
419
+ {"current_steps": 419, "total_steps": 550, "loss": 0.2431, "lr": 6.819328854551619e-07, "epoch": 7.583710407239819, "percentage": 76.18, "elapsed_time": "18:44:36", "remaining_time": "5:51:36"}
420
+ {"current_steps": 420, "total_steps": 550, "loss": 0.2612, "lr": 6.720534207629731e-07, "epoch": 7.601809954751131, "percentage": 76.36, "elapsed_time": "18:47:15", "remaining_time": "5:48:54"}
421
+ {"current_steps": 421, "total_steps": 550, "loss": 0.2363, "lr": 6.622349187771246e-07, "epoch": 7.619909502262443, "percentage": 76.55, "elapsed_time": "18:49:41", "remaining_time": "5:46:09"}
422
+ {"current_steps": 422, "total_steps": 550, "loss": 0.2165, "lr": 6.524777069483526e-07, "epoch": 7.638009049773755, "percentage": 76.73, "elapsed_time": "18:52:06", "remaining_time": "5:43:23"}
423
+ {"current_steps": 423, "total_steps": 550, "loss": 0.2518, "lr": 6.427821106833429e-07, "epoch": 7.656108597285068, "percentage": 76.91, "elapsed_time": "18:54:33", "remaining_time": "5:40:38"}
424
+ {"current_steps": 424, "total_steps": 550, "loss": 0.2497, "lr": 6.33148453333881e-07, "epoch": 7.67420814479638, "percentage": 77.09, "elapsed_time": "18:57:03", "remaining_time": "5:37:54"}
425
+ {"current_steps": 425, "total_steps": 550, "loss": 0.2735, "lr": 6.235770561860646e-07, "epoch": 7.6923076923076925, "percentage": 77.27, "elapsed_time": "18:59:44", "remaining_time": "5:35:13"}
426
+ {"current_steps": 426, "total_steps": 550, "loss": 0.2638, "lr": 6.140682384495902e-07, "epoch": 7.710407239819005, "percentage": 77.45, "elapsed_time": "19:02:27", "remaining_time": "5:32:32"}
427
+ {"current_steps": 427, "total_steps": 550, "loss": 0.2511, "lr": 6.046223172471083e-07, "epoch": 7.728506787330317, "percentage": 77.64, "elapsed_time": "19:04:54", "remaining_time": "5:29:47"}
428
+ {"current_steps": 428, "total_steps": 550, "loss": 0.2411, "lr": 5.952396076036457e-07, "epoch": 7.746606334841629, "percentage": 77.82, "elapsed_time": "19:07:12", "remaining_time": "5:27:00"}
429
+ {"current_steps": 429, "total_steps": 550, "loss": 0.2337, "lr": 5.85920422436099e-07, "epoch": 7.764705882352941, "percentage": 78.0, "elapsed_time": "19:09:45", "remaining_time": "5:24:17"}
430
+ {"current_steps": 430, "total_steps": 550, "loss": 0.2304, "lr": 5.766650725428027e-07, "epoch": 7.782805429864253, "percentage": 78.18, "elapsed_time": "19:12:15", "remaining_time": "5:21:33"}
431
+ {"current_steps": 431, "total_steps": 550, "loss": 0.2302, "lr": 5.674738665931575e-07, "epoch": 7.800904977375565, "percentage": 78.36, "elapsed_time": "19:14:38", "remaining_time": "5:18:48"}
432
+ {"current_steps": 432, "total_steps": 550, "loss": 0.2415, "lr": 5.583471111173414e-07, "epoch": 7.819004524886878, "percentage": 78.55, "elapsed_time": "19:17:12", "remaining_time": "5:16:05"}
433
+ {"current_steps": 433, "total_steps": 550, "loss": 0.2347, "lr": 5.492851104960839e-07, "epoch": 7.83710407239819, "percentage": 78.73, "elapsed_time": "19:19:40", "remaining_time": "5:13:21"}
434
+ {"current_steps": 434, "total_steps": 550, "loss": 0.2433, "lr": 5.402881669505164e-07, "epoch": 7.855203619909502, "percentage": 78.91, "elapsed_time": "19:22:19", "remaining_time": "5:10:40"}
435
+ {"current_steps": 435, "total_steps": 550, "loss": 0.2392, "lr": 5.313565805320914e-07, "epoch": 7.873303167420815, "percentage": 79.09, "elapsed_time": "19:24:53", "remaining_time": "5:07:57"}
436
+ {"current_steps": 436, "total_steps": 550, "loss": 0.2491, "lr": 5.224906491125778e-07, "epoch": 7.891402714932127, "percentage": 79.27, "elapsed_time": "19:27:28", "remaining_time": "5:05:15"}
437
+ {"current_steps": 437, "total_steps": 550, "loss": 0.2374, "lr": 5.13690668374125e-07, "epoch": 7.909502262443439, "percentage": 79.45, "elapsed_time": "19:29:59", "remaining_time": "5:02:32"}
438
+ {"current_steps": 438, "total_steps": 550, "loss": 0.2222, "lr": 5.049569317994013e-07, "epoch": 7.927601809954751, "percentage": 79.64, "elapsed_time": "19:32:30", "remaining_time": "4:59:49"}
439
+ {"current_steps": 439, "total_steps": 550, "loss": 0.2413, "lr": 4.962897306618101e-07, "epoch": 7.9457013574660635, "percentage": 79.82, "elapsed_time": "19:35:05", "remaining_time": "4:57:07"}
440
+ {"current_steps": 440, "total_steps": 550, "loss": 0.2526, "lr": 4.876893540157692e-07, "epoch": 7.963800904977376, "percentage": 80.0, "elapsed_time": "19:37:45", "remaining_time": "4:54:26"}
441
+ {"current_steps": 441, "total_steps": 550, "loss": 0.2505, "lr": 4.791560886870786e-07, "epoch": 7.981900452488688, "percentage": 80.18, "elapsed_time": "19:40:30", "remaining_time": "4:51:46"}
442
+ {"current_steps": 442, "total_steps": 550, "loss": 0.2397, "lr": 4.70690219263347e-07, "epoch": 8.0, "percentage": 80.36, "elapsed_time": "19:43:12", "remaining_time": "4:49:06"}
443
+ {"current_steps": 443, "total_steps": 550, "loss": 0.2384, "lr": 4.6229202808450587e-07, "epoch": 8.018099547511312, "percentage": 80.55, "elapsed_time": "19:45:40", "remaining_time": "4:46:22"}
444
+ {"current_steps": 444, "total_steps": 550, "loss": 0.2396, "lr": 4.539617952333913e-07, "epoch": 8.036199095022624, "percentage": 80.73, "elapsed_time": "19:48:06", "remaining_time": "4:43:38"}
445
+ {"current_steps": 445, "total_steps": 550, "loss": 0.2481, "lr": 4.4569979852640444e-07, "epoch": 8.054298642533936, "percentage": 80.91, "elapsed_time": "19:50:37", "remaining_time": "4:40:56"}
446
+ {"current_steps": 446, "total_steps": 550, "loss": 0.2331, "lr": 4.3750631350424456e-07, "epoch": 8.072398190045249, "percentage": 81.09, "elapsed_time": "19:53:05", "remaining_time": "4:38:12"}
447
+ {"current_steps": 447, "total_steps": 550, "loss": 0.2398, "lr": 4.2938161342272024e-07, "epoch": 8.09049773755656, "percentage": 81.27, "elapsed_time": "19:55:36", "remaining_time": "4:35:29"}
448
+ {"current_steps": 448, "total_steps": 550, "loss": 0.2396, "lr": 4.2132596924363666e-07, "epoch": 8.108597285067873, "percentage": 81.45, "elapsed_time": "19:58:03", "remaining_time": "4:32:46"}
449
+ {"current_steps": 449, "total_steps": 550, "loss": 0.2457, "lr": 4.1333964962575995e-07, "epoch": 8.126696832579185, "percentage": 81.64, "elapsed_time": "20:00:49", "remaining_time": "4:30:07"}
450
+ {"current_steps": 450, "total_steps": 550, "loss": 0.2557, "lr": 4.0542292091585447e-07, "epoch": 8.144796380090497, "percentage": 81.82, "elapsed_time": "20:03:21", "remaining_time": "4:27:24"}
451
+ {"current_steps": 451, "total_steps": 550, "loss": 0.2346, "lr": 3.975760471398013e-07, "epoch": 8.16289592760181, "percentage": 82.0, "elapsed_time": "20:08:49", "remaining_time": "4:25:21"}
452
+ {"current_steps": 452, "total_steps": 550, "loss": 0.2176, "lr": 3.89799289993795e-07, "epoch": 8.180995475113122, "percentage": 82.18, "elapsed_time": "20:11:18", "remaining_time": "4:22:37"}
453
+ {"current_steps": 453, "total_steps": 550, "loss": 0.247, "lr": 3.8209290883561205e-07, "epoch": 8.199095022624434, "percentage": 82.36, "elapsed_time": "20:13:44", "remaining_time": "4:19:53"}
454
+ {"current_steps": 454, "total_steps": 550, "loss": 0.2298, "lr": 3.7445716067596506e-07, "epoch": 8.217194570135746, "percentage": 82.55, "elapsed_time": "20:16:08", "remaining_time": "4:17:09"}
455
+ {"current_steps": 455, "total_steps": 550, "loss": 0.2385, "lr": 3.668923001699284e-07, "epoch": 8.235294117647058, "percentage": 82.73, "elapsed_time": "20:18:37", "remaining_time": "4:14:26"}
456
+ {"current_steps": 456, "total_steps": 550, "loss": 0.2519, "lr": 3.593985796084468e-07, "epoch": 8.25339366515837, "percentage": 82.91, "elapsed_time": "20:21:11", "remaining_time": "4:11:44"}
457
+ {"current_steps": 457, "total_steps": 550, "loss": 0.2602, "lr": 3.519762489099207e-07, "epoch": 8.271493212669684, "percentage": 83.09, "elapsed_time": "20:23:57", "remaining_time": "4:09:04"}
458
+ {"current_steps": 458, "total_steps": 550, "loss": 0.2567, "lr": 3.446255556118736e-07, "epoch": 8.289592760180996, "percentage": 83.27, "elapsed_time": "20:26:52", "remaining_time": "4:06:26"}
459
+ {"current_steps": 459, "total_steps": 550, "loss": 0.2332, "lr": 3.373467448626916e-07, "epoch": 8.307692307692308, "percentage": 83.45, "elapsed_time": "20:29:41", "remaining_time": "4:03:47"}
460
+ {"current_steps": 460, "total_steps": 550, "loss": 0.2357, "lr": 3.3014005941345406e-07, "epoch": 8.32579185520362, "percentage": 83.64, "elapsed_time": "20:32:15", "remaining_time": "4:01:05"}
461
+ {"current_steps": 461, "total_steps": 550, "loss": 0.2188, "lr": 3.230057396098321e-07, "epoch": 8.343891402714933, "percentage": 83.82, "elapsed_time": "20:34:45", "remaining_time": "3:58:22"}
462
+ {"current_steps": 462, "total_steps": 550, "loss": 0.2595, "lr": 3.1594402338407633e-07, "epoch": 8.361990950226245, "percentage": 84.0, "elapsed_time": "20:37:26", "remaining_time": "3:55:42"}
463
+ {"current_steps": 463, "total_steps": 550, "loss": 0.2293, "lr": 3.0895514624707994e-07, "epoch": 8.380090497737557, "percentage": 84.18, "elapsed_time": "20:40:08", "remaining_time": "3:53:01"}
464
+ {"current_steps": 464, "total_steps": 550, "loss": 0.2305, "lr": 3.020393412805259e-07, "epoch": 8.39819004524887, "percentage": 84.36, "elapsed_time": "20:42:46", "remaining_time": "3:50:20"}
465
+ {"current_steps": 465, "total_steps": 550, "loss": 0.2596, "lr": 2.9519683912911267e-07, "epoch": 8.416289592760181, "percentage": 84.55, "elapsed_time": "20:45:20", "remaining_time": "3:47:38"}
466
+ {"current_steps": 466, "total_steps": 550, "loss": 0.2435, "lr": 2.8842786799286204e-07, "epoch": 8.434389140271493, "percentage": 84.73, "elapsed_time": "20:48:08", "remaining_time": "3:44:59"}
467
+ {"current_steps": 467, "total_steps": 550, "loss": 0.2386, "lr": 2.8173265361950837e-07, "epoch": 8.452488687782806, "percentage": 84.91, "elapsed_time": "20:50:44", "remaining_time": "3:42:17"}
468
+ {"current_steps": 468, "total_steps": 550, "loss": 0.231, "lr": 2.751114192969709e-07, "epoch": 8.470588235294118, "percentage": 85.09, "elapsed_time": "20:53:38", "remaining_time": "3:39:39"}
469
+ {"current_steps": 469, "total_steps": 550, "loss": 0.2477, "lr": 2.685643858459064e-07, "epoch": 8.48868778280543, "percentage": 85.27, "elapsed_time": "20:56:19", "remaining_time": "3:36:58"}
470
+ {"current_steps": 470, "total_steps": 550, "loss": 0.2504, "lr": 2.620917716123444e-07, "epoch": 8.506787330316742, "percentage": 85.45, "elapsed_time": "20:59:10", "remaining_time": "3:34:19"}
471
+ {"current_steps": 471, "total_steps": 550, "loss": 0.2545, "lr": 2.55693792460405e-07, "epoch": 8.524886877828054, "percentage": 85.64, "elapsed_time": "21:01:54", "remaining_time": "3:31:39"}
472
+ {"current_steps": 472, "total_steps": 550, "loss": 0.2462, "lr": 2.4937066176510123e-07, "epoch": 8.542986425339366, "percentage": 85.82, "elapsed_time": "21:04:35", "remaining_time": "3:28:58"}
473
+ {"current_steps": 473, "total_steps": 550, "loss": 0.2449, "lr": 2.4312259040522093e-07, "epoch": 8.561085972850679, "percentage": 86.0, "elapsed_time": "21:07:29", "remaining_time": "3:26:20"}
474
+ {"current_steps": 474, "total_steps": 550, "loss": 0.2422, "lr": 2.3694978675629476e-07, "epoch": 8.57918552036199, "percentage": 86.18, "elapsed_time": "21:10:20", "remaining_time": "3:23:41"}
475
+ {"current_steps": 475, "total_steps": 550, "loss": 0.2492, "lr": 2.3085245668364897e-07, "epoch": 8.597285067873303, "percentage": 86.36, "elapsed_time": "21:13:05", "remaining_time": "3:21:00"}
476
+ {"current_steps": 476, "total_steps": 550, "loss": 0.2435, "lr": 2.2483080353553537e-07, "epoch": 8.615384615384615, "percentage": 86.55, "elapsed_time": "21:16:03", "remaining_time": "3:18:22"}
477
+ {"current_steps": 477, "total_steps": 550, "loss": 0.2471, "lr": 2.1888502813635276e-07, "epoch": 8.633484162895927, "percentage": 86.73, "elapsed_time": "21:18:49", "remaining_time": "3:15:42"}
478
+ {"current_steps": 478, "total_steps": 550, "loss": 0.2367, "lr": 2.1301532877994747e-07, "epoch": 8.65158371040724, "percentage": 86.91, "elapsed_time": "21:21:30", "remaining_time": "3:13:01"}
479
+ {"current_steps": 479, "total_steps": 550, "loss": 0.2344, "lr": 2.0722190122300311e-07, "epoch": 8.669683257918551, "percentage": 87.09, "elapsed_time": "21:24:14", "remaining_time": "3:10:21"}
480
+ {"current_steps": 480, "total_steps": 550, "loss": 0.2394, "lr": 2.0150493867850867e-07, "epoch": 8.687782805429864, "percentage": 87.27, "elapsed_time": "21:26:55", "remaining_time": "3:07:40"}
481
+ {"current_steps": 481, "total_steps": 550, "loss": 0.242, "lr": 1.9586463180931658e-07, "epoch": 8.705882352941176, "percentage": 87.45, "elapsed_time": "21:29:58", "remaining_time": "3:05:02"}
482
+ {"current_steps": 482, "total_steps": 550, "loss": 0.2571, "lr": 1.9030116872178317e-07, "epoch": 8.723981900452488, "percentage": 87.64, "elapsed_time": "21:32:48", "remaining_time": "3:02:23"}
483
+ {"current_steps": 483, "total_steps": 550, "loss": 0.2457, "lr": 1.848147349594967e-07, "epoch": 8.742081447963802, "percentage": 87.82, "elapsed_time": "21:35:35", "remaining_time": "2:59:43"}
484
+ {"current_steps": 484, "total_steps": 550, "loss": 0.2351, "lr": 1.7940551349708734e-07, "epoch": 8.760180995475114, "percentage": 88.0, "elapsed_time": "21:38:12", "remaining_time": "2:57:01"}
485
+ {"current_steps": 485, "total_steps": 550, "loss": 0.2355, "lr": 1.7407368473412678e-07, "epoch": 8.778280542986426, "percentage": 88.18, "elapsed_time": "21:40:52", "remaining_time": "2:54:20"}
486
+ {"current_steps": 486, "total_steps": 550, "loss": 0.2287, "lr": 1.6881942648911077e-07, "epoch": 8.796380090497738, "percentage": 88.36, "elapsed_time": "21:43:50", "remaining_time": "2:51:42"}
487
+ {"current_steps": 487, "total_steps": 550, "loss": 0.2447, "lr": 1.6364291399352916e-07, "epoch": 8.81447963800905, "percentage": 88.55, "elapsed_time": "21:46:33", "remaining_time": "2:49:01"}
488
+ {"current_steps": 488, "total_steps": 550, "loss": 0.2431, "lr": 1.5854431988602175e-07, "epoch": 8.832579185520363, "percentage": 88.73, "elapsed_time": "21:49:30", "remaining_time": "2:46:22"}
489
+ {"current_steps": 489, "total_steps": 550, "loss": 0.2397, "lr": 1.5352381420662144e-07, "epoch": 8.850678733031675, "percentage": 88.91, "elapsed_time": "21:52:19", "remaining_time": "2:43:42"}
490
+ {"current_steps": 490, "total_steps": 550, "loss": 0.2291, "lr": 1.4858156439108097e-07, "epoch": 8.868778280542987, "percentage": 89.09, "elapsed_time": "21:54:53", "remaining_time": "2:41:00"}
491
+ {"current_steps": 491, "total_steps": 550, "loss": 0.2138, "lr": 1.4371773526529216e-07, "epoch": 8.886877828054299, "percentage": 89.27, "elapsed_time": "21:57:26", "remaining_time": "2:38:18"}
492
+ {"current_steps": 492, "total_steps": 550, "loss": 0.2248, "lr": 1.3893248903978695e-07, "epoch": 8.904977375565611, "percentage": 89.45, "elapsed_time": "22:00:09", "remaining_time": "2:35:37"}
493
+ {"current_steps": 493, "total_steps": 550, "loss": 0.2628, "lr": 1.342259853043279e-07, "epoch": 8.923076923076923, "percentage": 89.64, "elapsed_time": "22:03:09", "remaining_time": "2:32:58"}
494
+ {"current_steps": 494, "total_steps": 550, "loss": 0.2369, "lr": 1.2959838102258537e-07, "epoch": 8.941176470588236, "percentage": 89.82, "elapsed_time": "22:05:45", "remaining_time": "2:30:17"}
495
+ {"current_steps": 495, "total_steps": 550, "loss": 0.2371, "lr": 1.2504983052690406e-07, "epoch": 8.959276018099548, "percentage": 90.0, "elapsed_time": "22:08:27", "remaining_time": "2:27:36"}
496
+ {"current_steps": 496, "total_steps": 550, "loss": 0.2364, "lr": 1.2058048551315455e-07, "epoch": 8.97737556561086, "percentage": 90.18, "elapsed_time": "22:11:17", "remaining_time": "2:24:56"}
497
+ {"current_steps": 497, "total_steps": 550, "loss": 0.2473, "lr": 1.1619049503567486e-07, "epoch": 8.995475113122172, "percentage": 90.36, "elapsed_time": "22:14:07", "remaining_time": "2:22:16"}
498
+ {"current_steps": 498, "total_steps": 550, "loss": 0.2352, "lr": 1.1188000550230005e-07, "epoch": 9.013574660633484, "percentage": 90.55, "elapsed_time": "22:16:55", "remaining_time": "2:19:35"}
499
+ {"current_steps": 499, "total_steps": 550, "loss": 0.2641, "lr": 1.0764916066947795e-07, "epoch": 9.031674208144796, "percentage": 90.73, "elapsed_time": "22:19:57", "remaining_time": "2:16:56"}
500
+ {"current_steps": 500, "total_steps": 550, "loss": 0.2324, "lr": 1.0349810163747587e-07, "epoch": 9.049773755656108, "percentage": 90.91, "elapsed_time": "22:22:51", "remaining_time": "2:14:17"}
501
+ {"current_steps": 501, "total_steps": 550, "loss": 0.2433, "lr": 9.942696684567488e-08, "epoch": 9.06787330316742, "percentage": 91.09, "elapsed_time": "22:29:08", "remaining_time": "2:11:57"}
502
+ {"current_steps": 502, "total_steps": 550, "loss": 0.2374, "lr": 9.54358920679524e-08, "epoch": 9.085972850678733, "percentage": 91.27, "elapsed_time": "22:31:54", "remaining_time": "2:09:15"}
.ipynb_checkpoints/training_loss-checkpoint.png ADDED
README.md ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ library_name: transformers
3
+ license: other
4
+ base_model: deepseek-ai/DeepSeek-R1-Distill-Qwen-7B
5
+ tags:
6
+ - llama-factory
7
+ - full
8
+ - generated_from_trainer
9
+ model-index:
10
+ - name: Light-R1-ly
11
+ results: []
12
+ ---
13
+
14
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
15
+ should probably proofread and complete it, then remove this comment. -->
16
+
17
+ # Light-R1
18
+
19
+ This model is a fine-tuned version of [deepseek-ai/DeepSeek-R1-Distill-Qwen-7B](https://huggingface.co//lamda12/zhouz/models/DeepSeek-R1-Distill-Qwen-7B) on the light_r1_3k dataset.
20
+
21
+ ## Model description
22
+
23
+ More information needed
24
+
25
+ ## Intended uses & limitations
26
+
27
+ More information needed
28
+
29
+ ## Training and evaluation data
30
+
31
+ More information needed
32
+
33
+ ## Training procedure
34
+
35
+ ### Training hyperparameters
36
+
37
+ The following hyperparameters were used during training:
38
+ - learning_rate: 5e-06
39
+ - train_batch_size: 2
40
+ - eval_batch_size: 8
41
+ - seed: 42
42
+ - distributed_type: multi-GPU
43
+ - num_devices: 2
44
+ - gradient_accumulation_steps: 16
45
+ - total_train_batch_size: 64
46
+ - total_eval_batch_size: 16
47
+ - optimizer: Adam with betas=(0.9,0.95) and epsilon=1e-08
48
+ - lr_scheduler_type: cosine
49
+ - lr_scheduler_warmup_ratio: 0.01
50
+ - num_epochs: 10.0
51
+
52
+ ### Training results
53
+
54
+
55
+
56
+ ### Framework versions
57
+
58
+ - Transformers 4.45.2
59
+ - Pytorch 2.6.0+cu124
60
+ - Datasets 3.1.0
61
+ - Tokenizers 0.20.3
all_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 9.95475113122172,
3
+ "total_flos": 9.907464757911224e+17,
4
+ "train_loss": 0.2748682842471383,
5
+ "train_runtime": 89439.6385,
6
+ "train_samples_per_second": 0.395,
7
+ "train_steps_per_second": 0.006
8
+ }
config.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/lamda12/zhouz/models/DeepSeek-R1-Distill-Qwen-7B",
3
+ "architectures": [
4
+ "Qwen2ForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 151643,
8
+ "eos_token_id": 151643,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 3584,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 18944,
13
+ "max_position_embeddings": 131072,
14
+ "max_window_layers": 28,
15
+ "model_type": "qwen2",
16
+ "num_attention_heads": 28,
17
+ "num_hidden_layers": 28,
18
+ "num_key_value_heads": 4,
19
+ "rms_norm_eps": 1e-06,
20
+ "rope_scaling": null,
21
+ "rope_theta": 10000,
22
+ "sliding_window": null,
23
+ "tie_word_embeddings": false,
24
+ "torch_dtype": "bfloat16",
25
+ "transformers_version": "4.45.2",
26
+ "use_cache": false,
27
+ "use_mrope": false,
28
+ "use_sliding_window": false,
29
+ "vocab_size": 152064
30
+ }
generation_config.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 151646,
4
+ "do_sample": true,
5
+ "eos_token_id": 151643,
6
+ "temperature": 0.6,
7
+ "top_p": 0.95,
8
+ "transformers_version": "4.45.2"
9
+ }
model-00001-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:14b0e0510ae5d1d9665ae6eeb4b397f1080a58a3f696e05a82e49163bad73e23
3
+ size 4877660776
model-00002-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fb20a381d87f3e7e7b419733cee855cd65e5c484c2d00480143f103ddbc5b482
3
+ size 4932751008
model-00003-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2628b6cd837c291e1297d824dc4f3dfffb4cf371d7d0db46368e028309a842e0
3
+ size 4330865200
model-00004-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a4198fe1b8fc8166040d3887dd7879a125cb000fbe6416b4f0753e091259e3f4
3
+ size 1089994880
model.safetensors.index.json ADDED
@@ -0,0 +1,346 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 15231233024
4
+ },
5
+ "weight_map": {
6
+ "lm_head.weight": "model-00004-of-00004.safetensors",
7
+ "model.embed_tokens.weight": "model-00001-of-00004.safetensors",
8
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00004.safetensors",
9
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
10
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
11
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
12
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
13
+ "model.layers.0.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
14
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
15
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
16
+ "model.layers.0.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
17
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
18
+ "model.layers.0.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
19
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
20
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00004.safetensors",
21
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
22
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
23
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
24
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
25
+ "model.layers.1.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
26
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
27
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
28
+ "model.layers.1.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
29
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
30
+ "model.layers.1.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
31
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
32
+ "model.layers.10.input_layernorm.weight": "model-00002-of-00004.safetensors",
33
+ "model.layers.10.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
34
+ "model.layers.10.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
35
+ "model.layers.10.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
36
+ "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
37
+ "model.layers.10.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
38
+ "model.layers.10.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
39
+ "model.layers.10.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
40
+ "model.layers.10.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
41
+ "model.layers.10.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
42
+ "model.layers.10.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
43
+ "model.layers.10.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
44
+ "model.layers.11.input_layernorm.weight": "model-00002-of-00004.safetensors",
45
+ "model.layers.11.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
46
+ "model.layers.11.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
47
+ "model.layers.11.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
48
+ "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
49
+ "model.layers.11.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
50
+ "model.layers.11.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
51
+ "model.layers.11.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
52
+ "model.layers.11.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
53
+ "model.layers.11.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
54
+ "model.layers.11.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
55
+ "model.layers.11.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
56
+ "model.layers.12.input_layernorm.weight": "model-00002-of-00004.safetensors",
57
+ "model.layers.12.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
58
+ "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
59
+ "model.layers.12.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
60
+ "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
61
+ "model.layers.12.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
62
+ "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
63
+ "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
64
+ "model.layers.12.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
65
+ "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
66
+ "model.layers.12.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
67
+ "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
68
+ "model.layers.13.input_layernorm.weight": "model-00002-of-00004.safetensors",
69
+ "model.layers.13.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
70
+ "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
71
+ "model.layers.13.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
72
+ "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
73
+ "model.layers.13.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
74
+ "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
75
+ "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
76
+ "model.layers.13.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
77
+ "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
78
+ "model.layers.13.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
79
+ "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
80
+ "model.layers.14.input_layernorm.weight": "model-00002-of-00004.safetensors",
81
+ "model.layers.14.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
82
+ "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
83
+ "model.layers.14.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
84
+ "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
85
+ "model.layers.14.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
86
+ "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
87
+ "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
88
+ "model.layers.14.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
89
+ "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
90
+ "model.layers.14.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
91
+ "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
92
+ "model.layers.15.input_layernorm.weight": "model-00002-of-00004.safetensors",
93
+ "model.layers.15.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
94
+ "model.layers.15.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
95
+ "model.layers.15.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
96
+ "model.layers.15.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
97
+ "model.layers.15.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
98
+ "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
99
+ "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
100
+ "model.layers.15.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
101
+ "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
102
+ "model.layers.15.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
103
+ "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
104
+ "model.layers.16.input_layernorm.weight": "model-00002-of-00004.safetensors",
105
+ "model.layers.16.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
106
+ "model.layers.16.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
107
+ "model.layers.16.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
108
+ "model.layers.16.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
109
+ "model.layers.16.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
110
+ "model.layers.16.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
111
+ "model.layers.16.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
112
+ "model.layers.16.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
113
+ "model.layers.16.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
114
+ "model.layers.16.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
115
+ "model.layers.16.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
116
+ "model.layers.17.input_layernorm.weight": "model-00002-of-00004.safetensors",
117
+ "model.layers.17.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
118
+ "model.layers.17.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
119
+ "model.layers.17.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
120
+ "model.layers.17.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
121
+ "model.layers.17.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
122
+ "model.layers.17.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
123
+ "model.layers.17.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
124
+ "model.layers.17.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
125
+ "model.layers.17.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
126
+ "model.layers.17.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
127
+ "model.layers.17.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
128
+ "model.layers.18.input_layernorm.weight": "model-00003-of-00004.safetensors",
129
+ "model.layers.18.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
130
+ "model.layers.18.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
131
+ "model.layers.18.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
132
+ "model.layers.18.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
133
+ "model.layers.18.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
134
+ "model.layers.18.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
135
+ "model.layers.18.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
136
+ "model.layers.18.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
137
+ "model.layers.18.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
138
+ "model.layers.18.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
139
+ "model.layers.18.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
140
+ "model.layers.19.input_layernorm.weight": "model-00003-of-00004.safetensors",
141
+ "model.layers.19.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
142
+ "model.layers.19.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
143
+ "model.layers.19.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
144
+ "model.layers.19.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
145
+ "model.layers.19.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
146
+ "model.layers.19.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
147
+ "model.layers.19.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
148
+ "model.layers.19.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
149
+ "model.layers.19.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
150
+ "model.layers.19.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
151
+ "model.layers.19.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
152
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00004.safetensors",
153
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
154
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
155
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
156
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
157
+ "model.layers.2.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
158
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
159
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
160
+ "model.layers.2.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
161
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
162
+ "model.layers.2.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
163
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
164
+ "model.layers.20.input_layernorm.weight": "model-00003-of-00004.safetensors",
165
+ "model.layers.20.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
166
+ "model.layers.20.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
167
+ "model.layers.20.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
168
+ "model.layers.20.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
169
+ "model.layers.20.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
170
+ "model.layers.20.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
171
+ "model.layers.20.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
172
+ "model.layers.20.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
173
+ "model.layers.20.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
174
+ "model.layers.20.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
175
+ "model.layers.20.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
176
+ "model.layers.21.input_layernorm.weight": "model-00003-of-00004.safetensors",
177
+ "model.layers.21.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
178
+ "model.layers.21.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
179
+ "model.layers.21.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
180
+ "model.layers.21.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
181
+ "model.layers.21.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
182
+ "model.layers.21.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
183
+ "model.layers.21.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
184
+ "model.layers.21.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
185
+ "model.layers.21.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
186
+ "model.layers.21.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
187
+ "model.layers.21.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
188
+ "model.layers.22.input_layernorm.weight": "model-00003-of-00004.safetensors",
189
+ "model.layers.22.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
190
+ "model.layers.22.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
191
+ "model.layers.22.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
192
+ "model.layers.22.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
193
+ "model.layers.22.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
194
+ "model.layers.22.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
195
+ "model.layers.22.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
196
+ "model.layers.22.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
197
+ "model.layers.22.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
198
+ "model.layers.22.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
199
+ "model.layers.22.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
200
+ "model.layers.23.input_layernorm.weight": "model-00003-of-00004.safetensors",
201
+ "model.layers.23.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
202
+ "model.layers.23.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
203
+ "model.layers.23.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
204
+ "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
205
+ "model.layers.23.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
206
+ "model.layers.23.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
207
+ "model.layers.23.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
208
+ "model.layers.23.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
209
+ "model.layers.23.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
210
+ "model.layers.23.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
211
+ "model.layers.23.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
212
+ "model.layers.24.input_layernorm.weight": "model-00003-of-00004.safetensors",
213
+ "model.layers.24.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
214
+ "model.layers.24.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
215
+ "model.layers.24.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
216
+ "model.layers.24.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
217
+ "model.layers.24.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
218
+ "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
219
+ "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
220
+ "model.layers.24.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
221
+ "model.layers.24.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
222
+ "model.layers.24.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
223
+ "model.layers.24.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
224
+ "model.layers.25.input_layernorm.weight": "model-00003-of-00004.safetensors",
225
+ "model.layers.25.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
226
+ "model.layers.25.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
227
+ "model.layers.25.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
228
+ "model.layers.25.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
229
+ "model.layers.25.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
230
+ "model.layers.25.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
231
+ "model.layers.25.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
232
+ "model.layers.25.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
233
+ "model.layers.25.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
234
+ "model.layers.25.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
235
+ "model.layers.25.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
236
+ "model.layers.26.input_layernorm.weight": "model-00003-of-00004.safetensors",
237
+ "model.layers.26.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
238
+ "model.layers.26.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
239
+ "model.layers.26.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
240
+ "model.layers.26.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
241
+ "model.layers.26.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
242
+ "model.layers.26.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
243
+ "model.layers.26.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
244
+ "model.layers.26.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
245
+ "model.layers.26.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
246
+ "model.layers.26.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
247
+ "model.layers.26.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
248
+ "model.layers.27.input_layernorm.weight": "model-00003-of-00004.safetensors",
249
+ "model.layers.27.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
250
+ "model.layers.27.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
251
+ "model.layers.27.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
252
+ "model.layers.27.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
253
+ "model.layers.27.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
254
+ "model.layers.27.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
255
+ "model.layers.27.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
256
+ "model.layers.27.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
257
+ "model.layers.27.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
258
+ "model.layers.27.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
259
+ "model.layers.27.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
260
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00004.safetensors",
261
+ "model.layers.3.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
262
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
263
+ "model.layers.3.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
264
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
265
+ "model.layers.3.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
266
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
267
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
268
+ "model.layers.3.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
269
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
270
+ "model.layers.3.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
271
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
272
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00004.safetensors",
273
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
274
+ "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
275
+ "model.layers.4.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
276
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
277
+ "model.layers.4.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
278
+ "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
279
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
280
+ "model.layers.4.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
281
+ "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
282
+ "model.layers.4.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
283
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
284
+ "model.layers.5.input_layernorm.weight": "model-00001-of-00004.safetensors",
285
+ "model.layers.5.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
286
+ "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
287
+ "model.layers.5.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
288
+ "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
289
+ "model.layers.5.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
290
+ "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
291
+ "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
292
+ "model.layers.5.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
293
+ "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
294
+ "model.layers.5.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
295
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
296
+ "model.layers.6.input_layernorm.weight": "model-00001-of-00004.safetensors",
297
+ "model.layers.6.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
298
+ "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
299
+ "model.layers.6.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
300
+ "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
301
+ "model.layers.6.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
302
+ "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
303
+ "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
304
+ "model.layers.6.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
305
+ "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
306
+ "model.layers.6.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
307
+ "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
308
+ "model.layers.7.input_layernorm.weight": "model-00001-of-00004.safetensors",
309
+ "model.layers.7.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
310
+ "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
311
+ "model.layers.7.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
312
+ "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
313
+ "model.layers.7.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
314
+ "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
315
+ "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
316
+ "model.layers.7.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
317
+ "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
318
+ "model.layers.7.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
319
+ "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
320
+ "model.layers.8.input_layernorm.weight": "model-00002-of-00004.safetensors",
321
+ "model.layers.8.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
322
+ "model.layers.8.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
323
+ "model.layers.8.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
324
+ "model.layers.8.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
325
+ "model.layers.8.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
326
+ "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
327
+ "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
328
+ "model.layers.8.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
329
+ "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
330
+ "model.layers.8.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
331
+ "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
332
+ "model.layers.9.input_layernorm.weight": "model-00002-of-00004.safetensors",
333
+ "model.layers.9.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
334
+ "model.layers.9.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
335
+ "model.layers.9.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
336
+ "model.layers.9.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
337
+ "model.layers.9.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
338
+ "model.layers.9.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
339
+ "model.layers.9.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
340
+ "model.layers.9.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
341
+ "model.layers.9.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
342
+ "model.layers.9.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
343
+ "model.layers.9.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
344
+ "model.norm.weight": "model-00003-of-00004.safetensors"
345
+ }
346
+ }
runs/Apr11_15-49-42_hx-rs4804g/events.out.tfevents.1744358013.hx-rs4804g ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c962556a10d64b48594de5fcc2d8c41816803baacb72d937909a989bca3e427d
3
+ size 5493
runs/Apr11_15-57-09_hx-rs4804g/events.out.tfevents.1744358435.hx-rs4804g ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cd3e02017bd4af6fad3e52183fdfcac2d12ee06db8714ec5436521e38d6eff12
3
+ size 8389
runs/Apr11_16-24-16_hx-rs4804g/events.out.tfevents.1744360084.hx-rs4804g ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5c5f6201dc8629c9a89734dbb5ef68e2c50e5c7b206bf9356650aa358b50773f
3
+ size 541070
runs/Apr17_15-49-32_hx-rs4804g/events.out.tfevents.1744876389.hx-rs4804g ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4277121bff218bb169b2605322a847b52626db443d0acaff31520c144f23afb5
3
+ size 5699
runs/Apr17_16-15-17_hx-rs4804g/events.out.tfevents.1744877945.hx-rs4804g ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3d63df54f51bc37810a5ede236e34ba4ba6262469c7502511d795bcaa69f02c8
3
+ size 5698
runs/Apr17_16-34-35_hx-rs4804g/events.out.tfevents.1744879059.hx-rs4804g ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f984a07f2c7f69790bc8397fdcde1035d11ac93893301c51c39df355adf0b34c
3
+ size 5492
runs/Apr17_16-47-38_hx-rs4804g/events.out.tfevents.1744879849.hx-rs4804g ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:160c7464019a9566baca8e198cac14ea42f824c8d459932b55fe7a4ac9f7efee
3
+ size 121181
special_tokens_map.json ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|end▁of▁sentence|>"
4
+ ],
5
+ "bos_token": {
6
+ "content": "<|begin▁of▁sentence|>",
7
+ "lstrip": false,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false
11
+ },
12
+ "eos_token": {
13
+ "content": "<|end▁of▁sentence|>",
14
+ "lstrip": false,
15
+ "normalized": false,
16
+ "rstrip": false,
17
+ "single_word": false
18
+ },
19
+ "pad_token": {
20
+ "content": "<|end▁of▁sentence|>",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false
25
+ }
26
+ }
special_tokens_map_old.json ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ {
4
+ "content": "<|end▁of▁sentence|>",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false
9
+ }
10
+ ],
11
+ "bos_token": {
12
+ "content": "<|begin▁of▁sentence|>",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false
17
+ },
18
+ "eos_token": {
19
+ "content": "<|end▁of▁sentence|>",
20
+ "lstrip": false,
21
+ "normalized": false,
22
+ "rstrip": false,
23
+ "single_word": false
24
+ },
25
+ "pad_token": {
26
+ "content": "<|end▁of▁sentence|>",
27
+ "lstrip": false,
28
+ "normalized": false,
29
+ "rstrip": false,
30
+ "single_word": false
31
+ }
32
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c7b1665d34943ed836d7c1277a72b52345855a4f7e295d475d00f9cd90af2891
3
+ size 11425024
tokenizer_config.json ADDED
@@ -0,0 +1,295 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "add_prefix_space": null,
5
+ "added_tokens_decoder": {
6
+ "151643": {
7
+ "content": "<|end▁of▁sentence|>",
8
+ "lstrip": false,
9
+ "normalized": false,
10
+ "rstrip": false,
11
+ "single_word": false,
12
+ "special": true
13
+ },
14
+ "151644": {
15
+ "content": "<|User|>",
16
+ "lstrip": false,
17
+ "normalized": false,
18
+ "rstrip": false,
19
+ "single_word": false,
20
+ "special": false
21
+ },
22
+ "151645": {
23
+ "content": "<|Assistant|>",
24
+ "lstrip": false,
25
+ "normalized": false,
26
+ "rstrip": false,
27
+ "single_word": false,
28
+ "special": false
29
+ },
30
+ "151646": {
31
+ "content": "<|begin▁of▁sentence|>",
32
+ "lstrip": false,
33
+ "normalized": false,
34
+ "rstrip": false,
35
+ "single_word": false,
36
+ "special": true
37
+ },
38
+ "151647": {
39
+ "content": "<|EOT|>",
40
+ "lstrip": false,
41
+ "normalized": false,
42
+ "rstrip": false,
43
+ "single_word": false,
44
+ "special": false
45
+ },
46
+ "151648": {
47
+ "content": "<think>",
48
+ "lstrip": false,
49
+ "normalized": false,
50
+ "rstrip": false,
51
+ "single_word": false,
52
+ "special": false
53
+ },
54
+ "151649": {
55
+ "content": "</think>",
56
+ "lstrip": false,
57
+ "normalized": false,
58
+ "rstrip": false,
59
+ "single_word": false,
60
+ "special": false
61
+ },
62
+ "151650": {
63
+ "content": "<|quad_start|>",
64
+ "lstrip": false,
65
+ "normalized": false,
66
+ "rstrip": false,
67
+ "single_word": false,
68
+ "special": true
69
+ },
70
+ "151651": {
71
+ "content": "<|quad_end|>",
72
+ "lstrip": false,
73
+ "normalized": false,
74
+ "rstrip": false,
75
+ "single_word": false,
76
+ "special": true
77
+ },
78
+ "151652": {
79
+ "content": "<|vision_start|>",
80
+ "lstrip": false,
81
+ "normalized": false,
82
+ "rstrip": false,
83
+ "single_word": false,
84
+ "special": true
85
+ },
86
+ "151653": {
87
+ "content": "<|vision_end|>",
88
+ "lstrip": false,
89
+ "normalized": false,
90
+ "rstrip": false,
91
+ "single_word": false,
92
+ "special": true
93
+ },
94
+ "151654": {
95
+ "content": "<|vision_pad|>",
96
+ "lstrip": false,
97
+ "normalized": false,
98
+ "rstrip": false,
99
+ "single_word": false,
100
+ "special": true
101
+ },
102
+ "151655": {
103
+ "content": "<|image_pad|>",
104
+ "lstrip": false,
105
+ "normalized": false,
106
+ "rstrip": false,
107
+ "single_word": false,
108
+ "special": true
109
+ },
110
+ "151656": {
111
+ "content": "<|video_pad|>",
112
+ "lstrip": false,
113
+ "normalized": false,
114
+ "rstrip": false,
115
+ "single_word": false,
116
+ "special": true
117
+ },
118
+ "151657": {
119
+ "content": "<tool_call>",
120
+ "lstrip": false,
121
+ "normalized": false,
122
+ "rstrip": false,
123
+ "single_word": false,
124
+ "special": false
125
+ },
126
+ "151658": {
127
+ "content": "</tool_call>",
128
+ "lstrip": false,
129
+ "normalized": false,
130
+ "rstrip": false,
131
+ "single_word": false,
132
+ "special": false
133
+ },
134
+ "151659": {
135
+ "content": "<|fim_prefix|>",
136
+ "lstrip": false,
137
+ "normalized": false,
138
+ "rstrip": false,
139
+ "single_word": false,
140
+ "special": false
141
+ },
142
+ "151660": {
143
+ "content": "<|fim_middle|>",
144
+ "lstrip": false,
145
+ "normalized": false,
146
+ "rstrip": false,
147
+ "single_word": false,
148
+ "special": false
149
+ },
150
+ "151661": {
151
+ "content": "<|fim_suffix|>",
152
+ "lstrip": false,
153
+ "normalized": false,
154
+ "rstrip": false,
155
+ "single_word": false,
156
+ "special": false
157
+ },
158
+ "151662": {
159
+ "content": "<|fim_pad|>",
160
+ "lstrip": false,
161
+ "normalized": false,
162
+ "rstrip": false,
163
+ "single_word": false,
164
+ "special": false
165
+ },
166
+ "151663": {
167
+ "content": "<|repo_name|>",
168
+ "lstrip": false,
169
+ "normalized": false,
170
+ "rstrip": false,
171
+ "single_word": false,
172
+ "special": false
173
+ },
174
+ "151664": {
175
+ "content": "<|file_sep|>",
176
+ "lstrip": false,
177
+ "normalized": false,
178
+ "rstrip": false,
179
+ "single_word": false,
180
+ "special": false
181
+ },
182
+ "151665": {
183
+ "content": "<rephrase>",
184
+ "lstrip": false,
185
+ "normalized": true,
186
+ "rstrip": false,
187
+ "single_word": false,
188
+ "special": false
189
+ },
190
+ "151666": {
191
+ "content": "</rephrase>",
192
+ "lstrip": false,
193
+ "normalized": true,
194
+ "rstrip": false,
195
+ "single_word": false,
196
+ "special": false
197
+ },
198
+ "151667": {
199
+ "content": "<decompose>",
200
+ "lstrip": false,
201
+ "normalized": true,
202
+ "rstrip": false,
203
+ "single_word": false,
204
+ "special": false
205
+ },
206
+ "151668": {
207
+ "content": "</decompose>",
208
+ "lstrip": false,
209
+ "normalized": true,
210
+ "rstrip": false,
211
+ "single_word": false,
212
+ "special": false
213
+ },
214
+ "151669": {
215
+ "content": "<thought>",
216
+ "lstrip": false,
217
+ "normalized": true,
218
+ "rstrip": false,
219
+ "single_word": false,
220
+ "special": false
221
+ },
222
+ "151670": {
223
+ "content": "</thought>",
224
+ "lstrip": false,
225
+ "normalized": true,
226
+ "rstrip": false,
227
+ "single_word": false,
228
+ "special": false
229
+ },
230
+ "151671": {
231
+ "content": "<answer>",
232
+ "lstrip": false,
233
+ "normalized": true,
234
+ "rstrip": false,
235
+ "single_word": false,
236
+ "special": false
237
+ },
238
+ "151672": {
239
+ "content": "</answer>",
240
+ "lstrip": false,
241
+ "normalized": true,
242
+ "rstrip": false,
243
+ "single_word": false,
244
+ "special": false
245
+ },
246
+ "151673": {
247
+ "content": "<reflect>",
248
+ "lstrip": false,
249
+ "normalized": true,
250
+ "rstrip": false,
251
+ "single_word": false,
252
+ "special": false
253
+ },
254
+ "151674": {
255
+ "content": "</reflect>",
256
+ "lstrip": false,
257
+ "normalized": true,
258
+ "rstrip": false,
259
+ "single_word": false,
260
+ "special": false
261
+ },
262
+ "151675": {
263
+ "content": "<summarize>",
264
+ "lstrip": false,
265
+ "normalized": true,
266
+ "rstrip": false,
267
+ "single_word": false,
268
+ "special": false
269
+ },
270
+ "151676": {
271
+ "content": "</summarize>",
272
+ "lstrip": false,
273
+ "normalized": true,
274
+ "rstrip": false,
275
+ "single_word": false,
276
+ "special": false
277
+ }
278
+ },
279
+ "additional_special_tokens": [
280
+ "<|end▁of▁sentence|>"
281
+ ],
282
+ "bos_token": "<|begin▁of▁sentence|>",
283
+ "chat_template": "{% if messages[0]['role'] == 'system' %}{% set loop_messages = messages[1:] %}{% set system_message = messages[0]['content'] %}{% else %}{% set loop_messages = messages %}{% endif %}{{ '<|begin▁of▁sentence|>' }}{% if system_message is defined %}{{ system_message }}{% endif %}{% for message in loop_messages %}{% set content = message['content'] %}{% if message['role'] == 'user' %}{{ '<|User|>' + content + '<|Assistant|>' }}{% elif message['role'] == 'assistant' %}{{ content + '<|end▁of▁sentence|>' + '\n' }}{% endif %}{% endfor %}",
284
+ "clean_up_tokenization_spaces": false,
285
+ "eos_token": "<|end▁of▁sentence|>",
286
+ "legacy": true,
287
+ "model_max_length": 32768,
288
+ "pad_token": "<|end▁of▁sentence|>",
289
+ "padding_side": "right",
290
+ "sp_model_kwargs": {},
291
+ "split_special_tokens": false,
292
+ "tokenizer_class": "LlamaTokenizer",
293
+ "unk_token": null,
294
+ "use_default_system_prompt": false
295
+ }
tokenizer_config_ol.json ADDED
@@ -0,0 +1,295 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "add_prefix_space": null,
5
+ "added_tokens_decoder": {
6
+ "151643": {
7
+ "content": "<|end▁of▁sentence|>",
8
+ "lstrip": false,
9
+ "normalized": false,
10
+ "rstrip": false,
11
+ "single_word": false,
12
+ "special": true
13
+ },
14
+ "151644": {
15
+ "content": "<|User|>",
16
+ "lstrip": false,
17
+ "normalized": false,
18
+ "rstrip": false,
19
+ "single_word": false,
20
+ "special": false
21
+ },
22
+ "151645": {
23
+ "content": "<|Assistant|>",
24
+ "lstrip": false,
25
+ "normalized": false,
26
+ "rstrip": false,
27
+ "single_word": false,
28
+ "special": false
29
+ },
30
+ "151646": {
31
+ "content": "<|begin▁of▁sentence|>",
32
+ "lstrip": false,
33
+ "normalized": false,
34
+ "rstrip": false,
35
+ "single_word": false,
36
+ "special": true
37
+ },
38
+ "151647": {
39
+ "content": "<|EOT|>",
40
+ "lstrip": false,
41
+ "normalized": false,
42
+ "rstrip": false,
43
+ "single_word": false,
44
+ "special": false
45
+ },
46
+ "151648": {
47
+ "content": "<think>",
48
+ "lstrip": false,
49
+ "normalized": false,
50
+ "rstrip": false,
51
+ "single_word": false,
52
+ "special": false
53
+ },
54
+ "151649": {
55
+ "content": "</think>",
56
+ "lstrip": false,
57
+ "normalized": false,
58
+ "rstrip": false,
59
+ "single_word": false,
60
+ "special": false
61
+ },
62
+ "151650": {
63
+ "content": "<|quad_start|>",
64
+ "lstrip": false,
65
+ "normalized": false,
66
+ "rstrip": false,
67
+ "single_word": false,
68
+ "special": true
69
+ },
70
+ "151651": {
71
+ "content": "<|quad_end|>",
72
+ "lstrip": false,
73
+ "normalized": false,
74
+ "rstrip": false,
75
+ "single_word": false,
76
+ "special": true
77
+ },
78
+ "151652": {
79
+ "content": "<|vision_start|>",
80
+ "lstrip": false,
81
+ "normalized": false,
82
+ "rstrip": false,
83
+ "single_word": false,
84
+ "special": true
85
+ },
86
+ "151653": {
87
+ "content": "<|vision_end|>",
88
+ "lstrip": false,
89
+ "normalized": false,
90
+ "rstrip": false,
91
+ "single_word": false,
92
+ "special": true
93
+ },
94
+ "151654": {
95
+ "content": "<|vision_pad|>",
96
+ "lstrip": false,
97
+ "normalized": false,
98
+ "rstrip": false,
99
+ "single_word": false,
100
+ "special": true
101
+ },
102
+ "151655": {
103
+ "content": "<|image_pad|>",
104
+ "lstrip": false,
105
+ "normalized": false,
106
+ "rstrip": false,
107
+ "single_word": false,
108
+ "special": true
109
+ },
110
+ "151656": {
111
+ "content": "<|video_pad|>",
112
+ "lstrip": false,
113
+ "normalized": false,
114
+ "rstrip": false,
115
+ "single_word": false,
116
+ "special": true
117
+ },
118
+ "151657": {
119
+ "content": "<tool_call>",
120
+ "lstrip": false,
121
+ "normalized": false,
122
+ "rstrip": false,
123
+ "single_word": false,
124
+ "special": false
125
+ },
126
+ "151658": {
127
+ "content": "</tool_call>",
128
+ "lstrip": false,
129
+ "normalized": false,
130
+ "rstrip": false,
131
+ "single_word": false,
132
+ "special": false
133
+ },
134
+ "151659": {
135
+ "content": "<|fim_prefix|>",
136
+ "lstrip": false,
137
+ "normalized": false,
138
+ "rstrip": false,
139
+ "single_word": false,
140
+ "special": false
141
+ },
142
+ "151660": {
143
+ "content": "<|fim_middle|>",
144
+ "lstrip": false,
145
+ "normalized": false,
146
+ "rstrip": false,
147
+ "single_word": false,
148
+ "special": false
149
+ },
150
+ "151661": {
151
+ "content": "<|fim_suffix|>",
152
+ "lstrip": false,
153
+ "normalized": false,
154
+ "rstrip": false,
155
+ "single_word": false,
156
+ "special": false
157
+ },
158
+ "151662": {
159
+ "content": "<|fim_pad|>",
160
+ "lstrip": false,
161
+ "normalized": false,
162
+ "rstrip": false,
163
+ "single_word": false,
164
+ "special": false
165
+ },
166
+ "151663": {
167
+ "content": "<|repo_name|>",
168
+ "lstrip": false,
169
+ "normalized": false,
170
+ "rstrip": false,
171
+ "single_word": false,
172
+ "special": false
173
+ },
174
+ "151664": {
175
+ "content": "<|file_sep|>",
176
+ "lstrip": false,
177
+ "normalized": false,
178
+ "rstrip": false,
179
+ "single_word": false,
180
+ "special": false
181
+ },
182
+ "151665": {
183
+ "content": "<rephrase>",
184
+ "lstrip": false,
185
+ "normalized": true,
186
+ "rstrip": false,
187
+ "single_word": false,
188
+ "special": false
189
+ },
190
+ "151666": {
191
+ "content": "</rephrase>",
192
+ "lstrip": false,
193
+ "normalized": true,
194
+ "rstrip": false,
195
+ "single_word": false,
196
+ "special": false
197
+ },
198
+ "151667": {
199
+ "content": "<decompose>",
200
+ "lstrip": false,
201
+ "normalized": true,
202
+ "rstrip": false,
203
+ "single_word": false,
204
+ "special": false
205
+ },
206
+ "151668": {
207
+ "content": "</decompose>",
208
+ "lstrip": false,
209
+ "normalized": true,
210
+ "rstrip": false,
211
+ "single_word": false,
212
+ "special": false
213
+ },
214
+ "151669": {
215
+ "content": "<thought>",
216
+ "lstrip": false,
217
+ "normalized": true,
218
+ "rstrip": false,
219
+ "single_word": false,
220
+ "special": false
221
+ },
222
+ "151670": {
223
+ "content": "</thought>",
224
+ "lstrip": false,
225
+ "normalized": true,
226
+ "rstrip": false,
227
+ "single_word": false,
228
+ "special": false
229
+ },
230
+ "151671": {
231
+ "content": "<answer>",
232
+ "lstrip": false,
233
+ "normalized": true,
234
+ "rstrip": false,
235
+ "single_word": false,
236
+ "special": false
237
+ },
238
+ "151672": {
239
+ "content": "</answer>",
240
+ "lstrip": false,
241
+ "normalized": true,
242
+ "rstrip": false,
243
+ "single_word": false,
244
+ "special": false
245
+ },
246
+ "151673": {
247
+ "content": "<reflect>",
248
+ "lstrip": false,
249
+ "normalized": true,
250
+ "rstrip": false,
251
+ "single_word": false,
252
+ "special": false
253
+ },
254
+ "151674": {
255
+ "content": "</reflect>",
256
+ "lstrip": false,
257
+ "normalized": true,
258
+ "rstrip": false,
259
+ "single_word": false,
260
+ "special": false
261
+ },
262
+ "151675": {
263
+ "content": "<summarize>",
264
+ "lstrip": false,
265
+ "normalized": true,
266
+ "rstrip": false,
267
+ "single_word": false,
268
+ "special": false
269
+ },
270
+ "151676": {
271
+ "content": "</summarize>",
272
+ "lstrip": false,
273
+ "normalized": true,
274
+ "rstrip": false,
275
+ "single_word": false,
276
+ "special": false
277
+ }
278
+ },
279
+ "additional_special_tokens": [
280
+ "<|end▁of▁sentence|>"
281
+ ],
282
+ "bos_token": "<|begin▁of▁sentence|>",
283
+ "chat_template": "{% if messages[0]['role'] == 'system' %}{% set loop_messages = messages[1:] %}{% set system_message = messages[0]['content'] %}{% else %}{% set loop_messages = messages %}{% endif %}{% if system_message is defined %}{{ system_message }}{% endif %}{% for message in loop_messages %}{% set content = message['content'] %}{% if message['role'] == 'user' %}{{ '<|begin▁of▁sentence|><|User|>' + content + '<|Assistant|>' }}{% elif message['role'] == 'assistant' %}{{ content + '<|end▁of▁sentence|>' + '\n' }}{% endif %}{% endfor %}",
284
+ "clean_up_tokenization_spaces": false,
285
+ "eos_token": "<|end▁of▁sentence|>",
286
+ "legacy": true,
287
+ "model_max_length": 32768,
288
+ "pad_token": "<|end▁of▁sentence|>",
289
+ "padding_side": "right",
290
+ "sp_model_kwargs": {},
291
+ "split_special_tokens": false,
292
+ "tokenizer_class": "LlamaTokenizer",
293
+ "unk_token": null,
294
+ "use_default_system_prompt": false
295
+ }
tokenizer_config_old.json ADDED
@@ -0,0 +1,199 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "add_prefix_space": null,
5
+ "added_tokens_decoder": {
6
+ "151643": {
7
+ "content": "<|end▁of▁sentence|>",
8
+ "lstrip": false,
9
+ "normalized": false,
10
+ "rstrip": false,
11
+ "single_word": false,
12
+ "special": true
13
+ },
14
+ "151644": {
15
+ "content": "<|User|>",
16
+ "lstrip": false,
17
+ "normalized": false,
18
+ "rstrip": false,
19
+ "single_word": false,
20
+ "special": false
21
+ },
22
+ "151645": {
23
+ "content": "<|Assistant|>",
24
+ "lstrip": false,
25
+ "normalized": false,
26
+ "rstrip": false,
27
+ "single_word": false,
28
+ "special": false
29
+ },
30
+ "151646": {
31
+ "content": "<|begin▁of▁sentence|>",
32
+ "lstrip": false,
33
+ "normalized": false,
34
+ "rstrip": false,
35
+ "single_word": false,
36
+ "special": true
37
+ },
38
+ "151647": {
39
+ "content": "<|EOT|>",
40
+ "lstrip": false,
41
+ "normalized": false,
42
+ "rstrip": false,
43
+ "single_word": false,
44
+ "special": false
45
+ },
46
+ "151648": {
47
+ "content": "<think>",
48
+ "lstrip": false,
49
+ "normalized": false,
50
+ "rstrip": false,
51
+ "single_word": false,
52
+ "special": false
53
+ },
54
+ "151649": {
55
+ "content": "</think>",
56
+ "lstrip": false,
57
+ "normalized": false,
58
+ "rstrip": false,
59
+ "single_word": false,
60
+ "special": false
61
+ },
62
+ "151650": {
63
+ "content": "<|quad_start|>",
64
+ "lstrip": false,
65
+ "normalized": false,
66
+ "rstrip": false,
67
+ "single_word": false,
68
+ "special": true
69
+ },
70
+ "151651": {
71
+ "content": "<|quad_end|>",
72
+ "lstrip": false,
73
+ "normalized": false,
74
+ "rstrip": false,
75
+ "single_word": false,
76
+ "special": true
77
+ },
78
+ "151652": {
79
+ "content": "<|vision_start|>",
80
+ "lstrip": false,
81
+ "normalized": false,
82
+ "rstrip": false,
83
+ "single_word": false,
84
+ "special": true
85
+ },
86
+ "151653": {
87
+ "content": "<|vision_end|>",
88
+ "lstrip": false,
89
+ "normalized": false,
90
+ "rstrip": false,
91
+ "single_word": false,
92
+ "special": true
93
+ },
94
+ "151654": {
95
+ "content": "<|vision_pad|>",
96
+ "lstrip": false,
97
+ "normalized": false,
98
+ "rstrip": false,
99
+ "single_word": false,
100
+ "special": true
101
+ },
102
+ "151655": {
103
+ "content": "<|image_pad|>",
104
+ "lstrip": false,
105
+ "normalized": false,
106
+ "rstrip": false,
107
+ "single_word": false,
108
+ "special": true
109
+ },
110
+ "151656": {
111
+ "content": "<|video_pad|>",
112
+ "lstrip": false,
113
+ "normalized": false,
114
+ "rstrip": false,
115
+ "single_word": false,
116
+ "special": true
117
+ },
118
+ "151657": {
119
+ "content": "<tool_call>",
120
+ "lstrip": false,
121
+ "normalized": false,
122
+ "rstrip": false,
123
+ "single_word": false,
124
+ "special": false
125
+ },
126
+ "151658": {
127
+ "content": "</tool_call>",
128
+ "lstrip": false,
129
+ "normalized": false,
130
+ "rstrip": false,
131
+ "single_word": false,
132
+ "special": false
133
+ },
134
+ "151659": {
135
+ "content": "<|fim_prefix|>",
136
+ "lstrip": false,
137
+ "normalized": false,
138
+ "rstrip": false,
139
+ "single_word": false,
140
+ "special": false
141
+ },
142
+ "151660": {
143
+ "content": "<|fim_middle|>",
144
+ "lstrip": false,
145
+ "normalized": false,
146
+ "rstrip": false,
147
+ "single_word": false,
148
+ "special": false
149
+ },
150
+ "151661": {
151
+ "content": "<|fim_suffix|>",
152
+ "lstrip": false,
153
+ "normalized": false,
154
+ "rstrip": false,
155
+ "single_word": false,
156
+ "special": false
157
+ },
158
+ "151662": {
159
+ "content": "<|fim_pad|>",
160
+ "lstrip": false,
161
+ "normalized": false,
162
+ "rstrip": false,
163
+ "single_word": false,
164
+ "special": false
165
+ },
166
+ "151663": {
167
+ "content": "<|repo_name|>",
168
+ "lstrip": false,
169
+ "normalized": false,
170
+ "rstrip": false,
171
+ "single_word": false,
172
+ "special": false
173
+ },
174
+ "151664": {
175
+ "content": "<|file_sep|>",
176
+ "lstrip": false,
177
+ "normalized": false,
178
+ "rstrip": false,
179
+ "single_word": false,
180
+ "special": false
181
+ }
182
+ },
183
+ "additional_special_tokens": [
184
+ "<|end▁of▁sentence|>"
185
+ ],
186
+ "bos_token": "<|begin▁of▁sentence|>",
187
+ "chat_template": "{% if messages[0]['role'] == 'system' %}{% set loop_messages = messages[1:] %}{% set system_message = messages[0]['content'] %}{% else %}{% set loop_messages = messages %}{% endif %}{% if system_message is defined %}{{ system_message }}{% endif %}{% for message in loop_messages %}{% set content = message['content'] %}{% if message['role'] == 'user' %}{{ '<|begin▁of▁sentence|><|User|>' + content + '<|Assistant|>' }}{% elif message['role'] == 'assistant' %}{{ content + '<|end▁of▁sentence|>' + '\n' }}{% endif %}{% endfor %}",
188
+ "clean_up_tokenization_spaces": false,
189
+ "eos_token": "<|end▁of▁sentence|>",
190
+ "legacy": true,
191
+ "model_max_length": 32768,
192
+ "pad_token": "<|end▁of▁sentence|>",
193
+ "padding_side": "right",
194
+ "sp_model_kwargs": {},
195
+ "split_special_tokens": false,
196
+ "tokenizer_class": "LlamaTokenizer",
197
+ "unk_token": null,
198
+ "use_default_system_prompt": false
199
+ }
tokenizer_old.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e20ddafc659ba90242154b55275402edeca0715e5dbb30f56815a4ce081f4893
3
+ size 11422778
train_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 9.95475113122172,
3
+ "total_flos": 9.907464757911224e+17,
4
+ "train_loss": 0.2748682842471383,
5
+ "train_runtime": 89439.6385,
6
+ "train_samples_per_second": 0.395,
7
+ "train_steps_per_second": 0.006
8
+ }
trainer_log.jsonl ADDED
The diff for this file is too large to render. See raw diff
 
trainer_state.json ADDED
The diff for this file is too large to render. See raw diff
 
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1cbcc4452a6ecea0347c57af406bccdfac56e5046f8f0b0e8d70d2cf677500fb
3
+ size 7352
training_loss.png ADDED