andrew-healey commited on
Commit
4b2d673
·
verified ·
1 Parent(s): fd53f19

Upload folder using huggingface_hub

Browse files
attention_kindselective_n_heads4_seed1344/args.json CHANGED
@@ -1 +1 @@
1
- {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_9/attention_kindselective_n_heads4_seed1344", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 4, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 8750, "warmup_steps": 500, "group": "wider_is_better_9", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1344, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 120, "total_batch_size": 61440, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 3.5e-05, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "3.5e-5_61440_4_1344", "n_embd": 256}
 
1
+ {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_9/attention_kindselective_n_heads4_seed1344", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 4, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 8750, "warmup_steps": 500, "group": "wider_is_better_9", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1344, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 120, "total_batch_size": 61440, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 4.5e-05, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "4.5e-5_61440_4_1344", "n_embd": 256}
attention_kindselective_n_heads4_seed1344/log2.txt CHANGED
@@ -1,534 +1,267 @@
1
  max_steps: 8750
2
- max_steps: 8750
3
- 0 val loss 11.2831
4
- 0 val perplexity 79466.0234
5
  0 val loss 11.2831
6
- 0 val perplexity 79466.0234
7
- 0 train 11.285278 (lr=7.0000e-08) (hash(x)=145830960)
8
- 0 train 11.285278 (lr=6.0000e-08) (hash(x)=145830960)
9
- 100 val loss 10.1647
10
- 100 val perplexity 25971.0234
11
- 100 train 10.198525 (lr=7.0700e-06) (hash(x)=144157453)
12
- 100 val loss 10.2511
13
- 100 val perplexity 28312.6055
14
- 100 train 10.282244 (lr=6.0600e-06) (hash(x)=144157453)
15
- 200 val loss 9.7081
16
- 200 val perplexity 16450.9492
17
- 200 train 9.747164 (lr=1.4070e-05) (hash(x)=146764602)
18
- 200 val loss 9.8092
19
- 200 val perplexity 18200.1836
20
- 200 train 9.849621 (lr=1.2060e-05) (hash(x)=146764602)
21
- 300 val loss 8.8063
22
- 300 val perplexity 6676.1025
23
- 300 train 8.721718 (lr=2.1070e-05) (hash(x)=140559124)
24
- 300 val loss 9.0431
25
- 300 val perplexity 8460.3369
26
- 300 train 8.977057 (lr=1.8060e-05) (hash(x)=140559124)
27
- 400 val loss 7.9303
28
- 400 val perplexity 2780.2043
29
- 400 train 8.263488 (lr=2.8070e-05) (hash(x)=166024176)
30
- 400 val loss 8.1200
31
- 400 val perplexity 3361.0332
32
- 400 train 8.433102 (lr=2.4060e-05) (hash(x)=166024176)
33
- 500 val loss 7.5507
34
- 500 val perplexity 1902.1611
35
- 500 train 7.518233 (lr=3.5000e-05) (hash(x)=161040668)
36
- 500 val loss 7.6197
37
- 500 val perplexity 2037.9108
38
- 500 train 7.589498 (lr=3.0000e-05) (hash(x)=161040668)
39
- 600 val loss 7.4388
40
- 600 val perplexity 1700.7233
41
- 600 train 7.468059 (lr=3.4989e-05) (hash(x)=148878990)
42
- 600 val loss 7.4876
43
- 600 val perplexity 1785.7812
44
- 600 train 7.517530 (lr=2.9990e-05) (hash(x)=148878990)
45
- 700 val loss 7.3485
46
- 700 val perplexity 1553.8051
47
- 700 train 7.329822 (lr=3.4954e-05) (hash(x)=150420695)
48
- 700 val loss 7.4002
49
- 700 val perplexity 1636.2662
50
- 700 train 7.380228 (lr=2.9961e-05) (hash(x)=150420695)
51
- 800 val loss 7.2750
52
- 800 val perplexity 1443.7603
53
- 800 train 7.251266 (lr=3.4897e-05) (hash(x)=151995229)
54
- 800 val loss 7.3320
55
- 800 val perplexity 1528.5052
56
- 800 train 7.302983 (lr=2.9912e-05) (hash(x)=151995229)
57
- 900 val loss 7.2145
58
- 900 val perplexity 1358.9803
59
- 900 train 6.968035 (lr=3.4818e-05) (hash(x)=139853932)
60
- 900 val loss 7.2744
61
- 900 val perplexity 1442.9172
62
- 900 train 7.033135 (lr=2.9844e-05) (hash(x)=139853932)
63
- 1000 val loss 7.1335
64
- 1000 val perplexity 1253.2953
65
- 1000 train 7.335016 (lr=3.4715e-05) (hash(x)=174104207)
66
- 1000 val loss 7.2135
67
- 1000 val perplexity 1357.5917
68
- 1000 train 7.412142 (lr=2.9756e-05) (hash(x)=174104207)
69
- 1100 val loss 7.0682
70
- 1100 val perplexity 1174.0667
71
- 1100 train 6.900183 (lr=3.4591e-05) (hash(x)=146275038)
72
- 1100 val loss 7.1555
73
- 1100 val perplexity 1281.1627
74
- 1100 train 6.990688 (lr=2.9649e-05) (hash(x)=146275038)
75
- 1200 val loss 7.0004
76
- 1200 val perplexity 1097.0662
77
- 1200 train 6.771210 (lr=3.4444e-05) (hash(x)=141403655)
78
- 1200 val loss 7.1005
79
- 1200 val perplexity 1212.6289
80
- 1200 train 6.876398 (lr=2.9523e-05) (hash(x)=141403655)
81
- 1300 val loss 6.9292
82
- 1300 val perplexity 1021.7067
83
- 1300 train 7.012178 (lr=3.4275e-05) (hash(x)=173675040)
84
- 1300 val loss 7.0356
85
- 1300 val perplexity 1136.3947
86
- 1300 train 7.122135 (lr=2.9378e-05) (hash(x)=173675040)
87
- 1400 val loss 6.8715
88
- 1400 val perplexity 964.4235
89
- 1400 train 6.844200 (lr=3.4084e-05) (hash(x)=162475906)
90
- 1400 val loss 6.9720
91
- 1400 val perplexity 1066.3026
92
- 1400 train 6.941983 (lr=2.9215e-05) (hash(x)=162475906)
93
- 1500 val loss 6.8163
94
- 1500 val perplexity 912.6173
95
- 1500 train 6.646946 (lr=3.3872e-05) (hash(x)=152785612)
96
- 1500 val loss 6.9207
97
- 1500 val perplexity 1013.0270
98
- 1500 train 6.750599 (lr=2.9033e-05) (hash(x)=152785612)
99
- 1600 val loss 6.7411
100
- 1600 val perplexity 846.5029
101
- 1600 train 6.695332 (lr=3.3638e-05) (hash(x)=151987383)
102
- 1600 val loss 6.8430
103
- 1600 val perplexity 937.3285
104
- 1600 train 6.799621 (lr=2.8833e-05) (hash(x)=151987383)
105
- 1700 val loss 6.6802
106
- 1700 val perplexity 796.5001
107
- 1700 val loss 6.7791
108
- 1700 val perplexity 879.2971
109
- 1700 train 6.662879 (lr=3.3384e-05) (hash(x)=143781605)
110
- 1700 train 6.760727 (lr=2.8615e-05) (hash(x)=143781605)
111
- 1800 val loss 6.6307
112
- 1800 val perplexity 758.0203
113
- 1800 val loss 6.7255
114
- 1800 val perplexity 833.4037
115
- 1800 train 6.614835 (lr=3.3109e-05) (hash(x)=151040203)
116
- 1800 train 6.708007 (lr=2.8379e-05) (hash(x)=151040203)
117
- 1900 val loss 6.5788
118
- 1900 val perplexity 719.6393
119
- 1900 val loss 6.6702
120
- 1900 val perplexity 788.5310
121
- 1900 train 6.482689 (lr=3.2814e-05) (hash(x)=149414572)
122
- 1900 train 6.571493 (lr=2.8127e-05) (hash(x)=149414572)
123
- 2000 val loss 6.6195
124
- 2000 val perplexity 749.5918
125
- 2000 val loss 6.5286
126
- 2000 val perplexity 684.4636
127
- 2000 train 6.576480 (lr=2.7857e-05) (hash(x)=156479674)
128
- 2000 train 6.482908 (lr=3.2500e-05) (hash(x)=156479674)
129
- 2100 val loss 6.4825
130
- 2100 val perplexity 653.5756
131
- 2100 val loss 6.5620
132
- 2100 val perplexity 707.6946
133
- 2100 train 6.345187 (lr=3.2166e-05) (hash(x)=137861481)
134
- 2100 train 6.429942 (lr=2.7571e-05) (hash(x)=137861481)
135
- 2200 val loss 6.4416
136
- 2200 val perplexity 627.3801
137
- 2200 val loss 6.5179
138
- 2200 val perplexity 677.1461
139
- 2200 train 6.499350 (lr=3.1813e-05) (hash(x)=155937443)
140
- 2200 train 6.576139 (lr=2.7269e-05) (hash(x)=155937443)
141
- 2300 val loss 6.4203
142
- 2300 val perplexity 614.1733
143
- 2300 val loss 6.4995
144
- 2300 val perplexity 664.7992
145
- 2300 train 6.101424 (lr=3.1443e-05) (hash(x)=131035715)
146
- 2300 train 6.185195 (lr=2.6951e-05) (hash(x)=131035715)
147
- 2400 val loss 6.3757
148
- 2400 val perplexity 587.4105
149
- 2400 val loss 6.4501
150
- 2400 val perplexity 632.7535
151
- 2400 train 6.374401 (lr=3.1054e-05) (hash(x)=146568981)
152
- 2400 train 6.447587 (lr=2.6618e-05) (hash(x)=146568981)
153
- 2500 val loss 6.3448
154
- 2500 val perplexity 569.5289
155
- 2500 val loss 6.4192
156
- 2500 val perplexity 613.5393
157
- 2500 train 6.317100 (lr=3.0649e-05) (hash(x)=163317586)
158
- 2500 train 6.389849 (lr=2.6270e-05) (hash(x)=163317586)
159
- 2600 val loss 6.3181
160
- 2600 val perplexity 554.5211
161
- 2600 val loss 6.3867
162
- 2600 val perplexity 593.9120
163
- 2600 train 6.141277 (lr=3.0227e-05) (hash(x)=144201060)
164
- 2600 train 6.207941 (lr=2.5909e-05) (hash(x)=144201060)
165
- 2700 val loss 6.2931
166
- 2700 val perplexity 540.8275
167
- 2700 val loss 6.3637
168
- 2700 val perplexity 580.3699
169
- 2700 train 6.193167 (lr=2.9789e-05) (hash(x)=141825701)
170
- 2700 train 6.259410 (lr=2.5533e-05) (hash(x)=141825701)
171
- 2800 val loss 6.2649
172
- 2800 val perplexity 525.7846
173
- 2800 val loss 6.3332
174
- 2800 val perplexity 562.9761
175
- 2800 train 6.384350 (lr=2.9336e-05) (hash(x)=160561627)
176
- 2800 train 6.452592 (lr=2.5145e-05) (hash(x)=160561627)
177
- 2900 val loss 6.2419
178
- 2900 val perplexity 513.8532
179
- 2900 val loss 6.3035
180
- 2900 val perplexity 546.5074
181
- 2900 train 6.189714 (lr=2.8868e-05) (hash(x)=151758176)
182
- 2900 train 6.255837 (lr=2.4744e-05) (hash(x)=151758176)
183
- 3000 val loss 6.2147
184
- 3000 val perplexity 500.0573
185
- 3000 val loss 6.2803
186
- 3000 val perplexity 533.9685
187
- 3000 train 6.226311 (lr=2.8386e-05) (hash(x)=155815751)
188
- 3000 train 6.286588 (lr=2.4331e-05) (hash(x)=155815751)
189
- 3100 val loss 6.2016
190
- 3100 val perplexity 493.5247
191
- 3100 val loss 6.2672
192
- 3100 val perplexity 527.0150
193
- 3100 train 5.858310 (lr=2.7891e-05) (hash(x)=142307043)
194
- 3100 train 5.926589 (lr=2.3906e-05) (hash(x)=142307043)
195
- 3200 val loss 6.1807
196
- 3200 val perplexity 483.3375
197
- 3200 train 6.293739 (lr=2.7383e-05) (hash(x)=156310690)
198
- 3200 val loss 6.2449
199
- 3200 val perplexity 515.3918
200
- 3200 train 6.361027 (lr=2.3471e-05) (hash(x)=156310690)
201
- 3300 val loss 6.2148
202
- 3300 val perplexity 500.1102
203
- 3300 val loss 6.1538
204
- 3300 val perplexity 470.4862
205
- 3300 train 6.216784 (lr=2.3026e-05) (hash(x)=151344506)
206
- 3300 train 6.156497 (lr=2.6864e-05) (hash(x)=151344506)
207
- 3400 val loss 6.2043
208
- 3400 val perplexity 494.8538
209
- 3400 val loss 6.1388
210
- 3400 val perplexity 463.4761
211
- 3400 train 6.240551 (lr=2.2572e-05) (hash(x)=162911881)
212
- 3400 train 6.174221 (lr=2.6333e-05) (hash(x)=162911881)
213
- 3500 val loss 6.1855
214
- 3500 val perplexity 485.6743
215
- 3500 val loss 6.1274
216
- 3500 val perplexity 458.2211
217
- 3500 train 6.123961 (lr=2.2108e-05) (hash(x)=142889971)
218
- 3500 train 6.059972 (lr=2.5793e-05) (hash(x)=142889971)
219
- 3600 val loss 6.1675
220
- 3600 val perplexity 476.9710
221
- 3600 val loss 6.1050
222
- 3600 val perplexity 448.1018
223
- 3600 train 6.160964 (lr=2.1637e-05) (hash(x)=149470354)
224
- 3600 train 6.092852 (lr=2.5243e-05) (hash(x)=149470354)
225
- 3700 val loss 6.1523
226
- 3700 val perplexity 469.7756
227
- 3700 val loss 6.0880
228
- 3700 val perplexity 440.5436
229
- 3700 train 6.096142 (lr=2.1158e-05) (hash(x)=154194821)
230
- 3700 train 6.030409 (lr=2.4684e-05) (hash(x)=154194821)
231
- 3800 val loss 6.1387
232
- 3800 val perplexity 463.4522
233
- 3800 val loss 6.0779
234
- 3800 val perplexity 436.1298
235
- 3800 train 6.069734 (lr=2.0672e-05) (hash(x)=148885848)
236
- 3800 train 6.011635 (lr=2.4117e-05) (hash(x)=148885848)
237
- 3900 val loss 6.1210
238
- 3900 val perplexity 455.3399
239
- 3900 val loss 6.0583
240
- 3900 val perplexity 427.6662
241
- 3900 train 6.032857 (lr=2.0180e-05) (hash(x)=148308484)
242
- 3900 train 5.971268 (lr=2.3543e-05) (hash(x)=148308484)
243
- 4000 val loss 6.1042
244
- 4000 val perplexity 447.7170
245
- 4000 val loss 6.0434
246
- 4000 val perplexity 421.3136
247
- 4000 train 5.941088 (lr=1.9683e-05) (hash(x)=139828564)
248
- 4000 train 5.881995 (lr=2.2963e-05) (hash(x)=139828564)
249
- 4100 val loss 6.0952
250
- 4100 val perplexity 443.7090
251
- 4100 val loss 6.0326
252
- 4100 val perplexity 416.8078
253
- 4100 train 6.002620 (lr=1.9181e-05) (hash(x)=139981997)
254
- 4100 train 5.940226 (lr=2.2378e-05) (hash(x)=139981997)
255
- 4200 val loss 6.0805
256
- 4200 val perplexity 437.2441
257
- 4200 train 6.236762 (lr=1.8675e-05) (hash(x)=150738447)
258
- 4200 val loss 6.0164
259
- 4200 val perplexity 410.0989
260
- 4200 train 6.170493 (lr=2.1788e-05) (hash(x)=150738447)
261
- 4300 val loss 6.0659
262
- 4300 val perplexity 430.9070
263
- 4300 train 5.848683 (lr=1.8166e-05) (hash(x)=142198107)
264
- 4300 val loss 6.0055
265
- 4300 val perplexity 405.6398
266
- 4300 train 5.784900 (lr=2.1194e-05) (hash(x)=142198107)
267
- 4400 val loss 6.0590
268
- 4400 val perplexity 427.9448
269
- 4400 train 5.930000 (lr=1.7655e-05) (hash(x)=142731201)
270
- 4400 val loss 5.9963
271
- 4400 val perplexity 401.9518
272
- 4400 train 5.867516 (lr=2.0598e-05) (hash(x)=142731201)
273
- 4500 val loss 6.0420
274
- 4500 val perplexity 420.7414
275
- 4500 train 6.054802 (lr=1.7142e-05) (hash(x)=154814426)
276
- 4500 val loss 5.9799
277
- 4500 val perplexity 395.4100
278
- 4500 train 5.997611 (lr=1.9999e-05) (hash(x)=154814426)
279
- 4600 val loss 6.0305
280
- 4600 val perplexity 415.9094
281
- 4600 train 5.998842 (lr=1.6629e-05) (hash(x)=155922230)
282
- 4600 val loss 5.9672
283
- 4600 val perplexity 390.4033
284
- 4600 train 5.934934 (lr=1.9400e-05) (hash(x)=155922230)
285
- 4700 val loss 6.0251
286
- 4700 val perplexity 413.6869
287
- 4700 train 5.771210 (lr=1.6114e-05) (hash(x)=139398510)
288
- 4700 val loss 5.9632
289
- 4700 val perplexity 388.8658
290
- 4700 train 5.711072 (lr=1.8800e-05) (hash(x)=139398510)
291
- 4800 val loss 6.0116
292
- 4800 val perplexity 408.1422
293
- 4800 train 6.080204 (lr=1.5601e-05) (hash(x)=140893236)
294
- 4800 val loss 5.9466
295
- 4800 val perplexity 382.4628
296
- 4800 train 6.004228 (lr=1.8201e-05) (hash(x)=140893236)
297
- 4900 val loss 5.9966
298
- 4900 val perplexity 402.0549
299
- 4900 val loss 5.9329
300
- 4900 val perplexity 377.2361
301
- 4900 train 5.900732 (lr=1.5089e-05) (hash(x)=153747830)
302
- 4900 train 5.829324 (lr=1.7604e-05) (hash(x)=153747830)
303
- 5000 val loss 5.9886
304
- 5000 val perplexity 398.8546
305
- 5000 val loss 5.9249
306
- 5000 val perplexity 374.2589
307
- 5000 train 6.216745 (lr=1.4579e-05) (hash(x)=148919005)
308
- 5000 train 6.162242 (lr=1.7009e-05) (hash(x)=148919005)
309
- 5100 val loss 5.9818
310
- 5100 val perplexity 396.1589
311
- 5100 val loss 5.9171
312
- 5100 val perplexity 371.3396
313
- 5100 train 5.771548 (lr=1.4071e-05) (hash(x)=142281936)
314
- 5100 train 5.707696 (lr=1.6417e-05) (hash(x)=142281936)
315
- 5200 val loss 5.9826
316
- 5200 val perplexity 396.4724
317
- 5200 val loss 5.9180
318
- 5200 val perplexity 371.6624
319
- 5200 train 5.790337 (lr=1.3568e-05) (hash(x)=143162650)
320
- 5200 train 5.723374 (lr=1.5829e-05) (hash(x)=143162650)
321
- 5300 val loss 5.9621
322
- 5300 val perplexity 388.4120
323
- 5300 val loss 5.8968
324
- 5300 val perplexity 363.8678
325
- 5300 train 6.020540 (lr=1.3068e-05) (hash(x)=148546849)
326
- 5300 train 5.958957 (lr=1.5246e-05) (hash(x)=148546849)
327
- 5400 val loss 5.9524
328
- 5400 val perplexity 384.6609
329
- 5400 val loss 5.8871
330
- 5400 val perplexity 360.3507
331
- 5400 train 5.812173 (lr=1.2573e-05) (hash(x)=143492259)
332
- 5400 train 5.746771 (lr=1.4669e-05) (hash(x)=143492259)
333
- 5500 val loss 5.9471
334
- 5500 val perplexity 382.6500
335
- 5500 val loss 5.8826
336
- 5500 val perplexity 358.7458
337
- 5500 train 5.701526 (lr=1.2085e-05) (hash(x)=141023941)
338
- 5500 train 5.634879 (lr=1.4099e-05) (hash(x)=141023941)
339
- 5600 val loss 5.9471
340
- 5600 val perplexity 382.6252
341
- 5600 val loss 5.8808
342
- 5600 val perplexity 358.0882
343
- 5600 train 5.817373 (lr=1.1602e-05) (hash(x)=142065021)
344
- 5600 train 5.754811 (lr=1.3536e-05) (hash(x)=142065021)
345
- 5700 val loss 5.9307
346
- 5700 val perplexity 376.4344
347
- 5700 val loss 5.8645
348
- 5700 val perplexity 352.3100
349
- 5700 train 5.791829 (lr=1.1127e-05) (hash(x)=145749913)
350
- 5700 train 5.731421 (lr=1.2981e-05) (hash(x)=145749913)
351
- 5800 val loss 5.8578
352
- 5800 val perplexity 349.9652
353
- 5800 val loss 5.9241
354
- 5800 val perplexity 373.9264
355
- 5800 train 5.537057 (lr=1.2436e-05) (hash(x)=140035522)
356
- 5800 train 5.610334 (lr=1.0659e-05) (hash(x)=140035522)
357
- 5900 val loss 5.8524
358
- 5900 val perplexity 348.0614
359
- 5900 val loss 5.9200
360
- 5900 val perplexity 372.4236
361
- 5900 train 5.614779 (lr=1.1900e-05) (hash(x)=154752726)
362
- 5900 train 5.682038 (lr=1.0200e-05) (hash(x)=154752726)
363
- 6000 val loss 5.8496
364
- 6000 val perplexity 347.1107
365
- 6000 val loss 5.9171
366
- 6000 val perplexity 371.3166
367
- 6000 train 5.874280 (lr=1.1375e-05) (hash(x)=158715824)
368
- 6000 train 5.940979 (lr=9.7500e-06) (hash(x)=158715824)
369
- 6100 val loss 5.8378
370
- 6100 val perplexity 343.0087
371
- 6100 val loss 5.9049
372
- 6100 val perplexity 366.8322
373
- 6100 train 5.851554 (lr=1.0861e-05) (hash(x)=137413820)
374
- 6100 train 5.914152 (lr=9.3098e-06) (hash(x)=137413820)
375
- 6200 val loss 5.8303
376
- 6200 val perplexity 340.4680
377
- 6200 train 5.751894 (lr=1.0360e-05) (hash(x)=151507523)
378
- 6200 val loss 5.8989
379
- 6200 val perplexity 364.6396
380
- 6200 train 5.823660 (lr=8.8800e-06) (hash(x)=151507523)
381
- 6300 val loss 5.8296
382
- 6300 val perplexity 340.2184
383
- 6300 train 5.600849 (lr=9.8715e-06) (hash(x)=147514617)
384
- 6300 val loss 5.8977
385
- 6300 val perplexity 364.1924
386
- 6300 train 5.668419 (lr=8.4613e-06) (hash(x)=147514617)
387
- 6400 val loss 5.8260
388
- 6400 val perplexity 338.9898
389
- 6400 train 5.836993 (lr=9.3966e-06) (hash(x)=151604465)
390
- 6400 val loss 5.8942
391
- 6400 val perplexity 362.9248
392
- 6400 train 5.905121 (lr=8.0542e-06) (hash(x)=151604465)
393
- 6500 val loss 5.8152
394
- 6500 val perplexity 335.3742
395
- 6500 train 5.872994 (lr=8.9359e-06) (hash(x)=144515881)
396
- 6500 val loss 5.8838
397
- 6500 val perplexity 359.1624
398
- 6500 train 5.940874 (lr=7.6594e-06) (hash(x)=144515881)
399
- 6600 val loss 5.8092
400
- 6600 val perplexity 333.3445
401
- 6600 train 5.602404 (lr=8.4903e-06) (hash(x)=136948374)
402
- 6600 val loss 5.8793
403
- 6600 val perplexity 357.5739
404
- 6600 train 5.664325 (lr=7.2774e-06) (hash(x)=136948374)
405
- 6700 val loss 5.8089
406
- 6700 val perplexity 333.2539
407
- 6700 train 5.620818 (lr=8.0602e-06) (hash(x)=146268592)
408
- 6700 val loss 5.8775
409
- 6700 val perplexity 356.8992
410
- 6700 train 5.695412 (lr=6.9087e-06) (hash(x)=146268592)
411
- 6800 val loss 5.8009
412
- 6800 val perplexity 330.5903
413
- 6800 train 5.757844 (lr=7.6463e-06) (hash(x)=152676836)
414
- 6800 val loss 5.8705
415
- 6800 val perplexity 354.4432
416
- 6800 train 5.824782 (lr=6.5540e-06) (hash(x)=152676836)
417
- 6900 val loss 5.7935
418
- 6900 val perplexity 328.1473
419
- 6900 train 5.711613 (lr=7.2493e-06) (hash(x)=134657776)
420
- 6900 val loss 5.8644
421
- 6900 val perplexity 352.2704
422
- 6900 train 5.785072 (lr=6.2137e-06) (hash(x)=134657776)
423
- 7000 val loss 5.7921
424
- 7000 val perplexity 327.6984
425
- 7000 train 5.767048 (lr=6.8697e-06) (hash(x)=166721861)
426
- 7000 val loss 5.8625
427
- 7000 val perplexity 351.6026
428
- 7000 train 5.835593 (lr=5.8883e-06) (hash(x)=166721861)
429
- 7100 val loss 5.7891
430
- 7100 val perplexity 326.7293
431
- 7100 train 5.594516 (lr=6.5080e-06) (hash(x)=135496702)
432
- 7100 val loss 5.8591
433
- 7100 val perplexity 350.4164
434
- 7100 train 5.666256 (lr=5.5783e-06) (hash(x)=135496702)
435
- 7200 val loss 5.7857
436
- 7200 val perplexity 325.5949
437
- 7200 train 5.894931 (lr=6.1648e-06) (hash(x)=155567461)
438
- 7200 val loss 5.8553
439
- 7200 val perplexity 349.0879
440
- 7200 train 5.959052 (lr=5.2841e-06) (hash(x)=155567461)
441
- 7300 val loss 5.7788
442
- 7300 val perplexity 323.3828
443
- 7300 train 5.581634 (lr=5.8405e-06) (hash(x)=142803829)
444
- 7300 val loss 5.8498
445
- 7300 val perplexity 347.1809
446
- 7300 train 5.649696 (lr=5.0062e-06) (hash(x)=142803829)
447
- 7400 val loss 5.7771
448
- 7400 val perplexity 322.8163
449
- 7400 train 5.600485 (lr=5.5357e-06) (hash(x)=145294178)
450
- 7400 val loss 5.8478
451
- 7400 val perplexity 346.4585
452
- 7400 train 5.666378 (lr=4.7449e-06) (hash(x)=145294178)
453
- 7500 val loss 5.7761
454
- 7500 val perplexity 322.5064
455
- 7500 train 5.544384 (lr=5.2508e-06) (hash(x)=150573713)
456
- 7500 val loss 5.8469
457
- 7500 val perplexity 346.1439
458
- 7500 train 5.614801 (lr=4.5007e-06) (hash(x)=150573713)
459
- 7600 val loss 5.7728
460
- 7600 val perplexity 321.4253
461
- 7600 train 5.784178 (lr=4.9862e-06) (hash(x)=142771511)
462
- 7600 val loss 5.8438
463
- 7600 val perplexity 345.0785
464
- 7600 train 5.856913 (lr=4.2739e-06) (hash(x)=142771511)
465
- 7700 val loss 5.7669
466
- 7700 val perplexity 319.5430
467
- 7700 train 5.666105 (lr=4.7423e-06) (hash(x)=143602175)
468
- 7700 val loss 5.8380
469
- 7700 val perplexity 343.1030
470
- 7700 train 5.733759 (lr=4.0648e-06) (hash(x)=143602175)
471
- 7800 val loss 5.7637
472
- 7800 val perplexity 318.5268
473
- 7800 train 5.804039 (lr=4.5194e-06) (hash(x)=152379862)
474
- 7800 val loss 5.8356
475
- 7800 val perplexity 342.2685
476
- 7800 train 5.870350 (lr=3.8738e-06) (hash(x)=152379862)
477
- 7900 val loss 5.7638
478
- 7900 val perplexity 318.5619
479
- 7900 train 5.605504 (lr=4.3179e-06) (hash(x)=146655921)
480
- 7900 val loss 5.8343
481
- 7900 val perplexity 341.8145
482
- 7900 train 5.679904 (lr=3.7010e-06) (hash(x)=146655921)
483
- 8000 val loss 5.7638
484
- 8000 val perplexity 318.5710
485
- 8000 train 5.879199 (lr=4.1380e-06) (hash(x)=148262482)
486
- 8000 val loss 5.8350
487
- 8000 val perplexity 342.0742
488
- 8000 train 5.947587 (lr=3.5468e-06) (hash(x)=148262482)
489
- 8100 val loss 5.7564
490
- 8100 val perplexity 316.1968
491
- 8100 train 5.696403 (lr=3.9800e-06) (hash(x)=147683655)
492
- 8100 val loss 5.8278
493
- 8100 val perplexity 339.6119
494
- 8100 train 5.762560 (lr=3.4114e-06) (hash(x)=147683655)
495
- 8200 val loss 5.8263
496
- 8200 val perplexity 339.1142
497
- 8200 train 5.888067 (lr=3.2950e-06) (hash(x)=164975934)
498
- 8200 val loss 5.7551
499
- 8200 val perplexity 315.8080
500
- 8200 train 5.818132 (lr=3.8442e-06) (hash(x)=164975934)
501
- 8300 val loss 5.8229
502
- 8300 val perplexity 337.9619
503
- 8300 train 5.741323 (lr=3.1977e-06) (hash(x)=145300550)
504
- 8300 val loss 5.7525
505
- 8300 val perplexity 314.9815
506
- 8300 train 5.667071 (lr=3.7307e-06) (hash(x)=145300550)
507
- 8400 val loss 5.8219
508
- 8400 val perplexity 337.6240
509
- 8400 train 5.738000 (lr=3.1197e-06) (hash(x)=150679400)
510
- 8400 val loss 5.7511
511
- 8400 val perplexity 314.5375
512
- 8400 train 5.666170 (lr=3.6397e-06) (hash(x)=150679400)
513
- 8500 val loss 5.8224
514
- 8500 val perplexity 337.7816
515
- 8500 train 5.843013 (lr=3.0611e-06) (hash(x)=164109401)
516
- 8500 val loss 5.7511
517
- 8500 val perplexity 314.5462
518
- 8500 train 5.767133 (lr=3.5713e-06) (hash(x)=164109401)
519
- 8600 val loss 5.8202
520
- 8600 val perplexity 337.0385
521
- 8600 train 5.823380 (lr=3.0220e-06) (hash(x)=161036376)
522
- 8600 val loss 5.7481
523
- 8600 val perplexity 313.5803
524
- 8600 train 5.730533 (lr=3.5257e-06) (hash(x)=161036376)
525
- 8700 val loss 5.8156
526
- 8700 val perplexity 335.5001
527
- 8700 train 5.750320 (lr=3.0024e-06) (hash(x)=153828820)
528
- 8700 val loss 5.7438
529
- 8700 val perplexity 312.2529
530
- 8700 train 5.674262 (lr=3.5029e-06) (hash(x)=153828820)
531
- 8749 val loss 5.8145
532
- 8749 val perplexity 335.1370
533
- 8749 val loss 5.7424
534
- 8749 val perplexity 311.8204
 
1
  max_steps: 8750
 
 
 
2
  0 val loss 11.2831
3
+ 0 val perplexity 79465.6484
4
+ 0 train 11.285273 (lr=9.0000e-08) (hash(x)=145830960)
5
+ 100 val loss 10.0638
6
+ 100 val perplexity 23478.2871
7
+ 100 train 10.097435 (lr=9.0900e-06) (hash(x)=144157453)
8
+ 200 val loss 9.5202
9
+ 200 val perplexity 13631.6973
10
+ 200 train 9.558424 (lr=1.8090e-05) (hash(x)=146764602)
11
+ 300 val loss 8.5081
12
+ 300 val perplexity 4954.6196
13
+ 300 train 8.403627 (lr=2.7090e-05) (hash(x)=140559124)
14
+ 400 val loss 7.7386
15
+ 400 val perplexity 2295.2202
16
+ 400 train 8.096967 (lr=3.6090e-05) (hash(x)=166024176)
17
+ 500 val loss 7.4794
18
+ 500 val perplexity 1771.2329
19
+ 500 train 7.450180 (lr=4.5000e-05) (hash(x)=161040668)
20
+ 600 val loss 7.3790
21
+ 600 val perplexity 1601.9789
22
+ 600 train 7.393307 (lr=4.4985e-05) (hash(x)=148878990)
23
+ 700 val loss 7.2895
24
+ 700 val perplexity 1464.8370
25
+ 700 train 7.270169 (lr=4.4941e-05) (hash(x)=150420695)
26
+ 800 val loss 7.1978
27
+ 800 val perplexity 1336.4874
28
+ 800 train 7.170339 (lr=4.4868e-05) (hash(x)=151995229)
29
+ 900 val loss 7.1269
30
+ 900 val perplexity 1244.9921
31
+ 900 train 6.876092 (lr=4.4766e-05) (hash(x)=139853932)
32
+ 1000 val loss 7.0380
33
+ 1000 val perplexity 1139.1506
34
+ 1000 train 7.244468 (lr=4.4634e-05) (hash(x)=174104207)
35
+ 1100 val loss 6.9615
36
+ 1100 val perplexity 1055.2466
37
+ 1100 train 6.792953 (lr=4.4474e-05) (hash(x)=146275038)
38
+ 1200 val loss 6.8973
39
+ 1200 val perplexity 989.6367
40
+ 1200 train 6.665742 (lr=4.4285e-05) (hash(x)=141403655)
41
+ 1300 val loss 6.8148
42
+ 1300 val perplexity 911.2523
43
+ 1300 train 6.898851 (lr=4.4068e-05) (hash(x)=173675040)
44
+ 1400 val loss 6.7374
45
+ 1400 val perplexity 843.3451
46
+ 1400 train 6.708657 (lr=4.3822e-05) (hash(x)=162475906)
47
+ 1500 val loss 6.6799
48
+ 1500 val perplexity 796.2518
49
+ 1500 train 6.506414 (lr=4.3549e-05) (hash(x)=152785612)
50
+ 1600 val loss 6.6006
51
+ 1600 val perplexity 735.5471
52
+ 1600 train 6.552176 (lr=4.3249e-05) (hash(x)=151987383)
53
+ 1700 val loss 6.5444
54
+ 1700 val perplexity 695.3262
55
+ 1700 train 6.532230 (lr=4.2922e-05) (hash(x)=143781605)
56
+ 1800 val loss 6.4969
57
+ 1800 val perplexity 663.1117
58
+ 1800 train 6.485873 (lr=4.2569e-05) (hash(x)=151040203)
59
+ 1900 val loss 6.4521
60
+ 1900 val perplexity 634.0513
61
+ 1900 train 6.349230 (lr=4.2190e-05) (hash(x)=149414572)
62
+ 2000 val loss 6.4051
63
+ 2000 val perplexity 604.9101
64
+ 2000 train 6.363278 (lr=4.1785e-05) (hash(x)=156479674)
65
+ 2100 val loss 6.3630
66
+ 2100 val perplexity 579.9941
67
+ 2100 train 6.231485 (lr=4.1356e-05) (hash(x)=137861481)
68
+ 2200 val loss 6.3339
69
+ 2200 val perplexity 563.3536
70
+ 2200 train 6.395198 (lr=4.0903e-05) (hash(x)=155937443)
71
+ 2300 val loss 6.3157
72
+ 2300 val perplexity 553.1810
73
+ 2300 train 5.989519 (lr=4.0426e-05) (hash(x)=131035715)
74
+ 2400 val loss 6.2738
75
+ 2400 val perplexity 530.4802
76
+ 2400 train 6.270514 (lr=3.9927e-05) (hash(x)=146568981)
77
+ 2500 val loss 6.2432
78
+ 2500 val perplexity 514.5201
79
+ 2500 train 6.221696 (lr=3.9406e-05) (hash(x)=163317586)
80
+ 2600 val loss 6.2184
81
+ 2600 val perplexity 501.8752
82
+ 2600 train 6.037910 (lr=3.8863e-05) (hash(x)=144201060)
83
+ 2700 val loss 6.1925
84
+ 2700 val perplexity 489.0673
85
+ 2700 train 6.092707 (lr=3.8300e-05) (hash(x)=141825701)
86
+ 2800 val loss 6.1621
87
+ 2800 val perplexity 474.4416
88
+ 2800 train 6.274689 (lr=3.7717e-05) (hash(x)=160561627)
89
+ 2900 val loss 6.1349
90
+ 2900 val perplexity 461.6791
91
+ 2900 train 6.078423 (lr=3.7116e-05) (hash(x)=151758176)
92
+ 3000 val loss 6.1149
93
+ 3000 val perplexity 452.5696
94
+ 3000 train 6.131371 (lr=3.6496e-05) (hash(x)=155815751)
95
+ 3100 val loss 6.1001
96
+ 3100 val perplexity 445.9056
97
+ 3100 train 5.759576 (lr=3.5860e-05) (hash(x)=142307043)
98
+ 3200 val loss 6.0838
99
+ 3200 val perplexity 438.6731
100
+ 3200 train 6.193019 (lr=3.5207e-05) (hash(x)=156310690)
101
+ 3300 val loss 6.0547
102
+ 3300 val perplexity 426.1009
103
+ 3300 train 6.055473 (lr=3.4539e-05) (hash(x)=151344506)
104
+ 3400 val loss 6.0379
105
+ 3400 val perplexity 419.0226
106
+ 3400 train 6.073301 (lr=3.3857e-05) (hash(x)=162911881)
107
+ 3500 val loss 6.0226
108
+ 3500 val perplexity 412.6454
109
+ 3500 train 5.966004 (lr=3.3162e-05) (hash(x)=142889971)
110
+ 3600 val loss 6.0021
111
+ 3600 val perplexity 404.2900
112
+ 3600 train 5.993401 (lr=3.2455e-05) (hash(x)=149470354)
113
+ 3700 val loss 5.9825
114
+ 3700 val perplexity 396.4442
115
+ 3700 train 5.922817 (lr=3.1736e-05) (hash(x)=154194821)
116
+ 3800 val loss 5.9734
117
+ 3800 val perplexity 392.8425
118
+ 3800 train 5.903098 (lr=3.1008e-05) (hash(x)=148885848)
119
+ 3900 val loss 5.9517
120
+ 3900 val perplexity 384.4179
121
+ 3900 train 5.866324 (lr=3.0270e-05) (hash(x)=148308484)
122
+ 4000 val loss 5.9353
123
+ 4000 val perplexity 378.1353
124
+ 4000 train 5.775120 (lr=2.9524e-05) (hash(x)=139828564)
125
+ 4100 val loss 5.9260
126
+ 4100 val perplexity 374.6640
127
+ 4100 train 5.837074 (lr=2.8771e-05) (hash(x)=139981997)
128
+ 4200 val loss 5.9099
129
+ 4200 val perplexity 368.6822
130
+ 4200 train 6.059356 (lr=2.8013e-05) (hash(x)=150738447)
131
+ 4300 val loss 5.8959
132
+ 4300 val perplexity 363.5316
133
+ 4300 train 5.676573 (lr=2.7250e-05) (hash(x)=142198107)
134
+ 4400 val loss 5.8850
135
+ 4400 val perplexity 359.6176
136
+ 4400 train 5.762465 (lr=2.6483e-05) (hash(x)=142731201)
137
+ 4500 val loss 5.8668
138
+ 4500 val perplexity 353.1304
139
+ 4500 train 5.892882 (lr=2.5714e-05) (hash(x)=154814426)
140
+ 4600 val loss 5.8548
141
+ 4600 val perplexity 348.9009
142
+ 4600 train 5.830213 (lr=2.4943e-05) (hash(x)=155922230)
143
+ 4700 val loss 5.8471
144
+ 4700 val perplexity 346.2455
145
+ 4700 train 5.597148 (lr=2.4172e-05) (hash(x)=139398510)
146
+ 4800 val loss 5.8393
147
+ 4800 val perplexity 343.5542
148
+ 4800 train 5.886129 (lr=2.3402e-05) (hash(x)=140893236)
149
+ 4900 val loss 5.8202
150
+ 4900 val perplexity 337.0312
151
+ 4900 train 5.712579 (lr=2.2633e-05) (hash(x)=153747830)
152
+ 5000 val loss 5.8103
153
+ 5000 val perplexity 333.7246
154
+ 5000 train 6.062975 (lr=2.1868e-05) (hash(x)=148919005)
155
+ 5100 val loss 5.8004
156
+ 5100 val perplexity 330.4405
157
+ 5100 train 5.591538 (lr=2.1107e-05) (hash(x)=142281936)
158
+ 5200 val loss 5.8013
159
+ 5200 val perplexity 330.7337
160
+ 5200 train 5.609389 (lr=2.0351e-05) (hash(x)=143162650)
161
+ 5300 val loss 5.7807
162
+ 5300 val perplexity 323.9874
163
+ 5300 train 5.845477 (lr=1.9602e-05) (hash(x)=148546849)
164
+ 5400 val loss 5.7698
165
+ 5400 val perplexity 320.4624
166
+ 5400 train 5.628488 (lr=1.8860e-05) (hash(x)=143492259)
167
+ 5500 val loss 5.7640
168
+ 5500 val perplexity 318.6333
169
+ 5500 train 5.525702 (lr=1.8127e-05) (hash(x)=141023941)
170
+ 5600 val loss 5.7623
171
+ 5600 val perplexity 318.0780
172
+ 5600 train 5.642804 (lr=1.7403e-05) (hash(x)=142065021)
173
+ 5700 val loss 5.7467
174
+ 5700 val perplexity 313.1555
175
+ 5700 train 5.629653 (lr=1.6690e-05) (hash(x)=145749913)
176
+ 5800 val loss 5.7393
177
+ 5800 val perplexity 310.8448
178
+ 5800 train 5.421354 (lr=1.5989e-05) (hash(x)=140035522)
179
+ 5900 val loss 5.7349
180
+ 5900 val perplexity 309.4912
181
+ 5900 train 5.485237 (lr=1.5300e-05) (hash(x)=154752726)
182
+ 6000 val loss 5.7325
183
+ 6000 val perplexity 308.7275
184
+ 6000 train 5.766155 (lr=1.4625e-05) (hash(x)=158715824)
185
+ 6100 val loss 5.7190
186
+ 6100 val perplexity 304.5998
187
+ 6100 train 5.739907 (lr=1.3965e-05) (hash(x)=137413820)
188
+ 6200 val loss 5.7107
189
+ 6200 val perplexity 302.0692
190
+ 6200 train 5.631349 (lr=1.3320e-05) (hash(x)=151507523)
191
+ 6300 val loss 5.7095
192
+ 6300 val perplexity 301.7145
193
+ 6300 train 5.488567 (lr=1.2692e-05) (hash(x)=147514617)
194
+ 6400 val loss 5.7060
195
+ 6400 val perplexity 300.6701
196
+ 6400 train 5.711531 (lr=1.2081e-05) (hash(x)=151604465)
197
+ 6500 val loss 5.6965
198
+ 6500 val perplexity 297.8287
199
+ 6500 train 5.751291 (lr=1.1489e-05) (hash(x)=144515881)
200
+ 6600 val loss 5.6902
201
+ 6600 val perplexity 295.9584
202
+ 6600 train 5.482191 (lr=1.0916e-05) (hash(x)=136948374)
203
+ 6700 val loss 5.6892
204
+ 6700 val perplexity 295.6615
205
+ 6700 train 5.490501 (lr=1.0363e-05) (hash(x)=146268592)
206
+ 6800 val loss 5.6796
207
+ 6800 val perplexity 292.8444
208
+ 6800 train 5.632364 (lr=9.8310e-06) (hash(x)=152676836)
209
+ 6900 val loss 5.6722
210
+ 6900 val perplexity 290.6861
211
+ 6900 train 5.596709 (lr=9.3205e-06) (hash(x)=134657776)
212
+ 7000 val loss 5.6706
213
+ 7000 val perplexity 290.2227
214
+ 7000 train 5.630913 (lr=8.8324e-06) (hash(x)=166721861)
215
+ 7100 val loss 5.6703
216
+ 7100 val perplexity 290.1279
217
+ 7100 train 5.472641 (lr=8.3674e-06) (hash(x)=135496702)
218
+ 7200 val loss 5.6653
219
+ 7200 val perplexity 288.6681
220
+ 7200 train 5.777382 (lr=7.9261e-06) (hash(x)=155567461)
221
+ 7300 val loss 5.6578
222
+ 7300 val perplexity 286.5287
223
+ 7300 train 5.460151 (lr=7.5093e-06) (hash(x)=142803829)
224
+ 7400 val loss 5.6564
225
+ 7400 val perplexity 286.1036
226
+ 7400 train 5.478210 (lr=7.1174e-06) (hash(x)=145294178)
227
+ 7500 val loss 5.6543
228
+ 7500 val perplexity 285.5166
229
+ 7500 train 5.422940 (lr=6.7511e-06) (hash(x)=150573713)
230
+ 7600 val loss 5.6515
231
+ 7600 val perplexity 284.7068
232
+ 7600 train 5.663387 (lr=6.4109e-06) (hash(x)=142771511)
233
+ 7700 val loss 5.6450
234
+ 7700 val perplexity 282.8737
235
+ 7700 train 5.550337 (lr=6.0972e-06) (hash(x)=143602175)
236
+ 7800 val loss 5.6422
237
+ 7800 val perplexity 282.0916
238
+ 7800 train 5.701168 (lr=5.8107e-06) (hash(x)=152379862)
239
+ 7900 val loss 5.6413
240
+ 7900 val perplexity 281.8354
241
+ 7900 train 5.475989 (lr=5.5515e-06) (hash(x)=146655921)
242
+ 8000 val loss 5.6420
243
+ 8000 val perplexity 282.0280
244
+ 8000 train 5.775498 (lr=5.3203e-06) (hash(x)=148262482)
245
+ 8100 val loss 5.6343
246
+ 8100 val perplexity 279.8623
247
+ 8100 train 5.575891 (lr=5.1172e-06) (hash(x)=147683655)
248
+ 8200 val loss 5.6332
249
+ 8200 val perplexity 279.5538
250
+ 8200 train 5.693173 (lr=4.9425e-06) (hash(x)=164975934)
251
+ 8300 val loss 5.6301
252
+ 8300 val perplexity 278.6893
253
+ 8300 train 5.533537 (lr=4.7966e-06) (hash(x)=145300550)
254
+ 8400 val loss 5.6282
255
+ 8400 val perplexity 278.1678
256
+ 8400 train 5.545886 (lr=4.6796e-06) (hash(x)=150679400)
257
+ 8500 val loss 5.6290
258
+ 8500 val perplexity 278.3904
259
+ 8500 train 5.625071 (lr=4.5917e-06) (hash(x)=164109401)
260
+ 8600 val loss 5.6257
261
+ 8600 val perplexity 277.4749
262
+ 8600 train 5.575358 (lr=4.5330e-06) (hash(x)=161036376)
263
+ 8700 val loss 5.6213
264
+ 8700 val perplexity 276.2414
265
+ 8700 train 5.549177 (lr=4.5037e-06) (hash(x)=153828820)
266
+ 8749 val loss 5.6201
267
+ 8749 val perplexity 275.9221
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
attention_kindselective_n_heads4_seed1344/model_08749.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:05d476d1b1967d89cb078db0cb8dde81e6a201530c206f968474e1bd60409cec
3
  size 92843394
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:559937116dc87cad8d5b0c4793cd92eae070e7ad9289ee0b597d496211e882f3
3
  size 92843394
attention_kindselective_n_heads4_seed1344/optimizer_08749.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0bd43931f26ce626d3345e60608b25bfbdb809636a0d94748839af3204037042
3
  size 179406214
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:846e83e3fb29ba9606086848e1d93051cf0e43d2ce03044226aad14ec7f885ea
3
  size 179406214