andrew-healey commited on
Commit
8fcc258
·
verified ·
1 Parent(s): 42e8d08

Upload folder using huggingface_hub

Browse files
attention_kindselective_n_heads4_seed1342/args.json CHANGED
@@ -1 +1 @@
1
- {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_9/attention_kindselective_n_heads4_seed1342", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 4, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 8750, "warmup_steps": 500, "group": "wider_is_better_9", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1342, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 120, "total_batch_size": 61440, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 4.5e-05, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "4.5e-5_61440_4_1342", "n_embd": 256}
 
1
+ {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_9/attention_kindselective_n_heads4_seed1342", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 4, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 8750, "warmup_steps": 500, "group": "wider_is_better_9", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1342, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 120, "total_batch_size": 61440, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 5e-05, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "5e-5_61440_4_1342", "n_embd": 256}
attention_kindselective_n_heads4_seed1342/log2.txt CHANGED
@@ -1,267 +1,522 @@
1
  max_steps: 8750
 
 
 
2
  0 val loss 11.2898
3
  0 val perplexity 80004.5469
4
- 0 train 11.289043 (lr=9.0000e-08) (hash(x)=150332693)
5
- 100 val loss 10.0243
6
- 100 val perplexity 22568.6875
7
- 100 train 9.978326 (lr=9.0900e-06) (hash(x)=149277319)
8
- 200 val loss 9.3785
9
- 200 val perplexity 11831.2090
10
- 200 train 9.445846 (lr=1.8090e-05) (hash(x)=155934075)
11
- 300 val loss 8.2194
12
- 300 val perplexity 3712.2971
13
- 300 train 8.254610 (lr=2.7090e-05) (hash(x)=165402628)
14
- 400 val loss 7.6163
15
- 400 val perplexity 2031.0946
16
- 400 train 7.421521 (lr=3.6090e-05) (hash(x)=139168939)
17
- 500 val loss 7.4349
18
- 500 val perplexity 1694.0548
19
- 500 train 7.357471 (lr=4.5000e-05) (hash(x)=147134338)
20
- 600 val loss 7.3305
21
- 600 val perplexity 1526.1565
22
- 600 train 7.243377 (lr=4.4985e-05) (hash(x)=159086459)
23
- 700 val loss 7.2503
24
- 700 val perplexity 1408.4863
25
- 700 train 7.288620 (lr=4.4941e-05) (hash(x)=152399099)
26
- 800 val loss 7.1363
27
- 800 val perplexity 1256.7866
28
- 800 train 7.140655 (lr=4.4868e-05) (hash(x)=156548843)
29
- 900 val loss 7.0484
30
- 900 val perplexity 1151.0243
31
- 900 train 7.021039 (lr=4.4766e-05) (hash(x)=157889911)
32
- 1000 val loss 6.9766
33
- 1000 val perplexity 1071.3145
34
- 1000 train 6.774483 (lr=4.4634e-05) (hash(x)=142943707)
35
- 1100 val loss 6.9135
36
- 1100 val perplexity 1005.7903
37
- 1100 train 6.912020 (lr=4.4474e-05) (hash(x)=148938689)
38
- 1200 val loss 6.8143
39
- 1200 val perplexity 910.7801
40
- 1200 train 6.954645 (lr=4.4285e-05) (hash(x)=161962903)
41
- 1300 val loss 6.7411
42
- 1300 val perplexity 846.4569
43
- 1300 train 6.723616 (lr=4.4068e-05) (hash(x)=145979235)
44
- 1400 val loss 6.6754
45
- 1400 val perplexity 792.6756
46
- 1400 train 6.625307 (lr=4.3822e-05) (hash(x)=159157507)
47
- 1500 val loss 6.6153
48
- 1500 val perplexity 746.3971
49
- 1500 train 6.649820 (lr=4.3549e-05) (hash(x)=145943130)
50
- 1600 val loss 6.5401
51
- 1600 val perplexity 692.3466
52
- 1600 train 6.341069 (lr=4.3249e-05) (hash(x)=143249173)
53
- 1700 val loss 6.4872
54
- 1700 val perplexity 656.6964
55
- 1700 train 6.551693 (lr=4.2922e-05) (hash(x)=173907143)
56
- 1800 val loss 6.4411
57
- 1800 val perplexity 627.1124
58
- 1800 train 6.368075 (lr=4.2569e-05) (hash(x)=138363755)
59
- 1900 val loss 6.4086
60
- 1900 val perplexity 607.0637
61
- 1900 train 6.216325 (lr=4.2190e-05) (hash(x)=146341390)
62
- 2000 val loss 6.3554
63
- 2000 val perplexity 575.6113
64
- 2000 train 6.384245 (lr=4.1785e-05) (hash(x)=154559671)
65
- 2100 val loss 6.3281
66
- 2100 val perplexity 560.0664
67
- 2100 train 6.378413 (lr=4.1356e-05) (hash(x)=155607137)
68
- 2200 val loss 6.2939
69
- 2200 val perplexity 541.2359
70
- 2200 train 6.150013 (lr=4.0903e-05) (hash(x)=153010221)
71
- 2300 val loss 6.2609
72
- 2300 val perplexity 523.7111
73
- 2300 train 6.278090 (lr=4.0426e-05) (hash(x)=160097777)
74
- 2400 val loss 6.2372
75
- 2400 val perplexity 511.4395
76
- 2400 train 6.214860 (lr=3.9927e-05) (hash(x)=135590094)
77
- 2500 val loss 6.2120
78
- 2500 val perplexity 498.6850
79
- 2500 train 6.329730 (lr=3.9406e-05) (hash(x)=147042929)
80
- 2600 val loss 6.1775
81
- 2600 val perplexity 481.7953
82
- 2600 train 6.138986 (lr=3.8863e-05) (hash(x)=163793881)
83
- 2700 val loss 6.1626
84
- 2700 val perplexity 474.6808
85
- 2700 train 6.195170 (lr=3.8300e-05) (hash(x)=153598698)
86
- 2800 val loss 6.1463
87
- 2800 val perplexity 466.9887
88
- 2800 train 5.944228 (lr=3.7717e-05) (hash(x)=133439803)
89
- 2900 val loss 6.1207
90
- 2900 val perplexity 455.1814
91
- 2900 train 6.059956 (lr=3.7116e-05) (hash(x)=148239158)
92
- 3000 val loss 6.0949
93
- 3000 val perplexity 443.6075
94
- 3000 train 6.099676 (lr=3.6496e-05) (hash(x)=150991971)
95
- 3100 val loss 6.0827
96
- 3100 val perplexity 438.2101
97
- 3100 train 6.087112 (lr=3.5860e-05) (hash(x)=157791832)
98
- 3200 val loss 6.0558
99
- 3200 val perplexity 426.5774
100
- 3200 train 6.124968 (lr=3.5207e-05) (hash(x)=163746305)
101
- 3300 val loss 6.0481
102
- 3300 val perplexity 423.3175
103
- 3300 train 5.978697 (lr=3.4539e-05) (hash(x)=155120269)
104
- 3400 val loss 6.0166
105
- 3400 val perplexity 410.1862
106
- 3400 train 5.958898 (lr=3.3857e-05) (hash(x)=152748451)
107
- 3500 val loss 6.0043
108
- 3500 val perplexity 405.1582
109
- 3500 train 5.885794 (lr=3.3162e-05) (hash(x)=147395004)
110
- 3600 val loss 5.9914
111
- 3600 val perplexity 399.9552
112
- 3600 train 6.009531 (lr=3.2455e-05) (hash(x)=157554250)
113
- 3700 val loss 5.9740
114
- 3700 val perplexity 393.0799
115
- 3700 train 5.826887 (lr=3.1736e-05) (hash(x)=147953321)
116
- 3800 val loss 5.9551
117
- 3800 val perplexity 385.7162
118
- 3800 train 5.805262 (lr=3.1008e-05) (hash(x)=146898609)
119
- 3900 val loss 5.9564
120
- 3900 val perplexity 386.2080
121
- 3900 train 5.996837 (lr=3.0270e-05) (hash(x)=150115282)
122
- 4000 val loss 5.9218
123
- 4000 val perplexity 373.0663
124
- 4000 train 5.790571 (lr=2.9524e-05) (hash(x)=142448619)
125
- 4100 val loss 5.9223
126
- 4100 val perplexity 373.2632
127
- 4100 train 5.654961 (lr=2.8771e-05) (hash(x)=141244123)
128
- 4200 val loss 5.9140
129
- 4200 val perplexity 370.1866
130
- 4200 train 5.927835 (lr=2.8013e-05) (hash(x)=165913661)
131
- 4300 val loss 5.8861
132
- 4300 val perplexity 360.0132
133
- 4300 train 5.917614 (lr=2.7250e-05) (hash(x)=155744823)
134
- 4400 val loss 5.8769
135
- 4400 val perplexity 356.6874
136
- 4400 train 5.872905 (lr=2.6483e-05) (hash(x)=160461243)
137
- 4500 val loss 5.8657
138
- 4500 val perplexity 352.7454
139
- 4500 train 5.776689 (lr=2.5714e-05) (hash(x)=156073129)
140
- 4600 val loss 5.8505
141
- 4600 val perplexity 347.4088
142
- 4600 train 5.897415 (lr=2.4943e-05) (hash(x)=152294132)
143
- 4700 val loss 5.8393
144
- 4700 val perplexity 343.5292
145
- 4700 train 5.771152 (lr=2.4172e-05) (hash(x)=140904569)
146
- 4800 val loss 5.8338
147
- 4800 val perplexity 341.6528
148
- 4800 train 5.739367 (lr=2.3402e-05) (hash(x)=156191635)
149
- 4900 val loss 5.8181
150
- 4900 val perplexity 336.3423
151
- 4900 train 5.924228 (lr=2.2633e-05) (hash(x)=145063976)
152
- 5000 val loss 5.8120
153
- 5000 val perplexity 334.2776
154
- 5000 train 5.754147 (lr=2.1868e-05) (hash(x)=160110619)
155
- 5100 val loss 5.7954
156
- 5100 val perplexity 328.7745
157
- 5100 train 5.821685 (lr=2.1107e-05) (hash(x)=156270070)
158
- 5200 val loss 5.7867
159
- 5200 val perplexity 325.9519
160
- 5200 train 5.674892 (lr=2.0351e-05) (hash(x)=138954242)
161
- 5300 val loss 5.7796
162
- 5300 val perplexity 323.6199
163
- 5300 train 5.633004 (lr=1.9602e-05) (hash(x)=146472367)
164
- 5400 val loss 5.7699
165
- 5400 val perplexity 320.5030
166
- 5400 train 5.702404 (lr=1.8860e-05) (hash(x)=146753405)
167
- 5500 val loss 5.7575
168
- 5500 val perplexity 316.5528
169
- 5500 train 5.706319 (lr=1.8127e-05) (hash(x)=147757398)
170
- 5600 val loss 5.7557
171
- 5600 val perplexity 315.9973
172
- 5600 train 5.624521 (lr=1.7403e-05) (hash(x)=151291757)
173
- 5700 val loss 5.7424
174
- 5700 val perplexity 311.8269
175
- 5700 train 5.793109 (lr=1.6690e-05) (hash(x)=156274392)
176
- 5800 val loss 5.7336
177
- 5800 val perplexity 309.0707
178
- 5800 train 5.702478 (lr=1.5989e-05) (hash(x)=152612266)
179
- 5900 val loss 5.7301
180
- 5900 val perplexity 307.9881
181
- 5900 train 5.893963 (lr=1.5300e-05) (hash(x)=148973201)
182
- 6000 val loss 5.7217
183
- 6000 val perplexity 305.4335
184
- 6000 train 5.687572 (lr=1.4625e-05) (hash(x)=158311490)
185
- 6100 val loss 5.7135
186
- 6100 val perplexity 302.9321
187
- 6100 train 5.645096 (lr=1.3965e-05) (hash(x)=147541319)
188
- 6200 val loss 5.7129
189
- 6200 val perplexity 302.7402
190
- 6200 train 5.777384 (lr=1.3320e-05) (hash(x)=154753980)
191
- 6300 val loss 5.7037
192
- 6300 val perplexity 299.9899
193
- 6300 train 5.601553 (lr=1.2692e-05) (hash(x)=140798998)
194
- 6400 val loss 5.6993
195
- 6400 val perplexity 298.6710
196
- 6400 train 5.588508 (lr=1.2081e-05) (hash(x)=149299516)
197
- 6500 val loss 5.6953
198
- 6500 val perplexity 297.4683
199
- 6500 train 5.682362 (lr=1.1489e-05) (hash(x)=148495806)
200
- 6600 val loss 5.6878
201
- 6600 val perplexity 295.2325
202
- 6600 train 5.665105 (lr=1.0916e-05) (hash(x)=153664387)
203
- 6700 val loss 5.6849
204
- 6700 val perplexity 294.3740
205
- 6700 train 5.661620 (lr=1.0363e-05) (hash(x)=153333491)
206
- 6800 val loss 5.6854
207
- 6800 val perplexity 294.5452
208
- 6800 train 5.438633 (lr=9.8310e-06) (hash(x)=150149382)
209
- 6900 val loss 5.6780
210
- 6900 val perplexity 292.3573
211
- 6900 train 5.763326 (lr=9.3205e-06) (hash(x)=152379767)
212
- 7000 val loss 5.6708
213
- 7000 val perplexity 290.2598
214
- 7000 train 5.719643 (lr=8.8324e-06) (hash(x)=150432537)
215
- 7100 val loss 5.6688
216
- 7100 val perplexity 289.6923
217
- 7100 train 5.545722 (lr=8.3674e-06) (hash(x)=148277836)
218
- 7200 val loss 5.6698
219
- 7200 val perplexity 289.9790
220
- 7200 train 5.664272 (lr=7.9261e-06) (hash(x)=168591659)
221
- 7300 val loss 5.6615
222
- 7300 val perplexity 287.5750
223
- 7300 train 5.642782 (lr=7.5093e-06) (hash(x)=152764851)
224
- 7400 val loss 5.6584
225
- 7400 val perplexity 286.6996
226
- 7400 train 5.621808 (lr=7.1174e-06) (hash(x)=142920131)
227
- 7500 val loss 5.6559
228
- 7500 val perplexity 285.9665
229
- 7500 train 5.656323 (lr=6.7511e-06) (hash(x)=146954649)
230
- 7600 val loss 5.6564
231
- 7600 val perplexity 286.1283
232
- 7600 train 5.434806 (lr=6.4109e-06) (hash(x)=144529566)
233
- 7700 val loss 5.6503
234
- 7700 val perplexity 284.3646
235
- 7700 train 5.609683 (lr=6.0972e-06) (hash(x)=142669945)
236
- 7800 val loss 5.6460
237
- 7800 val perplexity 283.1537
238
- 7800 train 5.704182 (lr=5.8107e-06) (hash(x)=161563764)
239
- 7900 val loss 5.6443
240
- 7900 val perplexity 282.6702
241
- 7900 train 5.439746 (lr=5.5515e-06) (hash(x)=141516532)
242
- 8000 val loss 5.6455
243
- 8000 val perplexity 283.0064
244
- 8000 train 5.515144 (lr=5.3203e-06) (hash(x)=152418457)
245
- 8100 val loss 5.6398
246
- 8100 val perplexity 281.4167
247
- 8100 train 5.624706 (lr=5.1172e-06) (hash(x)=161109470)
248
- 8200 val loss 5.6360
249
- 8200 val perplexity 280.3446
250
- 8200 train 5.758985 (lr=4.9425e-06) (hash(x)=154408822)
251
- 8300 val loss 5.6342
252
- 8300 val perplexity 279.8434
253
- 8300 train 5.520042 (lr=4.7966e-06) (hash(x)=146431691)
254
- 8400 val loss 5.6345
255
- 8400 val perplexity 279.9092
256
- 8400 train 5.412016 (lr=4.6796e-06) (hash(x)=150182527)
257
- 8500 val loss 5.6286
258
- 8500 val perplexity 278.2667
259
- 8500 train 5.693805 (lr=4.5917e-06) (hash(x)=154828709)
260
- 8600 val loss 5.6284
261
- 8600 val perplexity 278.2239
262
- 8600 train 5.539424 (lr=4.5330e-06) (hash(x)=154995428)
263
- 8700 val loss 5.6285
264
- 8700 val perplexity 278.2420
265
- 8700 train 5.523919 (lr=4.5037e-06) (hash(x)=142111043)
266
- 8749 val loss 5.6262
267
- 8749 val perplexity 277.6001
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  max_steps: 8750
2
+ 200 val loss 9.4639
3
+ 200 val perplexity 12885.6191
4
+ 200 train 9.532709 (lr=1.6080e-05) (hash(x)=155934075)
5
  0 val loss 11.2898
6
  0 val perplexity 80004.5469
7
+ 300 val loss 8.3583
8
+ 300 val perplexity 4265.4746
9
+ 300 train 8.394882 (lr=2.4080e-05) (hash(x)=165402628)
10
+ 0 train 11.289051 (lr=1.0000e-07) (hash(x)=150332693)
11
+ 400 val loss 7.6790
12
+ 400 val perplexity 2162.5322
13
+ 400 train 7.493039 (lr=3.2080e-05) (hash(x)=139168939)
14
+ 100 val loss 9.9962
15
+ 100 val perplexity 21943.4395
16
+ 100 train 9.948080 (lr=1.0100e-05) (hash(x)=149277319)
17
+ 500 val loss 7.4664
18
+ 500 val perplexity 1748.2917
19
+ 500 train 7.390862 (lr=4.0000e-05) (hash(x)=147134338)
20
+ 200 val loss 9.3283
21
+ 200 val perplexity 11251.4990
22
+ 200 train 9.395099 (lr=2.0100e-05) (hash(x)=155934075)
23
+ 600 val loss 7.3681
24
+ 600 val perplexity 1584.6302
25
+ 600 train 7.280435 (lr=3.9987e-05) (hash(x)=159086459)
26
+ 300 val loss 8.1346
27
+ 300 val perplexity 3410.3855
28
+ 300 train 8.168678 (lr=3.0100e-05) (hash(x)=165402628)
29
+ 700 val loss 7.2804
30
+ 700 val perplexity 1451.5247
31
+ 700 train 7.317653 (lr=3.9948e-05) (hash(x)=152399099)
32
+ 400 val loss 7.5968
33
+ 400 val perplexity 1991.7366
34
+ 400 train 7.398672 (lr=4.0100e-05) (hash(x)=139168939)
35
+ 800 val loss 7.1671
36
+ 800 val perplexity 1296.1224
37
+ 800 train 7.168324 (lr=3.9883e-05) (hash(x)=156548843)
38
+ 500 val loss 7.4448
39
+ 500 val perplexity 1711.0024
40
+ 500 train 7.364177 (lr=5.0000e-05) (hash(x)=147134338)
41
+ 900 val loss 7.0745
42
+ 900 val perplexity 1181.4358
43
+ 900 train 7.044015 (lr=3.9792e-05) (hash(x)=157889911)
44
+ 600 val loss 7.3553
45
+ 600 val perplexity 1564.4274
46
+ 600 train 7.269327 (lr=4.9984e-05) (hash(x)=159086459)
47
+ 1000 val loss 7.0000
48
+ 1000 val perplexity 1096.6268
49
+ 1000 train 6.798794 (lr=3.9675e-05) (hash(x)=142943707)
50
+ 700 val loss 7.2784
51
+ 700 val perplexity 1448.6952
52
+ 700 train 7.316185 (lr=4.9935e-05) (hash(x)=152399099)
53
+ 1100 val loss 6.9307
54
+ 1100 val perplexity 1023.2171
55
+ 1100 train 6.932874 (lr=3.9532e-05) (hash(x)=148938689)
56
+ 800 val loss 7.1807
57
+ 800 val perplexity 1313.8818
58
+ 800 train 7.183406 (lr=4.9853e-05) (hash(x)=156548843)
59
+ 1200 val loss 6.8365
60
+ 1200 val perplexity 931.1896
61
+ 1200 train 6.969981 (lr=3.9364e-05) (hash(x)=161962903)
62
+ 900 val loss 7.1040
63
+ 900 val perplexity 1216.8074
64
+ 900 train 7.077850 (lr=4.9739e-05) (hash(x)=157889911)
65
+ 1300 val loss 6.7581
66
+ 1300 val perplexity 861.0339
67
+ 1300 train 6.738302 (lr=3.9171e-05) (hash(x)=145979235)
68
+ 1000 val loss 7.0485
69
+ 1000 val perplexity 1151.1632
70
+ 1000 train 6.848722 (lr=4.9593e-05) (hash(x)=142943707)
71
+ 1400 val loss 6.6876
72
+ 1400 val perplexity 802.4104
73
+ 1400 train 6.635297 (lr=3.8953e-05) (hash(x)=159157507)
74
+ 1100 val loss 6.9970
75
+ 1100 val perplexity 1093.3792
76
+ 1100 train 7.001183 (lr=4.9415e-05) (hash(x)=148938689)
77
+ 1500 val loss 6.6290
78
+ 1500 val perplexity 756.6931
79
+ 1500 train 6.665385 (lr=3.8711e-05) (hash(x)=145943130)
80
+ 1200 val loss 6.9444
81
+ 1200 val perplexity 1037.3114
82
+ 1200 train 7.073627 (lr=4.9205e-05) (hash(x)=161962903)
83
+ 1600 val loss 6.5599
84
+ 1600 val perplexity 706.1689
85
+ 1600 train 6.357224 (lr=3.8444e-05) (hash(x)=143249173)
86
+ 1300 val loss 6.8886
87
+ 1300 val perplexity 981.0255
88
+ 1300 train 6.864804 (lr=4.8964e-05) (hash(x)=145979235)
89
+ 1700 val loss 6.5038
90
+ 1700 val perplexity 667.6825
91
+ 1700 train 6.566358 (lr=3.8153e-05) (hash(x)=173907143)
92
+ 1400 val loss 6.8403
93
+ 1400 val perplexity 934.7651
94
+ 1400 train 6.786602 (lr=4.8691e-05) (hash(x)=159157507)
95
+ 1800 val loss 6.4599
96
+ 1800 val perplexity 638.9742
97
+ 1800 train 6.385739 (lr=3.7839e-05) (hash(x)=138363755)
98
+ 1500 val loss 6.8001
99
+ 1500 val perplexity 897.9742
100
+ 1500 train 6.838896 (lr=4.8388e-05) (hash(x)=145943130)
101
+ 1900 val loss 6.4176
102
+ 1900 val perplexity 612.5451
103
+ 1900 train 6.228965 (lr=3.7502e-05) (hash(x)=146341390)
104
+ 1600 val loss 6.7494
105
+ 1600 val perplexity 853.5507
106
+ 1600 train 6.545481 (lr=4.8055e-05) (hash(x)=143249173)
107
+ 2000 val loss 6.3698
108
+ 2000 val perplexity 583.9255
109
+ 2000 train 6.398791 (lr=3.7143e-05) (hash(x)=154559671)
110
+ 1700 val loss 6.7104
111
+ 1700 val perplexity 820.8583
112
+ 1700 train 6.782251 (lr=4.7691e-05) (hash(x)=173907143)
113
+ 2100 val loss 6.3420
114
+ 2100 val perplexity 567.9370
115
+ 2100 train 6.400441 (lr=3.6761e-05) (hash(x)=155607137)
116
+ 1800 val loss 6.6697
117
+ 1800 val perplexity 788.1686
118
+ 1800 train 6.595098 (lr=4.7299e-05) (hash(x)=138363755)
119
+ 2200 val loss 6.3107
120
+ 2200 val perplexity 550.4556
121
+ 2200 train 6.170771 (lr=3.6358e-05) (hash(x)=153010221)
122
+ 1900 val loss 6.6298
123
+ 1900 val perplexity 757.3068
124
+ 1900 train 6.441909 (lr=4.6878e-05) (hash(x)=146341390)
125
+ 2300 val loss 6.2734
126
+ 2300 val perplexity 530.2939
127
+ 2300 train 6.293130 (lr=3.5935e-05) (hash(x)=160097777)
128
+ 2000 val loss 6.5908
129
+ 2000 val perplexity 728.3906
130
+ 2000 train 6.615541 (lr=4.6428e-05) (hash(x)=154559671)
131
+ 2400 val loss 6.2462
132
+ 2400 val perplexity 516.0425
133
+ 2400 train 6.216858 (lr=3.5491e-05) (hash(x)=135590094)
134
+ 2100 val loss 6.5619
135
+ 2100 val perplexity 707.5947
136
+ 2100 train 6.621873 (lr=4.5951e-05) (hash(x)=155607137)
137
+ 2500 val loss 6.2243
138
+ 2500 val perplexity 504.8625
139
+ 2500 train 6.341967 (lr=3.5027e-05) (hash(x)=147042929)
140
+ 2200 val loss 6.5254
141
+ 2200 val perplexity 682.2401
142
+ 2200 train 6.376645 (lr=4.5448e-05) (hash(x)=153010221)
143
+ 2600 val loss 6.1893
144
+ 2600 val perplexity 487.4810
145
+ 2600 train 6.154806 (lr=3.4545e-05) (hash(x)=163793881)
146
+ 2300 val loss 6.5038
147
+ 2300 val perplexity 667.6698
148
+ 2300 train 6.525729 (lr=4.4918e-05) (hash(x)=160097777)
149
+ 2700 val loss 6.1648
150
+ 2700 val perplexity 475.7272
151
+ 2700 train 6.199413 (lr=3.4044e-05) (hash(x)=153598698)
152
+ 2400 val loss 6.4631
153
+ 2400 val perplexity 641.0137
154
+ 2400 train 6.417774 (lr=4.4363e-05) (hash(x)=135590094)
155
+ 2800 val loss 6.1510
156
+ 2800 val perplexity 469.1915
157
+ 2800 train 5.944895 (lr=3.3526e-05) (hash(x)=133439803)
158
+ 2500 val loss 6.4394
159
+ 2500 val perplexity 626.0601
160
+ 2500 train 6.565438 (lr=4.3784e-05) (hash(x)=147042929)
161
+ 2900 val loss 6.1239
162
+ 2900 val perplexity 456.6519
163
+ 2900 train 6.061853 (lr=3.2992e-05) (hash(x)=148239158)
164
+ 2600 val loss 6.4139
165
+ 2600 val perplexity 610.2821
166
+ 2600 train 6.378555 (lr=4.3181e-05) (hash(x)=163793881)
167
+ 3000 val loss 6.1035
168
+ 3000 val perplexity 447.3994
169
+ 3000 train 6.109071 (lr=3.2441e-05) (hash(x)=150991971)
170
+ 2700 val loss 6.3924
171
+ 2700 val perplexity 597.3093
172
+ 2700 train 6.419984 (lr=4.2555e-05) (hash(x)=153598698)
173
+ 3100 val loss 6.0881
174
+ 3100 val perplexity 440.5812
175
+ 3100 train 6.093895 (lr=3.1875e-05) (hash(x)=157791832)
176
+ 2800 val loss 6.3788
177
+ 2800 val perplexity 589.2058
178
+ 2800 train 6.179019 (lr=4.1908e-05) (hash(x)=133439803)
179
+ 3200 val loss 6.0635
180
+ 3200 val perplexity 429.8743
181
+ 3200 train 6.134553 (lr=3.1295e-05) (hash(x)=163746305)
182
+ 2900 val loss 6.3535
183
+ 2900 val perplexity 574.4886
184
+ 2900 train 6.295610 (lr=4.1240e-05) (hash(x)=148239158)
185
+ 3300 val loss 6.0547
186
+ 3300 val perplexity 426.1033
187
+ 3300 train 5.977380 (lr=3.0702e-05) (hash(x)=155120269)
188
+ 3000 val loss 6.3357
189
+ 3000 val perplexity 564.3859
190
+ 3000 train 6.326377 (lr=4.0551e-05) (hash(x)=150991971)
191
+ 3400 val loss 6.0275
192
+ 3400 val perplexity 414.6939
193
+ 3400 train 5.970883 (lr=3.0095e-05) (hash(x)=152748451)
194
+ 3100 val loss 6.3179
195
+ 3100 val perplexity 554.4232
196
+ 3100 train 6.331873 (lr=3.9844e-05) (hash(x)=157791832)
197
+ 3500 val loss 6.0154
198
+ 3500 val perplexity 409.6898
199
+ 3500 train 5.895640 (lr=2.9477e-05) (hash(x)=147395004)
200
+ 3200 val loss 6.2993
201
+ 3200 val perplexity 544.2070
202
+ 3200 train 6.374971 (lr=3.9119e-05) (hash(x)=163746305)
203
+ 3600 val loss 6.0009
204
+ 3600 val perplexity 403.7821
205
+ 3600 train 6.022380 (lr=2.8849e-05) (hash(x)=157554250)
206
+ 3300 val loss 6.2950
207
+ 3300 val perplexity 541.8779
208
+ 3300 train 6.231050 (lr=3.8377e-05) (hash(x)=155120269)
209
+ 3700 val loss 5.9809
210
+ 3700 val perplexity 395.8124
211
+ 3700 train 5.834892 (lr=2.8210e-05) (hash(x)=147953321)
212
+ 3400 val loss 6.2608
213
+ 3400 val perplexity 523.6501
214
+ 3400 train 6.201460 (lr=3.7619e-05) (hash(x)=152748451)
215
+ 3800 val loss 5.9648
216
+ 3800 val perplexity 389.4606
217
+ 3800 train 5.812018 (lr=2.7562e-05) (hash(x)=146898609)
218
+ 3500 val loss 6.2453
219
+ 3500 val perplexity 515.5919
220
+ 3500 train 6.119865 (lr=3.6847e-05) (hash(x)=147395004)
221
+ 3900 val loss 5.9597
222
+ 3900 val perplexity 387.4993
223
+ 3900 train 6.004293 (lr=2.6907e-05) (hash(x)=150115282)
224
+ 3600 val loss 6.2349
225
+ 3600 val perplexity 510.2739
226
+ 3600 train 6.257258 (lr=3.6061e-05) (hash(x)=157554250)
227
+ 4000 val loss 5.9349
228
+ 4000 val perplexity 377.9908
229
+ 4000 train 5.803768 (lr=2.6244e-05) (hash(x)=142448619)
230
+ 3700 val loss 6.2165
231
+ 3700 val perplexity 500.9630
232
+ 3700 train 6.068434 (lr=3.5263e-05) (hash(x)=147953321)
233
+ 4100 val loss 5.9360
234
+ 4100 val perplexity 378.4156
235
+ 4100 train 5.667750 (lr=2.5575e-05) (hash(x)=141244123)
236
+ 3800 val loss 6.2068
237
+ 3800 val perplexity 496.1251
238
+ 3800 train 6.068558 (lr=3.4453e-05) (hash(x)=146898609)
239
+ 4200 val loss 5.9272
240
+ 4200 val perplexity 375.1158
241
+ 4200 train 5.949145 (lr=2.4900e-05) (hash(x)=165913661)
242
+ 3900 val loss 6.2001
243
+ 3900 val perplexity 492.8126
244
+ 3900 train 6.240398 (lr=3.3633e-05) (hash(x)=150115282)
245
+ 4300 val loss 5.9009
246
+ 4300 val perplexity 365.3578
247
+ 4300 train 5.933804 (lr=2.4222e-05) (hash(x)=155744823)
248
+ 4000 val loss 6.1741
249
+ 4000 val perplexity 480.1424
250
+ 4000 train 6.023779 (lr=3.2805e-05) (hash(x)=142448619)
251
+ 4400 val loss 5.8893
252
+ 4400 val perplexity 361.1411
253
+ 4400 train 5.882122 (lr=2.3540e-05) (hash(x)=160461243)
254
+ 4100 val loss 6.1704
255
+ 4100 val perplexity 478.3836
256
+ 4100 train 5.888917 (lr=3.1968e-05) (hash(x)=141244123)
257
+ 4500 val loss 5.8792
258
+ 4500 val perplexity 357.5369
259
+ 4500 train 5.789831 (lr=2.2856e-05) (hash(x)=156073129)
260
+ 4200 val loss 6.1580
261
+ 4200 val perplexity 472.5033
262
+ 4200 train 6.195307 (lr=3.1126e-05) (hash(x)=165913661)
263
+ 4600 val loss 5.8617
264
+ 4600 val perplexity 351.3350
265
+ 4600 train 5.907257 (lr=2.2171e-05) (hash(x)=152294132)
266
+ 4300 val loss 6.1397
267
+ 4300 val perplexity 463.9040
268
+ 4300 train 6.172644 (lr=3.0277e-05) (hash(x)=155744823)
269
+ 4700 val loss 5.8537
270
+ 4700 val perplexity 348.5111
271
+ 4700 train 5.785102 (lr=2.1486e-05) (hash(x)=140904569)
272
+ 4400 val loss 6.1511
273
+ 4400 val perplexity 469.2470
274
+ 4400 train 6.134822 (lr=2.9425e-05) (hash(x)=160461243)
275
+ 4800 val loss 5.8460
276
+ 4800 val perplexity 345.8560
277
+ 4800 train 5.754941 (lr=2.0801e-05) (hash(x)=156191635)
278
+ 4500 val loss 6.1273
279
+ 4500 val perplexity 458.1988
280
+ 4500 train 6.053799 (lr=2.8571e-05) (hash(x)=156073129)
281
+ 4900 val loss 5.8350
282
+ 4900 val perplexity 342.0713
283
+ 4900 train 5.941958 (lr=2.0118e-05) (hash(x)=145063976)
284
+ 4600 val loss 6.1165
285
+ 4600 val perplexity 453.2633
286
+ 4600 train 6.166283 (lr=2.7714e-05) (hash(x)=152294132)
287
+ 5000 val loss 5.8263
288
+ 5000 val perplexity 339.0927
289
+ 5000 train 5.771696 (lr=1.9438e-05) (hash(x)=160110619)
290
+ 4700 val loss 6.1035
291
+ 4700 val perplexity 447.4263
292
+ 4700 train 6.038318 (lr=2.6857e-05) (hash(x)=140904569)
293
+ 5100 val loss 5.8118
294
+ 5100 val perplexity 334.2325
295
+ 5100 train 5.841002 (lr=1.8762e-05) (hash(x)=156270070)
296
+ 4800 val loss 6.0977
297
+ 4800 val perplexity 444.8199
298
+ 4800 train 6.001534 (lr=2.6002e-05) (hash(x)=156191635)
299
+ 5200 val loss 5.8029
300
+ 5200 val perplexity 331.2700
301
+ 5200 train 5.687089 (lr=1.8090e-05) (hash(x)=138954242)
302
+ 4900 val loss 6.0911
303
+ 4900 val perplexity 441.9067
304
+ 4900 train 6.159082 (lr=2.5148e-05) (hash(x)=145063976)
305
+ 5300 val loss 5.7946
306
+ 5300 val perplexity 328.5219
307
+ 5300 train 5.646440 (lr=1.7424e-05) (hash(x)=146472367)
308
+ 5000 val loss 6.0757
309
+ 5000 val perplexity 435.1693
310
+ 5000 train 6.022645 (lr=2.4298e-05) (hash(x)=160110619)
311
+ 5400 val loss 5.7864
312
+ 5400 val perplexity 325.8291
313
+ 5400 train 5.721862 (lr=1.6765e-05) (hash(x)=146753405)
314
+ 5100 val loss 6.0679
315
+ 5100 val perplexity 431.7900
316
+ 5100 train 6.077213 (lr=2.3452e-05) (hash(x)=156270070)
317
+ 5500 val loss 5.7769
318
+ 5500 val perplexity 322.7703
319
+ 5500 train 5.724141 (lr=1.6113e-05) (hash(x)=147757398)
320
+ 5200 val loss 6.0523
321
+ 5200 val perplexity 425.0701
322
+ 5200 train 5.917948 (lr=2.2613e-05) (hash(x)=138954242)
323
+ 5600 val loss 5.7728
324
+ 5600 val perplexity 321.4291
325
+ 5600 train 5.645650 (lr=1.5469e-05) (hash(x)=151291757)
326
+ 5300 val loss 6.0469
327
+ 5300 val perplexity 422.7817
328
+ 5300 train 5.894984 (lr=2.1780e-05) (hash(x)=146472367)
329
+ 5700 val loss 5.7594
330
+ 5700 val perplexity 317.1720
331
+ 5700 train 5.807491 (lr=1.4836e-05) (hash(x)=156274392)
332
+ 5400 val loss 6.0328
333
+ 5400 val perplexity 416.8636
334
+ 5400 train 5.969725 (lr=2.0956e-05) (hash(x)=146753405)
335
+ 5800 val loss 5.7541
336
+ 5800 val perplexity 315.4712
337
+ 5800 train 5.722164 (lr=1.4212e-05) (hash(x)=152612266)
338
+ 5500 val loss 6.0239
339
+ 5500 val perplexity 413.1928
340
+ 5500 train 5.972818 (lr=2.0141e-05) (hash(x)=147757398)
341
+ 5900 val loss 5.7477
342
+ 5900 val perplexity 313.4535
343
+ 5900 train 5.910150 (lr=1.3600e-05) (hash(x)=148973201)
344
+ 5600 val loss 6.0236
345
+ 5600 val perplexity 413.0589
346
+ 5600 train 5.907842 (lr=1.9337e-05) (hash(x)=151291757)
347
+ 6000 val loss 5.7391
348
+ 6000 val perplexity 310.7723
349
+ 6000 train 5.699615 (lr=1.3000e-05) (hash(x)=158311490)
350
+ 5700 val loss 6.0070
351
+ 5700 val perplexity 406.2433
352
+ 5700 train 6.048605 (lr=1.8545e-05) (hash(x)=156274392)
353
+ 6100 val loss 5.7323
354
+ 6100 val perplexity 308.6890
355
+ 6100 train 5.659322 (lr=1.2413e-05) (hash(x)=147541319)
356
+ 5800 val loss 5.9992
357
+ 5800 val perplexity 403.1204
358
+ 5800 train 5.955314 (lr=1.7765e-05) (hash(x)=152612266)
359
+ 6200 val loss 5.7315
360
+ 6200 val perplexity 308.4253
361
+ 6200 train 5.799207 (lr=1.1840e-05) (hash(x)=154753980)
362
+ 5900 val loss 5.9945
363
+ 5900 val perplexity 401.2339
364
+ 5900 train 6.142016 (lr=1.7000e-05) (hash(x)=148973201)
365
+ 6300 val loss 5.7249
366
+ 6300 val perplexity 306.4074
367
+ 6300 train 5.620091 (lr=1.1282e-05) (hash(x)=140798998)
368
+ 6000 val loss 5.9841
369
+ 6000 val perplexity 397.0659
370
+ 6000 train 5.956523 (lr=1.6250e-05) (hash(x)=158311490)
371
+ 6400 val loss 5.7182
372
+ 6400 val perplexity 304.3530
373
+ 6400 train 5.600909 (lr=1.0739e-05) (hash(x)=149299516)
374
+ 6100 val loss 5.9760
375
+ 6100 val perplexity 393.8625
376
+ 6100 train 5.886071 (lr=1.5516e-05) (hash(x)=147541319)
377
+ 6500 val loss 5.7138
378
+ 6500 val perplexity 303.0228
379
+ 6500 train 5.701873 (lr=1.0213e-05) (hash(x)=148495806)
380
+ 6200 val loss 5.9756
381
+ 6200 val perplexity 393.7010
382
+ 6200 train 6.034393 (lr=1.4800e-05) (hash(x)=154753980)
383
+ 6600 val loss 5.7058
384
+ 6600 val perplexity 300.6016
385
+ 6600 train 5.683437 (lr=9.7032e-06) (hash(x)=153664387)
386
+ 6300 val loss 5.9669
387
+ 6300 val perplexity 390.2875
388
+ 6300 train 5.861510 (lr=1.4102e-05) (hash(x)=140798998)
389
+ 6700 val loss 5.7030
390
+ 6700 val perplexity 299.7645
391
+ 6700 train 5.687989 (lr=9.2116e-06) (hash(x)=153333491)
392
+ 6400 val loss 5.9614
393
+ 6400 val perplexity 388.1428
394
+ 6400 train 5.835191 (lr=1.3424e-05) (hash(x)=149299516)
395
+ 6800 val loss 5.7039
396
+ 6800 val perplexity 300.0225
397
+ 6800 train 5.457414 (lr=8.7387e-06) (hash(x)=150149382)
398
+ 6500 val loss 5.9534
399
+ 6500 val perplexity 385.0675
400
+ 6500 train 5.958037 (lr=1.2766e-05) (hash(x)=148495806)
401
+ 6900 val loss 5.6986
402
+ 6900 val perplexity 298.4626
403
+ 6900 train 5.785464 (lr=8.2849e-06) (hash(x)=152379767)
404
+ 6600 val loss 5.9466
405
+ 6600 val perplexity 382.4535
406
+ 6600 train 5.915279 (lr=1.2129e-05) (hash(x)=153664387)
407
+ 7000 val loss 5.6892
408
+ 7000 val perplexity 295.6606
409
+ 7000 train 5.736051 (lr=7.8510e-06) (hash(x)=150432537)
410
+ 6700 val loss 5.9436
411
+ 6700 val perplexity 381.3058
412
+ 6700 train 5.918467 (lr=1.1515e-05) (hash(x)=153333491)
413
+ 7100 val loss 5.6892
414
+ 7100 val perplexity 295.6718
415
+ 7100 train 5.562969 (lr=7.4377e-06) (hash(x)=148277836)
416
+ 6800 val loss 5.9436
417
+ 6800 val perplexity 381.3205
418
+ 6800 train 5.689354 (lr=1.0923e-05) (hash(x)=150149382)
419
+ 7200 val loss 5.6873
420
+ 7200 val perplexity 295.0999
421
+ 7200 train 5.689114 (lr=7.0455e-06) (hash(x)=168591659)
422
+ 6900 val loss 5.9350
423
+ 6900 val perplexity 378.0223
424
+ 6900 train 6.004079 (lr=1.0356e-05) (hash(x)=152379767)
425
+ 7300 val loss 5.6811
426
+ 7300 val perplexity 293.2657
427
+ 7300 train 5.662244 (lr=6.6749e-06) (hash(x)=152764851)
428
+ 7000 val loss 5.9268
429
+ 7000 val perplexity 374.9409
430
+ 7000 train 5.972569 (lr=9.8138e-06) (hash(x)=150432537)
431
+ 7400 val loss 5.6773
432
+ 7400 val perplexity 292.1537
433
+ 7400 train 5.639318 (lr=6.3266e-06) (hash(x)=142920131)
434
+ 7100 val loss 5.9259
435
+ 7100 val perplexity 374.5984
436
+ 7100 train 5.799407 (lr=9.2971e-06) (hash(x)=148277836)
437
+ 7500 val loss 5.6755
438
+ 7500 val perplexity 291.6258
439
+ 7500 train 5.671338 (lr=6.0010e-06) (hash(x)=146954649)
440
+ 7200 val loss 5.9228
441
+ 7200 val perplexity 373.4558
442
+ 7200 train 5.928644 (lr=8.8068e-06) (hash(x)=168591659)
443
+ 7600 val loss 5.6754
444
+ 7600 val perplexity 291.5921
445
+ 7600 train 5.457974 (lr=5.6986e-06) (hash(x)=144529566)
446
+ 7300 val loss 5.9162
447
+ 7300 val perplexity 371.0097
448
+ 7300 train 5.890861 (lr=8.3436e-06) (hash(x)=152764851)
449
+ 7700 val loss 5.6700
450
+ 7700 val perplexity 290.0484
451
+ 7700 train 5.628642 (lr=5.4198e-06) (hash(x)=142669945)
452
+ 7400 val loss 5.9115
453
+ 7400 val perplexity 369.2667
454
+ 7400 train 5.861132 (lr=7.9082e-06) (hash(x)=142920131)
455
+ 7800 val loss 5.6662
456
+ 7800 val perplexity 288.9460
457
+ 7800 train 5.725795 (lr=5.1650e-06) (hash(x)=161563764)
458
+ 7500 val loss 5.9086
459
+ 7500 val perplexity 368.1959
460
+ 7500 train 5.903454 (lr=7.5012e-06) (hash(x)=146954649)
461
+ 7900 val loss 5.6640
462
+ 7900 val perplexity 288.2950
463
+ 7900 train 5.460674 (lr=4.9347e-06) (hash(x)=141516532)
464
+ 7600 val loss 5.9086
465
+ 7600 val perplexity 368.1859
466
+ 7600 train 5.701641 (lr=7.1232e-06) (hash(x)=144529566)
467
+ 8000 val loss 5.6656
468
+ 8000 val perplexity 288.7531
469
+ 8000 train 5.532639 (lr=4.7291e-06) (hash(x)=152418457)
470
+ 7700 val loss 5.9026
471
+ 7700 val perplexity 365.9743
472
+ 7700 train 5.854231 (lr=6.7747e-06) (hash(x)=142669945)
473
+ 8100 val loss 5.6601
474
+ 8100 val perplexity 287.1883
475
+ 8100 train 5.657063 (lr=4.5486e-06) (hash(x)=161109470)
476
+ 7800 val loss 5.8985
477
+ 7800 val perplexity 364.4813
478
+ 7800 train 5.976606 (lr=6.4563e-06) (hash(x)=161563764)
479
+ 8200 val loss 5.6564
480
+ 8200 val perplexity 286.1299
481
+ 8200 train 5.783814 (lr=4.3933e-06) (hash(x)=154408822)
482
+ 7900 val loss 5.8950
483
+ 7900 val perplexity 363.2101
484
+ 7900 train 5.693729 (lr=6.1684e-06) (hash(x)=141516532)
485
+ 8300 val loss 5.6549
486
+ 8300 val perplexity 285.7017
487
+ 8300 train 5.537910 (lr=4.2636e-06) (hash(x)=146431691)
488
+ 8000 val loss 5.8972
489
+ 8000 val perplexity 364.0090
490
+ 8000 train 5.762141 (lr=5.9114e-06) (hash(x)=152418457)
491
+ 8400 val loss 5.6552
492
+ 8400 val perplexity 285.7867
493
+ 8400 train 5.427840 (lr=4.1596e-06) (hash(x)=150182527)
494
+ 8100 val loss 5.8903
495
+ 8100 val perplexity 361.5109
496
+ 8100 train 5.879162 (lr=5.6857e-06) (hash(x)=161109470)
497
+ 8500 val loss 5.6491
498
+ 8500 val perplexity 284.0453
499
+ 8500 train 5.715513 (lr=4.0815e-06) (hash(x)=154828709)
500
+ 8200 val loss 5.8859
501
+ 8200 val perplexity 359.9176
502
+ 8200 train 6.011838 (lr=5.4917e-06) (hash(x)=154408822)
503
+ 8600 val loss 5.6487
504
+ 8600 val perplexity 283.9250
505
+ 8600 train 5.561393 (lr=4.0294e-06) (hash(x)=154995428)
506
+ 8300 val loss 5.8841
507
+ 8300 val perplexity 359.2776
508
+ 8300 train 5.761460 (lr=5.3295e-06) (hash(x)=146431691)
509
+ 8700 val loss 5.6484
510
+ 8700 val perplexity 283.8460
511
+ 8700 train 5.544937 (lr=4.0033e-06) (hash(x)=142111043)
512
+ 8400 val loss 5.8840
513
+ 8400 val perplexity 359.2402
514
+ 8400 train 5.656861 (lr=5.1995e-06) (hash(x)=150182527)
515
+ 8749 val loss 5.6464
516
+ 8749 val perplexity 283.2623
517
+ 8500 val loss 5.8779
518
+ 8500 val perplexity 357.0568
519
+ 8500 train 5.951786 (lr=5.1019e-06) (hash(x)=154828709)
520
+ 8600 val loss 5.8766
521
+ 8600 val perplexity 356.5812
522
+ 8600 train 5.774024 (lr=5.0367e-06) (hash(x)=154995428)
attention_kindselective_n_heads4_seed1342/model_08749.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3b784ac0421732561a9feaba30a9e9fef056d1e13bb74eb961fa766d7412a975
3
  size 92843394
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:05f48028ce6ff170e45a1bc92fd2cb7055795a96c8a187c1a44aac35d2b8e75b
3
  size 92843394
attention_kindselective_n_heads4_seed1342/optimizer_08749.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a42419c28aa5e439c9bce28733a36c3eb5bf07c6395ea15a1c7f729048403a90
3
  size 179406214
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f37ffc5be7cc22cb042d56756e571065c8a63e66b7db829970eb6c191299e319
3
  size 179406214