andrew-healey commited on
Commit
a3dd22a
·
verified ·
1 Parent(s): b3db500

Upload folder using huggingface_hub

Browse files
attention_kindselective_n_heads4_seed1341/args.json CHANGED
@@ -1 +1 @@
1
- {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_9/attention_kindselective_n_heads4_seed1341", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 4, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 8750, "warmup_steps": 500, "group": "wider_is_better_9", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1341, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 120, "total_batch_size": 61440, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 4.5e-05, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "4.5e-5_61440_4_1341", "n_embd": 256}
 
1
+ {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_9/attention_kindselective_n_heads4_seed1341", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 4, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 8750, "warmup_steps": 500, "group": "wider_is_better_9", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1341, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 120, "total_batch_size": 61440, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 5e-05, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "5e-5_61440_4_1341", "n_embd": 256}
attention_kindselective_n_heads4_seed1341/log2.txt CHANGED
@@ -1,267 +1,522 @@
1
  max_steps: 8750
 
 
 
2
  0 val loss 11.3095
3
- 0 val perplexity 81594.3281
4
- 0 train 11.313040 (lr=9.0000e-08) (hash(x)=145079536)
5
- 100 val loss 10.0761
6
- 100 val perplexity 23767.8672
7
- 100 train 10.071070 (lr=9.0900e-06) (hash(x)=155800595)
8
- 200 val loss 9.5264
9
- 200 val perplexity 13717.7168
10
- 200 train 9.479525 (lr=1.8090e-05) (hash(x)=145606733)
11
- 300 val loss 8.3244
12
- 300 val perplexity 4123.3882
13
- 300 train 8.313211 (lr=2.7090e-05) (hash(x)=150367139)
14
- 400 val loss 7.6483
15
- 400 val perplexity 2096.9778
16
- 400 train 7.941428 (lr=3.6090e-05) (hash(x)=155747374)
17
- 500 val loss 7.4758
18
- 500 val perplexity 1764.8981
19
- 500 train 7.367132 (lr=4.5000e-05) (hash(x)=140604760)
20
- 600 val loss 7.3790
21
- 600 val perplexity 1602.0277
22
- 600 train 7.323732 (lr=4.4985e-05) (hash(x)=148404734)
23
- 700 val loss 7.2870
24
- 700 val perplexity 1461.2491
25
- 700 train 7.240127 (lr=4.4941e-05) (hash(x)=148115934)
26
- 800 val loss 7.1677
27
- 800 val perplexity 1296.9070
28
- 800 train 7.044980 (lr=4.4868e-05) (hash(x)=137464699)
29
- 900 val loss 7.0626
30
- 900 val perplexity 1167.4529
31
- 900 train 6.872476 (lr=4.4766e-05) (hash(x)=143886042)
32
- 1000 val loss 6.9679
33
- 1000 val perplexity 1061.9860
34
- 1000 train 6.971719 (lr=4.4634e-05) (hash(x)=163799796)
35
- 1100 val loss 6.8610
36
- 1100 val perplexity 954.3155
37
- 1100 train 6.825153 (lr=4.4474e-05) (hash(x)=144592844)
38
- 1200 val loss 6.7707
39
- 1200 val perplexity 871.9357
40
- 1200 train 7.097885 (lr=4.4285e-05) (hash(x)=204706354)
41
- 1300 val loss 6.6947
42
- 1300 val perplexity 808.1379
43
- 1300 train 6.631471 (lr=4.4068e-05) (hash(x)=150862210)
44
- 1400 val loss 6.6223
45
- 1400 val perplexity 751.7076
46
- 1400 train 6.619408 (lr=4.3822e-05) (hash(x)=147766811)
47
- 1500 val loss 6.5493
48
- 1500 val perplexity 698.7467
49
- 1500 train 6.367006 (lr=4.3549e-05) (hash(x)=135925327)
50
- 1600 val loss 6.5088
51
- 1600 val perplexity 671.0128
52
- 1600 train 6.479077 (lr=4.3249e-05) (hash(x)=160440642)
53
- 1700 val loss 6.4531
54
- 1700 val perplexity 634.6756
55
- 1700 train 6.575705 (lr=4.2922e-05) (hash(x)=151184106)
56
- 1800 val loss 6.4062
57
- 1800 val perplexity 605.5698
58
- 1800 train 6.302565 (lr=4.2569e-05) (hash(x)=148100580)
59
- 1900 val loss 6.3862
60
- 1900 val perplexity 593.6020
61
- 1900 train 6.214580 (lr=4.2190e-05) (hash(x)=149434659)
62
- 2000 val loss 6.3288
63
- 2000 val perplexity 560.5059
64
- 2000 train 6.230272 (lr=4.1785e-05) (hash(x)=152285486)
65
- 2100 val loss 6.2902
66
- 2100 val perplexity 539.2587
67
- 2100 train 6.033951 (lr=4.1356e-05) (hash(x)=144294295)
68
- 2200 val loss 6.2656
69
- 2200 val perplexity 526.1706
70
- 2200 train 6.274797 (lr=4.0903e-05) (hash(x)=175030215)
71
- 2300 val loss 6.2425
72
- 2300 val perplexity 514.1522
73
- 2300 train 6.053737 (lr=4.0426e-05) (hash(x)=150831428)
74
- 2400 val loss 6.2011
75
- 2400 val perplexity 493.2859
76
- 2400 train 6.386537 (lr=3.9927e-05) (hash(x)=140808297)
77
- 2500 val loss 6.1761
78
- 2500 val perplexity 481.1316
79
- 2500 train 6.153784 (lr=3.9406e-05) (hash(x)=153160275)
80
- 2600 val loss 6.1569
81
- 2600 val perplexity 471.9454
82
- 2600 train 5.944077 (lr=3.8863e-05) (hash(x)=133990623)
83
- 2700 val loss 6.1428
84
- 2700 val perplexity 465.3676
85
- 2700 train 6.110357 (lr=3.8300e-05) (hash(x)=142860944)
86
- 2800 val loss 6.1104
87
- 2800 val perplexity 450.5392
88
- 2800 train 6.122779 (lr=3.7717e-05) (hash(x)=137959511)
89
- 2900 val loss 6.0858
90
- 2900 val perplexity 439.5509
91
- 2900 train 6.040658 (lr=3.7116e-05) (hash(x)=147009873)
92
- 3000 val loss 6.0703
93
- 3000 val perplexity 432.8103
94
- 3000 train 5.927878 (lr=3.6496e-05) (hash(x)=158264841)
95
- 3100 val loss 6.0638
96
- 3100 val perplexity 430.0071
97
- 3100 train 5.902818 (lr=3.5860e-05) (hash(x)=139232251)
98
- 3200 val loss 6.0297
99
- 3200 val perplexity 415.6103
100
- 3200 train 6.069080 (lr=3.5207e-05) (hash(x)=153436104)
101
- 3300 val loss 6.0157
102
- 3300 val perplexity 409.8088
103
- 3300 train 5.843759 (lr=3.4539e-05) (hash(x)=149681831)
104
- 3400 val loss 6.0050
105
- 3400 val perplexity 405.4450
106
- 3400 train 5.885643 (lr=3.3857e-05) (hash(x)=168425516)
107
- 3500 val loss 5.9896
108
- 3500 val perplexity 399.2447
109
- 3500 train 6.019372 (lr=3.3162e-05) (hash(x)=163104338)
110
- 3600 val loss 5.9641
111
- 3600 val perplexity 389.2151
112
- 3600 train 5.938704 (lr=3.2455e-05) (hash(x)=165109772)
113
- 3700 val loss 5.9637
114
- 3700 val perplexity 389.0497
115
- 3700 train 5.694476 (lr=3.1736e-05) (hash(x)=153420306)
116
- 3800 val loss 5.9347
117
- 3800 val perplexity 377.9158
118
- 3800 train 5.948569 (lr=3.1008e-05) (hash(x)=160168863)
119
- 3900 val loss 5.9212
120
- 3900 val perplexity 372.8728
121
- 3900 train 5.840442 (lr=3.0270e-05) (hash(x)=153906073)
122
- 4000 val loss 5.9103
123
- 4000 val perplexity 368.8176
124
- 4000 train 5.857364 (lr=2.9524e-05) (hash(x)=151055067)
125
- 4100 val loss 5.8939
126
- 4100 val perplexity 362.8057
127
- 4100 train 5.873942 (lr=2.8771e-05) (hash(x)=149629830)
128
- 4200 val loss 5.8796
129
- 4200 val perplexity 357.6752
130
- 4200 train 5.760632 (lr=2.8013e-05) (hash(x)=143101381)
131
- 4300 val loss 5.8715
132
- 4300 val perplexity 354.7955
133
- 4300 train 5.774401 (lr=2.7250e-05) (hash(x)=149712044)
134
- 4400 val loss 5.8542
135
- 4400 val perplexity 348.6813
136
- 4400 train 5.778264 (lr=2.6483e-05) (hash(x)=153446449)
137
- 4500 val loss 5.8414
138
- 4500 val perplexity 344.2694
139
- 4500 train 5.791447 (lr=2.5714e-05) (hash(x)=146086947)
140
- 4600 val loss 5.8349
141
- 4600 val perplexity 342.0447
142
- 4600 train 5.677424 (lr=2.4943e-05) (hash(x)=153800173)
143
- 4700 val loss 5.8201
144
- 4700 val perplexity 336.9922
145
- 4700 train 5.893148 (lr=2.4172e-05) (hash(x)=155962726)
146
- 4800 val loss 5.8126
147
- 4800 val perplexity 334.4903
148
- 4800 train 6.055249 (lr=2.3402e-05) (hash(x)=142045616)
149
- 4900 val loss 5.8005
150
- 4900 val perplexity 330.4518
151
- 4900 train 5.765253 (lr=2.2633e-05) (hash(x)=143418248)
152
- 5000 val loss 5.7905
153
- 5000 val perplexity 327.1776
154
- 5000 train 5.720255 (lr=2.1868e-05) (hash(x)=145789790)
155
- 5100 val loss 5.7847
156
- 5100 val perplexity 325.2880
157
- 5100 train 5.768248 (lr=2.1107e-05) (hash(x)=137795633)
158
- 5200 val loss 5.7723
159
- 5200 val perplexity 321.2600
160
- 5200 train 5.932185 (lr=2.0351e-05) (hash(x)=148907132)
161
- 5300 val loss 5.7665
162
- 5300 val perplexity 319.4049
163
- 5300 train 5.643368 (lr=1.9602e-05) (hash(x)=152343580)
164
- 5400 val loss 5.7569
165
- 5400 val perplexity 316.3512
166
- 5400 train 5.610413 (lr=1.8860e-05) (hash(x)=148578264)
167
- 5500 val loss 5.7440
168
- 5500 val perplexity 312.2961
169
- 5500 train 5.908919 (lr=1.8127e-05) (hash(x)=145635833)
170
- 5600 val loss 5.7393
171
- 5600 val perplexity 310.8486
172
- 5600 train 5.729499 (lr=1.7403e-05) (hash(x)=156337844)
173
- 5700 val loss 5.7345
174
- 5700 val perplexity 309.3647
175
- 5700 train 5.609031 (lr=1.6690e-05) (hash(x)=147168506)
176
- 5800 val loss 5.7259
177
- 5800 val perplexity 306.7088
178
- 5800 train 5.782136 (lr=1.5989e-05) (hash(x)=159566920)
179
- 5900 val loss 5.7173
180
- 5900 val perplexity 304.0780
181
- 5900 train 5.780312 (lr=1.5300e-05) (hash(x)=158273929)
182
- 6000 val loss 5.7163
183
- 6000 val perplexity 303.7896
184
- 6000 train 5.688311 (lr=1.4625e-05) (hash(x)=156649749)
185
- 6100 val loss 5.7060
186
- 6100 val perplexity 300.6730
187
- 6100 train 5.621117 (lr=1.3965e-05) (hash(x)=146812388)
188
- 6200 val loss 5.7017
189
- 6200 val perplexity 299.3790
190
- 6200 train 5.531676 (lr=1.3320e-05) (hash(x)=143522146)
191
- 6300 val loss 5.6966
192
- 6300 val perplexity 297.8462
193
- 6300 train 5.516797 (lr=1.2692e-05) (hash(x)=150124474)
194
- 6400 val loss 5.6903
195
- 6400 val perplexity 295.9842
196
- 6400 train 5.728277 (lr=1.2081e-05) (hash(x)=141242117)
197
- 6500 val loss 5.6830
198
- 6500 val perplexity 293.8160
199
- 6500 train 5.561332 (lr=1.1489e-05) (hash(x)=143529762)
200
- 6600 val loss 5.6778
201
- 6600 val perplexity 292.3132
202
- 6600 train 5.466468 (lr=1.0916e-05) (hash(x)=136948374)
203
- 6700 val loss 5.6787
204
- 6700 val perplexity 292.5690
205
- 6700 train 5.479150 (lr=1.0363e-05) (hash(x)=146268592)
206
- 6800 val loss 5.6684
207
- 6800 val perplexity 289.5688
208
- 6800 train 5.621566 (lr=9.8310e-06) (hash(x)=152676836)
209
- 6900 val loss 5.6620
210
- 6900 val perplexity 287.7147
211
- 6900 train 5.583413 (lr=9.3205e-06) (hash(x)=134657776)
212
- 7000 val loss 5.6593
213
- 7000 val perplexity 286.9359
214
- 7000 train 5.614666 (lr=8.8324e-06) (hash(x)=166721861)
215
- 7100 val loss 5.6580
216
- 7100 val perplexity 286.5740
217
- 7100 train 5.455072 (lr=8.3674e-06) (hash(x)=135496702)
218
- 7200 val loss 5.6546
219
- 7200 val perplexity 285.6103
220
- 7200 train 5.764223 (lr=7.9261e-06) (hash(x)=155567461)
221
- 7300 val loss 5.6465
222
- 7300 val perplexity 283.3002
223
- 7300 train 5.449112 (lr=7.5093e-06) (hash(x)=142803829)
224
- 7400 val loss 5.6460
225
- 7400 val perplexity 283.1502
226
- 7400 train 5.468495 (lr=7.1174e-06) (hash(x)=145294178)
227
- 7500 val loss 5.6438
228
- 7500 val perplexity 282.5349
229
- 7500 train 5.400206 (lr=6.7511e-06) (hash(x)=150573713)
230
- 7600 val loss 5.6417
231
- 7600 val perplexity 281.9352
232
- 7600 train 5.657176 (lr=6.4109e-06) (hash(x)=142771511)
233
- 7700 val loss 5.6357
234
- 7700 val perplexity 280.2563
235
- 7700 train 5.538167 (lr=6.0972e-06) (hash(x)=143602175)
236
- 7800 val loss 5.6325
237
- 7800 val perplexity 279.3481
238
- 7800 train 5.691221 (lr=5.8107e-06) (hash(x)=152379862)
239
- 7900 val loss 5.6315
240
- 7900 val perplexity 279.0784
241
- 7900 train 5.453447 (lr=5.5515e-06) (hash(x)=146655921)
242
- 8000 val loss 5.6326
243
- 8000 val perplexity 279.3739
244
- 8000 train 5.770980 (lr=5.3203e-06) (hash(x)=148262482)
245
- 8100 val loss 5.6243
246
- 8100 val perplexity 277.0847
247
- 8100 train 5.568313 (lr=5.1172e-06) (hash(x)=147683655)
248
- 8200 val loss 5.6249
249
- 8200 val perplexity 277.2312
250
- 8200 train 5.739700 (lr=4.9425e-06) (hash(x)=157312987)
251
- 8300 val loss 5.6217
252
- 8300 val perplexity 276.3641
253
- 8300 train 5.581598 (lr=4.7966e-06) (hash(x)=141107543)
254
- 8400 val loss 5.6180
255
- 8400 val perplexity 275.3497
256
- 8400 train 5.610967 (lr=4.6796e-06) (hash(x)=141323024)
257
- 8500 val loss 5.6163
258
- 8500 val perplexity 274.8697
259
- 8500 train 5.587502 (lr=4.5917e-06) (hash(x)=150696521)
260
- 8600 val loss 5.6160
261
- 8600 val perplexity 274.7957
262
- 8600 train 5.565991 (lr=4.5330e-06) (hash(x)=162288191)
263
- 8700 val loss 5.6119
264
- 8700 val perplexity 273.6724
265
- 8700 train 5.479275 (lr=4.5037e-06) (hash(x)=152860941)
266
- 8749 val loss 5.6102
267
- 8749 val perplexity 273.1960
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  max_steps: 8750
2
+ 100 val loss 10.1171
3
+ 100 val perplexity 24761.9980
4
+ 100 train 10.112064 (lr=8.0800e-06) (hash(x)=155800595)
5
  0 val loss 11.3095
6
+ 0 val perplexity 81594.2500
7
+ 200 val loss 9.6651
8
+ 200 val perplexity 15758.2021
9
+ 200 train 9.620489 (lr=1.6080e-05) (hash(x)=145606733)
10
+ 0 train 11.313038 (lr=1.0000e-07) (hash(x)=145079536)
11
+ 300 val loss 8.5370
12
+ 300 val perplexity 5099.7910
13
+ 300 train 8.528650 (lr=2.4080e-05) (hash(x)=150367139)
14
+ 100 val loss 10.0532
15
+ 100 val perplexity 23230.9727
16
+ 100 train 10.047434 (lr=1.0100e-05) (hash(x)=155800595)
17
+ 400 val loss 7.7461
18
+ 400 val perplexity 2312.6008
19
+ 400 train 8.032513 (lr=3.2080e-05) (hash(x)=155747374)
20
+ 200 val loss 9.4102
21
+ 200 val perplexity 12211.7793
22
+ 200 train 9.361097 (lr=2.0100e-05) (hash(x)=145606733)
23
+ 500 val loss 7.5205
24
+ 500 val perplexity 1845.5077
25
+ 500 train 7.412711 (lr=4.0000e-05) (hash(x)=140604760)
26
+ 300 val loss 8.0795
27
+ 300 val perplexity 3227.5703
28
+ 300 train 8.047569 (lr=3.0100e-05) (hash(x)=150367139)
29
+ 600 val loss 7.4285
30
+ 600 val perplexity 1683.2040
31
+ 600 train 7.377125 (lr=3.9987e-05) (hash(x)=148404734)
32
+ 400 val loss 7.5665
33
+ 400 val perplexity 1932.2872
34
+ 400 train 7.873068 (lr=4.0100e-05) (hash(x)=155747374)
35
+ 700 val loss 7.3551
36
+ 700 val perplexity 1564.1499
37
+ 700 train 7.307829 (lr=3.9948e-05) (hash(x)=148115934)
38
+ 500 val loss 7.4186
39
+ 500 val perplexity 1666.7257
40
+ 500 train 7.307360 (lr=5.0000e-05) (hash(x)=140604760)
41
+ 800 val loss 7.2645
42
+ 800 val perplexity 1428.6420
43
+ 800 train 7.149124 (lr=3.9883e-05) (hash(x)=137464699)
44
+ 600 val loss 7.3078
45
+ 600 val perplexity 1491.8918
46
+ 600 train 7.244214 (lr=4.9984e-05) (hash(x)=148404734)
47
+ 900 val loss 7.1849
48
+ 900 val perplexity 1319.3766
49
+ 900 train 6.996758 (lr=3.9792e-05) (hash(x)=143886042)
50
+ 700 val loss 7.2002
51
+ 700 val perplexity 1339.6649
52
+ 700 train 7.151448 (lr=4.9935e-05) (hash(x)=148115934)
53
+ 1000 val loss 7.1057
54
+ 1000 val perplexity 1218.9403
55
+ 1000 train 7.108614 (lr=3.9675e-05) (hash(x)=163799796)
56
+ 800 val loss 7.0893
57
+ 800 val perplexity 1199.0146
58
+ 800 train 6.967818 (lr=4.9853e-05) (hash(x)=137464699)
59
+ 1100 val loss 7.0070
60
+ 1100 val perplexity 1104.3334
61
+ 1100 train 6.960635 (lr=3.9532e-05) (hash(x)=144592844)
62
+ 900 val loss 6.9989
63
+ 900 val perplexity 1095.4117
64
+ 900 train 6.811148 (lr=4.9739e-05) (hash(x)=143886042)
65
+ 1200 val loss 6.9284
66
+ 1200 val perplexity 1020.8506
67
+ 1200 train 7.269214 (lr=3.9364e-05) (hash(x)=204706354)
68
+ 1000 val loss 6.9037
69
+ 1000 val perplexity 995.9300
70
+ 1000 train 6.910965 (lr=4.9593e-05) (hash(x)=163799796)
71
+ 1300 val loss 6.8478
72
+ 1300 val perplexity 941.8114
73
+ 1300 train 6.778065 (lr=3.9171e-05) (hash(x)=150862210)
74
+ 1100 val loss 6.7992
75
+ 1100 val perplexity 897.1482
76
+ 1100 train 6.766705 (lr=4.9415e-05) (hash(x)=144592844)
77
+ 1400 val loss 6.7591
78
+ 1400 val perplexity 861.8267
79
+ 1400 train 6.746733 (lr=3.8953e-05) (hash(x)=147766811)
80
+ 1200 val loss 6.7084
81
+ 1200 val perplexity 819.2203
82
+ 1200 train 7.037606 (lr=4.9205e-05) (hash(x)=204706354)
83
+ 1500 val loss 6.6851
84
+ 1500 val perplexity 800.3522
85
+ 1500 train 6.497079 (lr=3.8711e-05) (hash(x)=135925327)
86
+ 1300 val loss 6.6284
87
+ 1300 val perplexity 756.2941
88
+ 1300 train 6.567288 (lr=4.8964e-05) (hash(x)=150862210)
89
+ 1600 val loss 6.6306
90
+ 1600 val perplexity 757.9744
91
+ 1600 train 6.597507 (lr=3.8444e-05) (hash(x)=160440642)
92
+ 1400 val loss 6.5553
93
+ 1400 val perplexity 702.9320
94
+ 1400 train 6.554690 (lr=4.8691e-05) (hash(x)=147766811)
95
+ 1700 val loss 6.5611
96
+ 1700 val perplexity 707.0446
97
+ 1700 train 6.665172 (lr=3.8153e-05) (hash(x)=151184106)
98
+ 1500 val loss 6.4862
99
+ 1500 val perplexity 655.9997
100
+ 1500 train 6.305926 (lr=4.8388e-05) (hash(x)=135925327)
101
+ 1800 val loss 6.5172
102
+ 1800 val perplexity 676.6623
103
+ 1800 train 6.413347 (lr=3.7839e-05) (hash(x)=148100580)
104
+ 1600 val loss 6.4483
105
+ 1600 val perplexity 631.6206
106
+ 1600 train 6.418903 (lr=4.8055e-05) (hash(x)=160440642)
107
+ 1900 val loss 6.4792
108
+ 1900 val perplexity 651.4436
109
+ 1900 train 6.312384 (lr=3.7502e-05) (hash(x)=149434659)
110
+ 1700 val loss 6.3908
111
+ 1700 val perplexity 596.3445
112
+ 1700 train 6.514476 (lr=4.7691e-05) (hash(x)=151184106)
113
+ 2000 val loss 6.4197
114
+ 2000 val perplexity 613.8058
115
+ 2000 train 6.322248 (lr=3.7143e-05) (hash(x)=152285486)
116
+ 1800 val loss 6.3551
117
+ 1800 val perplexity 575.4153
118
+ 1800 train 6.255002 (lr=4.7299e-05) (hash(x)=148100580)
119
+ 2100 val loss 6.3787
120
+ 2100 val perplexity 589.1384
121
+ 2100 train 6.119319 (lr=3.6761e-05) (hash(x)=144294295)
122
+ 1900 val loss 6.3350
123
+ 1900 val perplexity 563.9974
124
+ 1900 train 6.163347 (lr=4.6878e-05) (hash(x)=149434659)
125
+ 2200 val loss 6.3501
126
+ 2200 val perplexity 572.5634
127
+ 2200 train 6.368137 (lr=3.6358e-05) (hash(x)=175030215)
128
+ 2000 val loss 6.2730
129
+ 2000 val perplexity 530.0711
130
+ 2000 train 6.177517 (lr=4.6428e-05) (hash(x)=152285486)
131
+ 2300 val loss 6.3275
132
+ 2300 val perplexity 559.7407
133
+ 2300 train 6.127307 (lr=3.5935e-05) (hash(x)=150831428)
134
+ 2100 val loss 6.2385
135
+ 2100 val perplexity 512.0662
136
+ 2100 train 5.980282 (lr=4.5951e-05) (hash(x)=144294295)
137
+ 2400 val loss 6.2802
138
+ 2400 val perplexity 533.9189
139
+ 2400 train 6.456465 (lr=3.5491e-05) (hash(x)=140808297)
140
+ 2200 val loss 6.2135
141
+ 2200 val perplexity 499.4246
142
+ 2200 train 6.216037 (lr=4.5448e-05) (hash(x)=175030215)
143
+ 2500 val loss 6.2531
144
+ 2500 val perplexity 519.6355
145
+ 2500 train 6.235676 (lr=3.5027e-05) (hash(x)=153160275)
146
+ 2300 val loss 6.1925
147
+ 2300 val perplexity 489.0615
148
+ 2300 train 6.003661 (lr=4.4918e-05) (hash(x)=150831428)
149
+ 2600 val loss 6.2302
150
+ 2600 val perplexity 507.8468
151
+ 2600 train 6.007129 (lr=3.4545e-05) (hash(x)=133990623)
152
+ 2400 val loss 6.1525
153
+ 2400 val perplexity 469.8813
154
+ 2400 train 6.337286 (lr=4.4363e-05) (hash(x)=140808297)
155
+ 2700 val loss 6.2128
156
+ 2700 val perplexity 499.0825
157
+ 2700 train 6.184642 (lr=3.4044e-05) (hash(x)=142860944)
158
+ 2500 val loss 6.1242
159
+ 2500 val perplexity 456.7643
160
+ 2500 train 6.100560 (lr=4.3784e-05) (hash(x)=153160275)
161
+ 2800 val loss 6.1823
162
+ 2800 val perplexity 484.1247
163
+ 2800 train 6.199511 (lr=3.3526e-05) (hash(x)=137959511)
164
+ 2600 val loss 6.1124
165
+ 2600 val perplexity 451.4389
166
+ 2600 train 5.900442 (lr=4.3181e-05) (hash(x)=133990623)
167
+ 2900 val loss 6.1564
168
+ 2900 val perplexity 471.7105
169
+ 2900 train 6.107897 (lr=3.2992e-05) (hash(x)=147009873)
170
+ 2700 val loss 6.0887
171
+ 2700 val perplexity 440.8575
172
+ 2700 train 6.055900 (lr=4.2555e-05) (hash(x)=142860944)
173
+ 3000 val loss 6.1408
174
+ 3000 val perplexity 464.4283
175
+ 3000 train 5.998592 (lr=3.2441e-05) (hash(x)=158264841)
176
+ 2800 val loss 6.0603
177
+ 2800 val perplexity 428.4908
178
+ 2800 train 6.074576 (lr=4.1908e-05) (hash(x)=137959511)
179
+ 3100 val loss 6.1262
180
+ 3100 val perplexity 457.6978
181
+ 3100 train 5.968262 (lr=3.1875e-05) (hash(x)=139232251)
182
+ 2900 val loss 6.0398
183
+ 2900 val perplexity 419.8025
184
+ 2900 train 5.998567 (lr=4.1240e-05) (hash(x)=147009873)
185
+ 3200 val loss 6.0981
186
+ 3200 val perplexity 444.9918
187
+ 3200 train 6.139701 (lr=3.1295e-05) (hash(x)=153436104)
188
+ 3000 val loss 6.0207
189
+ 3000 val perplexity 411.8819
190
+ 3000 train 5.875710 (lr=4.0551e-05) (hash(x)=158264841)
191
+ 3300 val loss 6.0828
192
+ 3300 val perplexity 438.2633
193
+ 3300 train 5.901386 (lr=3.0702e-05) (hash(x)=149681831)
194
+ 3100 val loss 6.0151
195
+ 3100 val perplexity 409.5502
196
+ 3100 train 5.848797 (lr=3.9844e-05) (hash(x)=139232251)
197
+ 3400 val loss 6.0727
198
+ 3400 val perplexity 433.8536
199
+ 3400 train 5.962709 (lr=3.0095e-05) (hash(x)=168425516)
200
+ 3200 val loss 5.9804
201
+ 3200 val perplexity 395.6065
202
+ 3200 train 6.026180 (lr=3.9119e-05) (hash(x)=153436104)
203
+ 3500 val loss 6.0482
204
+ 3500 val perplexity 423.3637
205
+ 3500 train 6.077012 (lr=2.9477e-05) (hash(x)=163104338)
206
+ 3300 val loss 5.9644
207
+ 3300 val perplexity 389.3310
208
+ 3300 train 5.787261 (lr=3.8377e-05) (hash(x)=149681831)
209
+ 3600 val loss 6.0315
210
+ 3600 val perplexity 416.3579
211
+ 3600 train 6.009035 (lr=2.8849e-05) (hash(x)=165109772)
212
+ 3400 val loss 5.9549
213
+ 3400 val perplexity 385.6530
214
+ 3400 train 5.827873 (lr=3.7619e-05) (hash(x)=168425516)
215
+ 3700 val loss 6.0278
216
+ 3700 val perplexity 414.8033
217
+ 3700 train 5.760067 (lr=2.8210e-05) (hash(x)=153420306)
218
+ 3500 val loss 5.9309
219
+ 3500 val perplexity 376.5024
220
+ 3500 train 5.961608 (lr=3.6847e-05) (hash(x)=163104338)
221
+ 3800 val loss 5.9994
222
+ 3800 val perplexity 403.2057
223
+ 3800 train 6.023686 (lr=2.7562e-05) (hash(x)=160168863)
224
+ 3600 val loss 5.9163
225
+ 3600 val perplexity 371.0494
226
+ 3600 train 5.887290 (lr=3.6061e-05) (hash(x)=165109772)
227
+ 3900 val loss 5.9951
228
+ 3900 val perplexity 401.4536
229
+ 3900 train 5.910920 (lr=2.6907e-05) (hash(x)=153906073)
230
+ 3700 val loss 5.9126
231
+ 3700 val perplexity 369.6673
232
+ 3700 train 5.644318 (lr=3.5263e-05) (hash(x)=153420306)
233
+ 4000 val loss 5.9782
234
+ 4000 val perplexity 394.7390
235
+ 4000 train 5.915583 (lr=2.6244e-05) (hash(x)=151055067)
236
+ 3800 val loss 5.8868
237
+ 3800 val perplexity 360.2454
238
+ 3800 train 5.901347 (lr=3.4453e-05) (hash(x)=160168863)
239
+ 4100 val loss 5.9631
240
+ 4100 val perplexity 388.8174
241
+ 4100 train 5.942003 (lr=2.5575e-05) (hash(x)=149629830)
242
+ 3900 val loss 5.8704
243
+ 3900 val perplexity 354.3925
244
+ 3900 train 5.782591 (lr=3.3633e-05) (hash(x)=153906073)
245
+ 4200 val loss 5.9547
246
+ 4200 val perplexity 385.5438
247
+ 4200 train 5.834734 (lr=2.4900e-05) (hash(x)=143101381)
248
+ 4000 val loss 5.8594
249
+ 4000 val perplexity 350.5174
250
+ 4000 train 5.806424 (lr=3.2805e-05) (hash(x)=151055067)
251
+ 4300 val loss 5.9447
252
+ 4300 val perplexity 381.7111
253
+ 4300 train 5.853470 (lr=2.4222e-05) (hash(x)=149712044)
254
+ 4100 val loss 5.8430
255
+ 4100 val perplexity 344.7963
256
+ 4100 train 5.818872 (lr=3.1968e-05) (hash(x)=149629830)
257
+ 4400 val loss 5.9280
258
+ 4400 val perplexity 375.4156
259
+ 4400 train 5.852243 (lr=2.3540e-05) (hash(x)=153446449)
260
+ 4200 val loss 5.8300
261
+ 4200 val perplexity 340.3713
262
+ 4200 train 5.713513 (lr=3.1126e-05) (hash(x)=143101381)
263
+ 4500 val loss 5.9134
264
+ 4500 val perplexity 369.9484
265
+ 4500 train 5.863609 (lr=2.2856e-05) (hash(x)=146086947)
266
+ 4300 val loss 5.8179
267
+ 4300 val perplexity 336.2538
268
+ 4300 train 5.726902 (lr=3.0277e-05) (hash(x)=149712044)
269
+ 4600 val loss 5.9079
270
+ 4600 val perplexity 367.9209
271
+ 4600 train 5.746026 (lr=2.2171e-05) (hash(x)=153800173)
272
+ 4400 val loss 5.8030
273
+ 4400 val perplexity 331.2757
274
+ 4400 train 5.729753 (lr=2.9425e-05) (hash(x)=153446449)
275
+ 4700 val loss 5.8936
276
+ 4700 val perplexity 362.7248
277
+ 4700 train 5.950695 (lr=2.1486e-05) (hash(x)=155962726)
278
+ 4500 val loss 5.7892
279
+ 4500 val perplexity 326.7505
280
+ 4500 train 5.736334 (lr=2.8571e-05) (hash(x)=146086947)
281
+ 4800 val loss 5.8870
282
+ 4800 val perplexity 360.3289
283
+ 4800 train 6.127524 (lr=2.0801e-05) (hash(x)=142045616)
284
+ 4600 val loss 5.7816
285
+ 4600 val perplexity 324.2762
286
+ 4600 train 5.622052 (lr=2.7714e-05) (hash(x)=153800173)
287
+ 4900 val loss 5.8733
288
+ 4900 val perplexity 355.4223
289
+ 4900 train 5.846272 (lr=2.0118e-05) (hash(x)=143418248)
290
+ 4700 val loss 5.7701
291
+ 4700 val perplexity 320.5825
292
+ 4700 train 5.849279 (lr=2.6857e-05) (hash(x)=155962726)
293
+ 5000 val loss 5.8674
294
+ 5000 val perplexity 353.3279
295
+ 5000 train 5.789097 (lr=1.9438e-05) (hash(x)=145789790)
296
+ 4800 val loss 5.7609
297
+ 4800 val perplexity 317.6197
298
+ 4800 train 6.008756 (lr=2.6002e-05) (hash(x)=142045616)
299
+ 5100 val loss 5.8621
300
+ 5100 val perplexity 351.4626
301
+ 5100 train 5.837859 (lr=1.8762e-05) (hash(x)=137795633)
302
+ 4900 val loss 5.7479
303
+ 4900 val perplexity 313.5262
304
+ 4900 train 5.718339 (lr=2.5148e-05) (hash(x)=143418248)
305
+ 5200 val loss 5.8508
306
+ 5200 val perplexity 347.5151
307
+ 5200 train 6.012800 (lr=1.8090e-05) (hash(x)=148907132)
308
+ 5000 val loss 5.7359
309
+ 5000 val perplexity 309.7813
310
+ 5000 train 5.662224 (lr=2.4298e-05) (hash(x)=145789790)
311
+ 5300 val loss 5.8430
312
+ 5300 val perplexity 344.8073
313
+ 5300 train 5.720342 (lr=1.7424e-05) (hash(x)=152343580)
314
+ 5100 val loss 5.7307
315
+ 5100 val perplexity 308.1837
316
+ 5100 train 5.712019 (lr=2.3452e-05) (hash(x)=137795633)
317
+ 5400 val loss 5.8335
318
+ 5400 val perplexity 341.5411
319
+ 5400 train 5.687907 (lr=1.6765e-05) (hash(x)=148578264)
320
+ 5200 val loss 5.7172
321
+ 5200 val perplexity 304.0673
322
+ 5200 train 5.874541 (lr=2.2613e-05) (hash(x)=148907132)
323
+ 5500 val loss 5.8229
324
+ 5500 val perplexity 337.9455
325
+ 5500 train 5.987973 (lr=1.6113e-05) (hash(x)=145635833)
326
+ 5300 val loss 5.7104
327
+ 5300 val perplexity 301.9879
328
+ 5300 train 5.584001 (lr=2.1780e-05) (hash(x)=152343580)
329
+ 5600 val loss 5.8184
330
+ 5600 val perplexity 336.4306
331
+ 5600 train 5.804915 (lr=1.5469e-05) (hash(x)=156337844)
332
+ 5400 val loss 5.7041
333
+ 5400 val perplexity 300.1102
334
+ 5400 train 5.555911 (lr=2.0956e-05) (hash(x)=148578264)
335
+ 5700 val loss 5.8127
336
+ 5700 val perplexity 334.5068
337
+ 5700 train 5.692535 (lr=1.4836e-05) (hash(x)=147168506)
338
+ 5500 val loss 5.6908
339
+ 5500 val perplexity 296.1326
340
+ 5500 train 5.854535 (lr=2.0141e-05) (hash(x)=145635833)
341
+ 5800 val loss 5.8009
342
+ 5800 val perplexity 330.6078
343
+ 5800 train 5.861600 (lr=1.4212e-05) (hash(x)=159566920)
344
+ 5600 val loss 5.6859
345
+ 5600 val perplexity 294.6682
346
+ 5600 train 5.674375 (lr=1.9337e-05) (hash(x)=156337844)
347
+ 5900 val loss 5.7958
348
+ 5900 val perplexity 328.9098
349
+ 5900 train 5.869177 (lr=1.3600e-05) (hash(x)=158273929)
350
+ 5700 val loss 5.6814
351
+ 5700 val perplexity 293.3595
352
+ 5700 train 5.554213 (lr=1.8545e-05) (hash(x)=147168506)
353
+ 6000 val loss 5.7954
354
+ 6000 val perplexity 328.7870
355
+ 6000 train 5.764714 (lr=1.3000e-05) (hash(x)=156649749)
356
+ 5800 val loss 5.6695
357
+ 5800 val perplexity 289.8786
358
+ 5800 train 5.722130 (lr=1.7765e-05) (hash(x)=159566920)
359
+ 6100 val loss 5.7833
360
+ 6100 val perplexity 324.8376
361
+ 6100 train 5.699610 (lr=1.2413e-05) (hash(x)=146812388)
362
+ 5900 val loss 5.6619
363
+ 5900 val perplexity 287.7089
364
+ 5900 train 5.723249 (lr=1.7000e-05) (hash(x)=158273929)
365
+ 6200 val loss 5.7795
366
+ 6200 val perplexity 323.6072
367
+ 6200 train 5.603285 (lr=1.1840e-05) (hash(x)=143522146)
368
+ 6000 val loss 5.6624
369
+ 6000 val perplexity 287.8420
370
+ 6000 train 5.632993 (lr=1.6250e-05) (hash(x)=156649749)
371
+ 6300 val loss 5.7735
372
+ 6300 val perplexity 321.6751
373
+ 6300 train 5.599552 (lr=1.1282e-05) (hash(x)=150124474)
374
+ 6100 val loss 5.6500
375
+ 6100 val perplexity 284.3049
376
+ 6100 train 5.567279 (lr=1.5516e-05) (hash(x)=146812388)
377
+ 6400 val loss 5.7706
378
+ 6400 val perplexity 320.7302
379
+ 6400 train 5.803510 (lr=1.0739e-05) (hash(x)=141242117)
380
+ 6200 val loss 5.6470
381
+ 6200 val perplexity 283.4369
382
+ 6200 train 5.477498 (lr=1.4800e-05) (hash(x)=143522146)
383
+ 6500 val loss 5.7612
384
+ 6500 val perplexity 317.7372
385
+ 6500 train 5.646392 (lr=1.0213e-05) (hash(x)=143529762)
386
+ 6300 val loss 5.6428
387
+ 6300 val perplexity 282.2395
388
+ 6300 train 5.460387 (lr=1.4102e-05) (hash(x)=150124474)
389
+ 6600 val loss 5.7580
390
+ 6600 val perplexity 316.7123
391
+ 6600 train 5.543950 (lr=9.7032e-06) (hash(x)=136948374)
392
+ 6400 val loss 5.6350
393
+ 6400 val perplexity 280.0507
394
+ 6400 train 5.671438 (lr=1.3424e-05) (hash(x)=141242117)
395
+ 6700 val loss 5.7555
396
+ 6700 val perplexity 315.9332
397
+ 6700 train 5.561884 (lr=9.2116e-06) (hash(x)=146268592)
398
+ 6500 val loss 5.6280
399
+ 6500 val perplexity 278.1046
400
+ 6500 train 5.501300 (lr=1.2766e-05) (hash(x)=143529762)
401
+ 6800 val loss 5.7477
402
+ 6800 val perplexity 313.4810
403
+ 6800 train 5.701334 (lr=8.7387e-06) (hash(x)=152676836)
404
+ 6600 val loss 5.6238
405
+ 6600 val perplexity 276.9468
406
+ 6600 train 5.410007 (lr=1.2129e-05) (hash(x)=136948374)
407
+ 6900 val loss 5.7414
408
+ 6900 val perplexity 311.5153
409
+ 6900 train 5.658071 (lr=8.2849e-06) (hash(x)=134657776)
410
+ 6700 val loss 5.6229
411
+ 6700 val perplexity 276.6846
412
+ 6700 train 5.419105 (lr=1.1515e-05) (hash(x)=146268592)
413
+ 7000 val loss 5.7382
414
+ 7000 val perplexity 310.4917
415
+ 7000 train 5.696815 (lr=7.8510e-06) (hash(x)=166721861)
416
+ 6800 val loss 5.6152
417
+ 6800 val perplexity 274.5602
418
+ 6800 train 5.570204 (lr=1.0923e-05) (hash(x)=152676836)
419
+ 7100 val loss 5.7366
420
+ 7100 val perplexity 310.0130
421
+ 7100 train 5.535275 (lr=7.4377e-06) (hash(x)=135496702)
422
+ 6900 val loss 5.6066
423
+ 6900 val perplexity 272.2095
424
+ 6900 train 5.530090 (lr=1.0356e-05) (hash(x)=134657776)
425
+ 7200 val loss 5.7336
426
+ 7200 val perplexity 309.0950
427
+ 7200 train 5.841577 (lr=7.0455e-06) (hash(x)=155567461)
428
+ 7000 val loss 5.6050
429
+ 7000 val perplexity 271.7800
430
+ 7000 train 5.559492 (lr=9.8138e-06) (hash(x)=166721861)
431
+ 7300 val loss 5.7267
432
+ 7300 val perplexity 306.9496
433
+ 7300 train 5.524444 (lr=6.6749e-06) (hash(x)=142803829)
434
+ 7100 val loss 5.6054
435
+ 7100 val perplexity 271.8886
436
+ 7100 train 5.398874 (lr=9.2971e-06) (hash(x)=135496702)
437
+ 7400 val loss 5.7253
438
+ 7400 val perplexity 306.5243
439
+ 7400 train 5.550263 (lr=6.3266e-06) (hash(x)=145294178)
440
+ 7200 val loss 5.5987
441
+ 7200 val perplexity 270.0845
442
+ 7200 train 5.709658 (lr=8.8068e-06) (hash(x)=155567461)
443
+ 7500 val loss 5.7234
444
+ 7500 val perplexity 305.9535
445
+ 7500 train 5.481024 (lr=6.0010e-06) (hash(x)=150573713)
446
+ 7300 val loss 5.5917
447
+ 7300 val perplexity 268.2025
448
+ 7300 train 5.395301 (lr=8.3436e-06) (hash(x)=142803829)
449
+ 7600 val loss 5.7208
450
+ 7600 val perplexity 305.1357
451
+ 7600 train 5.742909 (lr=5.6986e-06) (hash(x)=142771511)
452
+ 7400 val loss 5.5920
453
+ 7400 val perplexity 268.2589
454
+ 7400 train 5.410421 (lr=7.9082e-06) (hash(x)=145294178)
455
+ 7700 val loss 5.7145
456
+ 7700 val perplexity 303.2360
457
+ 7700 train 5.618597 (lr=5.4198e-06) (hash(x)=143602175)
458
+ 7500 val loss 5.5897
459
+ 7500 val perplexity 267.6545
460
+ 7500 train 5.347271 (lr=7.5012e-06) (hash(x)=150573713)
461
+ 7800 val loss 5.7126
462
+ 7800 val perplexity 302.6505
463
+ 7800 train 5.756348 (lr=5.1650e-06) (hash(x)=152379862)
464
+ 7600 val loss 5.5874
465
+ 7600 val perplexity 267.0536
466
+ 7600 train 5.605257 (lr=7.1232e-06) (hash(x)=142771511)
467
+ 7900 val loss 5.7111
468
+ 7900 val perplexity 302.1945
469
+ 7900 train 5.537019 (lr=4.9347e-06) (hash(x)=146655921)
470
+ 7700 val loss 5.5809
471
+ 7700 val perplexity 265.2994
472
+ 7700 train 5.483287 (lr=6.7747e-06) (hash(x)=143602175)
473
+ 8000 val loss 5.7129
474
+ 8000 val perplexity 302.7426
475
+ 8000 train 5.840605 (lr=4.7291e-06) (hash(x)=148262482)
476
+ 7800 val loss 5.5780
477
+ 7800 val perplexity 264.5419
478
+ 7800 train 5.642337 (lr=6.4563e-06) (hash(x)=152379862)
479
+ 8100 val loss 5.7041
480
+ 8100 val perplexity 300.1092
481
+ 8100 train 5.641214 (lr=4.5486e-06) (hash(x)=147683655)
482
+ 7900 val loss 5.5774
483
+ 7900 val perplexity 264.3962
484
+ 7900 train 5.391592 (lr=6.1684e-06) (hash(x)=146655921)
485
+ 8200 val loss 5.7048
486
+ 8200 val perplexity 300.2959
487
+ 8200 train 5.807380 (lr=4.3933e-06) (hash(x)=157312987)
488
+ 8000 val loss 5.5782
489
+ 8000 val perplexity 264.5894
490
+ 8000 train 5.719192 (lr=5.9114e-06) (hash(x)=148262482)
491
+ 8300 val loss 5.7019
492
+ 8300 val perplexity 299.4422
493
+ 8300 train 5.664444 (lr=4.2636e-06) (hash(x)=141107543)
494
+ 8100 val loss 5.5697
495
+ 8100 val perplexity 262.3572
496
+ 8100 train 5.517353 (lr=5.6857e-06) (hash(x)=147683655)
497
+ 8400 val loss 5.6976
498
+ 8400 val perplexity 298.1539
499
+ 8400 train 5.683732 (lr=4.1596e-06) (hash(x)=141323024)
500
+ 8200 val loss 5.5705
501
+ 8200 val perplexity 262.5577
502
+ 8200 train 5.688238 (lr=5.4917e-06) (hash(x)=157312987)
503
+ 8500 val loss 5.6963
504
+ 8500 val perplexity 297.7502
505
+ 8500 train 5.670836 (lr=4.0815e-06) (hash(x)=150696521)
506
+ 8300 val loss 5.5666
507
+ 8300 val perplexity 261.5423
508
+ 8300 train 5.523445 (lr=5.3295e-06) (hash(x)=141107543)
509
+ 8600 val loss 5.6959
510
+ 8600 val perplexity 297.6495
511
+ 8600 train 5.643767 (lr=4.0294e-06) (hash(x)=162288191)
512
+ 8400 val loss 5.5627
513
+ 8400 val perplexity 260.5137
514
+ 8400 train 5.558888 (lr=5.1995e-06) (hash(x)=141323024)
515
+ 8700 val loss 5.6922
516
+ 8700 val perplexity 296.5599
517
+ 8700 train 5.560731 (lr=4.0033e-06) (hash(x)=152860941)
518
+ 8500 val loss 5.5615
519
+ 8500 val perplexity 260.2245
520
+ 8500 train 5.526323 (lr=5.1019e-06) (hash(x)=150696521)
521
+ 8749 val loss 5.6907
522
+ 8749 val perplexity 296.0869
attention_kindselective_n_heads4_seed1341/model_08749.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7b19bcdcc795560643bed6a5252e945b59403b36d9fb80a6c7c1978ec18d4c4d
3
  size 92843394
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:60f9795517c86381d63141ace51ce5da55dbc8de0aa3e2ee26b0ffd6d3c88332
3
  size 92843394
attention_kindselective_n_heads4_seed1341/optimizer_08749.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0642c7ecdf284e5d06d2b300e8988d6e95c5e528eeaf54c124ecd155f33aaac9
3
  size 179406214
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:43538ac4619bfa4754892879de2c96b45a186312926324b7e0fa80cc11d61f8b
3
  size 179406214