andrew-healey commited on
Commit
2de23b2
·
verified ·
1 Parent(s): 2ad8755

Upload folder using huggingface_hub

Browse files
attention_kindselective_n_heads2_seed1341/args.json CHANGED
@@ -1 +1 @@
1
- {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_4/attention_kindselective_n_heads2_seed1341", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 2, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 10000, "warmup_steps": 200, "group": "wider_is_better_4", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1341, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 120, "total_batch_size": 30720, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 5e-05, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "0.5e-4_30720_2_1341", "n_embd": 128}
 
1
+ {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_4/attention_kindselective_n_heads2_seed1341", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 2, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 10000, "warmup_steps": 200, "group": "wider_is_better_4", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1341, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 120, "total_batch_size": 30720, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 3e-05, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "3e-5_30720_2_1341", "n_embd": 128}
attention_kindselective_n_heads2_seed1341/log2.txt CHANGED
@@ -1,303 +1,303 @@
1
  max_steps: 10000
2
  0 val loss 11.7722
3
  0 val perplexity 129595.2031
4
- 0 train 11.774795 (lr=2.5000e-07) (hash(x)=145079536)
5
- 100 val loss 10.1967
6
- 100 val perplexity 26815.2441
7
- 100 train 10.143024 (lr=2.5250e-05) (hash(x)=154745873)
8
- 200 val loss 8.4253
9
- 200 val perplexity 4561.1460
10
- 200 train 8.489046 (lr=5.0000e-05) (hash(x)=155800595)
11
- 300 val loss 7.7144
12
- 300 val perplexity 2240.3523
13
- 300 train 7.665938 (lr=4.9988e-05) (hash(x)=148595389)
14
- 400 val loss 7.5699
15
- 400 val perplexity 1938.9187
16
- 400 train 7.476192 (lr=4.9954e-05) (hash(x)=145606733)
17
- 500 val loss 7.5245
18
- 500 val perplexity 1852.8209
19
- 500 train 7.344064 (lr=4.9896e-05) (hash(x)=138221231)
20
- 600 val loss 7.4760
21
- 600 val perplexity 1765.1185
22
- 600 train 7.476169 (lr=4.9815e-05) (hash(x)=150367139)
23
- 700 val loss 7.4224
24
- 700 val perplexity 1672.9674
25
- 700 train 7.559739 (lr=4.9712e-05) (hash(x)=155579314)
26
- 800 val loss 7.3439
27
- 800 val perplexity 1546.7035
28
- 800 train 7.421778 (lr=4.9585e-05) (hash(x)=155747374)
29
- 900 val loss 7.2824
30
- 900 val perplexity 1454.4935
31
- 900 train 7.492918 (lr=4.9436e-05) (hash(x)=159334575)
32
- 1000 val loss 7.2183
33
- 1000 val perplexity 1364.1555
34
- 1000 train 7.090861 (lr=4.9264e-05) (hash(x)=140604760)
35
- 1100 val loss 7.1651
36
- 1100 val perplexity 1293.5281
37
- 1100 train 6.987471 (lr=4.9070e-05) (hash(x)=146208052)
38
- 1200 val loss 7.0891
39
- 1200 val perplexity 1198.8540
40
- 1200 train 6.984112 (lr=4.8854e-05) (hash(x)=148404734)
41
- 1300 val loss 7.0370
42
- 1300 val perplexity 1137.9492
43
- 1300 train 6.965640 (lr=4.8616e-05) (hash(x)=155681970)
44
- 1400 val loss 6.9815
45
- 1400 val perplexity 1076.5845
46
- 1400 train 6.933797 (lr=4.8356e-05) (hash(x)=148115934)
47
- 1500 val loss 6.9014
48
- 1500 val perplexity 993.6962
49
- 1500 train 7.060465 (lr=4.8074e-05) (hash(x)=157074034)
50
- 1600 val loss 6.8488
51
- 1600 val perplexity 942.7806
52
- 1600 train 6.613491 (lr=4.7772e-05) (hash(x)=137464699)
53
- 1700 val loss 6.7931
54
- 1700 val perplexity 891.7070
55
- 1700 train 6.929006 (lr=4.7448e-05) (hash(x)=166955614)
56
- 1800 val loss 6.7475
57
- 1800 val perplexity 851.9409
58
- 1800 train 6.565822 (lr=4.7105e-05) (hash(x)=143886042)
59
- 1900 val loss 6.7145
60
- 1900 val perplexity 824.3010
61
- 1900 train 7.249403 (lr=4.6741e-05) (hash(x)=193610391)
62
- 2000 val loss 6.6721
63
- 2000 val perplexity 790.0560
64
- 2000 train 6.862562 (lr=4.6357e-05) (hash(x)=163799796)
65
- 2100 val loss 6.6205
66
- 2100 val perplexity 750.3120
67
- 2100 train 6.607678 (lr=4.5954e-05) (hash(x)=154107339)
68
- 2200 val loss 6.5971
69
- 2200 val perplexity 732.9810
70
- 2200 train 6.600620 (lr=4.5532e-05) (hash(x)=144592844)
71
- 2300 val loss 6.5494
72
- 2300 val perplexity 698.8473
73
- 2300 train 6.613638 (lr=4.5091e-05) (hash(x)=154535861)
74
- 2400 val loss 6.5211
75
- 2400 val perplexity 679.3499
76
- 2400 train 7.256778 (lr=4.4633e-05) (hash(x)=204706354)
77
- 2500 val loss 6.5063
78
- 2500 val perplexity 669.3500
79
- 2500 train 6.299736 (lr=4.4156e-05) (hash(x)=143783202)
80
- 2600 val loss 6.4746
81
- 2600 val perplexity 648.4813
82
- 2600 train 6.395596 (lr=4.3663e-05) (hash(x)=150862210)
83
- 2700 val loss 6.4500
84
- 2700 val perplexity 632.7221
85
- 2700 train 6.488371 (lr=4.3153e-05) (hash(x)=150753426)
86
- 2800 val loss 6.4271
87
- 2800 val perplexity 618.3502
88
- 2800 train 6.448690 (lr=4.2627e-05) (hash(x)=147766811)
89
- 2900 val loss 6.4014
90
- 2900 val perplexity 602.6843
91
- 2900 train 6.334109 (lr=4.2085e-05) (hash(x)=151177814)
92
- 3000 val loss 6.3806
93
- 3000 val perplexity 590.3071
94
- 3000 train 6.256360 (lr=4.1529e-05) (hash(x)=135925327)
95
- 3100 val loss 6.3712
96
- 3100 val perplexity 584.7452
97
- 3100 train 6.015045 (lr=4.0957e-05) (hash(x)=124842353)
98
- 3200 val loss 6.3610
99
- 3200 val perplexity 578.8417
100
- 3200 train 6.456865 (lr=4.0373e-05) (hash(x)=160440642)
101
- 3300 val loss 6.3370
102
- 3300 val perplexity 565.1111
103
- 3300 train 6.252707 (lr=3.9775e-05) (hash(x)=143017131)
104
- 3400 val loss 6.3147
105
- 3400 val perplexity 552.6184
106
- 3400 train 6.692260 (lr=3.9164e-05) (hash(x)=148143474)
107
- 3500 val loss 6.3072
108
- 3500 val perplexity 548.5033
109
- 3500 train 6.128360 (lr=3.8541e-05) (hash(x)=148732639)
110
- 3600 val loss 6.2925
111
- 3600 val perplexity 540.4887
112
- 3600 train 6.286038 (lr=3.7907e-05) (hash(x)=146333803)
113
- 3700 val loss 6.2816
114
- 3700 val perplexity 534.6521
115
- 3700 train 6.090304 (lr=3.7262e-05) (hash(x)=143611445)
116
- 3800 val loss 6.2777
117
- 3800 val perplexity 532.5809
118
- 3800 train 6.061575 (lr=3.6608e-05) (hash(x)=141272115)
119
- 3900 val loss 6.2569
120
- 3900 val perplexity 521.6227
121
- 3900 train 6.195599 (lr=3.5944e-05) (hash(x)=149161848)
122
- 4000 val loss 6.2324
123
- 4000 val perplexity 508.9707
124
- 4000 train 6.309767 (lr=3.5271e-05) (hash(x)=152036955)
125
- 4100 val loss 6.2213
126
- 4100 val perplexity 503.3548
127
- 4100 train 6.268515 (lr=3.4590e-05) (hash(x)=153260968)
128
- 4200 val loss 6.2097
129
- 4200 val perplexity 497.5497
130
- 4200 train 6.101007 (lr=3.3902e-05) (hash(x)=147629478)
131
- 4300 val loss 6.1937
132
- 4300 val perplexity 489.6672
133
- 4300 train 6.167351 (lr=3.3207e-05) (hash(x)=161701080)
134
- 4400 val loss 6.1911
135
- 4400 val perplexity 488.3745
136
- 4400 train 6.231541 (lr=3.2507e-05) (hash(x)=155290524)
137
- 4500 val loss 6.1868
138
- 4500 val perplexity 486.3091
139
- 4500 train 5.971059 (lr=3.1801e-05) (hash(x)=144726593)
140
- 4600 val loss 6.1760
141
- 4600 val perplexity 481.0795
142
- 4600 train 6.129531 (lr=3.1091e-05) (hash(x)=149517202)
143
- 4700 val loss 6.1587
144
- 4700 val perplexity 472.8268
145
- 4700 train 6.162160 (lr=3.0377e-05) (hash(x)=139403008)
146
- 4800 val loss 6.1452
147
- 4800 val perplexity 466.4960
148
- 4800 train 6.247012 (lr=2.9661e-05) (hash(x)=153387238)
149
- 4900 val loss 6.1427
150
- 4900 val perplexity 465.2972
151
- 4900 train 5.897235 (lr=2.8942e-05) (hash(x)=139881569)
152
- 5000 val loss 6.1239
153
- 5000 val perplexity 456.6504
154
- 5000 train 6.141126 (lr=2.8221e-05) (hash(x)=152602580)
155
- 5100 val loss 6.1204
156
- 5100 val perplexity 455.0395
157
- 5100 train 6.160268 (lr=2.7500e-05) (hash(x)=150598460)
158
- 5200 val loss 6.1112
159
- 5200 val perplexity 450.8787
160
- 5200 train 5.984895 (lr=2.6779e-05) (hash(x)=147660496)
161
- 5300 val loss 6.1046
162
- 5300 val perplexity 447.9008
163
- 5300 train 5.845158 (lr=2.6058e-05) (hash(x)=147693848)
164
- 5400 val loss 6.1049
165
- 5400 val perplexity 448.0625
166
- 5400 train 5.862473 (lr=2.5339e-05) (hash(x)=136048145)
167
- 5500 val loss 6.0878
168
- 5500 val perplexity 440.4329
169
- 5500 train 6.121768 (lr=2.4623e-05) (hash(x)=140313123)
170
- 5600 val loss 6.0770
171
- 5600 val perplexity 435.7415
172
- 5600 train 6.093797 (lr=2.3909e-05) (hash(x)=156997993)
173
- 5700 val loss 6.0683
174
- 5700 val perplexity 431.9543
175
- 5700 train 6.087464 (lr=2.3199e-05) (hash(x)=143513880)
176
- 5800 val loss 6.0614
177
- 5800 val perplexity 428.9725
178
- 5800 train 6.172104 (lr=2.2493e-05) (hash(x)=153736628)
179
- 5900 val loss 6.0547
180
- 5900 val perplexity 426.1208
181
- 5900 train 6.190841 (lr=2.1793e-05) (hash(x)=166385417)
182
- 6000 val loss 6.0458
183
- 6000 val perplexity 422.3309
184
- 6000 train 6.277092 (lr=2.1098e-05) (hash(x)=153216517)
185
- 6100 val loss 6.0446
186
- 6100 val perplexity 421.8097
187
- 6100 train 5.932088 (lr=2.0410e-05) (hash(x)=145387545)
188
- 6200 val loss 6.0460
189
- 6200 val perplexity 422.4171
190
- 6200 train 6.175024 (lr=1.9729e-05) (hash(x)=156306460)
191
- 6300 val loss 6.0292
192
- 6300 val perplexity 415.3701
193
- 6300 train 5.960823 (lr=1.9056e-05) (hash(x)=148610447)
194
- 6400 val loss 6.0213
195
- 6400 val perplexity 412.0996
196
- 6400 train 6.243478 (lr=1.8392e-05) (hash(x)=158343535)
197
- 6500 val loss 6.0134
198
- 6500 val perplexity 408.8782
199
- 6500 train 6.185011 (lr=1.7738e-05) (hash(x)=157742557)
200
- 6600 val loss 6.0124
201
- 6600 val perplexity 408.4781
202
- 6600 train 5.903574 (lr=1.7093e-05) (hash(x)=150457308)
203
- 6700 val loss 6.0066
204
- 6700 val perplexity 406.1186
205
- 6700 train 5.866361 (lr=1.6459e-05) (hash(x)=142426205)
206
- 6800 val loss 6.0069
207
- 6800 val perplexity 406.2187
208
- 6800 train 5.806967 (lr=1.5836e-05) (hash(x)=151522525)
209
- 6900 val loss 5.9981
210
- 6900 val perplexity 402.6668
211
- 6900 train 5.947786 (lr=1.5225e-05) (hash(x)=152029630)
212
- 7000 val loss 5.9889
213
- 7000 val perplexity 398.9860
214
- 7000 train 5.835184 (lr=1.4627e-05) (hash(x)=142978954)
215
- 7100 val loss 5.9839
216
- 7100 val perplexity 396.9924
217
- 7100 train 5.866302 (lr=1.4043e-05) (hash(x)=148064359)
218
- 7200 val loss 5.9817
219
- 7200 val perplexity 396.0956
220
- 7200 train 5.878535 (lr=1.3471e-05) (hash(x)=155771845)
221
- 7300 val loss 5.9780
222
- 7300 val perplexity 394.6485
223
- 7300 train 5.788899 (lr=1.2915e-05) (hash(x)=141639844)
224
- 7400 val loss 5.9782
225
- 7400 val perplexity 394.7098
226
- 7400 train 6.030018 (lr=1.2373e-05) (hash(x)=153466021)
227
- 7500 val loss 5.9703
228
- 7500 val perplexity 391.6344
229
- 7500 train 5.951066 (lr=1.1847e-05) (hash(x)=148843587)
230
- 7600 val loss 5.9636
231
- 7600 val perplexity 388.9968
232
- 7600 train 6.093093 (lr=1.1337e-05) (hash(x)=152053933)
233
- 7700 val loss 5.9594
234
- 7700 val perplexity 387.3708
235
- 7700 train 5.819657 (lr=1.0844e-05) (hash(x)=141564003)
236
- 7800 val loss 5.9565
237
- 7800 val perplexity 386.2691
238
- 7800 train 5.836637 (lr=1.0367e-05) (hash(x)=148617843)
239
- 7900 val loss 5.9547
240
- 7900 val perplexity 385.5467
241
- 7900 train 5.925571 (lr=9.9088e-06) (hash(x)=146417668)
242
- 8000 val loss 5.9517
243
- 8000 val perplexity 384.4099
244
- 8000 train 5.756671 (lr=9.4682e-06) (hash(x)=150165488)
245
- 8100 val loss 5.9492
246
- 8100 val perplexity 383.4286
247
- 8100 train 5.928113 (lr=9.0461e-06) (hash(x)=144818855)
248
- 8200 val loss 5.9425
249
- 8200 val perplexity 380.8962
250
- 8200 train 6.034739 (lr=8.6430e-06) (hash(x)=143074954)
251
- 8300 val loss 5.9406
252
- 8300 val perplexity 380.1790
253
- 8300 train 5.819485 (lr=8.2593e-06) (hash(x)=145172494)
254
- 8400 val loss 5.9388
255
- 8400 val perplexity 379.4785
256
- 8400 train 5.967767 (lr=7.8953e-06) (hash(x)=152284393)
257
- 8500 val loss 5.9353
258
- 8500 val perplexity 378.1628
259
- 8500 train 5.779359 (lr=7.5515e-06) (hash(x)=147044705)
260
- 8600 val loss 5.9345
261
- 8600 val perplexity 377.8589
262
- 8600 train 5.811585 (lr=7.2282e-06) (hash(x)=147762608)
263
- 8700 val loss 5.9323
264
- 8700 val perplexity 377.0360
265
- 8700 train 5.942141 (lr=6.9257e-06) (hash(x)=141994567)
266
- 8800 val loss 5.9272
267
- 8800 val perplexity 375.1202
268
- 8800 train 6.021777 (lr=6.6444e-06) (hash(x)=152552896)
269
- 8900 val loss 5.9233
270
- 8900 val perplexity 373.6460
271
- 8900 train 5.762463 (lr=6.3845e-06) (hash(x)=139714454)
272
- 9000 val loss 5.9218
273
- 9000 val perplexity 373.0761
274
- 9000 train 5.954716 (lr=6.1462e-06) (hash(x)=152274419)
275
- 9100 val loss 5.9213
276
- 9100 val perplexity 372.9054
277
- 9100 train 6.007237 (lr=5.9300e-06) (hash(x)=160536885)
278
- 9200 val loss 5.9212
279
- 9200 val perplexity 372.8654
280
- 9200 train 5.734900 (lr=5.7359e-06) (hash(x)=138386899)
281
- 9300 val loss 5.9195
282
- 9300 val perplexity 372.2228
283
- 9300 train 5.970142 (lr=5.5641e-06) (hash(x)=149957457)
284
- 9400 val loss 5.9159
285
- 9400 val perplexity 370.8707
286
- 9400 train 5.931352 (lr=5.4149e-06) (hash(x)=161671548)
287
- 9500 val loss 5.9132
288
- 9500 val perplexity 369.8911
289
- 9500 train 6.040483 (lr=5.2884e-06) (hash(x)=171857400)
290
- 9600 val loss 5.9124
291
- 9600 val perplexity 369.6096
292
- 9600 train 5.686675 (lr=5.1847e-06) (hash(x)=135378471)
293
- 9700 val loss 5.9109
294
- 9700 val perplexity 369.0223
295
- 9700 train 5.833959 (lr=5.1040e-06) (hash(x)=149514591)
296
- 9800 val loss 5.9099
297
- 9800 val perplexity 368.6873
298
- 9800 train 5.888211 (lr=5.0462e-06) (hash(x)=152679072)
299
- 9900 val loss 5.9073
300
- 9900 val perplexity 367.6994
301
- 9900 train 5.844470 (lr=5.0116e-06) (hash(x)=150415193)
302
- 9999 val loss 5.9055
303
- 9999 val perplexity 367.0529
 
1
  max_steps: 10000
2
  0 val loss 11.7722
3
  0 val perplexity 129595.2031
4
+ 0 train 11.774795 (lr=1.5000e-07) (hash(x)=145079536)
5
+ 100 val loss 10.3638
6
+ 100 val perplexity 31690.6855
7
+ 100 train 10.310390 (lr=1.5150e-05) (hash(x)=154745873)
8
+ 200 val loss 9.4344
9
+ 200 val perplexity 12511.1768
10
+ 200 train 9.417608 (lr=3.0000e-05) (hash(x)=155800595)
11
+ 300 val loss 8.1776
12
+ 300 val perplexity 3560.4011
13
+ 300 train 8.142024 (lr=2.9993e-05) (hash(x)=148595389)
14
+ 400 val loss 7.7316
15
+ 400 val perplexity 2279.2727
16
+ 400 train 7.647736 (lr=2.9972e-05) (hash(x)=145606733)
17
+ 500 val loss 7.5773
18
+ 500 val perplexity 1953.3274
19
+ 500 train 7.406966 (lr=2.9938e-05) (hash(x)=138221231)
20
+ 600 val loss 7.5203
21
+ 600 val perplexity 1845.0748
22
+ 600 train 7.525246 (lr=2.9889e-05) (hash(x)=150367139)
23
+ 700 val loss 7.4772
24
+ 700 val perplexity 1767.2173
25
+ 700 train 7.618254 (lr=2.9827e-05) (hash(x)=155579314)
26
+ 800 val loss 7.4219
27
+ 800 val perplexity 1672.1436
28
+ 800 train 7.494927 (lr=2.9751e-05) (hash(x)=155747374)
29
+ 900 val loss 7.3563
30
+ 900 val perplexity 1566.0425
31
+ 900 train 7.568688 (lr=2.9662e-05) (hash(x)=159334575)
32
+ 1000 val loss 7.2908
33
+ 1000 val perplexity 1466.7249
34
+ 1000 train 7.166207 (lr=2.9558e-05) (hash(x)=140604760)
35
+ 1100 val loss 7.2271
36
+ 1100 val perplexity 1376.2406
37
+ 1100 train 7.053119 (lr=2.9442e-05) (hash(x)=146208052)
38
+ 1200 val loss 7.1591
39
+ 1200 val perplexity 1285.7166
40
+ 1200 train 7.065541 (lr=2.9312e-05) (hash(x)=148404734)
41
+ 1300 val loss 7.0993
42
+ 1300 val perplexity 1211.0946
43
+ 1300 train 7.030000 (lr=2.9169e-05) (hash(x)=155681970)
44
+ 1400 val loss 7.0369
45
+ 1400 val perplexity 1137.8608
46
+ 1400 train 6.990728 (lr=2.9013e-05) (hash(x)=148115934)
47
+ 1500 val loss 6.9659
48
+ 1500 val perplexity 1059.9108
49
+ 1500 train 7.128124 (lr=2.8845e-05) (hash(x)=157074034)
50
+ 1600 val loss 6.9095
51
+ 1600 val perplexity 1001.7750
52
+ 1600 train 6.662536 (lr=2.8663e-05) (hash(x)=137464699)
53
+ 1700 val loss 6.8588
54
+ 1700 val perplexity 952.2664
55
+ 1700 train 7.000303 (lr=2.8469e-05) (hash(x)=166955614)
56
+ 1800 val loss 6.8125
57
+ 1800 val perplexity 909.1738
58
+ 1800 train 6.636445 (lr=2.8263e-05) (hash(x)=143886042)
59
+ 1900 val loss 6.7785
60
+ 1900 val perplexity 878.7321
61
+ 1900 train 7.344856 (lr=2.8044e-05) (hash(x)=193610391)
62
+ 2000 val loss 6.7289
63
+ 2000 val perplexity 836.2564
64
+ 2000 train 6.923176 (lr=2.7814e-05) (hash(x)=163799796)
65
+ 2100 val loss 6.6765
66
+ 2100 val perplexity 793.5648
67
+ 2100 train 6.666200 (lr=2.7572e-05) (hash(x)=154107339)
68
+ 2200 val loss 6.6449
69
+ 2200 val perplexity 768.8541
70
+ 2200 train 6.642457 (lr=2.7319e-05) (hash(x)=144592844)
71
+ 2300 val loss 6.6128
72
+ 2300 val perplexity 744.5838
73
+ 2300 train 6.680394 (lr=2.7055e-05) (hash(x)=154535861)
74
+ 2400 val loss 6.5805
75
+ 2400 val perplexity 720.9152
76
+ 2400 train 7.345472 (lr=2.6780e-05) (hash(x)=204706354)
77
+ 2500 val loss 6.5576
78
+ 2500 val perplexity 704.6126
79
+ 2500 train 6.345363 (lr=2.6494e-05) (hash(x)=143783202)
80
+ 2600 val loss 6.5345
81
+ 2600 val perplexity 688.5145
82
+ 2600 train 6.449786 (lr=2.6198e-05) (hash(x)=150862210)
83
+ 2700 val loss 6.5102
84
+ 2700 val perplexity 671.9762
85
+ 2700 train 6.551790 (lr=2.5892e-05) (hash(x)=150753426)
86
+ 2800 val loss 6.4929
87
+ 2800 val perplexity 660.4471
88
+ 2800 train 6.511770 (lr=2.5576e-05) (hash(x)=147766811)
89
+ 2900 val loss 6.4571
90
+ 2900 val perplexity 637.2186
91
+ 2900 train 6.394937 (lr=2.5251e-05) (hash(x)=151177814)
92
+ 3000 val loss 6.4394
93
+ 3000 val perplexity 626.0252
94
+ 3000 train 6.307624 (lr=2.4917e-05) (hash(x)=135925327)
95
+ 3100 val loss 6.4255
96
+ 3100 val perplexity 617.3714
97
+ 3100 train 6.066052 (lr=2.4574e-05) (hash(x)=124842353)
98
+ 3200 val loss 6.4126
99
+ 3200 val perplexity 609.4990
100
+ 3200 train 6.509407 (lr=2.4224e-05) (hash(x)=160440642)
101
+ 3300 val loss 6.3849
102
+ 3300 val perplexity 592.8026
103
+ 3300 train 6.300091 (lr=2.3865e-05) (hash(x)=143017131)
104
+ 3400 val loss 6.3705
105
+ 3400 val perplexity 584.3425
106
+ 3400 train 6.720880 (lr=2.3498e-05) (hash(x)=148143474)
107
+ 3500 val loss 6.3546
108
+ 3500 val perplexity 575.1470
109
+ 3500 train 6.168210 (lr=2.3125e-05) (hash(x)=148732639)
110
+ 3600 val loss 6.3427
111
+ 3600 val perplexity 568.3472
112
+ 3600 train 6.334900 (lr=2.2744e-05) (hash(x)=146333803)
113
+ 3700 val loss 6.3290
114
+ 3700 val perplexity 560.5775
115
+ 3700 train 6.129919 (lr=2.2357e-05) (hash(x)=143611445)
116
+ 3800 val loss 6.3270
117
+ 3800 val perplexity 559.4955
118
+ 3800 train 6.108684 (lr=2.1965e-05) (hash(x)=141272115)
119
+ 3900 val loss 6.3116
120
+ 3900 val perplexity 550.9077
121
+ 3900 train 6.246768 (lr=2.1566e-05) (hash(x)=149161848)
122
+ 4000 val loss 6.2831
123
+ 4000 val perplexity 535.4652
124
+ 4000 train 6.355687 (lr=2.1162e-05) (hash(x)=152036955)
125
+ 4100 val loss 6.2690
126
+ 4100 val perplexity 527.9527
127
+ 4100 train 6.317615 (lr=2.0754e-05) (hash(x)=153260968)
128
+ 4200 val loss 6.2611
129
+ 4200 val perplexity 523.7897
130
+ 4200 train 6.157646 (lr=2.0341e-05) (hash(x)=147629478)
131
+ 4300 val loss 6.2524
132
+ 4300 val perplexity 519.2458
133
+ 4300 train 6.222133 (lr=1.9924e-05) (hash(x)=161701080)
134
+ 4400 val loss 6.2445
135
+ 4400 val perplexity 515.1727
136
+ 4400 train 6.297623 (lr=1.9504e-05) (hash(x)=155290524)
137
+ 4500 val loss 6.2363
138
+ 4500 val perplexity 510.9671
139
+ 4500 train 6.020789 (lr=1.9081e-05) (hash(x)=144726593)
140
+ 4600 val loss 6.2316
141
+ 4600 val perplexity 508.5655
142
+ 4600 train 6.176429 (lr=1.8655e-05) (hash(x)=149517202)
143
+ 4700 val loss 6.2135
144
+ 4700 val perplexity 499.4560
145
+ 4700 train 6.223347 (lr=1.8226e-05) (hash(x)=139403008)
146
+ 4800 val loss 6.1999
147
+ 4800 val perplexity 492.6792
148
+ 4800 train 6.289749 (lr=1.7796e-05) (hash(x)=153387238)
149
+ 4900 val loss 6.1991
150
+ 4900 val perplexity 492.2868
151
+ 4900 train 5.946225 (lr=1.7365e-05) (hash(x)=139881569)
152
+ 5000 val loss 6.1858
153
+ 5000 val perplexity 485.8128
154
+ 5000 train 6.200039 (lr=1.6933e-05) (hash(x)=152602580)
155
+ 5100 val loss 6.1775
156
+ 5100 val perplexity 481.7774
157
+ 5100 train 6.222368 (lr=1.6500e-05) (hash(x)=150598460)
158
+ 5200 val loss 6.1737
159
+ 5200 val perplexity 479.9440
160
+ 5200 train 6.049888 (lr=1.6067e-05) (hash(x)=147660496)
161
+ 5300 val loss 6.1657
162
+ 5300 val perplexity 476.1298
163
+ 5300 train 5.907603 (lr=1.5635e-05) (hash(x)=147693848)
164
+ 5400 val loss 6.1717
165
+ 5400 val perplexity 478.9965
166
+ 5400 train 5.929591 (lr=1.5204e-05) (hash(x)=136048145)
167
+ 5500 val loss 6.1501
168
+ 5500 val perplexity 468.7782
169
+ 5500 train 6.171415 (lr=1.4774e-05) (hash(x)=140313123)
170
+ 5600 val loss 6.1436
171
+ 5600 val perplexity 465.7047
172
+ 5600 train 6.169950 (lr=1.4345e-05) (hash(x)=156997993)
173
+ 5700 val loss 6.1348
174
+ 5700 val perplexity 461.6267
175
+ 5700 train 6.146784 (lr=1.3919e-05) (hash(x)=143513880)
176
+ 5800 val loss 6.1298
177
+ 5800 val perplexity 459.3479
178
+ 5800 train 6.255326 (lr=1.3496e-05) (hash(x)=153736628)
179
+ 5900 val loss 6.1278
180
+ 5900 val perplexity 458.4455
181
+ 5900 train 6.270812 (lr=1.3076e-05) (hash(x)=166385417)
182
+ 6000 val loss 6.1195
183
+ 6000 val perplexity 454.6567
184
+ 6000 train 6.350556 (lr=1.2659e-05) (hash(x)=153216517)
185
+ 6100 val loss 6.1183
186
+ 6100 val perplexity 454.0780
187
+ 6100 train 6.004011 (lr=1.2246e-05) (hash(x)=145387545)
188
+ 6200 val loss 6.1155
189
+ 6200 val perplexity 452.8155
190
+ 6200 train 6.235756 (lr=1.1838e-05) (hash(x)=156306460)
191
+ 6300 val loss 6.1022
192
+ 6300 val perplexity 446.8536
193
+ 6300 train 6.034924 (lr=1.1434e-05) (hash(x)=148610447)
194
+ 6400 val loss 6.0974
195
+ 6400 val perplexity 444.7107
196
+ 6400 train 6.314558 (lr=1.1035e-05) (hash(x)=158343535)
197
+ 6500 val loss 6.0914
198
+ 6500 val perplexity 442.0321
199
+ 6500 train 6.262345 (lr=1.0643e-05) (hash(x)=157742557)
200
+ 6600 val loss 6.0901
201
+ 6600 val perplexity 441.4777
202
+ 6600 train 5.989834 (lr=1.0256e-05) (hash(x)=150457308)
203
+ 6700 val loss 6.0879
204
+ 6700 val perplexity 440.4988
205
+ 6700 train 5.953698 (lr=9.8753e-06) (hash(x)=142426205)
206
+ 6800 val loss 6.0852
207
+ 6800 val perplexity 439.3283
208
+ 6800 train 5.884865 (lr=9.5017e-06) (hash(x)=151522525)
209
+ 6900 val loss 6.0779
210
+ 6900 val perplexity 436.1267
211
+ 6900 train 6.016484 (lr=9.1353e-06) (hash(x)=152029630)
212
+ 7000 val loss 6.0745
213
+ 7000 val perplexity 434.6117
214
+ 7000 train 5.922952 (lr=8.7764e-06) (hash(x)=142978954)
215
+ 7100 val loss 6.0687
216
+ 7100 val perplexity 432.1179
217
+ 7100 train 5.947993 (lr=8.4255e-06) (hash(x)=148064359)
218
+ 7200 val loss 6.0660
219
+ 7200 val perplexity 430.9639
220
+ 7200 train 5.966048 (lr=8.0829e-06) (hash(x)=155771845)
221
+ 7300 val loss 6.0634
222
+ 7300 val perplexity 429.8407
223
+ 7300 train 5.861534 (lr=7.7489e-06) (hash(x)=141639844)
224
+ 7400 val loss 6.0639
225
+ 7400 val perplexity 430.0664
226
+ 7400 train 6.113844 (lr=7.4239e-06) (hash(x)=153466021)
227
+ 7500 val loss 6.0577
228
+ 7500 val perplexity 427.3977
229
+ 7500 train 6.058067 (lr=7.1083e-06) (hash(x)=148843587)
230
+ 7600 val loss 6.0520
231
+ 7600 val perplexity 424.9706
232
+ 7600 train 6.189840 (lr=6.8023e-06) (hash(x)=152053933)
233
+ 7700 val loss 6.0507
234
+ 7700 val perplexity 424.4133
235
+ 7700 train 5.898217 (lr=6.5062e-06) (hash(x)=141564003)
236
+ 7800 val loss 6.0465
237
+ 7800 val perplexity 422.6144
238
+ 7800 train 5.920815 (lr=6.2205e-06) (hash(x)=148617843)
239
+ 7900 val loss 6.0456
240
+ 7900 val perplexity 422.2512
241
+ 7900 train 6.015367 (lr=5.9453e-06) (hash(x)=146417668)
242
+ 8000 val loss 6.0450
243
+ 8000 val perplexity 421.9865
244
+ 8000 train 5.850304 (lr=5.6809e-06) (hash(x)=150165488)
245
+ 8100 val loss 6.0408
246
+ 8100 val perplexity 420.2381
247
+ 8100 train 6.013474 (lr=5.4277e-06) (hash(x)=144818855)
248
+ 8200 val loss 6.0372
249
+ 8200 val perplexity 418.7222
250
+ 8200 train 6.119427 (lr=5.1858e-06) (hash(x)=143074954)
251
+ 8300 val loss 6.0340
252
+ 8300 val perplexity 417.3623
253
+ 8300 train 5.923309 (lr=4.9556e-06) (hash(x)=145172494)
254
+ 8400 val loss 6.0337
255
+ 8400 val perplexity 417.2652
256
+ 8400 train 6.065529 (lr=4.7372e-06) (hash(x)=152284393)
257
+ 8500 val loss 6.0314
258
+ 8500 val perplexity 416.2838
259
+ 8500 train 5.882653 (lr=4.5309e-06) (hash(x)=147044705)
260
+ 8600 val loss 6.0320
261
+ 8600 val perplexity 416.5469
262
+ 8600 train 5.929805 (lr=4.3369e-06) (hash(x)=147762608)
263
+ 8700 val loss 6.0279
264
+ 8700 val perplexity 414.8306
265
+ 8700 train 6.042439 (lr=4.1554e-06) (hash(x)=141994567)
266
+ 8800 val loss 6.0255
267
+ 8800 val perplexity 413.8477
268
+ 8800 train 6.112911 (lr=3.9866e-06) (hash(x)=152552896)
269
+ 8900 val loss 6.0231
270
+ 8900 val perplexity 412.8680
271
+ 8900 train 5.859866 (lr=3.8307e-06) (hash(x)=139714454)
272
+ 9000 val loss 6.0218
273
+ 9000 val perplexity 412.3357
274
+ 9000 train 6.052674 (lr=3.6877e-06) (hash(x)=152274419)
275
+ 9100 val loss 6.0214
276
+ 9100 val perplexity 412.1412
277
+ 9100 train 6.103989 (lr=3.5580e-06) (hash(x)=160536885)
278
+ 9200 val loss 6.0210
279
+ 9200 val perplexity 411.9943
280
+ 9200 train 5.841023 (lr=3.4415e-06) (hash(x)=138386899)
281
+ 9300 val loss 6.0192
282
+ 9300 val perplexity 411.2347
283
+ 9300 train 6.074156 (lr=3.3385e-06) (hash(x)=149957457)
284
+ 9400 val loss 6.0161
285
+ 9400 val perplexity 409.9883
286
+ 9400 train 6.065225 (lr=3.2490e-06) (hash(x)=161671548)
287
+ 9500 val loss 6.0157
288
+ 9500 val perplexity 409.8131
289
+ 9500 train 6.159393 (lr=3.1730e-06) (hash(x)=171857400)
290
+ 9600 val loss 6.0129
291
+ 9600 val perplexity 408.6819
292
+ 9600 train 5.782595 (lr=3.1108e-06) (hash(x)=135378471)
293
+ 9700 val loss 6.0124
294
+ 9700 val perplexity 408.4582
295
+ 9700 train 5.940501 (lr=3.0624e-06) (hash(x)=149514591)
296
+ 9800 val loss 6.0114
297
+ 9800 val perplexity 408.0481
298
+ 9800 train 5.993114 (lr=3.0277e-06) (hash(x)=152679072)
299
+ 9900 val loss 6.0094
300
+ 9900 val perplexity 407.2200
301
+ 9900 train 5.938741 (lr=3.0069e-06) (hash(x)=150415193)
302
+ 9999 val loss 6.0089
303
+ 9999 val perplexity 407.0517
attention_kindselective_n_heads2_seed1341/model_02500.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:74ba334bd0101aaadc01e26184cae068161abfa0ae4d46c807c3442a5ced88c6
3
  size 38587970
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:61917b2061d550c01c6a13d915f7eddbd256ab6e650148187f75cdce6cacbc7b
3
  size 38587970
attention_kindselective_n_heads2_seed1341/model_05000.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b16d132361577f9fe65063258b176bda59304031f30e5218be876604b731f064
3
  size 38587970
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4cf822d46768dc68a2e287a6477f6a972e531f4ac2b6bee3659c053eddf6833b
3
  size 38587970
attention_kindselective_n_heads2_seed1341/model_07500.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ad2fa889095cb76098814eb3719349a2a88f7883160df398bb236eae2e0dd419
3
  size 38587970
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0d74f40d28c5bcaeacec475043051481eb3c1cac5909bf8266749547ca031e5c
3
  size 38587970
attention_kindselective_n_heads2_seed1341/model_09999.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7a9877f2d1585c73ca2a2d1529be8d7c23b4402dfb2488a72c5b2e4f1e7f289f
3
  size 38587970
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:92515d22afc83810717ffa953962bfbf259bbbb4453961f59c3ddb677c9b5e0e
3
  size 38587970
attention_kindselective_n_heads2_seed1341/optimizer_02500.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c592f311f13824dc70dda18105f0323da847beb468ad3662628d16c3f1f38bdf
3
  size 70895430
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0691eaba89da72b0b9e028c1b0f83917e0f6c2436c82b31b0d7e187f615e83e6
3
  size 70895430
attention_kindselective_n_heads2_seed1341/optimizer_05000.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:bf007fb4171da723c27811bcb2b34a91e5f5b47f68dfe98b97dd74e61c5ec415
3
  size 70895430
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:302fd87b96897189d938f9764920b673f8b54755f7112d2a04192fea75f999e4
3
  size 70895430
attention_kindselective_n_heads2_seed1341/optimizer_07500.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:da0337e3b4da4c98a4a70a4614120ce958de075a8f71d2460b983614a4d736a7
3
  size 70895430
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3bf07f379520069eceb45d10e1bc257b623074ee5e9d9c913d6d34a09b07f476
3
  size 70895430
attention_kindselective_n_heads2_seed1341/optimizer_09999.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:849ec8ad2b98bcb82237741dbdd657d3a1463e7ec0077e7ae9bca8af3a98c953
3
  size 70895430
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:03daeb14bf6ca8dcc9eded806955bb32da689c8c248d32e57fe4b5e2d430be28
3
  size 70895430