andrew-healey commited on
Commit
b2c54e6
·
verified ·
1 Parent(s): 4b2d673

Upload folder using huggingface_hub

Browse files
attention_kindselective_n_heads4_seed1341/args.json CHANGED
@@ -1 +1 @@
1
- {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_9/attention_kindselective_n_heads4_seed1341", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 4, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 8750, "warmup_steps": 500, "group": "wider_is_better_9", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1341, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 120, "total_batch_size": 61440, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 5.5e-05, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "5.5e-5_61440_4_1341", "n_embd": 256}
 
1
+ {"hellaswag": true, "attention_kind": "selective", "log_dir": "wider_is_better_9/attention_kindselective_n_heads4_seed1341", "resume_checkpoint": null, "resume_optimizer": false, "add_a_head": false, "add_head_to_start": true, "new_head_init": "normal", "n_heads": 4, "protect_bos_token": true, "prevent_from_masking_myself": true, "max_steps": 8750, "warmup_steps": 500, "group": "wider_is_better_9", "use_wandb": true, "kill_self_after_run": false, "random_seed": 1341, "memory_penalty_epsilon": 0.1, "selection_head_linear_combo": "none", "selection_head_linear_combo_scale": 1.0, "protection_kind": "none", "leaky_relu_alpha": null, "leaky_relu_bias": null, "use_compile": true, "use_mini_model": false, "upload_to_hf": true, "seq_len": 256, "batch_size": 120, "total_batch_size": 61440, "protection_head_scaling_factor": 1.0, "protection_head_bias": 0.0, "n_sliced_masks": null, "n_latent_masks": null, "mask_layernorm": false, "residual_attention_masks": false, "compute_base_shapes": false, "base_shapes_savefile": null, "mup": true, "disable_selection": false, "mup_enable_coord_check_logging": false, "max_lr": 4.5e-05, "decay_lr": true, "readout_zero_init": false, "query_zero_init": false, "l1_loss": false, "debugpy": false, "key": "4.5e-5_61440_4_1341", "n_embd": 256}
attention_kindselective_n_heads4_seed1341/log2.txt CHANGED
@@ -1,267 +1,267 @@
1
  max_steps: 8750
2
  0 val loss 11.3095
3
- 0 val perplexity 81594.2500
4
- 0 train 11.313040 (lr=1.1000e-07) (hash(x)=145079536)
5
- 100 val loss 10.0285
6
- 100 val perplexity 22663.5449
7
- 100 train 10.022821 (lr=1.1110e-05) (hash(x)=155800595)
8
- 200 val loss 9.3391
9
- 200 val perplexity 11373.7100
10
- 200 train 9.288088 (lr=2.2110e-05) (hash(x)=145606733)
11
- 300 val loss 8.0597
12
- 300 val perplexity 3164.1838
13
- 300 train 8.040195 (lr=3.3110e-05) (hash(x)=150367139)
14
- 400 val loss 7.5562
15
- 400 val perplexity 1912.4974
16
- 400 train 7.863613 (lr=4.4110e-05) (hash(x)=155747374)
17
- 500 val loss 7.4240
18
- 500 val perplexity 1675.6675
19
- 500 train 7.315066 (lr=5.5000e-05) (hash(x)=140604760)
20
- 600 val loss 7.3587
21
- 600 val perplexity 1569.7710
22
- 600 train 7.302488 (lr=5.4982e-05) (hash(x)=148404734)
23
- 700 val loss 7.3047
24
- 700 val perplexity 1487.2728
25
- 700 train 7.262489 (lr=5.4928e-05) (hash(x)=148115934)
26
- 800 val loss 7.2321
27
- 800 val perplexity 1383.0587
28
- 800 train 7.115292 (lr=5.4839e-05) (hash(x)=137464699)
29
- 900 val loss 7.1679
30
- 900 val perplexity 1297.1080
31
- 900 train 6.982425 (lr=5.4713e-05) (hash(x)=143886042)
32
- 1000 val loss 7.1077
33
- 1000 val perplexity 1221.3217
34
- 1000 train 7.107786 (lr=5.4553e-05) (hash(x)=163799796)
35
- 1100 val loss 7.0422
36
- 1100 val perplexity 1143.8781
37
- 1100 train 6.998779 (lr=5.4357e-05) (hash(x)=144592844)
38
- 1200 val loss 6.9882
39
- 1200 val perplexity 1083.7599
40
- 1200 train 7.327436 (lr=5.4126e-05) (hash(x)=204706354)
41
- 1300 val loss 6.9446
42
- 1300 val perplexity 1037.5281
43
- 1300 train 6.878752 (lr=5.3860e-05) (hash(x)=150862210)
44
- 1400 val loss 6.8865
45
- 1400 val perplexity 978.9376
46
- 1400 train 6.871954 (lr=5.3561e-05) (hash(x)=147766811)
47
- 1500 val loss 6.8288
48
- 1500 val perplexity 924.0453
49
- 1500 train 6.639542 (lr=5.3227e-05) (hash(x)=135925327)
50
- 1600 val loss 6.7740
51
- 1600 val perplexity 874.8393
52
- 1600 train 6.746453 (lr=5.2860e-05) (hash(x)=160440642)
53
- 1700 val loss 6.7164
54
- 1700 val perplexity 825.8420
55
- 1700 train 6.815380 (lr=5.2461e-05) (hash(x)=151184106)
56
- 1800 val loss 6.6585
57
- 1800 val perplexity 779.3540
58
- 1800 train 6.557573 (lr=5.2029e-05) (hash(x)=148100580)
59
- 1900 val loss 6.6249
60
- 1900 val perplexity 753.6406
61
- 1900 train 6.457758 (lr=5.1565e-05) (hash(x)=149434659)
62
- 2000 val loss 6.5724
63
- 2000 val perplexity 715.0803
64
- 2000 train 6.471416 (lr=5.1071e-05) (hash(x)=152285486)
65
- 2100 val loss 6.5395
66
- 2100 val perplexity 691.9631
67
- 2100 train 6.288988 (lr=5.0547e-05) (hash(x)=144294295)
68
- 2200 val loss 6.5062
69
- 2200 val perplexity 669.3031
70
- 2200 train 6.534094 (lr=4.9993e-05) (hash(x)=175030215)
71
- 2300 val loss 6.4785
72
- 2300 val perplexity 651.0085
73
- 2300 train 6.280591 (lr=4.9410e-05) (hash(x)=150831428)
74
- 2400 val loss 6.4330
75
- 2400 val perplexity 622.0258
76
- 2400 train 6.595540 (lr=4.8800e-05) (hash(x)=140808297)
77
- 2500 val loss 6.4295
78
- 2500 val perplexity 619.8776
79
- 2500 train 6.419309 (lr=4.8162e-05) (hash(x)=153160275)
80
- 2600 val loss 6.4029
81
- 2600 val perplexity 603.5640
82
- 2600 train 6.176099 (lr=4.7499e-05) (hash(x)=133990623)
83
- 2700 val loss 6.3730
84
- 2700 val perplexity 585.8351
85
- 2700 train 6.347503 (lr=4.6811e-05) (hash(x)=142860944)
86
- 2800 val loss 6.3438
87
- 2800 val perplexity 568.9605
88
- 2800 train 6.365329 (lr=4.6099e-05) (hash(x)=137959511)
89
- 2900 val loss 6.3173
90
- 2900 val perplexity 554.1024
91
- 2900 train 6.263561 (lr=4.5364e-05) (hash(x)=147009873)
92
- 3000 val loss 6.2962
93
- 3000 val perplexity 542.5182
94
- 3000 train 6.152777 (lr=4.4606e-05) (hash(x)=158264841)
95
- 3100 val loss 6.2885
96
- 3100 val perplexity 538.3243
97
- 3100 train 6.133510 (lr=4.3828e-05) (hash(x)=139232251)
98
- 3200 val loss 6.2569
99
- 3200 val perplexity 521.6201
100
- 3200 train 6.301063 (lr=4.3031e-05) (hash(x)=153436104)
101
- 3300 val loss 6.2290
102
- 3300 val perplexity 507.2550
103
- 3300 train 6.049569 (lr=4.2215e-05) (hash(x)=149681831)
104
- 3400 val loss 6.2229
105
- 3400 val perplexity 504.1629
106
- 3400 train 6.117395 (lr=4.1381e-05) (hash(x)=168425516)
107
- 3500 val loss 6.1881
108
- 3500 val perplexity 486.9261
109
- 3500 train 6.212366 (lr=4.0532e-05) (hash(x)=163104338)
110
- 3600 val loss 6.1671
111
- 3600 val perplexity 476.8029
112
- 3600 train 6.143349 (lr=3.9667e-05) (hash(x)=165109772)
113
- 3700 val loss 6.1626
114
- 3700 val perplexity 474.6586
115
- 3700 train 5.890706 (lr=3.8789e-05) (hash(x)=153420306)
116
- 3800 val loss 6.1309
117
- 3800 val perplexity 459.8583
118
- 3800 train 6.154771 (lr=3.7898e-05) (hash(x)=160168863)
119
- 3900 val loss 6.1152
120
- 3900 val perplexity 452.6844
121
- 3900 train 6.032316 (lr=3.6996e-05) (hash(x)=153906073)
122
- 4000 val loss 6.1059
123
- 4000 val perplexity 448.4742
124
- 4000 train 6.034504 (lr=3.6085e-05) (hash(x)=151055067)
125
- 4100 val loss 6.0870
126
- 4100 val perplexity 440.1180
127
- 4100 train 6.061316 (lr=3.5165e-05) (hash(x)=149629830)
128
- 4200 val loss 6.0718
129
- 4200 val perplexity 433.4450
130
- 4200 train 5.946362 (lr=3.4238e-05) (hash(x)=143101381)
131
- 4300 val loss 6.0579
132
- 4300 val perplexity 427.4625
133
- 4300 train 5.960875 (lr=3.3305e-05) (hash(x)=149712044)
134
- 4400 val loss 6.0403
135
- 4400 val perplexity 419.9999
136
- 4400 train 5.964753 (lr=3.2368e-05) (hash(x)=153446449)
137
- 4500 val loss 6.0242
138
- 4500 val perplexity 413.3047
139
- 4500 train 5.972738 (lr=3.1428e-05) (hash(x)=146086947)
140
- 4600 val loss 6.0203
141
- 4600 val perplexity 411.6975
142
- 4600 train 5.859571 (lr=3.0486e-05) (hash(x)=153800173)
143
- 4700 val loss 6.0057
144
- 4700 val perplexity 405.7249
145
- 4700 train 6.063126 (lr=2.9543e-05) (hash(x)=155962726)
146
- 4800 val loss 5.9915
147
- 4800 val perplexity 400.0210
148
- 4800 train 6.226341 (lr=2.8602e-05) (hash(x)=142045616)
149
- 4900 val loss 5.9758
150
- 4900 val perplexity 393.7772
151
- 4900 train 5.946213 (lr=2.7663e-05) (hash(x)=143418248)
152
- 5000 val loss 5.9665
153
- 5000 val perplexity 390.1279
154
- 5000 train 5.884785 (lr=2.6728e-05) (hash(x)=145789790)
155
- 5100 val loss 5.9587
156
- 5100 val perplexity 387.1258
157
- 5100 train 5.927770 (lr=2.5798e-05) (hash(x)=137795633)
158
- 5200 val loss 5.9446
159
- 5200 val perplexity 381.7025
160
- 5200 train 6.104008 (lr=2.4874e-05) (hash(x)=148907132)
161
- 5300 val loss 5.9428
162
- 5300 val perplexity 380.9910
163
- 5300 train 5.824908 (lr=2.3958e-05) (hash(x)=152343580)
164
- 5400 val loss 5.9278
165
- 5400 val perplexity 375.3416
166
- 5400 train 5.779144 (lr=2.3051e-05) (hash(x)=148578264)
167
- 5500 val loss 5.9159
168
- 5500 val perplexity 370.8882
169
- 5500 train 6.081324 (lr=2.2155e-05) (hash(x)=145635833)
170
- 5600 val loss 5.9097
171
- 5600 val perplexity 368.5929
172
- 5600 train 5.884354 (lr=2.1271e-05) (hash(x)=156337844)
173
- 5700 val loss 5.9002
174
- 5700 val perplexity 365.1134
175
- 5700 train 5.783232 (lr=2.0399e-05) (hash(x)=147168506)
176
- 5800 val loss 5.8906
177
- 5800 val perplexity 361.6292
178
- 5800 train 5.954978 (lr=1.9542e-05) (hash(x)=159566920)
179
- 5900 val loss 5.8862
180
- 5900 val perplexity 360.0167
181
- 5900 train 5.948883 (lr=1.8700e-05) (hash(x)=158273929)
182
- 6000 val loss 5.8794
183
- 6000 val perplexity 357.5957
184
- 6000 train 5.850380 (lr=1.7875e-05) (hash(x)=156649749)
185
- 6100 val loss 5.8706
186
- 6100 val perplexity 354.4674
187
- 6100 train 5.786806 (lr=1.7068e-05) (hash(x)=146812388)
188
- 6200 val loss 5.8642
189
- 6200 val perplexity 352.1928
190
- 6200 train 5.684584 (lr=1.6280e-05) (hash(x)=143522146)
191
- 6300 val loss 5.8598
192
- 6300 val perplexity 350.6573
193
- 6300 train 5.697289 (lr=1.5512e-05) (hash(x)=150124474)
194
- 6400 val loss 5.8556
195
- 6400 val perplexity 349.1890
196
- 6400 train 5.881581 (lr=1.4766e-05) (hash(x)=141242117)
197
- 6500 val loss 5.8434
198
- 6500 val perplexity 344.9429
199
- 6500 train 5.724736 (lr=1.4042e-05) (hash(x)=143529762)
200
- 6600 val loss 5.8384
201
- 6600 val perplexity 343.2130
202
- 6600 train 5.621650 (lr=1.3342e-05) (hash(x)=136948374)
203
- 6700 val loss 5.8354
204
- 6700 val perplexity 342.2034
205
- 6700 train 5.649374 (lr=1.2666e-05) (hash(x)=146268592)
206
- 6800 val loss 5.8289
207
- 6800 val perplexity 339.9703
208
- 6800 train 5.787588 (lr=1.2016e-05) (hash(x)=152676836)
209
- 6900 val loss 5.8204
210
- 6900 val perplexity 337.0951
211
- 6900 train 5.736982 (lr=1.1392e-05) (hash(x)=134657776)
212
- 7000 val loss 5.8177
213
- 7000 val perplexity 336.1875
214
- 7000 train 5.775075 (lr=1.0795e-05) (hash(x)=166721861)
215
- 7100 val loss 5.8143
216
- 7100 val perplexity 335.0538
217
- 7100 train 5.619752 (lr=1.0227e-05) (hash(x)=135496702)
218
- 7200 val loss 5.8106
219
- 7200 val perplexity 333.8330
220
- 7200 train 5.914602 (lr=9.6875e-06) (hash(x)=155567461)
221
- 7300 val loss 5.8045
222
- 7300 val perplexity 331.7833
223
- 7300 train 5.601978 (lr=9.1780e-06) (hash(x)=142803829)
224
- 7400 val loss 5.8025
225
- 7400 val perplexity 331.1190
226
- 7400 train 5.623451 (lr=8.6990e-06) (hash(x)=145294178)
227
- 7500 val loss 5.7976
228
- 7500 val perplexity 329.5175
229
- 7500 train 5.558932 (lr=8.2513e-06) (hash(x)=150573713)
230
- 7600 val loss 5.7957
231
- 7600 val perplexity 328.8690
232
- 7600 train 5.817007 (lr=7.8355e-06) (hash(x)=142771511)
233
- 7700 val loss 5.7898
234
- 7700 val perplexity 326.9609
235
- 7700 train 5.689264 (lr=7.4522e-06) (hash(x)=143602175)
236
- 7800 val loss 5.7861
237
- 7800 val perplexity 325.7462
238
- 7800 train 5.816852 (lr=7.1019e-06) (hash(x)=152379862)
239
- 7900 val loss 5.7852
240
- 7900 val perplexity 325.4419
241
- 7900 train 5.620042 (lr=6.7852e-06) (hash(x)=146655921)
242
- 8000 val loss 5.7856
243
- 8000 val perplexity 325.5628
244
- 8000 train 5.900315 (lr=6.5025e-06) (hash(x)=148262482)
245
- 8100 val loss 5.7776
246
- 8100 val perplexity 322.9937
247
- 8100 train 5.713513 (lr=6.2543e-06) (hash(x)=147683655)
248
- 8200 val loss 5.7774
249
- 8200 val perplexity 322.9288
250
- 8200 train 5.876583 (lr=6.0408e-06) (hash(x)=157312987)
251
- 8300 val loss 5.7743
252
- 8300 val perplexity 321.9338
253
- 8300 train 5.736997 (lr=5.8625e-06) (hash(x)=141107543)
254
- 8400 val loss 5.7698
255
- 8400 val perplexity 320.4694
256
- 8400 train 5.747867 (lr=5.7195e-06) (hash(x)=141323024)
257
- 8500 val loss 5.7673
258
- 8500 val perplexity 319.6715
259
- 8500 train 5.737051 (lr=5.6121e-06) (hash(x)=150696521)
260
- 8600 val loss 5.7659
261
- 8600 val perplexity 319.2322
262
- 8600 train 5.718998 (lr=5.5404e-06) (hash(x)=162288191)
263
- 8700 val loss 5.7624
264
- 8700 val perplexity 318.1103
265
- 8700 train 5.633211 (lr=5.5045e-06) (hash(x)=152860941)
266
- 8749 val loss 5.7618
267
- 8749 val perplexity 317.9061
 
1
  max_steps: 8750
2
  0 val loss 11.3095
3
+ 0 val perplexity 81594.3281
4
+ 0 train 11.313040 (lr=9.0000e-08) (hash(x)=145079536)
5
+ 100 val loss 10.0761
6
+ 100 val perplexity 23767.8672
7
+ 100 train 10.071070 (lr=9.0900e-06) (hash(x)=155800595)
8
+ 200 val loss 9.5264
9
+ 200 val perplexity 13717.7168
10
+ 200 train 9.479525 (lr=1.8090e-05) (hash(x)=145606733)
11
+ 300 val loss 8.3244
12
+ 300 val perplexity 4123.3882
13
+ 300 train 8.313211 (lr=2.7090e-05) (hash(x)=150367139)
14
+ 400 val loss 7.6483
15
+ 400 val perplexity 2096.9778
16
+ 400 train 7.941428 (lr=3.6090e-05) (hash(x)=155747374)
17
+ 500 val loss 7.4758
18
+ 500 val perplexity 1764.8981
19
+ 500 train 7.367132 (lr=4.5000e-05) (hash(x)=140604760)
20
+ 600 val loss 7.3790
21
+ 600 val perplexity 1602.0277
22
+ 600 train 7.323732 (lr=4.4985e-05) (hash(x)=148404734)
23
+ 700 val loss 7.2870
24
+ 700 val perplexity 1461.2491
25
+ 700 train 7.240127 (lr=4.4941e-05) (hash(x)=148115934)
26
+ 800 val loss 7.1677
27
+ 800 val perplexity 1296.9070
28
+ 800 train 7.044980 (lr=4.4868e-05) (hash(x)=137464699)
29
+ 900 val loss 7.0626
30
+ 900 val perplexity 1167.4529
31
+ 900 train 6.872476 (lr=4.4766e-05) (hash(x)=143886042)
32
+ 1000 val loss 6.9679
33
+ 1000 val perplexity 1061.9860
34
+ 1000 train 6.971719 (lr=4.4634e-05) (hash(x)=163799796)
35
+ 1100 val loss 6.8610
36
+ 1100 val perplexity 954.3155
37
+ 1100 train 6.825153 (lr=4.4474e-05) (hash(x)=144592844)
38
+ 1200 val loss 6.7707
39
+ 1200 val perplexity 871.9357
40
+ 1200 train 7.097885 (lr=4.4285e-05) (hash(x)=204706354)
41
+ 1300 val loss 6.6947
42
+ 1300 val perplexity 808.1379
43
+ 1300 train 6.631471 (lr=4.4068e-05) (hash(x)=150862210)
44
+ 1400 val loss 6.6223
45
+ 1400 val perplexity 751.7076
46
+ 1400 train 6.619408 (lr=4.3822e-05) (hash(x)=147766811)
47
+ 1500 val loss 6.5493
48
+ 1500 val perplexity 698.7467
49
+ 1500 train 6.367006 (lr=4.3549e-05) (hash(x)=135925327)
50
+ 1600 val loss 6.5088
51
+ 1600 val perplexity 671.0128
52
+ 1600 train 6.479077 (lr=4.3249e-05) (hash(x)=160440642)
53
+ 1700 val loss 6.4531
54
+ 1700 val perplexity 634.6756
55
+ 1700 train 6.575705 (lr=4.2922e-05) (hash(x)=151184106)
56
+ 1800 val loss 6.4062
57
+ 1800 val perplexity 605.5698
58
+ 1800 train 6.302565 (lr=4.2569e-05) (hash(x)=148100580)
59
+ 1900 val loss 6.3862
60
+ 1900 val perplexity 593.6020
61
+ 1900 train 6.214580 (lr=4.2190e-05) (hash(x)=149434659)
62
+ 2000 val loss 6.3288
63
+ 2000 val perplexity 560.5059
64
+ 2000 train 6.230272 (lr=4.1785e-05) (hash(x)=152285486)
65
+ 2100 val loss 6.2902
66
+ 2100 val perplexity 539.2587
67
+ 2100 train 6.033951 (lr=4.1356e-05) (hash(x)=144294295)
68
+ 2200 val loss 6.2656
69
+ 2200 val perplexity 526.1706
70
+ 2200 train 6.274797 (lr=4.0903e-05) (hash(x)=175030215)
71
+ 2300 val loss 6.2425
72
+ 2300 val perplexity 514.1522
73
+ 2300 train 6.053737 (lr=4.0426e-05) (hash(x)=150831428)
74
+ 2400 val loss 6.2011
75
+ 2400 val perplexity 493.2859
76
+ 2400 train 6.386537 (lr=3.9927e-05) (hash(x)=140808297)
77
+ 2500 val loss 6.1761
78
+ 2500 val perplexity 481.1316
79
+ 2500 train 6.153784 (lr=3.9406e-05) (hash(x)=153160275)
80
+ 2600 val loss 6.1569
81
+ 2600 val perplexity 471.9454
82
+ 2600 train 5.944077 (lr=3.8863e-05) (hash(x)=133990623)
83
+ 2700 val loss 6.1428
84
+ 2700 val perplexity 465.3676
85
+ 2700 train 6.110357 (lr=3.8300e-05) (hash(x)=142860944)
86
+ 2800 val loss 6.1104
87
+ 2800 val perplexity 450.5392
88
+ 2800 train 6.122779 (lr=3.7717e-05) (hash(x)=137959511)
89
+ 2900 val loss 6.0858
90
+ 2900 val perplexity 439.5509
91
+ 2900 train 6.040658 (lr=3.7116e-05) (hash(x)=147009873)
92
+ 3000 val loss 6.0703
93
+ 3000 val perplexity 432.8103
94
+ 3000 train 5.927878 (lr=3.6496e-05) (hash(x)=158264841)
95
+ 3100 val loss 6.0638
96
+ 3100 val perplexity 430.0071
97
+ 3100 train 5.902818 (lr=3.5860e-05) (hash(x)=139232251)
98
+ 3200 val loss 6.0297
99
+ 3200 val perplexity 415.6103
100
+ 3200 train 6.069080 (lr=3.5207e-05) (hash(x)=153436104)
101
+ 3300 val loss 6.0157
102
+ 3300 val perplexity 409.8088
103
+ 3300 train 5.843759 (lr=3.4539e-05) (hash(x)=149681831)
104
+ 3400 val loss 6.0050
105
+ 3400 val perplexity 405.4450
106
+ 3400 train 5.885643 (lr=3.3857e-05) (hash(x)=168425516)
107
+ 3500 val loss 5.9896
108
+ 3500 val perplexity 399.2447
109
+ 3500 train 6.019372 (lr=3.3162e-05) (hash(x)=163104338)
110
+ 3600 val loss 5.9641
111
+ 3600 val perplexity 389.2151
112
+ 3600 train 5.938704 (lr=3.2455e-05) (hash(x)=165109772)
113
+ 3700 val loss 5.9637
114
+ 3700 val perplexity 389.0497
115
+ 3700 train 5.694476 (lr=3.1736e-05) (hash(x)=153420306)
116
+ 3800 val loss 5.9347
117
+ 3800 val perplexity 377.9158
118
+ 3800 train 5.948569 (lr=3.1008e-05) (hash(x)=160168863)
119
+ 3900 val loss 5.9212
120
+ 3900 val perplexity 372.8728
121
+ 3900 train 5.840442 (lr=3.0270e-05) (hash(x)=153906073)
122
+ 4000 val loss 5.9103
123
+ 4000 val perplexity 368.8176
124
+ 4000 train 5.857364 (lr=2.9524e-05) (hash(x)=151055067)
125
+ 4100 val loss 5.8939
126
+ 4100 val perplexity 362.8057
127
+ 4100 train 5.873942 (lr=2.8771e-05) (hash(x)=149629830)
128
+ 4200 val loss 5.8796
129
+ 4200 val perplexity 357.6752
130
+ 4200 train 5.760632 (lr=2.8013e-05) (hash(x)=143101381)
131
+ 4300 val loss 5.8715
132
+ 4300 val perplexity 354.7955
133
+ 4300 train 5.774401 (lr=2.7250e-05) (hash(x)=149712044)
134
+ 4400 val loss 5.8542
135
+ 4400 val perplexity 348.6813
136
+ 4400 train 5.778264 (lr=2.6483e-05) (hash(x)=153446449)
137
+ 4500 val loss 5.8414
138
+ 4500 val perplexity 344.2694
139
+ 4500 train 5.791447 (lr=2.5714e-05) (hash(x)=146086947)
140
+ 4600 val loss 5.8349
141
+ 4600 val perplexity 342.0447
142
+ 4600 train 5.677424 (lr=2.4943e-05) (hash(x)=153800173)
143
+ 4700 val loss 5.8201
144
+ 4700 val perplexity 336.9922
145
+ 4700 train 5.893148 (lr=2.4172e-05) (hash(x)=155962726)
146
+ 4800 val loss 5.8126
147
+ 4800 val perplexity 334.4903
148
+ 4800 train 6.055249 (lr=2.3402e-05) (hash(x)=142045616)
149
+ 4900 val loss 5.8005
150
+ 4900 val perplexity 330.4518
151
+ 4900 train 5.765253 (lr=2.2633e-05) (hash(x)=143418248)
152
+ 5000 val loss 5.7905
153
+ 5000 val perplexity 327.1776
154
+ 5000 train 5.720255 (lr=2.1868e-05) (hash(x)=145789790)
155
+ 5100 val loss 5.7847
156
+ 5100 val perplexity 325.2880
157
+ 5100 train 5.768248 (lr=2.1107e-05) (hash(x)=137795633)
158
+ 5200 val loss 5.7723
159
+ 5200 val perplexity 321.2600
160
+ 5200 train 5.932185 (lr=2.0351e-05) (hash(x)=148907132)
161
+ 5300 val loss 5.7665
162
+ 5300 val perplexity 319.4049
163
+ 5300 train 5.643368 (lr=1.9602e-05) (hash(x)=152343580)
164
+ 5400 val loss 5.7569
165
+ 5400 val perplexity 316.3512
166
+ 5400 train 5.610413 (lr=1.8860e-05) (hash(x)=148578264)
167
+ 5500 val loss 5.7440
168
+ 5500 val perplexity 312.2961
169
+ 5500 train 5.908919 (lr=1.8127e-05) (hash(x)=145635833)
170
+ 5600 val loss 5.7393
171
+ 5600 val perplexity 310.8486
172
+ 5600 train 5.729499 (lr=1.7403e-05) (hash(x)=156337844)
173
+ 5700 val loss 5.7345
174
+ 5700 val perplexity 309.3647
175
+ 5700 train 5.609031 (lr=1.6690e-05) (hash(x)=147168506)
176
+ 5800 val loss 5.7259
177
+ 5800 val perplexity 306.7088
178
+ 5800 train 5.782136 (lr=1.5989e-05) (hash(x)=159566920)
179
+ 5900 val loss 5.7173
180
+ 5900 val perplexity 304.0780
181
+ 5900 train 5.780312 (lr=1.5300e-05) (hash(x)=158273929)
182
+ 6000 val loss 5.7163
183
+ 6000 val perplexity 303.7896
184
+ 6000 train 5.688311 (lr=1.4625e-05) (hash(x)=156649749)
185
+ 6100 val loss 5.7060
186
+ 6100 val perplexity 300.6730
187
+ 6100 train 5.621117 (lr=1.3965e-05) (hash(x)=146812388)
188
+ 6200 val loss 5.7017
189
+ 6200 val perplexity 299.3790
190
+ 6200 train 5.531676 (lr=1.3320e-05) (hash(x)=143522146)
191
+ 6300 val loss 5.6966
192
+ 6300 val perplexity 297.8462
193
+ 6300 train 5.516797 (lr=1.2692e-05) (hash(x)=150124474)
194
+ 6400 val loss 5.6903
195
+ 6400 val perplexity 295.9842
196
+ 6400 train 5.728277 (lr=1.2081e-05) (hash(x)=141242117)
197
+ 6500 val loss 5.6830
198
+ 6500 val perplexity 293.8160
199
+ 6500 train 5.561332 (lr=1.1489e-05) (hash(x)=143529762)
200
+ 6600 val loss 5.6778
201
+ 6600 val perplexity 292.3132
202
+ 6600 train 5.466468 (lr=1.0916e-05) (hash(x)=136948374)
203
+ 6700 val loss 5.6787
204
+ 6700 val perplexity 292.5690
205
+ 6700 train 5.479150 (lr=1.0363e-05) (hash(x)=146268592)
206
+ 6800 val loss 5.6684
207
+ 6800 val perplexity 289.5688
208
+ 6800 train 5.621566 (lr=9.8310e-06) (hash(x)=152676836)
209
+ 6900 val loss 5.6620
210
+ 6900 val perplexity 287.7147
211
+ 6900 train 5.583413 (lr=9.3205e-06) (hash(x)=134657776)
212
+ 7000 val loss 5.6593
213
+ 7000 val perplexity 286.9359
214
+ 7000 train 5.614666 (lr=8.8324e-06) (hash(x)=166721861)
215
+ 7100 val loss 5.6580
216
+ 7100 val perplexity 286.5740
217
+ 7100 train 5.455072 (lr=8.3674e-06) (hash(x)=135496702)
218
+ 7200 val loss 5.6546
219
+ 7200 val perplexity 285.6103
220
+ 7200 train 5.764223 (lr=7.9261e-06) (hash(x)=155567461)
221
+ 7300 val loss 5.6465
222
+ 7300 val perplexity 283.3002
223
+ 7300 train 5.449112 (lr=7.5093e-06) (hash(x)=142803829)
224
+ 7400 val loss 5.6460
225
+ 7400 val perplexity 283.1502
226
+ 7400 train 5.468495 (lr=7.1174e-06) (hash(x)=145294178)
227
+ 7500 val loss 5.6438
228
+ 7500 val perplexity 282.5349
229
+ 7500 train 5.400206 (lr=6.7511e-06) (hash(x)=150573713)
230
+ 7600 val loss 5.6417
231
+ 7600 val perplexity 281.9352
232
+ 7600 train 5.657176 (lr=6.4109e-06) (hash(x)=142771511)
233
+ 7700 val loss 5.6357
234
+ 7700 val perplexity 280.2563
235
+ 7700 train 5.538167 (lr=6.0972e-06) (hash(x)=143602175)
236
+ 7800 val loss 5.6325
237
+ 7800 val perplexity 279.3481
238
+ 7800 train 5.691221 (lr=5.8107e-06) (hash(x)=152379862)
239
+ 7900 val loss 5.6315
240
+ 7900 val perplexity 279.0784
241
+ 7900 train 5.453447 (lr=5.5515e-06) (hash(x)=146655921)
242
+ 8000 val loss 5.6326
243
+ 8000 val perplexity 279.3739
244
+ 8000 train 5.770980 (lr=5.3203e-06) (hash(x)=148262482)
245
+ 8100 val loss 5.6243
246
+ 8100 val perplexity 277.0847
247
+ 8100 train 5.568313 (lr=5.1172e-06) (hash(x)=147683655)
248
+ 8200 val loss 5.6249
249
+ 8200 val perplexity 277.2312
250
+ 8200 train 5.739700 (lr=4.9425e-06) (hash(x)=157312987)
251
+ 8300 val loss 5.6217
252
+ 8300 val perplexity 276.3641
253
+ 8300 train 5.581598 (lr=4.7966e-06) (hash(x)=141107543)
254
+ 8400 val loss 5.6180
255
+ 8400 val perplexity 275.3497
256
+ 8400 train 5.610967 (lr=4.6796e-06) (hash(x)=141323024)
257
+ 8500 val loss 5.6163
258
+ 8500 val perplexity 274.8697
259
+ 8500 train 5.587502 (lr=4.5917e-06) (hash(x)=150696521)
260
+ 8600 val loss 5.6160
261
+ 8600 val perplexity 274.7957
262
+ 8600 train 5.565991 (lr=4.5330e-06) (hash(x)=162288191)
263
+ 8700 val loss 5.6119
264
+ 8700 val perplexity 273.6724
265
+ 8700 train 5.479275 (lr=4.5037e-06) (hash(x)=152860941)
266
+ 8749 val loss 5.6102
267
+ 8749 val perplexity 273.1960
attention_kindselective_n_heads4_seed1341/model_08749.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8b7fd67aa3ad05060e49d991c1a5ce788f8dd22e7a1852efc3ad664cfb3e9e6a
3
  size 92843394
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7b19bcdcc795560643bed6a5252e945b59403b36d9fb80a6c7c1978ec18d4c4d
3
  size 92843394
attention_kindselective_n_heads4_seed1341/optimizer_08749.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e26d19de11e659cfb1c05e7f92e8c844a6fa530d7e243d5fc97a27e28b66d8ec
3
  size 179406214
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0642c7ecdf284e5d06d2b300e8988d6e95c5e528eeaf54c124ecd155f33aaac9
3
  size 179406214